Spaces:
Running
on
Zero
Running
on
Zero
Update
Browse files- .pre-commit-config.yaml +2 -2
- model.py +10 -10
- pyproject.toml +7 -2
- utils.py +1 -1
.pre-commit-config.yaml
CHANGED
|
@@ -14,13 +14,13 @@ repos:
|
|
| 14 |
- id: requirements-txt-fixer
|
| 15 |
- id: trailing-whitespace
|
| 16 |
- repo: https://github.com/astral-sh/ruff-pre-commit
|
| 17 |
-
rev: v0.
|
| 18 |
hooks:
|
| 19 |
- id: ruff
|
| 20 |
args: ["--fix"]
|
| 21 |
- id: ruff-format
|
| 22 |
- repo: https://github.com/pre-commit/mirrors-mypy
|
| 23 |
-
rev: v1.
|
| 24 |
hooks:
|
| 25 |
- id: mypy
|
| 26 |
args: ["--ignore-missing-imports"]
|
|
|
|
| 14 |
- id: requirements-txt-fixer
|
| 15 |
- id: trailing-whitespace
|
| 16 |
- repo: https://github.com/astral-sh/ruff-pre-commit
|
| 17 |
+
rev: v0.11.10
|
| 18 |
hooks:
|
| 19 |
- id: ruff
|
| 20 |
args: ["--fix"]
|
| 21 |
- id: ruff-format
|
| 22 |
- repo: https://github.com/pre-commit/mirrors-mypy
|
| 23 |
+
rev: v1.15.0
|
| 24 |
hooks:
|
| 25 |
- id: mypy
|
| 26 |
args: ["--ignore-missing-imports"]
|
model.py
CHANGED
|
@@ -9,7 +9,7 @@ from diffusers.utils import export_to_ply
|
|
| 9 |
|
| 10 |
|
| 11 |
class Model:
|
| 12 |
-
def __init__(self):
|
| 13 |
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 14 |
self.pipe = ShapEPipeline.from_pretrained("openai/shap-e", torch_dtype=torch.float16)
|
| 15 |
self.pipe.to(self.device)
|
|
@@ -23,9 +23,9 @@ class Model:
|
|
| 23 |
mesh = mesh.apply_transform(rot)
|
| 24 |
rot = trimesh.transformations.rotation_matrix(np.pi, [0, 1, 0])
|
| 25 |
mesh = mesh.apply_transform(rot)
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
|
| 30 |
def run_text(self, prompt: str, seed: int = 0, guidance_scale: float = 15.0, num_steps: int = 64) -> str:
|
| 31 |
generator = torch.Generator(device=self.device).manual_seed(seed)
|
|
@@ -36,9 +36,9 @@ class Model:
|
|
| 36 |
num_inference_steps=num_steps,
|
| 37 |
output_type="mesh",
|
| 38 |
).images
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
|
| 43 |
def run_image(
|
| 44 |
self, image: PIL.Image.Image, seed: int = 0, guidance_scale: float = 3.0, num_steps: int = 64
|
|
@@ -51,6 +51,6 @@ class Model:
|
|
| 51 |
num_inference_steps=num_steps,
|
| 52 |
output_type="mesh",
|
| 53 |
).images
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
|
|
|
| 9 |
|
| 10 |
|
| 11 |
class Model:
|
| 12 |
+
def __init__(self) -> None:
|
| 13 |
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 14 |
self.pipe = ShapEPipeline.from_pretrained("openai/shap-e", torch_dtype=torch.float16)
|
| 15 |
self.pipe.to(self.device)
|
|
|
|
| 23 |
mesh = mesh.apply_transform(rot)
|
| 24 |
rot = trimesh.transformations.rotation_matrix(np.pi, [0, 1, 0])
|
| 25 |
mesh = mesh.apply_transform(rot)
|
| 26 |
+
with tempfile.NamedTemporaryFile(suffix=".glb", delete=False) as mesh_path:
|
| 27 |
+
mesh.export(mesh_path.name, file_type="glb")
|
| 28 |
+
return mesh_path.name
|
| 29 |
|
| 30 |
def run_text(self, prompt: str, seed: int = 0, guidance_scale: float = 15.0, num_steps: int = 64) -> str:
|
| 31 |
generator = torch.Generator(device=self.device).manual_seed(seed)
|
|
|
|
| 36 |
num_inference_steps=num_steps,
|
| 37 |
output_type="mesh",
|
| 38 |
).images
|
| 39 |
+
with tempfile.NamedTemporaryFile(suffix=".ply", delete=False, mode="w+b") as ply_path:
|
| 40 |
+
export_to_ply(images[0], ply_path.name)
|
| 41 |
+
return self.to_glb(ply_path.name)
|
| 42 |
|
| 43 |
def run_image(
|
| 44 |
self, image: PIL.Image.Image, seed: int = 0, guidance_scale: float = 3.0, num_steps: int = 64
|
|
|
|
| 51 |
num_inference_steps=num_steps,
|
| 52 |
output_type="mesh",
|
| 53 |
).images
|
| 54 |
+
with tempfile.NamedTemporaryFile(suffix=".ply", delete=False, mode="w+b") as ply_path:
|
| 55 |
+
export_to_ply(images[0], ply_path.name)
|
| 56 |
+
return self.to_glb(ply_path.name)
|
pyproject.toml
CHANGED
|
@@ -29,8 +29,7 @@ ignore = [
|
|
| 29 |
"D213", # multi-line-summary-second-line
|
| 30 |
"E501", # line-too-long
|
| 31 |
"SIM117", # multiple-with-statements
|
| 32 |
-
|
| 33 |
-
extend-ignore = [
|
| 34 |
"D100", # undocumented-public-module
|
| 35 |
"D101", # undocumented-public-class
|
| 36 |
"D102", # undocumented-public-method
|
|
@@ -51,5 +50,11 @@ unfixable = [
|
|
| 51 |
"F401", # unused-import
|
| 52 |
]
|
| 53 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
[tool.ruff.format]
|
| 55 |
docstring-code-format = true
|
|
|
|
| 29 |
"D213", # multi-line-summary-second-line
|
| 30 |
"E501", # line-too-long
|
| 31 |
"SIM117", # multiple-with-statements
|
| 32 |
+
#
|
|
|
|
| 33 |
"D100", # undocumented-public-module
|
| 34 |
"D101", # undocumented-public-class
|
| 35 |
"D102", # undocumented-public-method
|
|
|
|
| 50 |
"F401", # unused-import
|
| 51 |
]
|
| 52 |
|
| 53 |
+
[tool.ruff.lint.pydocstyle]
|
| 54 |
+
convention = "google"
|
| 55 |
+
|
| 56 |
+
[tool.ruff.lint.per-file-ignores]
|
| 57 |
+
"*.ipynb" = ["T201", "T203"]
|
| 58 |
+
|
| 59 |
[tool.ruff.format]
|
| 60 |
docstring-code-format = true
|
utils.py
CHANGED
|
@@ -5,5 +5,5 @@ from settings import MAX_SEED
|
|
| 5 |
|
| 6 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
| 7 |
if randomize_seed:
|
| 8 |
-
seed = random.randint(0, MAX_SEED)
|
| 9 |
return seed
|
|
|
|
| 5 |
|
| 6 |
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
| 7 |
if randomize_seed:
|
| 8 |
+
seed = random.randint(0, MAX_SEED) # noqa: S311
|
| 9 |
return seed
|