import hashlib import logging import os import time import uuid from datetime import datetime, timedelta from bson import ObjectId from fastapi import FastAPI, File, Form, Header, HTTPException, UploadFile from fastapi.responses import FileResponse from fastapi.staticfiles import StaticFiles from gradio_client import Client, handle_file from pymongo import MongoClient from dotenv import load_dotenv # Load environment variables load_dotenv() logging.basicConfig(level=logging.INFO) logger = logging.getLogger("app") # Initialize FastAPI app = FastAPI() # Initialize Hugging Face Client (background remover Space) - lazy initialization client = None def get_client(): """Lazy initialization of Gradio Client with timeout handling""" global client if client is None: max_retries = 3 retry_delay = 2 # seconds for attempt in range(max_retries): try: # Initialize Gradio Client client = Client("LogicGoInfotechSpaces/background-remover") print(f"Gradio Client initialized successfully (attempt {attempt + 1})") break except Exception as e: if attempt < max_retries - 1: print(f"Error initializing Gradio Client (attempt {attempt + 1}/{max_retries}): {e}") print(f"Retrying in {retry_delay} seconds...") time.sleep(retry_delay) else: print(f"Failed to initialize Gradio Client after {max_retries} attempts: {e}") raise return client # Create static folder to store files os.makedirs("static", exist_ok=True) app.mount("/static", StaticFiles(directory="static"), name="static") # MongoDB connections and config # Use an ObjectId for the default category so it is stored consistently DEFAULT_CATEGORY_FALLBACK = ObjectId("69368f2f2e46bd68ae188963") primary_mongo_uri = os.getenv("MONGODB_URI") or os.getenv("MONGO_URI") primary_db_name = os.getenv("MONGODB_DB_NAME", "BG_DB") admin_mongo_uri = os.getenv("MONGODB_ADMIN") or os.getenv("MONGO_URI_ADMIN") admin_db_name = os.getenv("MONGODB_ADMIN_DB_NAME", "adminPanel") mongo_client = None mongo_collection = None admin_client = None media_clicks_collection = None if primary_mongo_uri: try: mongo_client = MongoClient(primary_mongo_uri) db = mongo_client.get_database(primary_db_name) mongo_collection = db["bgremove_logs"] except Exception as e: logger.error("MongoDB (primary) connection error: %s", e) if admin_mongo_uri: try: admin_client = MongoClient(admin_mongo_uri) admin_db = admin_client.get_database(admin_db_name) media_clicks_collection = admin_db["media_clicks"] logger.info("Admin DB in use: %s", admin_db_name) except Exception as e: logger.error("MongoDB (admin) connection error: %s", e) def _hash_to_object_id(value: str) -> ObjectId: digest = hashlib.sha1(value.encode("utf-8")).hexdigest()[:24] return ObjectId(digest) def normalize_user_id(raw_user_id): """Accepts ObjectId string, numeric string/int, or missing -> returns ObjectId.""" if raw_user_id is None: return ObjectId(), "generated" value = str(raw_user_id).strip() if value == "": return ObjectId(), "generated_empty" try: return ObjectId(value), "parsed_objectid" except Exception: pass if value.isdigit(): return _hash_to_object_id(value), "numeric_hashed" return _hash_to_object_id(value), "hashed" def normalize_category_id(raw_category_id): if raw_category_id is None: return DEFAULT_CATEGORY_FALLBACK, "default_missing" value = str(raw_category_id).strip() if value == "": return DEFAULT_CATEGORY_FALLBACK, "default_empty" # Always store categoryId as an ObjectId in Mongo try: return ObjectId(value), "provided_objectid" except Exception: # If somehow not a valid hex string, fall back to hashing return _hash_to_object_id(value), "provided_hashed" ##-------NEW CATEGORY CLICK-------##### def log_media_click(raw_user_id, raw_category_id): if media_clicks_collection is None: logger.info("Admin DB unavailable, skipping media click log") return user_oid, _ = normalize_user_id(raw_user_id) category_id, _ = normalize_category_id(raw_category_id) now = datetime.utcnow() # Normalize today to UTC midnight today_date = datetime(now.year, now.month, now.day) try: # -------------------------------------------------- # STEP 1: ENSURE USER DOC EXISTS # -------------------------------------------------- media_clicks_collection.update_one( {"userId": user_oid}, { "$setOnInsert": { "userId": user_oid, "createdAt": now, "ai_edit_complete": 0, "ai_edit_daily_count": [] }, "$set": { "ai_edit_last_date": now, "updatedAt": now } }, upsert=True ) # -------------------------------------------------- # STEP 2: DAILY COUNT (STRICT, UNIQUE, ORDERED) # -------------------------------------------------- doc = media_clicks_collection.find_one( {"userId": user_oid}, {"ai_edit_daily_count": 1} ) daily_entries = doc.get("ai_edit_daily_count", []) if doc else [] # Build UNIQUE date → count map daily_map = { entry["date"]: entry["count"] for entry in daily_entries } # Find last known date last_date = max(daily_map.keys()) if daily_map else today_date # Fill ALL missing days with count = 0 next_day = last_date + timedelta(days=1) while next_day < today_date: daily_map.setdefault(next_day, 0) next_day += timedelta(days=1) # Mark today as used (binary) daily_map[today_date] = 1 # Rebuild list: OLD → NEW final_daily_entries = [ {"date": d, "count": daily_map[d]} for d in sorted(daily_map.keys()) ] # Keep last 32 days only final_daily_entries = final_daily_entries[-32:] # Atomic replace (NO $push) media_clicks_collection.update_one( {"userId": user_oid}, { "$set": { "ai_edit_daily_count": final_daily_entries, "updatedAt": now } } ) # -------------------------------------------------- # STEP 3: NORMALIZE OLD STRING CATEGORY IDs TO ObjectId # -------------------------------------------------- # If there is an existing category entry where categoryId was stored # as a string, convert that field to an ObjectId so future updates # always work with a consistent type. category_id_str = str(category_id) media_clicks_collection.update_one( { "userId": user_oid, "categories.categoryId": category_id_str, }, { "$set": { "categories.$.categoryId": category_id, } }, ) # -------------------------------------------------- # STEP 4: UPDATE EXISTING CATEGORY (DATES CAN REPEAT) # -------------------------------------------------- update_existing = media_clicks_collection.update_one( { "userId": user_oid, "categories.categoryId": category_id, }, { "$inc": { "categories.$.click_count": 1, "ai_edit_complete": 1, }, "$set": { "categories.$.lastClickedAt": now, "ai_edit_last_date": now, "updatedAt": now, }, }, ) if update_existing.matched_count > 0: return # -------------------------------------------------- # STEP 5: PUSH NEW CATEGORY (ORDER = TIME) # -------------------------------------------------- media_clicks_collection.update_one( {"userId": user_oid}, { "$inc": {"ai_edit_complete": 1}, "$set": { "ai_edit_last_date": now, "updatedAt": now, }, "$push": { "categories": { "categoryId": category_id, "click_count": 1, "lastClickedAt": now, } }, }, upsert=True, ) except Exception as exc: logger.error("Failed to log media click: %s", exc) # def log_media_click(raw_user_id, raw_category_id): # if media_clicks_collection is None: # logger.info("Admin DB unavailable, skipping media click log") # return # user_oid, _ = normalize_user_id(raw_user_id) # category_id, _ = normalize_category_id(raw_category_id) # now = datetime.utcnow() # # Normalize to UTC midnight # today_date = datetime(now.year, now.month, now.day) # try: # # -------------------------------------------------- # # STEP 1: ENSURE USER DOC + DAILY FIELD # # -------------------------------------------------- # media_clicks_collection.update_one( # {"userId": user_oid}, # { # "$setOnInsert": { # "userId": user_oid, # "createdAt": now, # "ai_edit_complete": 0, # "ai_edit_daily_count": [] # }, # "$set": { # "ai_edit_last_date": now, # "updatedAt": now # } # }, # upsert=True # ) # # -------------------------------------------------- # # STEP 2: DAILY COUNT LOGIC (FIXED) # # -------------------------------------------------- # doc = media_clicks_collection.find_one( # {"userId": user_oid}, # {"ai_edit_daily_count": 1} # ) # daily_entries = doc.get("ai_edit_daily_count", []) if doc else [] # daily_updates = [] # if not daily_entries: # # FIRST EVER USAGE → ONLY TODAY # daily_updates.append({"date": today_date, "count": 1}) # else: # existing_dates = {entry["date"].date() for entry in daily_entries} # last_date = max(entry["date"] for entry in daily_entries) # # Fill skipped days between last_date and today-1 # next_day = last_date + timedelta(days=1) # while next_day.date() < today_date.date(): # if next_day.date() not in existing_dates: # daily_updates.append({"date": next_day, "count": 0}) # next_day += timedelta(days=1) # # Add today if missing # if today_date.date() not in existing_dates: # daily_updates.append({"date": today_date, "count": 1}) # # Push updates if any # if daily_updates: # media_clicks_collection.update_one( # {"userId": user_oid}, # {"$push": {"ai_edit_daily_count": {"$each": daily_updates}}} # ) # # Sort oldest → newest and trim to last 32 entries # doc = media_clicks_collection.find_one({"userId": user_oid}, {"ai_edit_daily_count": 1}) # daily_entries = doc.get("ai_edit_daily_count", []) if doc else [] # daily_entries.sort(key=lambda x: x["date"]) # if len(daily_entries) > 32: # daily_entries = daily_entries[-32:] # media_clicks_collection.update_one( # {"userId": user_oid}, # {"$set": {"ai_edit_daily_count": daily_entries}} # ) # # -------------------------------------------------- # # STEP 3: TRY UPDATING EXISTING CATEGORY # # -------------------------------------------------- # update_existing = media_clicks_collection.update_one( # { # "userId": user_oid, # "categories.categoryId": category_id, # }, # { # "$inc": { # "categories.$.click_count": 1, # "ai_edit_complete": 1, # }, # "$set": { # "categories.$.lastClickedAt": now, # "ai_edit_last_date": now, # "updatedAt": now, # }, # }, # ) # if update_existing.matched_count > 0: # return # # -------------------------------------------------- # # STEP 4: PUSH NEW CATEGORY # # -------------------------------------------------- # media_clicks_collection.update_one( # {"userId": user_oid}, # { # "$inc": { # "ai_edit_complete": 1, # }, # "$set": { # "ai_edit_last_date": now, # "updatedAt": now, # }, # "$push": { # "categories": { # "categoryId": category_id, # "click_count": 1, # "lastClickedAt": now, # } # }, # }, # upsert=True, # ) # except Exception as exc: # logger.error("Failed to log media click: %s", exc) # Health endpoint @app.get("/health") def health(): return {"status": "ok"} # Upload source image @app.post("/source") async def upload_source(file: UploadFile = File(...)): file_id = str(uuid.uuid4()) + "_" + file.filename file_path = os.path.join("static", file_id) with open(file_path, "wb") as f: f.write(await file.read()) return {"source_path": f"/static/{file_id}"} # Background remover (protected) @app.post("/bg-remove") async def bg_remove( file: UploadFile = File(...), authorization: str = Header(None), user_id: str = Form(None), category_id: str = Form(None), categoryId: str = Form(None), ): import time start_time = time.time() # start timer dt_now = datetime.utcnow() # UTC datetime # Token check if authorization != "Bearer logicgo@123": # Log fail if mongo_collection is not None: try: mongo_collection.insert_one({ "status": "fail", "ts": int(time.time()), "datetime": dt_now, "filename": file.filename, "error": "Unauthorized", "response_time_ms": 0 }) except Exception as e: print("Mongo insert error:", e) raise HTTPException(status_code=401, detail="Unauthorized") # Save uploaded file temporarily file_id = str(uuid.uuid4()) + "_" + file.filename input_path = os.path.join("static", file_id) with open(input_path, "wb") as f: f.write(await file.read()) try: # Call Hugging Face Space via gradio_client hf_client = get_client() result_path = hf_client.predict( f=handle_file(input_path), api_name="/png" ) # Ensure output has .png extension output_id = f"bgremoved_{os.path.splitext(file_id)[0]}.png" output_path = os.path.join("static", output_id) os.replace(result_path, output_path) end_time = time.time() response_time_ms = (end_time - start_time) * 1000 # in ms # Log success if mongo_collection is not None: try: mongo_collection.insert_one({ "status": "success", "ts": int(time.time()), "datetime": dt_now, "bg_removed_path": f"/static/{output_id}", "filename": output_id, "response_time_ms": response_time_ms }) except Exception as e: print("Mongo insert error:", e) log_media_click( raw_user_id=user_id, raw_category_id=category_id or categoryId, ) return { "bg_removed_path": f"/static/{output_id}", "filename": output_id, "response_time_ms": response_time_ms } except Exception as e: # Remove temporary file if exists if os.path.exists(input_path): os.remove(input_path) end_time = time.time() response_time_ms = (end_time - start_time) * 1000 # in ms # Log fail if mongo_collection is not None: try: mongo_collection.insert_one({ "status": "fail", "ts": int(time.time()), "datetime": dt_now, "filename": file.filename, "error": str(e), "response_time_ms": response_time_ms }) except Exception as e2: print("Mongo insert error:", e2) raise HTTPException( status_code=503, detail=f"Background removal service unavailable: {str(e)}" ) # @app.post("/bg-remove") # async def bg_remove( # file: UploadFile = File(...), # authorization: str = Header(None) # ): # import time # start_time = time.time() # start timer # # Token check # if authorization != "Bearer logicgo@123": # raise HTTPException(status_code=401, detail="Unauthorized") # # Save uploaded file temporarily # file_id = str(uuid.uuid4()) + "_" + file.filename # input_path = os.path.join("static", file_id) # with open(input_path, "wb") as f: # f.write(await file.read()) # # Call Hugging Face Space via gradio_client (returns transparent PNG) # try: # hf_client = get_client() # result_path = hf_client.predict( # f=handle_file(input_path), # api_name="/png" # ) # except Exception as e: # if os.path.exists(input_path): # os.remove(input_path) # raise HTTPException( # status_code=503, # detail=f"Background removal service unavailable: {str(e)}" # ) # # Ensure output has .png extension # output_id = f"bgremoved_{os.path.splitext(file_id)[0]}.png" # output_path = os.path.join("static", output_id) # # Move the transparent PNG to static folder # os.replace(result_path, output_path) # # Calculate response time # end_time = time.time() # response_time_ms = (end_time - start_time) * 1000 # in milliseconds # # Log to MongoDB # if mongo_collection is not None: # try: # mongo_collection.insert_one({ # "ts": int(time.time()), # "bg_removed_path": f"/static/{output_id}", # "filename": output_id, # "response_time_ms": response_time_ms # }) # except Exception as e: # print("Mongo insert error:", e) # return { # "bg_removed_path": f"/static/{output_id}", # "filename": output_id, # "response_time_ms": response_time_ms # } # Preview processed image @app.get("/preview/{filename}") def preview(filename: str): file_path = os.path.join("static", filename) if not os.path.exists(file_path): raise HTTPException(status_code=404, detail="File not found") return FileResponse(file_path, media_type="image/png") # Download processed image @app.get("/download/{filename}") def download(filename: str): file_path = os.path.join("static", filename) if not os.path.exists(file_path): raise HTTPException(status_code=404, detail="File not found") return FileResponse(file_path, media_type="image/png", filename=filename) # import hashlib # import logging # import os # import time # import uuid # from datetime import datetime, timedelta # from bson import ObjectId # from fastapi import FastAPI, File, Form, Header, HTTPException, UploadFile # from fastapi.responses import FileResponse # from fastapi.staticfiles import StaticFiles # from gradio_client import Client, handle_file # from pymongo import MongoClient # from dotenv import load_dotenv # # Load environment variables # load_dotenv() # logging.basicConfig(level=logging.INFO) # logger = logging.getLogger("app") # # Initialize FastAPI # app = FastAPI() # # Initialize Hugging Face Client (background remover Space) - lazy initialization # client = None # def get_client(): # """Lazy initialization of Gradio Client with timeout handling""" # global client # if client is None: # max_retries = 3 # retry_delay = 2 # seconds # for attempt in range(max_retries): # try: # # Initialize Gradio Client # client = Client("LogicGoInfotechSpaces/background-remover") # print(f"Gradio Client initialized successfully (attempt {attempt + 1})") # break # except Exception as e: # if attempt < max_retries - 1: # print(f"Error initializing Gradio Client (attempt {attempt + 1}/{max_retries}): {e}") # print(f"Retrying in {retry_delay} seconds...") # time.sleep(retry_delay) # else: # print(f"Failed to initialize Gradio Client after {max_retries} attempts: {e}") # raise # return client # # Create static folder to store files # os.makedirs("static", exist_ok=True) # app.mount("/static", StaticFiles(directory="static"), name="static") # # MongoDB connections and config # DEFAULT_CATEGORY_FALLBACK = "69368f2f2e46bd68ae188963" # primary_mongo_uri = os.getenv("MONGODB_URI") or os.getenv("MONGO_URI") # primary_db_name = os.getenv("MONGODB_DB_NAME", "BG_DB") # admin_mongo_uri = os.getenv("MONGODB_ADMIN") or os.getenv("MONGO_URI_ADMIN") # admin_db_name = os.getenv("MONGODB_ADMIN_DB_NAME", "adminPanel") # mongo_client = None # mongo_collection = None # admin_client = None # media_clicks_collection = None # if primary_mongo_uri: # try: # mongo_client = MongoClient(primary_mongo_uri) # db = mongo_client.get_database(primary_db_name) # mongo_collection = db["bgremove_logs"] # except Exception as e: # logger.error("MongoDB (primary) connection error: %s", e) # if admin_mongo_uri: # try: # admin_client = MongoClient(admin_mongo_uri) # admin_db = admin_client.get_database(admin_db_name) # media_clicks_collection = admin_db["media_clicks"] # logger.info("Admin DB in use: %s", admin_db_name) # except Exception as e: # logger.error("MongoDB (admin) connection error: %s", e) # def _hash_to_object_id(value: str) -> ObjectId: # digest = hashlib.sha1(value.encode("utf-8")).hexdigest()[:24] # return ObjectId(digest) # def normalize_user_id(raw_user_id): # """Accepts ObjectId string, numeric string/int, or missing -> returns ObjectId.""" # if raw_user_id is None: # return ObjectId(), "generated" # value = str(raw_user_id).strip() # if value == "": # return ObjectId(), "generated_empty" # try: # return ObjectId(value), "parsed_objectid" # except Exception: # pass # if value.isdigit(): # return _hash_to_object_id(value), "numeric_hashed" # return _hash_to_object_id(value), "hashed" # def normalize_category_id(raw_category_id): # if raw_category_id is None: # return DEFAULT_CATEGORY_FALLBACK, "default_missing" # value = str(raw_category_id).strip() # if value == "": # return DEFAULT_CATEGORY_FALLBACK, "default_empty" # return value, "provided" # ##-------NEW CATEGORY CLICK-------##### # def log_media_click(raw_user_id, raw_category_id): # if media_clicks_collection is None: # logger.info("Admin DB unavailable, skipping media click log") # return # user_oid, _ = normalize_user_id(raw_user_id) # category_id, _ = normalize_category_id(raw_category_id) # now = datetime.utcnow() # # Normalize today to UTC midnight # today_date = datetime(now.year, now.month, now.day) # try: # # -------------------------------------------------- # # STEP 1: ENSURE USER DOC EXISTS # # -------------------------------------------------- # media_clicks_collection.update_one( # {"userId": user_oid}, # { # "$setOnInsert": { # "userId": user_oid, # "createdAt": now, # "ai_edit_complete": 0, # "ai_edit_daily_count": [] # }, # "$set": { # "ai_edit_last_date": now, # "updatedAt": now # } # }, # upsert=True # ) # # -------------------------------------------------- # # STEP 2: DAILY COUNT (STRICT, UNIQUE, ORDERED) # # -------------------------------------------------- # doc = media_clicks_collection.find_one( # {"userId": user_oid}, # {"ai_edit_daily_count": 1} # ) # daily_entries = doc.get("ai_edit_daily_count", []) if doc else [] # # Build UNIQUE date → count map # daily_map = { # entry["date"]: entry["count"] # for entry in daily_entries # } # # Find last known date # last_date = max(daily_map.keys()) if daily_map else today_date # # Fill ALL missing days with count = 0 # next_day = last_date + timedelta(days=1) # while next_day < today_date: # daily_map.setdefault(next_day, 0) # next_day += timedelta(days=1) # # Mark today as used (binary) # daily_map[today_date] = 1 # # Rebuild list: OLD → NEW # final_daily_entries = [ # {"date": d, "count": daily_map[d]} # for d in sorted(daily_map.keys()) # ] # # Keep last 32 days only # final_daily_entries = final_daily_entries[-32:] # # Atomic replace (NO $push) # media_clicks_collection.update_one( # {"userId": user_oid}, # { # "$set": { # "ai_edit_daily_count": final_daily_entries, # "updatedAt": now # } # } # ) # # -------------------------------------------------- # # STEP 3: UPDATE EXISTING CATEGORY (DATES CAN REPEAT) # # -------------------------------------------------- # update_existing = media_clicks_collection.update_one( # { # "userId": user_oid, # "categories.categoryId": category_id, # }, # { # "$inc": { # "categories.$.click_count": 1, # "ai_edit_complete": 1, # }, # "$set": { # "categories.$.lastClickedAt": now, # "ai_edit_last_date": now, # "updatedAt": now, # }, # }, # ) # if update_existing.matched_count > 0: # return # # -------------------------------------------------- # # STEP 4: PUSH NEW CATEGORY (ORDER = TIME) # # -------------------------------------------------- # media_clicks_collection.update_one( # {"userId": user_oid}, # { # "$inc": {"ai_edit_complete": 1}, # "$set": { # "ai_edit_last_date": now, # "updatedAt": now, # }, # "$push": { # "categories": { # "categoryId": category_id, # "click_count": 1, # "lastClickedAt": now, # } # }, # }, # upsert=True, # ) # except Exception as exc: # logger.error("Failed to log media click: %s", exc) # # def log_media_click(raw_user_id, raw_category_id): # # if media_clicks_collection is None: # # logger.info("Admin DB unavailable, skipping media click log") # # return # # user_oid, _ = normalize_user_id(raw_user_id) # # category_id, _ = normalize_category_id(raw_category_id) # # now = datetime.utcnow() # # # Normalize to UTC midnight # # today_date = datetime(now.year, now.month, now.day) # # try: # # # -------------------------------------------------- # # # STEP 1: ENSURE USER DOC + DAILY FIELD # # # -------------------------------------------------- # # media_clicks_collection.update_one( # # {"userId": user_oid}, # # { # # "$setOnInsert": { # # "userId": user_oid, # # "createdAt": now, # # "ai_edit_complete": 0, # # "ai_edit_daily_count": [] # # }, # # "$set": { # # "ai_edit_last_date": now, # # "updatedAt": now # # } # # }, # # upsert=True # # ) # # # -------------------------------------------------- # # # STEP 2: DAILY COUNT LOGIC (FIXED) # # # -------------------------------------------------- # # doc = media_clicks_collection.find_one( # # {"userId": user_oid}, # # {"ai_edit_daily_count": 1} # # ) # # daily_entries = doc.get("ai_edit_daily_count", []) if doc else [] # # daily_updates = [] # # if not daily_entries: # # # FIRST EVER USAGE → ONLY TODAY # # daily_updates.append({"date": today_date, "count": 1}) # # else: # # existing_dates = {entry["date"].date() for entry in daily_entries} # # last_date = max(entry["date"] for entry in daily_entries) # # # Fill skipped days between last_date and today-1 # # next_day = last_date + timedelta(days=1) # # while next_day.date() < today_date.date(): # # if next_day.date() not in existing_dates: # # daily_updates.append({"date": next_day, "count": 0}) # # next_day += timedelta(days=1) # # # Add today if missing # # if today_date.date() not in existing_dates: # # daily_updates.append({"date": today_date, "count": 1}) # # # Push updates if any # # if daily_updates: # # media_clicks_collection.update_one( # # {"userId": user_oid}, # # {"$push": {"ai_edit_daily_count": {"$each": daily_updates}}} # # ) # # # Sort oldest → newest and trim to last 32 entries # # doc = media_clicks_collection.find_one({"userId": user_oid}, {"ai_edit_daily_count": 1}) # # daily_entries = doc.get("ai_edit_daily_count", []) if doc else [] # # daily_entries.sort(key=lambda x: x["date"]) # # if len(daily_entries) > 32: # # daily_entries = daily_entries[-32:] # # media_clicks_collection.update_one( # # {"userId": user_oid}, # # {"$set": {"ai_edit_daily_count": daily_entries}} # # ) # # # -------------------------------------------------- # # # STEP 3: TRY UPDATING EXISTING CATEGORY # # # -------------------------------------------------- # # update_existing = media_clicks_collection.update_one( # # { # # "userId": user_oid, # # "categories.categoryId": category_id, # # }, # # { # # "$inc": { # # "categories.$.click_count": 1, # # "ai_edit_complete": 1, # # }, # # "$set": { # # "categories.$.lastClickedAt": now, # # "ai_edit_last_date": now, # # "updatedAt": now, # # }, # # }, # # ) # # if update_existing.matched_count > 0: # # return # # # -------------------------------------------------- # # # STEP 4: PUSH NEW CATEGORY # # # -------------------------------------------------- # # media_clicks_collection.update_one( # # {"userId": user_oid}, # # { # # "$inc": { # # "ai_edit_complete": 1, # # }, # # "$set": { # # "ai_edit_last_date": now, # # "updatedAt": now, # # }, # # "$push": { # # "categories": { # # "categoryId": category_id, # # "click_count": 1, # # "lastClickedAt": now, # # } # # }, # # }, # # upsert=True, # # ) # # except Exception as exc: # # logger.error("Failed to log media click: %s", exc) # # Health endpoint # @app.get("/health") # def health(): # return {"status": "ok"} # # Upload source image # @app.post("/source") # async def upload_source(file: UploadFile = File(...)): # file_id = str(uuid.uuid4()) + "_" + file.filename # file_path = os.path.join("static", file_id) # with open(file_path, "wb") as f: # f.write(await file.read()) # return {"source_path": f"/static/{file_id}"} # # Background remover (protected) # @app.post("/bg-remove") # async def bg_remove( # file: UploadFile = File(...), # authorization: str = Header(None), # user_id: str = Form(None), # category_id: str = Form(None), # categoryId: str = Form(None), # ): # import time # start_time = time.time() # start timer # dt_now = datetime.utcnow() # UTC datetime # # Token check # if authorization != "Bearer logicgo@123": # # Log fail # if mongo_collection is not None: # try: # mongo_collection.insert_one({ # "status": "fail", # "ts": int(time.time()), # "datetime": dt_now, # "filename": file.filename, # "error": "Unauthorized", # "response_time_ms": 0 # }) # except Exception as e: # print("Mongo insert error:", e) # raise HTTPException(status_code=401, detail="Unauthorized") # # Save uploaded file temporarily # file_id = str(uuid.uuid4()) + "_" + file.filename # input_path = os.path.join("static", file_id) # with open(input_path, "wb") as f: # f.write(await file.read()) # try: # # Call Hugging Face Space via gradio_client # hf_client = get_client() # result_path = hf_client.predict( # f=handle_file(input_path), # api_name="/png" # ) # # Ensure output has .png extension # output_id = f"bgremoved_{os.path.splitext(file_id)[0]}.png" # output_path = os.path.join("static", output_id) # os.replace(result_path, output_path) # end_time = time.time() # response_time_ms = (end_time - start_time) * 1000 # in ms # # Log success # if mongo_collection is not None: # try: # mongo_collection.insert_one({ # "status": "success", # "ts": int(time.time()), # "datetime": dt_now, # "bg_removed_path": f"/static/{output_id}", # "filename": output_id, # "response_time_ms": response_time_ms # }) # except Exception as e: # print("Mongo insert error:", e) # log_media_click( # raw_user_id=user_id, # raw_category_id=category_id or categoryId, # ) # return { # "bg_removed_path": f"/static/{output_id}", # "filename": output_id, # "response_time_ms": response_time_ms # } # except Exception as e: # # Remove temporary file if exists # if os.path.exists(input_path): # os.remove(input_path) # end_time = time.time() # response_time_ms = (end_time - start_time) * 1000 # in ms # # Log fail # if mongo_collection is not None: # try: # mongo_collection.insert_one({ # "status": "fail", # "ts": int(time.time()), # "datetime": dt_now, # "filename": file.filename, # "error": str(e), # "response_time_ms": response_time_ms # }) # except Exception as e2: # print("Mongo insert error:", e2) # raise HTTPException( # status_code=503, # detail=f"Background removal service unavailable: {str(e)}" # ) # # @app.post("/bg-remove") # # async def bg_remove( # # file: UploadFile = File(...), # # authorization: str = Header(None) # # ): # # import time # # start_time = time.time() # start timer # # # Token check # # if authorization != "Bearer logicgo@123": # # raise HTTPException(status_code=401, detail="Unauthorized") # # # Save uploaded file temporarily # # file_id = str(uuid.uuid4()) + "_" + file.filename # # input_path = os.path.join("static", file_id) # # with open(input_path, "wb") as f: # # f.write(await file.read()) # # # Call Hugging Face Space via gradio_client (returns transparent PNG) # # try: # # hf_client = get_client() # # result_path = hf_client.predict( # # f=handle_file(input_path), # # api_name="/png" # # ) # # except Exception as e: # # if os.path.exists(input_path): # # os.remove(input_path) # # raise HTTPException( # # status_code=503, # # detail=f"Background removal service unavailable: {str(e)}" # # ) # # # Ensure output has .png extension # # output_id = f"bgremoved_{os.path.splitext(file_id)[0]}.png" # # output_path = os.path.join("static", output_id) # # # Move the transparent PNG to static folder # # os.replace(result_path, output_path) # # # Calculate response time # # end_time = time.time() # # response_time_ms = (end_time - start_time) * 1000 # in milliseconds # # # Log to MongoDB # # if mongo_collection is not None: # # try: # # mongo_collection.insert_one({ # # "ts": int(time.time()), # # "bg_removed_path": f"/static/{output_id}", # # "filename": output_id, # # "response_time_ms": response_time_ms # # }) # # except Exception as e: # # print("Mongo insert error:", e) # # return { # # "bg_removed_path": f"/static/{output_id}", # # "filename": output_id, # # "response_time_ms": response_time_ms # # } # # Preview processed image # @app.get("/preview/{filename}") # def preview(filename: str): # file_path = os.path.join("static", filename) # if not os.path.exists(file_path): # raise HTTPException(status_code=404, detail="File not found") # return FileResponse(file_path, media_type="image/png") # # Download processed image # @app.get("/download/{filename}") # def download(filename: str): # file_path = os.path.join("static", filename) # if not os.path.exists(file_path): # raise HTTPException(status_code=404, detail="File not found") # return FileResponse(file_path, media_type="image/png", filename=filename)