Spaces:
Sleeping
Sleeping
File size: 4,947 Bytes
ea54f0b 5afc33c ea54f0b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 |
"""
Simple AI Text Humanizer using Gradio
A clean text-to-text interface for humanizing AI-generated content.
"""
import gradio as gr
import time
from typing import Optional
from transformer.app import AdvancedAcademicTextHumanizer, download_nltk_resources
# Global humanizer instance
humanizer_instance = None
def initialize_humanizer():
"""Initialize the humanizer model."""
global humanizer_instance
if humanizer_instance is None:
try:
print("🔄 Downloading NLTK resources...")
# Download NLTK resources
download_nltk_resources()
print("🔄 Initializing lightweight models...")
# Initialize humanizer with lightweight, fast settings
humanizer_instance = AdvancedAcademicTextHumanizer(
sentence_model="fast", # Uses all-MiniLM-L6-v2 (lightweight)
paraphrase_model="fast", # Uses t5-small (fast)
enable_advanced_models=True,
ai_avoidance_mode=True
)
print("✅ All models loaded successfully and ready!")
return "✅ Models loaded successfully"
except Exception as e:
error_msg = f"❌ Error loading models: {str(e)}"
print(error_msg)
return error_msg
return "✅ Models already loaded"
def humanize_text(input_text: str, use_passive: bool, use_synonyms: bool, use_paraphrasing: bool) -> str:
"""Transform AI text to human-like text."""
if not input_text.strip():
return "Please enter some text to transform."
global humanizer_instance
if humanizer_instance is None:
init_result = initialize_humanizer()
if "Error" in init_result:
return init_result
try:
# Transform the text
transformed = humanizer_instance.humanize_text(
input_text,
use_passive=use_passive,
use_synonyms=use_synonyms,
use_paraphrasing=use_paraphrasing
)
return transformed
except Exception as e:
return f"❌ Error during transformation: {str(e)}"
def create_interface():
"""Create the Gradio interface."""
with gr.Blocks(title="AI Text Humanizer", theme=gr.themes.Soft()) as interface:
gr.Markdown("# 🤖➡️🧔🏻♂️ AI Text Humanizer")
gr.Markdown("Transform AI-generated text into human-like content using advanced ML models.")
with gr.Row():
with gr.Column():
input_text = gr.Textbox(
label="Input Text",
placeholder="Paste your AI-generated text here...",
lines=10,
max_lines=20
)
with gr.Row():
use_passive = gr.Checkbox(
label="Passive Voice Transformation",
value=False,
info="Convert active voice to passive"
)
use_synonyms = gr.Checkbox(
label="Synonym Replacement",
value=True,
info="AI-powered contextual synonyms"
)
use_paraphrasing = gr.Checkbox(
label="Neural Paraphrasing",
value=True,
info="T5 sentence-level rewriting"
)
transform_btn = gr.Button("🚀 Transform Text", variant="primary")
with gr.Column():
output_text = gr.Textbox(
label="Transformed Text",
lines=10,
max_lines=20,
interactive=False
)
# Initialize models on startup
gr.Markdown("### Model Status")
status_text = gr.Textbox(
label="Initialization Status",
value="Click 'Transform Text' to load models...",
interactive=False
)
# Connect the transformation function
transform_btn.click(
fn=humanize_text,
inputs=[input_text, use_passive, use_synonyms, use_paraphrasing],
outputs=output_text
)
# Initialize models when interface loads
interface.load(
fn=initialize_humanizer,
outputs=status_text
)
gr.Markdown("---")
gr.Markdown("**Note:** First-time model loading may take a few moments.")
return interface
def main():
"""Launch the Gradio interface."""
interface = create_interface()
# Launch with Mac-optimized settings
interface.launch(
show_api=False,
share=False,
debug=False,
show_error=True
)
if __name__ == "__main__":
main() |