Toxic_Classification / config.json
NightPrince's picture
Upload folder using huggingface_hub
0ebf33b verified
{
"model_type": "keras",
"keras_version": "3.7.0",
"tensorflow_version": "2.19.0",
"max_sequence_length": 150,
"vocab_size": 10000,
"num_labels": 9,
"label2id": {
"Child Sexual Exploitation": 0,
"Elections": 1,
"Non-Violent Crimes": 2,
"Safe": 3,
"Sex-Related Crimes": 4,
"Suicide & Self-Harm": 5,
"Unknown S-Type": 6,
"Violent Crimes": 7,
"Unsafe": 8
},
"id2label": {
"0": "Child Sexual Exploitation",
"1": "Elections",
"2": "Non-Violent Crimes",
"3": "Safe",
"4": "Sex-Related Crimes",
"5": "Suicide & Self-Harm",
"6": "Unknown S-Type",
"7": "Violent Crimes",
"8": "Unsafe"
},
"tokenizer_file": "tokenizer.json",
"task": "text-classification",
"language": "multilingual",
"input_format": "concatenated_text_and_image_description",
"pipeline_tag": "text-classification",
"tags": [
"toxic-detection",
"text-classification",
"keras",
"tensorflow",
"multiclass",
"safety"
]
}