fallacyfinder / config.json
SamanthaStorm's picture
Upload FallacyFinder v1.0 - Advanced Logical Fallacy Detection Model
447c094 verified
{
"activation": "gelu",
"architectures": [
"DistilBertForSequenceClassification"
],
"attention_dropout": 0.1,
"custom_metadata": {
"average_confidence": 0.982,
"creation_date": "2024",
"fallacy_types": [
"ad_hominem",
"appeal_to_authority",
"appeal_to_emotion",
"cherry_picking",
"darvo",
"false_dichotomy",
"gaslighting",
"gish_gallop",
"kafkatrapping",
"motte_and_bailey",
"moving_goalposts",
"no_fallacy",
"sealioning",
"slippery_slope",
"strawman",
"whataboutism"
],
"model_version": "1.0.0",
"test_accuracy": 1.0,
"training_dataset_size": 3200,
"training_framework": "transformers"
},
"dim": 768,
"dropout": 0.1,
"hidden_dim": 3072,
"id2label": {
"0": "ad_hominem",
"1": "appeal_to_authority",
"2": "appeal_to_emotion",
"3": "cherry_picking",
"4": "darvo",
"5": "false_dichotomy",
"6": "gaslighting",
"7": "gish_gallop",
"8": "kafkatrapping",
"9": "motte_and_bailey",
"10": "moving_goalposts",
"11": "no_fallacy",
"12": "sealioning",
"13": "slippery_slope",
"14": "strawman",
"15": "whataboutism"
},
"initializer_range": 0.02,
"label2id": {
"ad_hominem": 0,
"appeal_to_authority": 1,
"appeal_to_emotion": 2,
"cherry_picking": 3,
"darvo": 4,
"false_dichotomy": 5,
"gaslighting": 6,
"gish_gallop": 7,
"kafkatrapping": 8,
"motte_and_bailey": 9,
"moving_goalposts": 10,
"no_fallacy": 11,
"sealioning": 12,
"slippery_slope": 13,
"strawman": 14,
"whataboutism": 15
},
"max_position_embeddings": 512,
"model_type": "distilbert",
"n_heads": 12,
"n_layers": 6,
"pad_token_id": 0,
"problem_type": "single_label_classification",
"qa_dropout": 0.1,
"seq_classif_dropout": 0.2,
"sinusoidal_pos_embds": false,
"task_specific_params": {
"text-classification": {
"num_labels": 16,
"problem_type": "single_label_classification"
}
},
"tie_weights_": true,
"torch_dtype": "float32",
"transformers_version": "4.53.0",
"vocab_size": 30522
}