lysandre HF Staff commited on
Commit
b3936bc
·
verified ·
1 Parent(s): e832268

Update with commit 84710a4291c3ca4d4b3d65d5a011ff83af243c1d

Browse files

See: https://github.com/huggingface/transformers/commit/84710a4291c3ca4d4b3d65d5a011ff83af243c1d

Files changed (2) hide show
  1. frameworks.json +1 -0
  2. pipeline_tags.json +1 -0
frameworks.json CHANGED
@@ -316,6 +316,7 @@
316
  {"model_type":"vitpose_backbone","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
317
  {"model_type":"vits","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
318
  {"model_type":"vivit","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
 
319
  {"model_type":"wav2vec2","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoProcessor"}
320
  {"model_type":"wav2vec2-bert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
321
  {"model_type":"wav2vec2-conformer","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
 
316
  {"model_type":"vitpose_backbone","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
317
  {"model_type":"vits","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
318
  {"model_type":"vivit","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
319
+ {"model_type":"vjepa2","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoTokenizer"}
320
  {"model_type":"wav2vec2","pytorch":true,"tensorflow":true,"flax":true,"processor":"AutoProcessor"}
321
  {"model_type":"wav2vec2-bert","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
322
  {"model_type":"wav2vec2-conformer","pytorch":true,"tensorflow":false,"flax":false,"processor":"AutoProcessor"}
pipeline_tags.json CHANGED
@@ -1238,6 +1238,7 @@
1238
  {"model_class":"UniSpeechSatForXVector","pipeline_tag":"audio-xvector","auto_class":"AutoModelForAudioXVector"}
1239
  {"model_class":"UniSpeechSatModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
1240
  {"model_class":"UnivNetModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
 
1241
  {"model_class":"VanForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
1242
  {"model_class":"VanModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
1243
  {"model_class":"ViTForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
 
1238
  {"model_class":"UniSpeechSatForXVector","pipeline_tag":"audio-xvector","auto_class":"AutoModelForAudioXVector"}
1239
  {"model_class":"UniSpeechSatModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
1240
  {"model_class":"UnivNetModel","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
1241
+ {"model_class":"VJEPA2Model","pipeline_tag":"feature-extraction","auto_class":"AutoModel"}
1242
  {"model_class":"VanForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}
1243
  {"model_class":"VanModel","pipeline_tag":"image-feature-extraction","auto_class":"AutoModel"}
1244
  {"model_class":"ViTForImageClassification","pipeline_tag":"image-classification","auto_class":"AutoModelForImageClassification"}