dataset_info:
- config_name: I2E-CIFAR10
features:
- name: file_path
dtype: string
- name: label
dtype:
class_label:
names:
'0': '0'
'1': '1'
'2': '2'
'3': '3'
'4': '4'
'5': '5'
'6': '6'
'7': '7'
'8': '8'
'9': '9'
- name: data
dtype: binary
splits:
- name: train
num_bytes: 1646538890
num_examples: 50000
- name: validation
num_bytes: 329298890
num_examples: 10000
download_size: 464478602
dataset_size: 1975837780
- config_name: I2E-CIFAR100
features:
- name: file_path
dtype: string
- name: label
dtype:
class_label:
names:
'0': '0'
'1': '1'
'2': '2'
'3': '3'
'4': '4'
'5': '5'
'6': '6'
'7': '7'
'8': '8'
'9': '9'
'10': '10'
'11': '11'
'12': '12'
'13': '13'
'14': '14'
'15': '15'
'16': '16'
'17': '17'
'18': '18'
'19': '19'
'20': '20'
'21': '21'
'22': '22'
'23': '23'
'24': '24'
'25': '25'
'26': '26'
'27': '27'
'28': '28'
'29': '29'
'30': '30'
'31': '31'
'32': '32'
'33': '33'
'34': '34'
'35': '35'
'36': '36'
'37': '37'
'38': '38'
'39': '39'
'40': '40'
'41': '41'
'42': '42'
'43': '43'
'44': '44'
'45': '45'
'46': '46'
'47': '47'
'48': '48'
'49': '49'
'50': '50'
'51': '51'
'52': '52'
'53': '53'
'54': '54'
'55': '55'
'56': '56'
'57': '57'
'58': '58'
'59': '59'
'60': '60'
'61': '61'
'62': '62'
'63': '63'
'64': '64'
'65': '65'
'66': '66'
'67': '67'
'68': '68'
'69': '69'
'70': '70'
'71': '71'
'72': '72'
'73': '73'
'74': '74'
'75': '75'
'76': '76'
'77': '77'
'78': '78'
'79': '79'
'80': '80'
'81': '81'
'82': '82'
'83': '83'
'84': '84'
'85': '85'
'86': '86'
'87': '87'
'88': '88'
'89': '89'
'90': '90'
'91': '91'
'92': '92'
'93': '93'
'94': '94'
'95': '95'
'96': '96'
'97': '97'
'98': '98'
'99': '99'
- name: data
dtype: binary
splits:
- name: train
num_bytes: 1646583890
num_examples: 50000
- name: validation
num_bytes: 329307890
num_examples: 10000
download_size: 462298257
dataset_size: 1975891780
- config_name: I2E-Caltech101
features:
- name: file_path
dtype: string
- name: label
dtype:
class_label:
names:
'0': Faces
'1': Faces_easy
'2': Leopards
'3': Motorbikes
'4': accordion
'5': airplanes
'6': anchor
'7': ant
'8': barrel
'9': bass
'10': beaver
'11': binocular
'12': bonsai
'13': brain
'14': brontosaurus
'15': buddha
'16': butterfly
'17': camera
'18': cannon
'19': car_side
'20': ceiling_fan
'21': cellphone
'22': chair
'23': chandelier
'24': cougar_body
'25': cougar_face
'26': crab
'27': crayfish
'28': crocodile
'29': crocodile_head
'30': cup
'31': dalmatian
'32': dollar_bill
'33': dolphin
'34': dragonfly
'35': electric_guitar
'36': elephant
'37': emu
'38': euphonium
'39': ewer
'40': ferry
'41': flamingo
'42': flamingo_head
'43': garfield
'44': gerenuk
'45': gramophone
'46': grand_piano
'47': hawksbill
'48': headphone
'49': hedgehog
'50': helicopter
'51': ibis
'52': inline_skate
'53': joshua_tree
'54': kangaroo
'55': ketch
'56': lamp
'57': laptop
'58': llama
'59': lobster
'60': lotus
'61': mandolin
'62': mayfly
'63': menorah
'64': metronome
'65': minaret
'66': nautilus
'67': octopus
'68': okapi
'69': pagoda
'70': panda
'71': pigeon
'72': pizza
'73': platypus
'74': pyramid
'75': revolver
'76': rhino
'77': rooster
'78': saxophone
'79': schooner
'80': scissors
'81': scorpion
'82': sea_horse
'83': snoopy
'84': soccer_ball
'85': stapler
'86': starfish
'87': stegosaurus
'88': stop_sign
'89': strawberry
'90': sunflower
'91': tick
'92': trilobite
'93': umbrella
'94': watch
'95': water_lilly
'96': wheelchair
'97': wild_cat
'98': windsor_chair
'99': wrench
'100': yin_yang
- name: data
dtype: binary
splits:
- name: train
num_bytes: 872272607
num_examples: 8677
download_size: 344357976
dataset_size: 872272607
- config_name: I2E-Caltech256
features:
- name: file_path
dtype: string
- name: label
dtype:
class_label:
names:
'0': 001.ak47
'1': 002.american-flag
'2': 003.backpack
'3': 004.baseball-bat
'4': 005.baseball-glove
'5': 006.basketball-hoop
'6': 007.bat
'7': 008.bathtub
'8': 009.bear
'9': 010.beer-mug
'10': 011.billiards
'11': 012.binoculars
'12': 013.birdbath
'13': 014.blimp
'14': 015.bonsai-101
'15': 016.boom-box
'16': 017.bowling-ball
'17': 018.bowling-pin
'18': 019.boxing-glove
'19': 020.brain-101
'20': 021.breadmaker
'21': 022.buddha-101
'22': 023.bulldozer
'23': 024.butterfly
'24': 025.cactus
'25': 026.cake
'26': 027.calculator
'27': 028.camel
'28': 029.cannon
'29': 030.canoe
'30': 031.car-tire
'31': 032.cartman
'32': 033.cd
'33': 034.centipede
'34': 035.cereal-box
'35': 036.chandelier-101
'36': 037.chess-board
'37': 038.chimp
'38': 039.chopsticks
'39': 040.cockroach
'40': 041.coffee-mug
'41': 042.coffin
'42': 043.coin
'43': 044.comet
'44': 045.computer-keyboard
'45': 046.computer-monitor
'46': 047.computer-mouse
'47': 048.conch
'48': 049.cormorant
'49': 050.covered-wagon
'50': 051.cowboy-hat
'51': 052.crab-101
'52': 053.desk-globe
'53': 054.diamond-ring
'54': 055.dice
'55': 056.dog
'56': 057.dolphin-101
'57': 058.doorknob
'58': 059.drinking-straw
'59': 060.duck
'60': 061.dumb-bell
'61': 062.eiffel-tower
'62': 063.electric-guitar-101
'63': 064.elephant-101
'64': 065.elk
'65': 066.ewer-101
'66': 067.eyeglasses
'67': 068.fern
'68': 069.fighter-jet
'69': 070.fire-extinguisher
'70': 071.fire-hydrant
'71': 072.fire-truck
'72': 073.fireworks
'73': 074.flashlight
'74': 075.floppy-disk
'75': 076.football-helmet
'76': 077.french-horn
'77': 078.fried-egg
'78': 079.frisbee
'79': 080.frog
'80': 081.frying-pan
'81': 082.galaxy
'82': 083.gas-pump
'83': 084.giraffe
'84': 085.goat
'85': 086.golden-gate-bridge
'86': 087.goldfish
'87': 088.golf-ball
'88': 089.goose
'89': 090.gorilla
'90': 091.grand-piano-101
'91': 092.grapes
'92': 093.grasshopper
'93': 094.guitar-pick
'94': 095.hamburger
'95': 096.hammock
'96': 097.harmonica
'97': 098.harp
'98': 099.harpsichord
'99': 100.hawksbill-101
'100': 101.head-phones
'101': 102.helicopter-101
'102': 103.hibiscus
'103': 104.homer-simpson
'104': 105.horse
'105': 106.horseshoe-crab
'106': 107.hot-air-balloon
'107': 108.hot-dog
'108': 109.hot-tub
'109': 110.hourglass
'110': 111.house-fly
'111': 112.human-skeleton
'112': 113.hummingbird
'113': 114.ibis-101
'114': 115.ice-cream-cone
'115': 116.iguana
'116': 117.ipod
'117': 118.iris
'118': 119.jesus-christ
'119': 120.joy-stick
'120': 121.kangaroo-101
'121': 122.kayak
'122': 123.ketch-101
'123': 124.killer-whale
'124': 125.knife
'125': 126.ladder
'126': 127.laptop-101
'127': 128.lathe
'128': 129.leopards-101
'129': 130.license-plate
'130': 131.lightbulb
'131': 132.light-house
'132': 133.lightning
'133': 134.llama-101
'134': 135.mailbox
'135': 136.mandolin
'136': 137.mars
'137': 138.mattress
'138': 139.megaphone
'139': 140.menorah-101
'140': 141.microscope
'141': 142.microwave
'142': 143.minaret
'143': 144.minotaur
'144': 145.motorbikes-101
'145': 146.mountain-bike
'146': 147.mushroom
'147': 148.mussels
'148': 149.necktie
'149': 150.octopus
'150': 151.ostrich
'151': 152.owl
'152': 153.palm-pilot
'153': 154.palm-tree
'154': 155.paperclip
'155': 156.paper-shredder
'156': 157.pci-card
'157': 158.penguin
'158': 159.people
'159': 160.pez-dispenser
'160': 161.photocopier
'161': 162.picnic-table
'162': 163.playing-card
'163': 164.porcupine
'164': 165.pram
'165': 166.praying-mantis
'166': 167.pyramid
'167': 168.raccoon
'168': 169.radio-telescope
'169': 170.rainbow
'170': 171.refrigerator
'171': 172.revolver-101
'172': 173.rifle
'173': 174.rotary-phone
'174': 175.roulette-wheel
'175': 176.saddle
'176': 177.saturn
'177': 178.school-bus
'178': 179.scorpion-101
'179': 180.screwdriver
'180': 181.segway
'181': 182.self-propelled-lawn-mower
'182': 183.sextant
'183': 184.sheet-music
'184': 185.skateboard
'185': 186.skunk
'186': 187.skyscraper
'187': 188.smokestack
'188': 189.snail
'189': 190.snake
'190': 191.sneaker
'191': 192.snowmobile
'192': 193.soccer-ball
'193': 194.socks
'194': 195.soda-can
'195': 196.spaghetti
'196': 197.speed-boat
'197': 198.spider
'198': 199.spoon
'199': 200.stained-glass
'200': 201.starfish-101
'201': 202.steering-wheel
'202': 203.stirrups
'203': 204.sunflower-101
'204': 205.superman
'205': 206.sushi
'206': 207.swan
'207': 208.swiss-army-knife
'208': 209.sword
'209': 210.syringe
'210': 211.tambourine
'211': 212.teapot
'212': 213.teddy-bear
'213': 214.teepee
'214': 215.telephone-box
'215': 216.tennis-ball
'216': 217.tennis-court
'217': 218.tennis-racket
'218': 219.theodolite
'219': 220.toaster
'220': 221.tomato
'221': 222.tombstone
'222': 223.top-hat
'223': 224.touring-bike
'224': 225.tower-pisa
'225': 226.traffic-light
'226': 227.treadmill
'227': 228.triceratops
'228': 229.tricycle
'229': 230.trilobite-101
'230': 231.tripod
'231': 232.t-shirt
'232': 233.tuning-fork
'233': 234.tweezer
'234': 235.umbrella-101
'235': 236.unicorn
'236': 237.vcr
'237': 238.video-projector
'238': 239.washing-machine
'239': 240.watch-101
'240': 241.waterfall
'241': 242.watermelon
'242': 243.welding-mask
'243': 244.wheelbarrow
'244': 245.windmill
'245': 246.wine-bottle
'246': 247.xylophone
'247': 248.yarmulke
'248': 249.yo-yo
'249': 250.zebra
'250': 251.airplanes-101
'251': 252.car-side-101
'252': 253.faces-easy-101
'253': 254.greyhound
'254': 255.tennis-shoes
'255': 256.toad
'256': 257.clutter
- name: data
dtype: binary
splits:
- name: train
num_bytes: 3076928106
num_examples: 30607
download_size: 1165568633
dataset_size: 3076928106
- config_name: I2E-FashionMNIST
features:
- name: file_path
dtype: string
- name: label
dtype:
class_label:
names:
'0': '0'
'1': '1'
'2': '2'
'3': '3'
'4': '4'
'5': '5'
'6': '6'
'7': '7'
'8': '8'
'9': '9'
- name: data
dtype: binary
splits:
- name: train
num_bytes: 132648890
num_examples: 60000
- name: validation
num_bytes: 22098890
num_examples: 10000
download_size: 68196022
dataset_size: 154747780
- config_name: I2E-ImageNet
features:
- name: file_path
dtype: string
- name: label
dtype:
class_label:
names:
'0': n01440764
'1': n01443537
'2': n01484850
'3': n01491361
'4': n01494475
'5': n01496331
'6': n01498041
'7': n01514668
'8': n01514859
'9': n01518878
'10': n01530575
'11': n01531178
'12': n01532829
'13': n01534433
'14': n01537544
'15': n01558993
'16': n01560419
'17': n01580077
'18': n01582220
'19': n01592084
'20': n01601694
'21': n01608432
'22': n01614925
'23': n01616318
'24': n01622779
'25': n01629819
'26': n01630670
'27': n01631663
'28': n01632458
'29': n01632777
'30': n01641577
'31': n01644373
'32': n01644900
'33': n01664065
'34': n01665541
'35': n01667114
'36': n01667778
'37': n01669191
'38': n01675722
'39': n01677366
'40': n01682714
'41': n01685808
'42': n01687978
'43': n01688243
'44': n01689811
'45': n01692333
'46': n01693334
'47': n01694178
'48': n01695060
'49': n01697457
'50': n01698640
'51': n01704323
'52': n01728572
'53': n01728920
'54': n01729322
'55': n01729977
'56': n01734418
'57': n01735189
'58': n01737021
'59': n01739381
'60': n01740131
'61': n01742172
'62': n01744401
'63': n01748264
'64': n01749939
'65': n01751748
'66': n01753488
'67': n01755581
'68': n01756291
'69': n01768244
'70': n01770081
'71': n01770393
'72': n01773157
'73': n01773549
'74': n01773797
'75': n01774384
'76': n01774750
'77': n01775062
'78': n01776313
'79': n01784675
'80': n01795545
'81': n01796340
'82': n01797886
'83': n01798484
'84': n01806143
'85': n01806567
'86': n01807496
'87': n01817953
'88': n01818515
'89': n01819313
'90': n01820546
'91': n01824575
'92': n01828970
'93': n01829413
'94': n01833805
'95': n01843065
'96': n01843383
'97': n01847000
'98': n01855032
'99': n01855672
'100': n01860187
'101': n01871265
'102': n01872401
'103': n01873310
'104': n01877812
'105': n01882714
'106': n01883070
'107': n01910747
'108': n01914609
'109': n01917289
'110': n01924916
'111': n01930112
'112': n01943899
'113': n01944390
'114': n01945685
'115': n01950731
'116': n01955084
'117': n01968897
'118': n01978287
'119': n01978455
'120': n01980166
'121': n01981276
'122': n01983481
'123': n01984695
'124': n01985128
'125': n01986214
'126': n01990800
'127': n02002556
'128': n02002724
'129': n02006656
'130': n02007558
'131': n02009229
'132': n02009912
'133': n02011460
'134': n02012849
'135': n02013706
'136': n02017213
'137': n02018207
'138': n02018795
'139': n02025239
'140': n02027492
'141': n02028035
'142': n02033041
'143': n02037110
'144': n02051845
'145': n02056570
'146': n02058221
'147': n02066245
'148': n02071294
'149': n02074367
'150': n02077923
'151': n02085620
'152': n02085782
'153': n02085936
'154': n02086079
'155': n02086240
'156': n02086646
'157': n02086910
'158': n02087046
'159': n02087394
'160': n02088094
'161': n02088238
'162': n02088364
'163': n02088466
'164': n02088632
'165': n02089078
'166': n02089867
'167': n02089973
'168': n02090379
'169': n02090622
'170': n02090721
'171': n02091032
'172': n02091134
'173': n02091244
'174': n02091467
'175': n02091635
'176': n02091831
'177': n02092002
'178': n02092339
'179': n02093256
'180': n02093428
'181': n02093647
'182': n02093754
'183': n02093859
'184': n02093991
'185': n02094114
'186': n02094258
'187': n02094433
'188': n02095314
'189': n02095570
'190': n02095889
'191': n02096051
'192': n02096177
'193': n02096294
'194': n02096437
'195': n02096585
'196': n02097047
'197': n02097130
'198': n02097209
'199': n02097298
'200': n02097474
'201': n02097658
'202': n02098105
'203': n02098286
'204': n02098413
'205': n02099267
'206': n02099429
'207': n02099601
'208': n02099712
'209': n02099849
'210': n02100236
'211': n02100583
'212': n02100735
'213': n02100877
'214': n02101006
'215': n02101388
'216': n02101556
'217': n02102040
'218': n02102177
'219': n02102318
'220': n02102480
'221': n02102973
'222': n02104029
'223': n02104365
'224': n02105056
'225': n02105162
'226': n02105251
'227': n02105412
'228': n02105505
'229': n02105641
'230': n02105855
'231': n02106030
'232': n02106166
'233': n02106382
'234': n02106550
'235': n02106662
'236': n02107142
'237': n02107312
'238': n02107574
'239': n02107683
'240': n02107908
'241': n02108000
'242': n02108089
'243': n02108422
'244': n02108551
'245': n02108915
'246': n02109047
'247': n02109525
'248': n02109961
'249': n02110063
'250': n02110185
'251': n02110341
'252': n02110627
'253': n02110806
'254': n02110958
'255': n02111129
'256': n02111277
'257': n02111500
'258': n02111889
'259': n02112018
'260': n02112137
'261': n02112350
'262': n02112706
'263': n02113023
'264': n02113186
'265': n02113624
'266': n02113712
'267': n02113799
'268': n02113978
'269': n02114367
'270': n02114548
'271': n02114712
'272': n02114855
'273': n02115641
'274': n02115913
'275': n02116738
'276': n02117135
'277': n02119022
'278': n02119789
'279': n02120079
'280': n02120505
'281': n02123045
'282': n02123159
'283': n02123394
'284': n02123597
'285': n02124075
'286': n02125311
'287': n02127052
'288': n02128385
'289': n02128757
'290': n02128925
'291': n02129165
'292': n02129604
'293': n02130308
'294': n02132136
'295': n02133161
'296': n02134084
'297': n02134418
'298': n02137549
'299': n02138441
'300': n02165105
'301': n02165456
'302': n02167151
'303': n02168699
'304': n02169497
'305': n02172182
'306': n02174001
'307': n02177972
'308': n02190166
'309': n02206856
'310': n02219486
'311': n02226429
'312': n02229544
'313': n02231487
'314': n02233338
'315': n02236044
'316': n02256656
'317': n02259212
'318': n02264363
'319': n02268443
'320': n02268853
'321': n02276258
'322': n02277742
'323': n02279972
'324': n02280649
'325': n02281406
'326': n02281787
'327': n02317335
'328': n02319095
'329': n02321529
'330': n02325366
'331': n02326432
'332': n02328150
'333': n02342885
'334': n02346627
'335': n02356798
'336': n02361337
'337': n02363005
'338': n02364673
'339': n02389026
'340': n02391049
'341': n02395406
'342': n02396427
'343': n02397096
'344': n02398521
'345': n02403003
'346': n02408429
'347': n02410509
'348': n02412080
'349': n02415577
'350': n02417914
'351': n02422106
'352': n02422699
'353': n02423022
'354': n02437312
'355': n02437616
'356': n02441942
'357': n02442845
'358': n02443114
'359': n02443484
'360': n02444819
'361': n02445715
'362': n02447366
'363': n02454379
'364': n02457408
'365': n02480495
'366': n02480855
'367': n02481823
'368': n02483362
'369': n02483708
'370': n02484975
'371': n02486261
'372': n02486410
'373': n02487347
'374': n02488291
'375': n02488702
'376': n02489166
'377': n02490219
'378': n02492035
'379': n02492660
'380': n02493509
'381': n02493793
'382': n02494079
'383': n02497673
'384': n02500267
'385': n02504013
'386': n02504458
'387': n02509815
'388': n02510455
'389': n02514041
'390': n02526121
'391': n02536864
'392': n02606052
'393': n02607072
'394': n02640242
'395': n02641379
'396': n02643566
'397': n02655020
'398': n02666196
'399': n02667093
'400': n02669723
'401': n02672831
'402': n02676566
'403': n02687172
'404': n02690373
'405': n02692877
'406': n02699494
'407': n02701002
'408': n02704792
'409': n02708093
'410': n02727426
'411': n02730930
'412': n02747177
'413': n02749479
'414': n02769748
'415': n02776631
'416': n02777292
'417': n02782093
'418': n02783161
'419': n02786058
'420': n02787622
'421': n02788148
'422': n02790996
'423': n02791124
'424': n02791270
'425': n02793495
'426': n02794156
'427': n02795169
'428': n02797295
'429': n02799071
'430': n02802426
'431': n02804414
'432': n02804610
'433': n02807133
'434': n02808304
'435': n02808440
'436': n02814533
'437': n02814860
'438': n02815834
'439': n02817516
'440': n02823428
'441': n02823750
'442': n02825657
'443': n02834397
'444': n02835271
'445': n02837789
'446': n02840245
'447': n02841315
'448': n02843684
'449': n02859443
'450': n02860847
'451': n02865351
'452': n02869837
'453': n02870880
'454': n02871525
'455': n02877765
'456': n02879718
'457': n02883205
'458': n02892201
'459': n02892767
'460': n02894605
'461': n02895154
'462': n02906734
'463': n02909870
'464': n02910353
'465': n02916936
'466': n02917067
'467': n02927161
'468': n02930766
'469': n02939185
'470': n02948072
'471': n02950826
'472': n02951358
'473': n02951585
'474': n02963159
'475': n02965783
'476': n02966193
'477': n02966687
'478': n02971356
'479': n02974003
'480': n02977058
'481': n02978881
'482': n02979186
'483': n02980441
'484': n02981792
'485': n02988304
'486': n02992211
'487': n02992529
'488': n02999410
'489': n03000134
'490': n03000247
'491': n03000684
'492': n03014705
'493': n03016953
'494': n03017168
'495': n03018349
'496': n03026506
'497': n03028079
'498': n03032252
'499': n03041632
'500': n03042490
'501': n03045698
'502': n03047690
'503': n03062245
'504': n03063599
'505': n03063689
'506': n03065424
'507': n03075370
'508': n03085013
'509': n03089624
'510': n03095699
'511': n03100240
'512': n03109150
'513': n03110669
'514': n03124043
'515': n03124170
'516': n03125729
'517': n03126707
'518': n03127747
'519': n03127925
'520': n03131574
'521': n03133878
'522': n03134739
'523': n03141823
'524': n03146219
'525': n03160309
'526': n03179701
'527': n03180011
'528': n03187595
'529': n03188531
'530': n03196217
'531': n03197337
'532': n03201208
'533': n03207743
'534': n03207941
'535': n03208938
'536': n03216828
'537': n03218198
'538': n03220513
'539': n03223299
'540': n03240683
'541': n03249569
'542': n03250847
'543': n03255030
'544': n03259280
'545': n03271574
'546': n03272010
'547': n03272562
'548': n03290653
'549': n03291819
'550': n03297495
'551': n03314780
'552': n03325584
'553': n03337140
'554': n03344393
'555': n03345487
'556': n03347037
'557': n03355925
'558': n03372029
'559': n03376595
'560': n03379051
'561': n03384352
'562': n03388043
'563': n03388183
'564': n03388549
'565': n03393912
'566': n03394916
'567': n03400231
'568': n03404251
'569': n03417042
'570': n03424325
'571': n03425413
'572': n03443371
'573': n03444034
'574': n03445777
'575': n03445924
'576': n03447447
'577': n03447721
'578': n03450230
'579': n03452741
'580': n03457902
'581': n03459775
'582': n03461385
'583': n03467068
'584': n03476684
'585': n03476991
'586': n03478589
'587': n03481172
'588': n03482405
'589': n03483316
'590': n03485407
'591': n03485794
'592': n03492542
'593': n03494278
'594': n03495258
'595': n03496892
'596': n03498962
'597': n03527444
'598': n03529860
'599': n03530642
'600': n03532672
'601': n03534580
'602': n03535780
'603': n03538406
'604': n03544143
'605': n03584254
'606': n03584829
'607': n03590841
'608': n03594734
'609': n03594945
'610': n03595614
'611': n03598930
'612': n03599486
'613': n03602883
'614': n03617480
'615': n03623198
'616': n03627232
'617': n03630383
'618': n03633091
'619': n03637318
'620': n03642806
'621': n03649909
'622': n03657121
'623': n03658185
'624': n03661043
'625': n03662601
'626': n03666591
'627': n03670208
'628': n03673027
'629': n03676483
'630': n03680355
'631': n03690938
'632': n03691459
'633': n03692522
'634': n03697007
'635': n03706229
'636': n03709823
'637': n03710193
'638': n03710637
'639': n03710721
'640': n03717622
'641': n03720891
'642': n03721384
'643': n03724870
'644': n03729826
'645': n03733131
'646': n03733281
'647': n03733805
'648': n03742115
'649': n03743016
'650': n03759954
'651': n03761084
'652': n03763968
'653': n03764736
'654': n03769881
'655': n03770439
'656': n03770679
'657': n03773504
'658': n03775071
'659': n03775546
'660': n03776460
'661': n03777568
'662': n03777754
'663': n03781244
'664': n03782006
'665': n03785016
'666': n03786901
'667': n03787032
'668': n03788195
'669': n03788365
'670': n03791053
'671': n03792782
'672': n03792972
'673': n03793489
'674': n03794056
'675': n03796401
'676': n03803284
'677': n03804744
'678': n03814639
'679': n03814906
'680': n03825788
'681': n03832673
'682': n03837869
'683': n03838899
'684': n03840681
'685': n03841143
'686': n03843555
'687': n03854065
'688': n03857828
'689': n03866082
'690': n03868242
'691': n03868863
'692': n03871628
'693': n03873416
'694': n03874293
'695': n03874599
'696': n03876231
'697': n03877472
'698': n03877845
'699': n03884397
'700': n03887697
'701': n03888257
'702': n03888605
'703': n03891251
'704': n03891332
'705': n03895866
'706': n03899768
'707': n03902125
'708': n03903868
'709': n03908618
'710': n03908714
'711': n03916031
'712': n03920288
'713': n03924679
'714': n03929660
'715': n03929855
'716': n03930313
'717': n03930630
'718': n03933933
'719': n03935335
'720': n03937543
'721': n03938244
'722': n03942813
'723': n03944341
'724': n03947888
'725': n03950228
'726': n03954731
'727': n03956157
'728': n03958227
'729': n03961711
'730': n03967562
'731': n03970156
'732': n03976467
'733': n03976657
'734': n03977966
'735': n03980874
'736': n03982430
'737': n03983396
'738': n03991062
'739': n03992509
'740': n03995372
'741': n03998194
'742': n04004767
'743': n04005630
'744': n04008634
'745': n04009552
'746': n04019541
'747': n04023962
'748': n04026417
'749': n04033901
'750': n04033995
'751': n04037443
'752': n04039381
'753': n04040759
'754': n04041544
'755': n04044716
'756': n04049303
'757': n04065272
'758': n04067472
'759': n04069434
'760': n04070727
'761': n04074963
'762': n04081281
'763': n04086273
'764': n04090263
'765': n04099969
'766': n04111531
'767': n04116512
'768': n04118538
'769': n04118776
'770': n04120489
'771': n04125021
'772': n04127249
'773': n04131690
'774': n04133789
'775': n04136333
'776': n04141076
'777': n04141327
'778': n04141975
'779': n04146614
'780': n04147183
'781': n04149813
'782': n04152593
'783': n04153751
'784': n04154565
'785': n04162706
'786': n04179913
'787': n04192698
'788': n04200800
'789': n04201297
'790': n04204238
'791': n04204347
'792': n04208210
'793': n04209133
'794': n04209239
'795': n04228054
'796': n04229816
'797': n04235860
'798': n04238763
'799': n04239074
'800': n04243546
'801': n04251144
'802': n04252077
'803': n04252225
'804': n04254120
'805': n04254680
'806': n04254777
'807': n04258138
'808': n04259630
'809': n04263257
'810': n04264628
'811': n04265275
'812': n04266014
'813': n04270147
'814': n04273569
'815': n04275548
'816': n04277352
'817': n04285008
'818': n04286575
'819': n04296562
'820': n04310018
'821': n04311004
'822': n04311174
'823': n04317175
'824': n04325704
'825': n04326547
'826': n04328186
'827': n04330267
'828': n04332243
'829': n04335435
'830': n04336792
'831': n04344873
'832': n04346328
'833': n04347754
'834': n04350905
'835': n04355338
'836': n04355933
'837': n04356056
'838': n04357314
'839': n04366367
'840': n04367480
'841': n04370456
'842': n04371430
'843': n04371774
'844': n04372370
'845': n04376876
'846': n04380533
'847': n04389033
'848': n04392985
'849': n04398044
'850': n04399382
'851': n04404412
'852': n04409515
'853': n04417672
'854': n04418357
'855': n04423845
'856': n04428191
'857': n04429376
'858': n04435653
'859': n04442312
'860': n04443257
'861': n04447861
'862': n04456115
'863': n04458633
'864': n04461696
'865': n04462240
'866': n04465501
'867': n04467665
'868': n04476259
'869': n04479046
'870': n04482393
'871': n04483307
'872': n04485082
'873': n04486054
'874': n04487081
'875': n04487394
'876': n04493381
'877': n04501370
'878': n04505470
'879': n04507155
'880': n04509417
'881': n04515003
'882': n04517823
'883': n04522168
'884': n04523525
'885': n04525038
'886': n04525305
'887': n04532106
'888': n04532670
'889': n04536866
'890': n04540053
'891': n04542943
'892': n04548280
'893': n04548362
'894': n04550184
'895': n04552348
'896': n04553703
'897': n04554684
'898': n04557648
'899': n04560804
'900': n04562935
'901': n04579145
'902': n04579432
'903': n04584207
'904': n04589890
'905': n04590129
'906': n04591157
'907': n04591713
'908': n04592741
'909': n04596742
'910': n04597913
'911': n04599235
'912': n04604644
'913': n04606251
'914': n04612504
'915': n04613696
'916': n06359193
'917': n06596364
'918': n06785654
'919': n06794110
'920': n06874185
'921': n07248320
'922': n07565083
'923': n07579787
'924': n07583066
'925': n07584110
'926': n07590611
'927': n07613480
'928': n07614500
'929': n07615774
'930': n07684084
'931': n07693725
'932': n07695742
'933': n07697313
'934': n07697537
'935': n07711569
'936': n07714571
'937': n07714990
'938': n07715103
'939': n07716358
'940': n07716906
'941': n07717410
'942': n07717556
'943': n07718472
'944': n07718747
'945': n07720875
'946': n07730033
'947': n07734744
'948': n07742313
'949': n07745940
'950': n07747607
'951': n07749582
'952': n07753113
'953': n07753275
'954': n07753592
'955': n07754684
'956': n07760859
'957': n07768694
'958': n07802026
'959': n07831146
'960': n07836838
'961': n07860988
'962': n07871810
'963': n07873807
'964': n07875152
'965': n07880968
'966': n07892512
'967': n07920052
'968': n07930864
'969': n07932039
'970': n09193705
'971': n09229709
'972': n09246464
'973': n09256479
'974': n09288635
'975': n09332890
'976': n09399592
'977': n09421951
'978': n09428293
'979': n09468604
'980': n09472597
'981': n09835506
'982': n10148035
'983': n10565667
'984': n11879895
'985': n11939491
'986': n12057211
'987': n12144580
'988': n12267677
'989': n12620546
'990': n12768682
'991': n12985857
'992': n12998815
'993': n13037406
'994': n13040303
'995': n13044778
'996': n13052670
'997': n13054560
'998': n13133613
'999': n15075141
- name: data
dtype: binary
splits:
- name: train
num_bytes: 128798692994
num_examples: 1281167
- name: validation
num_bytes: 5027050000
num_examples: 50000
download_size: 57961329620
dataset_size: 133825742994
- config_name: I2E-MNIST
features:
- name: file_path
dtype: string
- name: label
dtype:
class_label:
names:
'0': '0'
'1': '1'
'2': '2'
'3': '3'
'4': '4'
'5': '5'
'6': '6'
'7': '7'
'8': '8'
'9': '9'
- name: data
dtype: binary
splits:
- name: train
num_bytes: 132648890
num_examples: 60000
- name: validation
num_bytes: 22098890
num_examples: 10000
download_size: 60473109
dataset_size: 154747780
- config_name: I2E-Mini-ImageNet
features:
- name: file_path
dtype: string
- name: label
dtype:
class_label:
names:
'0': n01532829
'1': n01558993
'2': n01704323
'3': n01749939
'4': n01770081
'5': n01843383
'6': n01855672
'7': n01910747
'8': n01930112
'9': n01981276
'10': n02074367
'11': n02089867
'12': n02091244
'13': n02091831
'14': n02099601
'15': n02101006
'16': n02105505
'17': n02108089
'18': n02108551
'19': n02108915
'20': n02110063
'21': n02110341
'22': n02111277
'23': n02113712
'24': n02114548
'25': n02116738
'26': n02120079
'27': n02129165
'28': n02138441
'29': n02165456
'30': n02174001
'31': n02219486
'32': n02443484
'33': n02457408
'34': n02606052
'35': n02687172
'36': n02747177
'37': n02795169
'38': n02823428
'39': n02871525
'40': n02950826
'41': n02966193
'42': n02971356
'43': n02981792
'44': n03017168
'45': n03047690
'46': n03062245
'47': n03075370
'48': n03127925
'49': n03146219
'50': n03207743
'51': n03220513
'52': n03272010
'53': n03337140
'54': n03347037
'55': n03400231
'56': n03417042
'57': n03476684
'58': n03527444
'59': n03535780
'60': n03544143
'61': n03584254
'62': n03676483
'63': n03770439
'64': n03773504
'65': n03775546
'66': n03838899
'67': n03854065
'68': n03888605
'69': n03908618
'70': n03924679
'71': n03980874
'72': n03998194
'73': n04067472
'74': n04146614
'75': n04149813
'76': n04243546
'77': n04251144
'78': n04258138
'79': n04275548
'80': n04296562
'81': n04389033
'82': n04418357
'83': n04435653
'84': n04443257
'85': n04509417
'86': n04515003
'87': n04522168
'88': n04596742
'89': n04604644
'90': n04612504
'91': n06794110
'92': n07584110
'93': n07613480
'94': n07697537
'95': n07747607
'96': n09246464
'97': n09256479
'98': n13054560
'99': n13133613
- name: data
dtype: binary
splits:
- name: train
num_bytes: 6031941884
num_examples: 60000
download_size: 2568434568
dataset_size: 6031941884
configs:
- config_name: I2E-CIFAR10
data_files:
- split: train
path: I2E-CIFAR10/train-*
- split: validation
path: I2E-CIFAR10/validation-*
- config_name: I2E-CIFAR100
data_files:
- split: train
path: I2E-CIFAR100/train-*
- split: validation
path: I2E-CIFAR100/validation-*
- config_name: I2E-Caltech101
data_files:
- split: train
path: I2E-Caltech101/train-*
- config_name: I2E-Caltech256
data_files:
- split: train
path: I2E-Caltech256/train-*
- config_name: I2E-FashionMNIST
data_files:
- split: train
path: I2E-FashionMNIST/train-*
- split: validation
path: I2E-FashionMNIST/validation-*
- config_name: I2E-ImageNet
data_files:
- split: train
path: I2E-ImageNet/train-*
- split: validation
path: I2E-ImageNet/validation-*
- config_name: I2E-MNIST
data_files:
- split: train
path: I2E-MNIST/train-*
- split: validation
path: I2E-MNIST/validation-*
- config_name: I2E-Mini-ImageNet
data_files:
- split: train
path: I2E-Mini-ImageNet/train-*
license: mit
task_categories:
- image-classification
- video-classification
tags:
- neuromorphic
- snn
- spiking neural networks
- event
- dvs
- biology
- pytorch
- imagenet
- cifar10
- cifar100
- caltech101
- caltech256
- mnist
- fashionmnist
- mini-imagenet
pretty_name: I2E Neuromorphic Dataset
language:
- en
π Introduction
This repository hosts the I2E-Datasets, a comprehensive suite of neuromorphic datasets generated using the I2E (Image-to-Event) framework. This work has been accepted for Oral Presentation at AAAI 2026.
I2E bridges the data scarcity gap in Neuromorphic Computing and Spiking Neural Networks (SNNs). By simulating microsaccadic eye movements via highly parallelized convolution, I2E converts static images into high-fidelity event streams in real-time (>300x faster than prior methods).
ποΈ Visualization
The following comparisons illustrate the high-fidelity conversion from static RGB images to dynamic event streams using I2E.
![]() |
![]() |
![]() |
![]() |
![]() |
![]() |
![]() |
![]() |
More visualization comparisons can be found in Visualization.md.
π¦ Dataset Catalog
We provide a comprehensive collection of standard benchmarks converted into event streams via the I2E algorithm.
1. Standard Benchmarks (Classification)
| Config Name | Original Source | Resolution $(H, W)$ | I2E Ratio | Event Rate | Samples (Train/Val) |
|---|---|---|---|---|---|
I2E-CIFAR10 |
CIFAR-10 | 128 x 128 | 0.07 | 5.86% | 50k / 10k |
I2E-CIFAR100 |
CIFAR-100 | 128 x 128 | 0.07 | 5.76% | 50k / 10k |
I2E-ImageNet |
ILSVRC2012 | 224 x 224 | 0.12 | 6.66% | 1.28M / 50k |
2. Transfer Learning & Fine-grained
| Config Name | Original Source | Resolution $(H, W)$ | I2E Ratio | Event Rate | Samples |
|---|---|---|---|---|---|
I2E-Caltech101 |
Caltech-101 | 224 x 224 | 0.12 | 6.25% | 8.677k |
I2E-Caltech256 |
Caltech-256 | 224 x 224 | 0.12 | 6.04% | 30.607k |
I2E-Mini-ImageNet |
Mini-ImageNet | 224 x 224 | 0.12 | 6.65% | 60k |
3. Small Scale / Toy
| Config Name | Original Source | Resolution $(H, W)$ | I2E Ratio | Event Rate | Samples |
|---|---|---|---|---|---|
I2E-MNIST |
MNIST | 32 x 32 | 0.10 | 9.56% | 60k / 10k |
I2E-FashionMNIST |
Fashion-MNIST | 32 x 32 | 0.15 | 10.76% | 60k / 10k |
π Coming Soon: Object Detection and Semantic Segmentation datasets.
π οΈ Preprocessing Protocol
To ensure reproducibility, we specify the exact data augmentation pipeline applied to the static images before I2E conversion.
The (H, W) in the code below corresponds to the "Resolution" column in the Dataset Catalog above.
from torchvision.transforms import v2
# Standard Pre-processing Pipeline used for I2E generation
transform_train = v2.Compose([
# Ensure 3-channel RGB (crucial for grayscale datasets like MNIST)
v2.Lambda(lambda x: x.convert('RGB')),
v2.PILToTensor(),
v2.Resize((H, W), interpolation=v2.InterpolationMode.BICUBIC),
v2.ToDtype(torch.float32, scale=True),
])
π» Usage
π Quick Start
You do not need to download any extra scripts. Just copy the code below. It handles the binary unpacking (converting Parquet bytes to PyTorch Tensors) automatically.
import io
import torch
import numpy as np
from datasets import load_dataset
from torch.utils.data import Dataset, DataLoader
# ==================================================================
# 1. Core Decoding Function (Handles the binary packing)
# ==================================================================
def unpack_event_data(item, use_io=True):
"""
Decodes the custom binary format:
Header (8 bytes) -> Shape (T, C, H, W) -> Body (Packed Bits)
"""
if use_io:
with io.BytesIO(item['data']) as f:
raw_data = np.load(f)
else:
raw_data = np.load(item)
header_size = 4 * 2 # Parse Header (First 8 bytes for 4 uint16 shape values)
shape_header = raw_data[:header_size].view(np.uint16)
original_shape = tuple(shape_header) # Returns (T, C, H, W)
packed_body = raw_data[header_size:] # Parse Body & Bit-unpacking
unpacked = np.unpackbits(packed_body)
num_elements = np.prod(original_shape) # Extract valid bits (Handle padding)
event_flat = unpacked[:num_elements]
event_data = event_flat.reshape(original_shape).astype(np.float32).copy()
return torch.from_numpy(event_data)
# ==================================================================
# 2. Dataset Wrapper
# ==================================================================
class I2E_Dataset(Dataset):
def __init__(self, cache_dir, config_name, split='train', transform=None, target_transform=None):
print(f"π Loading {config_name} [{split}] from Hugging Face...")
self.ds = load_dataset('UESTC-BICS/I2E', config_name, split=split, cache_dir=cache_dir, keep_in_memory=False)
self.transform = transform
self.target_transform = target_transform
def __len__(self):
return len(self.ds)
def __getitem__(self, idx):
item = self.ds[idx]
event = unpack_event_data(item)
label = item['label']
if self.transform:
event = self.transform(event)
if self.target_transform:
label = self.target_transform(label)
return event, label
# ==================================================================
# 3. Run Example
# ==================================================================
if __name__ == "__main__":
import os
os.environ['HF_ENDPOINT'] = 'https://hf-mirror.com' # Use HF mirror server in some regions
DATASET_NAME = 'I2E-CIFAR10' # Choose your config: 'I2E-CIFAR10', 'I2E-ImageNet', etc.
MODEL_PATH = 'Your cache path here' # e.g., './hf_datasets_cache/'
train_dataset = I2E_Dataset(MODEL_PATH, DATASET_NAME, split='train')
val_dataset = I2E_Dataset(MODEL_PATH, DATASET_NAME, split='validation')
train_loader = DataLoader(train_dataset, batch_size=32, shuffle=True, num_workers=32, persistent_workers=True)
val_loader = DataLoader(val_dataset, batch_size=32, shuffle=False, num_workers=32, persistent_workers=True)
events, labels = next(iter(train_loader))
print(f"β
Loaded Batch Shape: {events.shape}") # Expect: [32, T, 2, H, W]
print(f"β
Labels: {labels}")
π Results (SOTA)
Our I2E-pretraining sets new benchmarks for Sim-to-Real transfer on CIFAR10-DVS.
| Dataset | Architecture | Method | Top-1 Acc |
|---|---|---|---|
| CIFAR10-DVS (Real) |
MS-ResNet18 | Baseline | 65.6% |
| MS-ResNet18 | Transfer-I | 83.1% | |
| MS-ResNet18 | Transfer-II (Sim-to-Real) | 92.5% |
For full results and model weights, please visit our GitHub Repo.
π Citation
If you find this work or the models useful, please cite our AAAI 2026 paper:
@article{ma2025i2e,
title={I2E: Real-Time Image-to-Event Conversion for High-Performance Spiking Neural Networks},
author={Ma, Ruichen and Meng, Liwei and Qiao, Guanchao and Ning, Ning and Liu, Yang and Hu, Shaogang},
journal={arXiv preprint arXiv:2511.08065},
year={2025}
}







