download
history
blame
contribute
delete
351 MB
Detected Pickle imports (237)
- "__torch__.torch.nn.modules.linear.___torch_mangle_42.Linear",
- "__torch__.multimodal.model.multimodal_transformer.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_89.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_134.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_32.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_123.MultiheadAttention",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_10.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_124.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_198.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_190.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_214.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_98.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_19.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_1.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_92.ResidualAttentionBlock",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_29.ResidualAttentionBlock",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_56.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.container.___torch_mangle_200.Sequential",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_103.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_36.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_22.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_132.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_58.MultiheadAttention",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_156.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_9.Sequential",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_157.ResidualAttentionBlock",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_162.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_199.Linear",
- "__torch__.torch.nn.modules.linear._LinearWithBias",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_34.QuickGELU",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_43.QuickGELU",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_11.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.activation.___torch_mangle_49.MultiheadAttention",
- "__torch__.torch.nn.modules.container.___torch_mangle_18.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_85.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_188.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_99.Sequential",
- "__torch__.multimodal.model.multimodal_transformer.Transformer",
- "__torch__.torch.nn.modules.activation.___torch_mangle_150.MultiheadAttention",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_189.QuickGELU",
- "__torch__.torch.nn.modules.container.___torch_mangle_191.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_4.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_177.MultiheadAttention",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_171.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_80.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_12._LinearWithBias",
- "__torch__.torch.nn.modules.linear.___torch_mangle_172.Linear",
- "__torch__.torch.nn.modules.sparse.Embedding",
- "__torch__.torch.nn.modules.linear.___torch_mangle_116.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_111.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_46.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_24.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_82.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_97.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_136.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_183.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_211.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_62.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_112.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.container.___torch_mangle_155.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_195.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_170.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_7.QuickGELU",
- "__torch__.multimodal.model.multimodal_transformer.Multimodal",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_68.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_164.Sequential",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_151.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_169.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_54.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_159.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_39._LinearWithBias",
- "__torch__.torch.nn.modules.container.___torch_mangle_212.Sequential",
- "__torch__.multimodal.model.multimodal_transformer.QuickGELU",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_139.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.container.___torch_mangle_137.Sequential",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_138.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_76.MultiheadAttention",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_41.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_67.MultiheadAttention",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_130.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_93._LinearWithBias",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_47.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_158._LinearWithBias",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_142.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_45.Sequential",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_74.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_87.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_184.ResidualAttentionBlock",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_20.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_206.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_78.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_16.QuickGELU",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_55.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_84._LinearWithBias",
- "__torch__.torch.nn.modules.container.___torch_mangle_110.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_168.MultiheadAttention",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_115.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_70.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_104._LinearWithBias",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_14.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_48._LinearWithBias",
- "__torch__.torch.nn.modules.container.___torch_mangle_27.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_60.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_66._LinearWithBias",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_91.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_31.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_122._LinearWithBias",
- "__torch__.torch.nn.modules.activation.MultiheadAttention",
- "__torch__.torch.nn.modules.container.___torch_mangle_209.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_17.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_90.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_179.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_160.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_102.Sequential",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_175.ResidualAttentionBlock",
- "torch.HalfStorage",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_126.QuickGELU",
- "__torch__.multimodal.model.multimodal_transformer.VisualTransformer",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_73.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.ResidualAttentionBlock",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_50.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_127.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_52.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_194._LinearWithBias",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_178.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_13.MultiheadAttention",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_213.Transformer",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_205.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_3._LinearWithBias",
- "__torch__.torch.nn.modules.linear.___torch_mangle_143.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_185._LinearWithBias",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_37.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_33.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_21._LinearWithBias",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_77.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_146.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_109.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_59.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_83.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_152.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_163.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_176._LinearWithBias",
- "__torch__.torch.nn.modules.linear.___torch_mangle_140._LinearWithBias",
- "__torch__.torch.nn.modules.linear.___torch_mangle_131._LinearWithBias",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_2.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_180.QuickGELU",
- "__torch__.torch.nn.modules.activation.___torch_mangle_105.MultiheadAttention",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_192.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_181.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_201.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_30._LinearWithBias",
- "torch.FloatStorage",
- "__torch__.torch.nn.modules.activation.___torch_mangle_40.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_69.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_203._LinearWithBias",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_135.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_208.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_64.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_197.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_113._LinearWithBias",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_193.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.container.___torch_mangle_72.Sequential",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_147.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_149._LinearWithBias",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_174.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_202.ResidualAttentionBlock",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_38.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_187.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_186.MultiheadAttention",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_153.QuickGELU",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_65.ResidualAttentionBlock",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_100.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_204.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_94.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_118.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_28.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_154.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_165.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_95.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_207.QuickGELU",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_133.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_148.ResidualAttentionBlock",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_117.QuickGELU",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_79.QuickGELU",
- "__torch__.torch.nn.modules.container.___torch_mangle_173.Sequential",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_101.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.linear.___torch_mangle_15.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_166.ResidualAttentionBlock",
- "__torch__.torch.nn.modules.container.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_26.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_61.QuickGELU",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_129.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_23.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_51.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_96.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_107.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_121.ResidualAttentionBlock",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_210.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_145.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_35.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_53.Linear",
- "__torch__.torch.nn.modules.container.___torch_mangle_119.Sequential",
- "__torch__.torch.nn.modules.container.___torch_mangle_128.Sequential",
- "__torch__.torch.nn.modules.activation.___torch_mangle_114.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_141.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_167._LinearWithBias",
- "__torch__.torch.nn.modules.linear.___torch_mangle_125.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_161.Linear",
- "collections.OrderedDict",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_5.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_106.LayerNorm",
- "__torch__.torch.nn.modules.container.___torch_mangle_81.Sequential",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_144.QuickGELU",
- "__torch__.torch.nn.modules.conv.Conv2d",
- "torch.LongStorage",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_0.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_6.Linear",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_196.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_108.QuickGELU",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_120.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_57._LinearWithBias",
- "__torch__.torch.nn.modules.container.___torch_mangle_182.Sequential",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_25.QuickGELU",
- "__torch__.torch.nn.modules.linear.___torch_mangle_8.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_44.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_75._LinearWithBias",
- "torch._utils._rebuild_tensor_v2",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_86.LayerNorm",
- "__torch__.multimodal.model.multimodal_transformer.___torch_mangle_88.QuickGELU",
- "__torch__.torch.nn.modules.container.___torch_mangle_63.Sequential",
- "__torch__.torch.nn.modules.linear.___torch_mangle_71.Linear",
- "torch.HalfStorage",
- "collections.OrderedDict",
- "torch.DoubleStorage",
- "torch._utils._rebuild_tensor_v2",
- "torch.LongStorage"
Git LFS Details
- SHA256: 5806e77cd80f8b59890b7e101eabd078d9fb84e6937f9e85e4ecb61988df416f
- Pointer size: 134 Bytes
- Size of remote file: 351 MB
Git Large File Storage (LFS) replaces large files with text pointers inside Git, while storing the file contents on a remote server. More info.