NeuML/txtai-hfposts
Sentence Similarity
•
Updated
•
22
•
3
slug
stringlengths 15
15
| content
listlengths 1
129
| rawContent
stringlengths 1
2k
| author
dict | attachments
listlengths 0
49
| mentions
listlengths 0
49
| reactions
listlengths 0
12
| publishedAt
stringlengths 24
24
| updatedAt
stringlengths 24
24
| commentators
listlengths 0
52
| url
stringlengths 25
46
| totalUniqueImpressions
int64 1
42.1k
⌀ | numComments
int64 0
621
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
415480285355699 | [
{
"type": "text",
"value": "anychat",
"raw": "anychat",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "supports chatgpt, gemini, perplexity, claude, meta llama, grok all in one app",
"raw": "supports chatgpt, gemini, perplexity, claude, meta llama, grok all in one app",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "try it out there: ",
"raw": "try it out there: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./spaces/akhaliq/anychat",
"href": null,
"resource": {
"type": "space",
"id": "akhaliq/anychat",
"discussionNum": null
},
"url": "https://huggingface.co./spaces/akhaliq/anychat",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | anychat
supports chatgpt, gemini, perplexity, claude, meta llama, grok all in one app
try it out there: https://huggingface.co./spaces/akhaliq/anychat
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg",
"fullname": "AK",
"name": "akhaliq",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 5205,
"isFollowing": false
} | [] | [] | [] | 2024-11-24T15:39:51.000Z | 2024-11-24T15:39:51.254Z | [] | /posts/akhaliq/415480285355699 | null | 0 |
573286768557034 | [
{
"type": "text",
"value": "Interesting long read from ",
"raw": "Interesting long read from ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@evanmiller-anthropic",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "evanmiller-anthropic",
"label": null,
"lang": null
},
{
"type": "text",
"value": " on having a better founded statistical approach to Language Model Evaluations:",
"raw": " on having a better founded statistical approach to Language Model Evaluations:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://www.anthropic.com/research/statistical-approach-to-model-evals",
"href": "https://www.anthropic.com/research/statistical-approach-to-model-evals",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Worth a read if you're into LLM evaluations!",
"raw": "Worth a read if you're into LLM evaluations!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Cc ",
"raw": "Cc ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@clefourrier",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "clefourrier",
"label": null,
"lang": null
}
] | Interesting long read from @evanmiller-anthropic on having a better founded statistical approach to Language Model Evaluations:
https://www.anthropic.com/research/statistical-approach-to-model-evals
Worth a read if you're into LLM evaluations!
Cc @clefourrier | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1583857746553-5df7e9e5da6d0311fd3d53f9.jpeg",
"fullname": "Thomas Wolf",
"name": "thomwolf",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 704,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5df7e9e5da6d0311fd3d53f9/UOyX5evzJg2CVMd8xoqnb.png"
}
] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1644340617257-noauth.png",
"fullname": "Clémentine Fourrier",
"name": "clefourrier",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 459
},
{
"avatarUrl": "/avatars/04591248ad3ace7b5f1122ecddc7efe8.svg",
"fullname": "Evan Miller",
"name": "evanmiller-anthropic",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1
}
] | [] | 2024-11-24T15:29:59.000Z | 2024-11-24T15:31:35.045Z | [] | /posts/thomwolf/573286768557034 | null | 0 |
454790754502988 | [
{
"type": "text",
"value": "For those who want to try out the new ",
"raw": "For those who want to try out the new ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./black-forest-labs/FLUX.1-Redux-dev",
"href": null,
"resource": {
"type": "model",
"id": "black-forest-labs/FLUX.1-Redux-dev",
"discussionNum": null
},
"url": "https://huggingface.co./black-forest-labs/FLUX.1-Redux-dev",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "You can do this from my latest spaces ",
"raw": "You can do this from my latest spaces ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./spaces/MohamedRashad/Flux-Redux",
"href": null,
"resource": {
"type": "space",
"id": "MohamedRashad/Flux-Redux",
"discussionNum": null
},
"url": "https://huggingface.co./spaces/MohamedRashad/Flux-Redux",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | For those who want to try out the new https://huggingface.co./black-forest-labs/FLUX.1-Redux-dev
You can do this from my latest spaces https://huggingface.co./spaces/MohamedRashad/Flux-Redux | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1628885133347-6116d0584ef9fdfbf45dc4d9.jpeg",
"fullname": "Mohamed Rashad",
"name": "MohamedRashad",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 141,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"John6666"
],
"count": 1
}
] | 2024-11-24T14:03:43.000Z | 2024-11-24T14:03:43.928Z | [] | /posts/MohamedRashad/454790754502988 | 35 | 0 |
195659728745561 | [
{
"type": "text",
"value": "Estamos tratando de unir, aunar fuerzas y cooperar en experimentos de IA en América Latina. Te invito a unirte a nosotros en «LatinAI». La idea es compartir y organizar espacios, modelos y conjuntos de datos en español/portugués/guaraní/mapuche o ingles para el desarrollo en América Latina.",
"raw": "Estamos tratando de unir, aunar fuerzas y cooperar en experimentos de IA en América Latina. Te invito a unirte a nosotros en «LatinAI». La idea es compartir y organizar espacios, modelos y conjuntos de datos en español/portugués/guaraní/mapuche o ingles para el desarrollo en América Latina.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Siéntete libre de unirte a la organización : ",
"raw": "Siéntete libre de unirte a la organización : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co./LatinAI",
"href": "https://huggingface.co./LatinAI",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "---",
"raw": "---",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "We are trying to unite, join forces and cooperate in AI experiments in Latin America. We invite you to join us in “LatinAI”. The idea is to share and organize spaces, models and datasets in Spanish/Portuguese/Guarani/Mapuche or English for development in Latin America.",
"raw": "We are trying to unite, join forces and cooperate in AI experiments in Latin America. We invite you to join us in “LatinAI”. The idea is to share and organize spaces, models and datasets in Spanish/Portuguese/Guarani/Mapuche or English for development in Latin America.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Feel free to join the organization : ",
"raw": "Feel free to join the organization : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co./LatinAI",
"href": "https://huggingface.co./LatinAI",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Estamos tratando de unir, aunar fuerzas y cooperar en experimentos de IA en América Latina. Te invito a unirte a nosotros en «LatinAI». La idea es compartir y organizar espacios, modelos y conjuntos de datos en español/portugués/guaraní/mapuche o ingles para el desarrollo en América Latina.
Siéntete libre de unirte a la organización : https://huggingface.co./LatinAI
---
We are trying to unite, join forces and cooperate in AI experiments in Latin America. We invite you to join us in “LatinAI”. The idea is to share and organize spaces, models and datasets in Spanish/Portuguese/Guarani/Mapuche or English for development in Latin America.
Feel free to join the organization : https://huggingface.co./LatinAI | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/65665c2af450504854d60806/_YXDkUoXTrIdPFf94rRh0.jpeg",
"fullname": "Ramon Mayor Martins",
"name": "rmayormartins",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 22,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👀",
"users": [
"John6666"
],
"count": 1
}
] | 2024-11-24T13:43:00.000Z | 2024-11-24T13:43:00.808Z | [] | /posts/rmayormartins/195659728745561 | 49 | 0 |
418420597611798 | [
{
"type": "text",
"value": "Sorry, I just cannot get the hype behind F5 TTS. It has now gathered a thousand votes in the TTS Arena fork and **has remained in #8 spot** in the _mostly_ Open TTS adversaries.",
"raw": "Sorry, I just cannot get the hype behind F5 TTS. It has now gathered a thousand votes in the TTS Arena fork and **has remained in #8 spot** in the _mostly_ Open TTS adversaries.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The voice sample used is the same as XTTS. F5 has so far been unstable, being unemotional/monotone/depressed and mispronouncing words (_awestruck_).",
"raw": "The voice sample used is the same as XTTS. F5 has so far been unstable, being unemotional/monotone/depressed and mispronouncing words (_awestruck_).",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "If you have suggestions please give feedback in the following thread:",
"raw": "If you have suggestions please give feedback in the following thread:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./spaces/mrfakename/E2-F5-TTS/discussions/32",
"href": null,
"resource": {
"type": "space",
"id": "mrfakename/E2-F5-TTS",
"discussionNum": 32
},
"url": "https://huggingface.co./spaces/mrfakename/E2-F5-TTS/discussions/32",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Sorry, I just cannot get the hype behind F5 TTS. It has now gathered a thousand votes in the TTS Arena fork and **has remained in #8 spot** in the _mostly_ Open TTS adversaries.
The voice sample used is the same as XTTS. F5 has so far been unstable, being unemotional/monotone/depressed and mispronouncing words (_awestruck_).
If you have suggestions please give feedback in the following thread:
https://huggingface.co./spaces/mrfakename/E2-F5-TTS/discussions/32 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/63d52e0c4e5642795617f668/ztXLrdFz3gkUJUIIQXfHo.png",
"fullname": "Yanis L",
"name": "Pendrokar",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 15,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/63d52e0c4e5642795617f668/ZDVB0mKa7SNvAQ1xcGEPH.png"
}
] | [] | [
{
"reaction": "👀",
"users": [
"John6666"
],
"count": 1
}
] | 2024-11-24T11:25:35.000Z | 2024-11-24T11:25:35.816Z | [] | /posts/Pendrokar/418420597611798 | 94 | 0 |
319625675629271 | [
{
"type": "text",
"value": "Maybe that post I showed the other day with my Hyperbolic Embeddings getting to perfect loss with RAdam was a one-time fluke, bad test dataset, etc.? Anotha' one! I gave it a test set a PhD student would struggle with. This model is a bit more souped up. Major callouts of the model: High Dimensional Encoding (HDC), Hyperbolic Embeddings, Entropix. Link to the Colab Notebook: ",
"raw": "Maybe that post I showed the other day with my Hyperbolic Embeddings getting to perfect loss with RAdam was a one-time fluke, bad test dataset, etc.? Anotha' one! I gave it a test set a PhD student would struggle with. This model is a bit more souped up. Major callouts of the model: High Dimensional Encoding (HDC), Hyperbolic Embeddings, Entropix. Link to the Colab Notebook: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://colab.research.google.com/drive/1mS-uxhufx-h7eZXL0ZwPMAAXHqSeGZxX?usp=sharing",
"href": "https://colab.research.google.com/drive/1mS-uxhufx-h7eZXL0ZwPMAAXHqSeGZxX?usp=sharing",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Maybe that post I showed the other day with my Hyperbolic Embeddings getting to perfect loss with RAdam was a one-time fluke, bad test dataset, etc.? Anotha' one! I gave it a test set a PhD student would struggle with. This model is a bit more souped up. Major callouts of the model: High Dimensional Encoding (HDC), Hyperbolic Embeddings, Entropix. Link to the Colab Notebook: https://colab.research.google.com/drive/1mS-uxhufx-h7eZXL0ZwPMAAXHqSeGZxX?usp=sharing | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png",
"fullname": "Richard A Aragon",
"name": "TuringsSolutions",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 146,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/64274b69ba6cef0a6ebb0fd6/z_vgKFqgTyfz3LxuffCUB.png"
}
] | [] | [] | 2024-11-24T08:53:52.000Z | 2024-11-24T09:35:35.833Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6316fb937b0ee0136e5f1220/poHBoJ7QAF_s2CCaosdvQ.jpeg",
"fullname": "Firstname Lastname",
"name": "takeraparterer",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 29,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png",
"fullname": "Richard A Aragon",
"name": "TuringsSolutions",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 146,
"isFollowing": false
}
] | /posts/TuringsSolutions/319625675629271 | 143 | 4 |
102132857217055 | [
{
"type": "text",
"value": "I created bodybuilder and playful AI this week. Try them!",
"raw": "I created bodybuilder and playful AI this week. Try them!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./migueldeguzmandev/bodybuilder_ai",
"href": null,
"resource": {
"type": "model",
"id": "migueldeguzmandev/bodybuilder_ai",
"discussionNum": null
},
"url": "https://huggingface.co./migueldeguzmandev/bodybuilder_ai",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./migueldeguzmandev/playful_ai",
"href": null,
"resource": {
"type": "model",
"id": "migueldeguzmandev/playful_ai",
"discussionNum": null
},
"url": "https://huggingface.co./migueldeguzmandev/playful_ai",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | I created bodybuilder and playful AI this week. Try them!
https://huggingface.co./migueldeguzmandev/bodybuilder_ai
https://huggingface.co./migueldeguzmandev/playful_ai | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6436066c76dbfd731bce1a44/6Kh5xTS2vMgmzJ-M-pqw7.png",
"fullname": "Miguelito De Guzman",
"name": "migueldeguzmandev",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👀",
"users": [
"John6666"
],
"count": 1
}
] | 2024-11-24T08:45:15.000Z | 2024-11-24T09:24:00.859Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6436066c76dbfd731bce1a44/6Kh5xTS2vMgmzJ-M-pqw7.png",
"fullname": "Miguelito De Guzman",
"name": "migueldeguzmandev",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
}
] | /posts/migueldeguzmandev/102132857217055 | 119 | 1 |
538076072707429 | [
{
"type": "text",
"value": "Using AI to teach English as a Foreign Language? EFL teachers often have busy schedules, variable class sizes, and unexpected cancellations. Introducting VocabSova: ",
"raw": "Using AI to teach English as a Foreign Language? EFL teachers often have busy schedules, variable class sizes, and unexpected cancellations. Introducting VocabSova: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./spaces/ZennyKenny/VocabSova",
"href": null,
"resource": {
"type": "space",
"id": "ZennyKenny/VocabSova",
"discussionNum": null
},
"url": "https://huggingface.co./spaces/ZennyKenny/VocabSova",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "VocabSova is a simple chatbot interface that helps teachers create topical vocabulary lists, custom worksheets using that vocabulary, and group activities on a defined theme for a specific English-speaking level (according to CEFR international standards).",
"raw": "VocabSova is a simple chatbot interface that helps teachers create topical vocabulary lists, custom worksheets using that vocabulary, and group activities on a defined theme for a specific English-speaking level (according to CEFR international standards).",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "There is a great use case for AI in nearly every field, and language learning is a particularly apt domain in my opinion. VocabSova is in active development during its Alpha release, all feedback welcome.",
"raw": "There is a great use case for AI in nearly every field, and language learning is a particularly apt domain in my opinion. VocabSova is in active development during its Alpha release, all feedback welcome.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Using AI to teach English as a Foreign Language? EFL teachers often have busy schedules, variable class sizes, and unexpected cancellations. Introducting VocabSova: https://huggingface.co./spaces/ZennyKenny/VocabSova
VocabSova is a simple chatbot interface that helps teachers create topical vocabulary lists, custom worksheets using that vocabulary, and group activities on a defined theme for a specific English-speaking level (according to CEFR international standards).
There is a great use case for AI in nearly every field, and language learning is a particularly apt domain in my opinion. VocabSova is in active development during its Alpha release, all feedback welcome. | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/656e3808d4de03a07d116850/JZh4lrjFueJZVqugjoloP.jpeg",
"fullname": "Kenneth Hamilton",
"name": "ZennyKenny",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 33,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👀",
"users": [
"John6666"
],
"count": 1
}
] | 2024-11-24T04:48:41.000Z | 2024-11-24T04:48:41.173Z | [] | /posts/ZennyKenny/538076072707429 | 209 | 0 |
117573628010199 | [
{
"type": "text",
"value": "Good folks from ",
"raw": "Good folks from ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@amazon",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "amazon",
"label": null,
"lang": null
},
{
"type": "text",
"value": ", ",
"raw": ", ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@Stanford",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "Stanford",
"label": null,
"lang": null
},
{
"type": "text",
"value": ", and other great institutions have released “A Comprehensive Survey of Hallucination Mitigation Techniques in Large Language Models!”",
"raw": ", and other great institutions have released “A Comprehensive Survey of Hallucination Mitigation Techniques in Large Language Models!”",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "This comprehensive survey examines over 32 cutting-edge techniques to combat hallucination in Large Language Models (LLMs). As LLMs become increasingly integral to our daily operations, addressing their tendency to generate ungrounded content is crucial.",
"raw": "This comprehensive survey examines over 32 cutting-edge techniques to combat hallucination in Large Language Models (LLMs). As LLMs become increasingly integral to our daily operations, addressing their tendency to generate ungrounded content is crucial.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Retrieval-Augmented Generation (RAG) Innovations:",
"raw": "Retrieval-Augmented Generation (RAG) Innovations:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Pre-generation retrieval using LLM-Augmenter with Plug-and-Play modules",
"raw": "- Pre-generation retrieval using LLM-Augmenter with Plug-and-Play modules",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Real-time verification through the EVER framework implementing three-stage validation",
"raw": "- Real-time verification through the EVER framework implementing three-stage validation",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Post-generation refinement via the RARR system for automated attribution",
"raw": "- Post-generation refinement via the RARR system for automated attribution",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Advanced Decoding Strategies:",
"raw": "Advanced Decoding Strategies:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Context-Aware Decoding (CAD) utilizing contrastive output distribution",
"raw": "- Context-Aware Decoding (CAD) utilizing contrastive output distribution",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- DoLa's innovative approach of contrasting logit differences between transformer layers",
"raw": "- DoLa's innovative approach of contrasting logit differences between transformer layers",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Knowledge Integration Methods:",
"raw": "Knowledge Integration Methods:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- The RHO framework leveraging entity representations and relation predicates",
"raw": "- The RHO framework leveraging entity representations and relation predicates",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- FLEEK's intelligent fact verification system using curated knowledge graphs",
"raw": "- FLEEK's intelligent fact verification system using curated knowledge graphs",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Novel Loss Functions:",
"raw": "Novel Loss Functions:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Text Hallucination Regularization (THR) derived from mutual information",
"raw": "- Text Hallucination Regularization (THR) derived from mutual information",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- The mFACT metric for evaluating faithfulness in multilingual contexts",
"raw": "- The mFACT metric for evaluating faithfulness in multilingual contexts",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "This research provides a structured taxonomy for categorizing these mitigation techniques, offering valuable insights for practitioners and researchers working with LLMs.",
"raw": "This research provides a structured taxonomy for categorizing these mitigation techniques, offering valuable insights for practitioners and researchers working with LLMs.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "What are your thoughts on hallucination mitigation in LLMs?",
"raw": "What are your thoughts on hallucination mitigation in LLMs?",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Good folks from @amazon, @Stanford, and other great institutions have released “A Comprehensive Survey of Hallucination Mitigation Techniques in Large Language Models!”
This comprehensive survey examines over 32 cutting-edge techniques to combat hallucination in Large Language Models (LLMs). As LLMs become increasingly integral to our daily operations, addressing their tendency to generate ungrounded content is crucial.
Retrieval-Augmented Generation (RAG) Innovations:
- Pre-generation retrieval using LLM-Augmenter with Plug-and-Play modules
- Real-time verification through the EVER framework implementing three-stage validation
- Post-generation refinement via the RARR system for automated attribution
Advanced Decoding Strategies:
- Context-Aware Decoding (CAD) utilizing contrastive output distribution
- DoLa's innovative approach of contrasting logit differences between transformer layers
Knowledge Integration Methods:
- The RHO framework leveraging entity representations and relation predicates
- FLEEK's intelligent fact verification system using curated knowledge graphs
Novel Loss Functions:
- Text Hallucination Regularization (THR) derived from mutual information
- The mFACT metric for evaluating faithfulness in multilingual contexts
This research provides a structured taxonomy for categorizing these mitigation techniques, offering valuable insights for practitioners and researchers working with LLMs.
What are your thoughts on hallucination mitigation in LLMs? | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png",
"fullname": "Kuldeep Singh Sidhu",
"name": "singhsidhukuldeep",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 219,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/k5RoT0IOOG9erjBWJVLOj.jpeg"
}
] | [] | [
{
"reaction": "👀",
"users": [
"John6666"
],
"count": 1
}
] | 2024-11-24T02:45:02.000Z | 2024-11-24T14:55:57.726Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/bGo60bmJrLTD0CIERF6GP.png",
"fullname": "I M Weasel",
"name": "imw34531",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 2,
"isFollowing": false
}
] | /posts/singhsidhukuldeep/117573628010199 | 216 | 1 |
378209596329028 | [
{
"type": "text",
"value": "Release a new virtual tryon flux fill finetuning model. Try it here. ",
"raw": "Release a new virtual tryon flux fill finetuning model. Try it here. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./xiaozaa/catvton-flux-alpha",
"href": null,
"resource": {
"type": "model",
"id": "xiaozaa/catvton-flux-alpha",
"discussionNum": null
},
"url": "https://huggingface.co./xiaozaa/catvton-flux-alpha",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Release a new virtual tryon flux fill finetuning model. Try it here.
https://huggingface.co./xiaozaa/catvton-flux-alpha
| {
"avatarUrl": "/avatars/4941f9461c77bb5c5c0b5ec9a6f9efed.svg",
"fullname": "az",
"name": "xiaozaa",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 7,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6401517a8ba76abe4b72b2bf/75K6VNDnzDG9ihG18C2Ux.jpeg"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6401517a8ba76abe4b72b2bf/mXe4GpJHNwxax0maUHf9j.jpeg"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6401517a8ba76abe4b72b2bf/eBz7WssbJyWzGURAP_FyI.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6401517a8ba76abe4b72b2bf/yHUR1ZFr25YYFOIhZ7xIm.jpeg"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6401517a8ba76abe4b72b2bf/O3Vj2Em-dB7ECbIYeQNvv.png"
}
] | [] | [
{
"reaction": "👍",
"users": [
"imrankhakwani",
"John6666"
],
"count": 2
}
] | 2024-11-24T02:01:24.000Z | 2024-11-24T02:01:24.673Z | [] | /posts/xiaozaa/378209596329028 | 211 | 0 |
834919494324436 | [
{
"type": "text",
"value": "Repurposed my older AI workstation to a homelab server, it has received 2xV100 + 1xP40",
"raw": "Repurposed my older AI workstation to a homelab server, it has received 2xV100 + 1xP40",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I can reach huge 210k token context size with MegaBeam-Mistral-7B-512k-GGUF ~70+tok/s, or run Llama-3.1-Nemotron-70B-Instruct-HF-GGUF with 50k Context ~10tok/s (V100 only 40k ctx and 15tok/s).",
"raw": "I can reach huge 210k token context size with MegaBeam-Mistral-7B-512k-GGUF ~70+tok/s, or run Llama-3.1-Nemotron-70B-Instruct-HF-GGUF with 50k Context ~10tok/s (V100 only 40k ctx and 15tok/s).",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Also able to Lora finetune with similar performace as an RTX3090.",
"raw": "Also able to Lora finetune with similar performace as an RTX3090.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "It moved to the garage to no complaints for the noise from the family. Will move to a Rack soon :D",
"raw": "It moved to the garage to no complaints for the noise from the family. Will move to a Rack soon :D",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Repurposed my older AI workstation to a homelab server, it has received 2xV100 + 1xP40
I can reach huge 210k token context size with MegaBeam-Mistral-7B-512k-GGUF ~70+tok/s, or run Llama-3.1-Nemotron-70B-Instruct-HF-GGUF with 50k Context ~10tok/s (V100 only 40k ctx and 15tok/s).
Also able to Lora finetune with similar performace as an RTX3090.
It moved to the garage to no complaints for the noise from the family. Will move to a Rack soon :D
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64e6d37e02dee9bcb9d9fa18/os24VYiNCoyth9yQSdv_A.jpeg",
"fullname": "Csaba Kecskemeti",
"name": "csabakecskemeti",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 9,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/64e6d37e02dee9bcb9d9fa18/9RauPJdJLt2gDQ081Udxw.jpeg"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/64e6d37e02dee9bcb9d9fa18/34pYYbQd46L04JUC4ZVJ6.jpeg"
}
] | [] | [
{
"reaction": "🚀",
"users": [
"John6666"
],
"count": 1
}
] | 2024-11-24T00:04:02.000Z | 2024-11-24T04:31:05.379Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/6aIIy7eNLLjepZfp3Aym3.png",
"fullname": "George M",
"name": "ge-or-ge",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64e6d37e02dee9bcb9d9fa18/os24VYiNCoyth9yQSdv_A.jpeg",
"fullname": "Csaba Kecskemeti",
"name": "csabakecskemeti",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 9,
"isFollowing": false
}
] | /posts/csabakecskemeti/834919494324436 | 168 | 2 |
308791196500352 | [
{
"type": "text",
"value": "p104-100s are beasts. 8 gigs of VRAM, 12 tok/s on qwen 14b at q4, and 18 tok/s on 7b at q6. best thing - 20 euros each.",
"raw": "p104-100s are beasts. 8 gigs of VRAM, 12 tok/s on qwen 14b at q4, and 18 tok/s on 7b at q6. best thing - 20 euros each.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://furry.engineer/@cappuch/113500349547803802",
"href": "https://furry.engineer/@cappuch/113500349547803802",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | p104-100s are beasts. 8 gigs of VRAM, 12 tok/s on qwen 14b at q4, and 18 tok/s on 7b at q6. best thing - 20 euros each.
https://furry.engineer/@cappuch/113500349547803802 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64da645be42fba08b88d0315/dcBKWq3d3X9QKQbtf8t46.jpeg",
"fullname": "Mikus",
"name": "cappuch",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 9,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🤯",
"users": [
"YaTharThShaRma999",
"AtAndDev",
"John6666"
],
"count": 3
},
{
"reaction": "😎",
"users": [
"csabakecskemeti",
"AtAndDev"
],
"count": 2
}
] | 2024-11-23T21:16:14.000Z | 2024-11-23T21:16:14.425Z | [] | /posts/cappuch/308791196500352 | 453 | 0 |
216590365251377 | [
{
"type": "text",
"value": "SAM: I can segment anything!",
"raw": "SAM: I can segment anything!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "SAMURAI: Hold my sake while I track it through time and space without even training 🎯",
"raw": "SAMURAI: Hold my sake while I track it through time and space without even training 🎯",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Researchers really said what if we gave SAM object permanence and it worked 🤯",
"raw": "Researchers really said what if we gave SAM object permanence and it worked 🤯",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://www.aimodels.fyi/papers/arxiv/samurai-adapting-segment-anything-model-zero-shot",
"href": "https://www.aimodels.fyi/papers/arxiv/samurai-adapting-segment-anything-model-zero-shot",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | SAM: I can segment anything!
SAMURAI: Hold my sake while I track it through time and space without even training 🎯
Researchers really said what if we gave SAM object permanence and it worked 🤯
https://www.aimodels.fyi/papers/arxiv/samurai-adapting-segment-anything-model-zero-shot | {
"avatarUrl": "/avatars/0bc16a7447cd71ac18828a678313bd83.svg",
"fullname": "Mike Young",
"name": "mikelabs",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 10,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"YaTharThShaRma999",
"mikelabs"
],
"count": 2
},
{
"reaction": "👀",
"users": [
"John6666"
],
"count": 1
}
] | 2024-11-23T17:37:17.000Z | 2024-11-24T00:13:47.361Z | [
{
"avatarUrl": "/avatars/b2b3fe650c1fcc689d74e66f84aa5e5f.svg",
"fullname": "fdgt fgh",
"name": "gfdhujykrdr",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/mikelabs/216590365251377 | 580 | 1 |
782676184452029 | [
{
"type": "text",
"value": "📢 If you were earlier interested in quick translator application for bunch of texts with spans of fixed parts that tolerant for translation, then this post might be relevant! Delighted to share a bulk_translate -- a framework for automatic texts translation with the pre-anotated fixed spans. ",
"raw": "📢 If you were earlier interested in quick translator application for bunch of texts with spans of fixed parts that tolerant for translation, then this post might be relevant! Delighted to share a bulk_translate -- a framework for automatic texts translation with the pre-anotated fixed spans. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📦 ",
"raw": "📦 ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://pypi.org/project/bulk-translate/",
"href": "https://pypi.org/project/bulk-translate/",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🌟 ",
"raw": "🌟 ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/nicolay-r/bulk-translate",
"href": "https://github.com/nicolay-r/bulk-translate",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🔑 Spans allows you to control your objects in texts, so that objects would be tollerant to translator. By default it provides implementation for GoogleTranslate.",
"raw": "🔑 Spans allows you to control your objects in texts, so that objects would be tollerant to translator. By default it provides implementation for GoogleTranslate.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "bulk_translate features: ",
"raw": "bulk_translate features: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "✅ Native Implementation of two translation modes:",
"raw": "✅ Native Implementation of two translation modes:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " - fast-mode: exploits extra chars for grouping text parts into single batch",
"raw": " - fast-mode: exploits extra chars for grouping text parts into single batch",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " - accurate: pefroms individual translation of each text part.",
"raw": " - accurate: pefroms individual translation of each text part.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "✅ No strings: you're free to adopt any LM / LLM backend.",
"raw": "✅ No strings: you're free to adopt any LM / LLM backend.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Support googletrans by default.",
"raw": "Support googletrans by default.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The initial release of the project supports fixed spans as text parts wrapped in square brackets [] with non inner space characters.",
"raw": "The initial release of the project supports fixed spans as text parts wrapped in square brackets [] with non inner space characters.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "You can play with your data in CSV here on GoogleColab:",
"raw": "You can play with your data in CSV here on GoogleColab:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📒 ",
"raw": "📒 ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://colab.research.google.com/github/nicolay-r/bulk-translate/blob/master/bulk_translate_demo.ipynb",
"href": "https://colab.research.google.com/github/nicolay-r/bulk-translate/blob/master/bulk_translate_demo.ipynb",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "👏 This project is based on AREkit 0.25.1 pipelines for deployment lm-based workflows: ",
"raw": "👏 This project is based on AREkit 0.25.1 pipelines for deployment lm-based workflows: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/nicolay-r/AREkit",
"href": "https://github.com/nicolay-r/AREkit",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 📢 If you were earlier interested in quick translator application for bunch of texts with spans of fixed parts that tolerant for translation, then this post might be relevant! Delighted to share a bulk_translate -- a framework for automatic texts translation with the pre-anotated fixed spans.
📦 https://pypi.org/project/bulk-translate/
🌟 https://github.com/nicolay-r/bulk-translate
🔑 Spans allows you to control your objects in texts, so that objects would be tollerant to translator. By default it provides implementation for GoogleTranslate.
bulk_translate features:
✅ Native Implementation of two translation modes:
- fast-mode: exploits extra chars for grouping text parts into single batch
- accurate: pefroms individual translation of each text part.
✅ No strings: you're free to adopt any LM / LLM backend.
Support googletrans by default.
The initial release of the project supports fixed spans as text parts wrapped in square brackets [] with non inner space characters.
You can play with your data in CSV here on GoogleColab:
📒 https://colab.research.google.com/github/nicolay-r/bulk-translate/blob/master/bulk_translate_demo.ipynb
👏 This project is based on AREkit 0.25.1 pipelines for deployment lm-based workflows:
https://github.com/nicolay-r/AREkit | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/64e62d11d27a8292c3637f86/aptDeBHpCJxcREj6KPLN1.jpeg",
"fullname": "Nicolay Rusnachenko",
"name": "nicolay-r",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 49,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/LcYCqtHaZyyaQaQGGa3ri.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/ZDaa1LmbrZJJOvI8P1_nR.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/n_5T_gXwLWM4raIUkClu6.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/_4SbYk_XtxncAZnwdy044.png"
}
] | [] | [
{
"reaction": "👀",
"users": [
"John6666"
],
"count": 1
}
] | 2024-11-23T12:33:02.000Z | 2024-11-23T12:48:04.858Z | [] | /posts/nicolay-r/782676184452029 | 430 | 0 |
642264011690230 | [
{
"type": "resource",
"value": null,
"raw": "https://hf.co/spaces/hexgrad/Kokoro-TTS",
"href": null,
"resource": {
"type": "space",
"id": "hexgrad/Kokoro-TTS",
"discussionNum": null
},
"url": "https://hf.co/spaces/hexgrad/Kokoro-TTS",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " just got an upgrade that substantially improves TTS naturalness for short bursts while maintaining parity for longer utterances! 🔥",
"raw": " just got an upgrade that substantially improves TTS naturalness for short bursts while maintaining parity for longer utterances! 🔥",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Read more and listen to before/after audio samples at ",
"raw": "Read more and listen to before/after audio samples at ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://hf.co/blog/hexgrad/kokoro-short-burst-upgrade",
"href": "https://hf.co/blog/hexgrad/kokoro-short-burst-upgrade",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "(Probably would have made that Article a Post instead, if audio could be embedded into Posts.)",
"raw": "(Probably would have made that Article a Post instead, if audio could be embedded into Posts.)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | https://hf.co/spaces/hexgrad/Kokoro-TTS just got an upgrade that substantially improves TTS naturalness for short bursts while maintaining parity for longer utterances! 🔥
Read more and listen to before/after audio samples at https://hf.co/blog/hexgrad/kokoro-short-burst-upgrade
(Probably would have made that Article a Post instead, if audio could be embedded into Posts.) | {
"avatarUrl": "/avatars/02074f60a2ef445a29343ed90a303cc6.svg",
"fullname": "Hexgrad",
"name": "hexgrad",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 20,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👍",
"users": [
"John6666",
"ai-everyday",
"victor",
"AtAndDev",
"Norod78"
],
"count": 5
},
{
"reaction": "🔥",
"users": [
"prithivMLmods",
"AtAndDev"
],
"count": 2
}
] | 2024-11-23T00:00:15.000Z | 2024-11-23T18:58:44.764Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/66c26b6fb01b19d8c3c2467b/HIcQYcU6rOilwbuRCRStm.jpeg",
"fullname": "DV",
"name": "Delta-Vector",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 19,
"isFollowing": false
}
] | /posts/hexgrad/642264011690230 | 1,115 | 1 |
622669072793033 | [
{
"type": "text",
"value": "🦋 Hug the butterfly! You can now add your Bluesky handle to your Hugging Face profile! ✨",
"raw": "🦋 Hug the butterfly! You can now add your Bluesky handle to your Hugging Face profile! ✨",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🦋 Hug the butterfly! You can now add your Bluesky handle to your Hugging Face profile! ✨ | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg",
"fullname": "Florent Daudens",
"name": "fdaudens",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 384,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/jG8vFk8ANl3LzPPYgSnYn.jpeg"
}
] | [] | [
{
"reaction": "❤️",
"users": [
"jsulz",
"arthurspapa",
"OmbelineM",
"enzostvs",
"John6666",
"gordy12gg",
"funi58480",
"Clausss"
],
"count": 8
},
{
"reaction": "🤗",
"users": [
"John6666",
"monsoon-nlp"
],
"count": 2
}
] | 2024-11-22T23:14:41.000Z | 2024-11-22T23:14:41.790Z | [] | /posts/fdaudens/622669072793033 | 1,225 | 0 |
153064070369037 | [
{
"type": "text",
"value": "NVIDIA Labs developed SANA model weights and Gradio demo app published —Check out this amazing new Text to Image model by NVIDIA",
"raw": "NVIDIA Labs developed SANA model weights and Gradio demo app published —Check out this amazing new Text to Image model by NVIDIA",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Official repo : ",
"raw": "Official repo : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/NVlabs/Sana",
"href": "https://github.com/NVlabs/Sana",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "1-Click Windows, RunPod, Massed Compute installers and free Kaggle notebook : ",
"raw": "1-Click Windows, RunPod, Massed Compute installers and free Kaggle notebook : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://www.patreon.com/posts/116474081",
"href": "https://www.patreon.com/posts/116474081",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "You can follow instructions on the repository to install and use locally. I tested on my Windows RTX 3060 and 3090 GPUs.",
"raw": "You can follow instructions on the repository to install and use locally. I tested on my Windows RTX 3060 and 3090 GPUs.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I have tested some speeds and VRAM usage too",
"raw": "I have tested some speeds and VRAM usage too",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Uses 9.5 GB VRAM but someone reported works good on 8 GB GPUs too",
"raw": "Uses 9.5 GB VRAM but someone reported works good on 8 GB GPUs too",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Default settings per image speeds as below",
"raw": "Default settings per image speeds as below",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Free Kaggle Account Notebook on T4 GPU : 15 second",
"raw": "Free Kaggle Account Notebook on T4 GPU : 15 second",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "RTX 3060 (12 GB) : 9.5 second",
"raw": "RTX 3060 (12 GB) : 9.5 second",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "RTX 3090 : 4 second",
"raw": "RTX 3090 : 4 second",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "RTX 4090 : 2 second",
"raw": "RTX 4090 : 2 second",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "More info : ",
"raw": "More info : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://nvlabs.github.io/Sana/",
"href": "https://nvlabs.github.io/Sana/",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Works great on RunPod and Massed Compute as well (cloud)",
"raw": "Works great on RunPod and Massed Compute as well (cloud)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Sana : Efficient High-Resolution Image Synthesis",
"raw": "Sana : Efficient High-Resolution Image Synthesis",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "with Linear Diffusion Transformer",
"raw": "with Linear Diffusion Transformer",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "About Sana — Taken from official repo",
"raw": "About Sana — Taken from official repo",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "We introduce Sana, a text-to-image framework that can efficiently generate images up to 4096 × 4096 resolution. Sana can synthesize high-resolution, high-quality images with strong text-image alignment at a remarkably fast speed, deployable on laptop GPU. Core designs include: Deep compression autoencoder: unlike traditional AEs, which compress images only 8×, we trained an AE that can compress images 32×, effectively reducing the number of latent tokens. Linear DiT: we replace all vanilla attention in DiT with linear attention, which is more efficient at high resolutions without sacrificing quality. Decoder-only text encoder: we replaced T5 with modern decoder-only small LLM as the text encoder and designed complex human instruction with in-context learning to enhance the image-text alignment. Efficient training and sampling: we propose Flow-DPM-Solver to reduce sampling steps, with efficient caption labeling and selection to accelerate convergence.",
"raw": "We introduce Sana, a text-to-image framework that can efficiently generate images up to 4096 × 4096 resolution. Sana can synthesize high-resolution, high-quality images with strong text-image alignment at a remarkably fast speed, deployable on laptop GPU. Core designs include: Deep compression autoencoder: unlike traditional AEs, which compress images only 8×, we trained an AE that can compress images 32×, effectively reducing the number of latent tokens. Linear DiT: we replace all vanilla attention in DiT with linear attention, which is more efficient at high resolutions without sacrificing quality. Decoder-only text encoder: we replaced T5 with modern decoder-only small LLM as the text encoder and designed complex human instruction with in-context learning to enhance the image-text alignment. Efficient training and sampling: we propose Flow-DPM-Solver to reduce sampling steps, with efficient caption labeling and selection to accelerate convergence.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | NVIDIA Labs developed SANA model weights and Gradio demo app published —Check out this amazing new Text to Image model by NVIDIA
Official repo : https://github.com/NVlabs/Sana
1-Click Windows, RunPod, Massed Compute installers and free Kaggle notebook : https://www.patreon.com/posts/116474081
You can follow instructions on the repository to install and use locally. I tested on my Windows RTX 3060 and 3090 GPUs.
I have tested some speeds and VRAM usage too
Uses 9.5 GB VRAM but someone reported works good on 8 GB GPUs too
Default settings per image speeds as below
Free Kaggle Account Notebook on T4 GPU : 15 second
RTX 3060 (12 GB) : 9.5 second
RTX 3090 : 4 second
RTX 4090 : 2 second
More info : https://nvlabs.github.io/Sana/
Works great on RunPod and Massed Compute as well (cloud)
Sana : Efficient High-Resolution Image Synthesis
with Linear Diffusion Transformer
About Sana — Taken from official repo
We introduce Sana, a text-to-image framework that can efficiently generate images up to 4096 × 4096 resolution. Sana can synthesize high-resolution, high-quality images with strong text-image alignment at a remarkably fast speed, deployable on laptop GPU. Core designs include: Deep compression autoencoder: unlike traditional AEs, which compress images only 8×, we trained an AE that can compress images 32×, effectively reducing the number of latent tokens. Linear DiT: we replace all vanilla attention in DiT with linear attention, which is more efficient at high resolutions without sacrificing quality. Decoder-only text encoder: we replaced T5 with modern decoder-only small LLM as the text encoder and designed complex human instruction with in-context learning to enhance the image-text alignment. Efficient training and sampling: we propose Flow-DPM-Solver to reduce sampling steps, with efficient caption labeling and selection to accelerate convergence.
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1672531901326-6345bd89fe134dfd7a0dba40.png",
"fullname": "Furkan Gözükara",
"name": "MonsterMMORPG",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 376,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/KqOdD2PyaWMen3kxHEst1.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/hG4uxvhJ8TEYXtsP26mCs.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Bbz9kqFRqGjZZyJeddsb7.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/ixI6n8ENWGV-9MSYx72Bs.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/p5tFos_O2wZiK2bHetzT_.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/VBvxObWKdBQfLen0rbsaG.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/c3RNric-IiRyE9gejNhBF.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/k7VQ4_0Sdcxc_OGzGIhFk.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/zRAGD2qVfCsakhTm4BsnA.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Kza50gOKwIoI5fhQTCySH.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/KZrbrog_x7cwk1OG54cPX.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Qj7m4Al2G8MsgR1Fony_O.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/h9ccVFDJMo8zIjOZ1lyXW.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/fC3N8ERv_Rgn3OzcHV35N.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/SIZb4w3xStNrFSRLxGpcj.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/S5EYUHhP5jlZfuO005gIe.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/lgpMVt9NzhzS5GuWzbGy9.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/4OODIr6fREfCoLk-8CFZE.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/gagsaM94j7JPRbctrHVeA.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Z2Cq0mUFzVjz-xwiN39MT.png"
}
] | [] | [
{
"reaction": "👀",
"users": [
"John6666"
],
"count": 1
},
{
"reaction": "🚀",
"users": [
"John6666"
],
"count": 1
}
] | 2024-11-22T22:47:00.000Z | 2024-11-22T22:47:00.331Z | [] | /posts/MonsterMMORPG/153064070369037 | 440 | 0 |
558505558371853 | [
{
"type": "text",
"value": "All wisper or transcriptions projects spaces (including Zero GPUs plans) are very slow or have many quotas bugs or processing errors. I tested all... anything can transcribe a single 3 min short audio file!!! How it´s possible???",
"raw": "All wisper or transcriptions projects spaces (including Zero GPUs plans) are very slow or have many quotas bugs or processing errors. I tested all... anything can transcribe a single 3 min short audio file!!! How it´s possible???",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | All wisper or transcriptions projects spaces (including Zero GPUs plans) are very slow or have many quotas bugs or processing errors. I tested all... anything can transcribe a single 3 min short audio file!!! How it´s possible??? | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/663e7c386304d377fca8552c/__GKPNKAhSi6ZcEk8XoBD.jpeg",
"fullname": "Edney Silva",
"name": "ednsinf",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👀",
"users": [
"John6666"
],
"count": 1
}
] | 2024-11-22T22:41:20.000Z | 2024-11-23T16:42:20.668Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png",
"fullname": "John Smith",
"name": "John6666",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 398,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/663e7c386304d377fca8552c/__GKPNKAhSi6ZcEk8XoBD.jpeg",
"fullname": "Edney Silva",
"name": "ednsinf",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
},
{
"avatarUrl": "/avatars/c82779fdf94f80cdb5020504f83c818b.svg",
"fullname": "Yatharth Sharma",
"name": "YaTharThShaRma999",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 14,
"isFollowing": false
}
] | /posts/ednsinf/558505558371853 | 276 | 3 |
673896178494185 | [
{
"type": "text",
"value": "I'm currently on a push to expand the scope of image based datasets on the Hub. There's certainly a lot already, but for anyone who's looked closely, there's not a whole lot of standardization. I am to fix that, datasets under the ",
"raw": "I'm currently on a push to expand the scope of image based datasets on the Hub. There's certainly a lot already, but for anyone who's looked closely, there's not a whole lot of standardization. I am to fix that, datasets under the ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co./timm",
"href": "https://huggingface.co./timm",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " and ",
"raw": " and ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co./pixparse",
"href": "https://huggingface.co./pixparse",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " orgs will serve as canonical examples for various task / modality combinations and be useable without fuss in libraries like ",
"raw": " orgs will serve as canonical examples for various task / modality combinations and be useable without fuss in libraries like ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`timm`",
"href": null,
"resource": null,
"url": null,
"code": "timm",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ", ",
"raw": ", ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`OpenCLIP`",
"href": null,
"resource": null,
"url": null,
"code": "OpenCLIP",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ", and hopefully more.",
"raw": ", and hopefully more.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I just uploaded the first multi-label dataset that I'll support with ",
"raw": "I just uploaded the first multi-label dataset that I'll support with ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`timm`",
"href": null,
"resource": null,
"url": null,
"code": "timm",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " scripts soon: ",
"raw": " scripts soon: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./datasets/timm/plant-pathology-2021",
"href": null,
"resource": {
"type": "dataset",
"id": "timm/plant-pathology-2021",
"discussionNum": null
},
"url": "https://huggingface.co./datasets/timm/plant-pathology-2021",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Next up object detection & segmentation! I've got an annotation spec sorted out, a lot of datasets ready to rip, and yeah that means ",
"raw": "Next up object detection & segmentation! I've got an annotation spec sorted out, a lot of datasets ready to rip, and yeah that means ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "inline_code",
"value": null,
"raw": "`timm`",
"href": null,
"resource": null,
"url": null,
"code": "timm",
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " support for object detection, eventually segmentation, is finally under development :O",
"raw": " support for object detection, eventually segmentation, is finally under development :O",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | I'm currently on a push to expand the scope of image based datasets on the Hub. There's certainly a lot already, but for anyone who's looked closely, there's not a whole lot of standardization. I am to fix that, datasets under the https://huggingface.co./timm and https://huggingface.co./pixparse orgs will serve as canonical examples for various task / modality combinations and be useable without fuss in libraries like `timm`, `OpenCLIP`, and hopefully more.
I just uploaded the first multi-label dataset that I'll support with `timm` scripts soon: https://huggingface.co./datasets/timm/plant-pathology-2021
Next up object detection & segmentation! I've got an annotation spec sorted out, a lot of datasets ready to rip, and yeah that means `timm` support for object detection, eventually segmentation, is finally under development :O
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1667002643224-604a5184dca2c7ac7508b849.jpeg",
"fullname": "Ross Wightman",
"name": "rwightman",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 221,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👍",
"users": [
"John6666",
"davanstrien",
"lhoestq",
"fgdrfgrgrdgdr"
],
"count": 4
}
] | 2024-11-22T21:19:23.000Z | 2024-11-22T21:19:23.859Z | [] | /posts/rwightman/673896178494185 | 720 | 0 |
421660573639613 | [
{
"type": "text",
"value": "Trace LLM calls with Arize AI's Phoenix observability dashboards on Hugging Face Spaces! 🚀",
"raw": "Trace LLM calls with Arize AI's Phoenix observability dashboards on Hugging Face Spaces! 🚀",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "✨ I just added a new recipe to the Open-Source AI Cookbook that shows you how to:",
"raw": "✨ I just added a new recipe to the Open-Source AI Cookbook that shows you how to:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "1️⃣ Deploy Phoenix on HF Spaces with persistent storage in a few clicks",
"raw": "1️⃣ Deploy Phoenix on HF Spaces with persistent storage in a few clicks",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "2️⃣ Configure LLM tracing with the 𝗦𝗲𝗿𝘃𝗲𝗿𝗹𝗲𝘀𝘀 𝗜𝗻𝗳𝗲𝗿𝗲𝗻𝗰𝗲 𝗔𝗣𝗜",
"raw": "2️⃣ Configure LLM tracing with the 𝗦𝗲𝗿𝘃𝗲𝗿𝗹𝗲𝘀𝘀 𝗜𝗻𝗳𝗲𝗿𝗲𝗻𝗰𝗲 𝗔𝗣𝗜",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "3️⃣ Observe multi-agent application runs with the CrewAI integration",
"raw": "3️⃣ Observe multi-agent application runs with the CrewAI integration",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "𝗢𝗯𝘀𝗲𝗿𝘃𝗮𝗯𝗶𝗹𝗶𝘁𝘆 𝗶𝘀 𝗰𝗿𝘂𝗰𝗶𝗮𝗹 for building robust LLM apps.",
"raw": "𝗢𝗯𝘀𝗲𝗿𝘃𝗮𝗯𝗶𝗹𝗶𝘁𝘆 𝗶𝘀 𝗰𝗿𝘂𝗰𝗶𝗮𝗹 for building robust LLM apps.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Phoenix makes it easy to visualize trace data, evaluate performance, and track down issues. Give it a try!",
"raw": "Phoenix makes it easy to visualize trace data, evaluate performance, and track down issues. Give it a try!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🔗 Cookbook recipe: ",
"raw": "🔗 Cookbook recipe: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co./learn/cookbook/en/phoenix_observability_on_hf_spaces",
"href": "https://huggingface.co./learn/cookbook/en/phoenix_observability_on_hf_spaces",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🔗 Phoenix docs: ",
"raw": "🔗 Phoenix docs: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://docs.arize.com/phoenix",
"href": "https://docs.arize.com/phoenix",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Trace LLM calls with Arize AI's Phoenix observability dashboards on Hugging Face Spaces! 🚀
✨ I just added a new recipe to the Open-Source AI Cookbook that shows you how to:
1️⃣ Deploy Phoenix on HF Spaces with persistent storage in a few clicks
2️⃣ Configure LLM tracing with the 𝗦𝗲𝗿𝘃𝗲𝗿𝗹𝗲𝘀𝘀 𝗜𝗻𝗳𝗲𝗿𝗲𝗻𝗰𝗲 𝗔𝗣𝗜
3️⃣ Observe multi-agent application runs with the CrewAI integration
𝗢𝗯𝘀𝗲𝗿𝘃𝗮𝗯𝗶𝗹𝗶𝘁𝘆 𝗶𝘀 𝗰𝗿𝘂𝗰𝗶𝗮𝗹 for building robust LLM apps.
Phoenix makes it easy to visualize trace data, evaluate performance, and track down issues. Give it a try!
🔗 Cookbook recipe: https://huggingface.co./learn/cookbook/en/phoenix_observability_on_hf_spaces
🔗 Phoenix docs: https://docs.arize.com/phoenix | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/61d375fd733d3a83ecd1bba9/oIXwvvs1-HaCnJXMCZgkc.jpeg",
"fullname": "Andrew Reed",
"name": "andrewrreed",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 106,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👀",
"users": [
"John6666",
"funi58480"
],
"count": 2
}
] | 2024-11-22T20:52:04.000Z | 2024-11-22T20:52:04.474Z | [] | /posts/andrewrreed/421660573639613 | 631 | 0 |
161512292032853 | [
{
"type": "text",
"value": "If I am correct and the LLM model changes the 'shape' of the data as it learns, then I should be able to track and utilize those shape changes as a backpropagation training mechanism, right? Well guess what, I can do that! Entropy, Sparsity, and Density, this is how I can measure the shape of the data the LLM model is creating. Nodes, Clusters, and Edges, these are the mechanisms within the neural network the LLM model updates as it learns these concepts. I measure the effects of these updates, via Entropy, Sparsity, and Density. Check out more in this video: ",
"raw": "If I am correct and the LLM model changes the 'shape' of the data as it learns, then I should be able to track and utilize those shape changes as a backpropagation training mechanism, right? Well guess what, I can do that! Entropy, Sparsity, and Density, this is how I can measure the shape of the data the LLM model is creating. Nodes, Clusters, and Edges, these are the mechanisms within the neural network the LLM model updates as it learns these concepts. I measure the effects of these updates, via Entropy, Sparsity, and Density. Check out more in this video: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://youtu.be/jADTt5HHtiw",
"href": "https://youtu.be/jADTt5HHtiw",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | If I am correct and the LLM model changes the 'shape' of the data as it learns, then I should be able to track and utilize those shape changes as a backpropagation training mechanism, right? Well guess what, I can do that! Entropy, Sparsity, and Density, this is how I can measure the shape of the data the LLM model is creating. Nodes, Clusters, and Edges, these are the mechanisms within the neural network the LLM model updates as it learns these concepts. I measure the effects of these updates, via Entropy, Sparsity, and Density. Check out more in this video: https://youtu.be/jADTt5HHtiw | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png",
"fullname": "Richard A Aragon",
"name": "TuringsSolutions",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 146,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👀",
"users": [
"lunarflu"
],
"count": 1
}
] | 2024-11-17T22:57:57.000Z | 2024-11-18T05:35:31.228Z | [
{
"avatarUrl": "/avatars/709d76cb6f02ae98e13bf8ced95f624d.svg",
"fullname": "Rebelo",
"name": "JonasDiasRebelo",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/TuringsSolutions/161512292032853 | 724 | 2 |
716422382816033 | [
{
"type": "text",
"value": "What a brilliant week for Open Source AI!",
"raw": "What a brilliant week for Open Source AI!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Qwen 2.5 Coder by Alibaba - 0.5B / 1.5B / 3B / 7B / 14B/ 32B (Base + Instruct) Code generation LLMs, with 32B tackling giants like Gemnini 1.5 Pro, Claude Sonnet",
"raw": "Qwen 2.5 Coder by Alibaba - 0.5B / 1.5B / 3B / 7B / 14B/ 32B (Base + Instruct) Code generation LLMs, with 32B tackling giants like Gemnini 1.5 Pro, Claude Sonnet",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./collections/Qwen/qwen25-coder-66eaa22e6f99801bf65b0c2f",
"href": null,
"resource": {
"type": "collection",
"id": "Qwen/qwen25-coder-66eaa22e6f99801bf65b0c2f",
"discussionNum": null
},
"url": "https://huggingface.co./collections/Qwen/qwen25-coder-66eaa22e6f99801bf65b0c2f",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "LLM2CLIP from Microsoft - Leverage LLMs to train ultra-powerful CLIP models! Boosts performance over the previous SOTA by ~17%",
"raw": "LLM2CLIP from Microsoft - Leverage LLMs to train ultra-powerful CLIP models! Boosts performance over the previous SOTA by ~17%",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./collections/microsoft/llm2clip-672323a266173cfa40b32d4c",
"href": null,
"resource": {
"type": "collection",
"id": "microsoft/llm2clip-672323a266173cfa40b32d4c",
"discussionNum": null
},
"url": "https://huggingface.co./collections/microsoft/llm2clip-672323a266173cfa40b32d4c",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Athene v2 Chat & Agent by NexusFlow - SoTA general LLM fine-tuned from Qwen 2.5 72B excels at Chat + Function Calling/ JSON/ Agents",
"raw": "Athene v2 Chat & Agent by NexusFlow - SoTA general LLM fine-tuned from Qwen 2.5 72B excels at Chat + Function Calling/ JSON/ Agents",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./collections/Nexusflow/athene-v2-6735b85e505981a794fb02cc",
"href": null,
"resource": {
"type": "collection",
"id": "Nexusflow/athene-v2-6735b85e505981a794fb02cc",
"discussionNum": null
},
"url": "https://huggingface.co./collections/Nexusflow/athene-v2-6735b85e505981a794fb02cc",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Orca Agent Instruct by Microsoft - 1 million instruct pairs covering text editing, creative writing, coding, reading comprehension, etc - permissively licensed",
"raw": "Orca Agent Instruct by Microsoft - 1 million instruct pairs covering text editing, creative writing, coding, reading comprehension, etc - permissively licensed",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./datasets/microsoft/orca-agentinstruct-1M-v1",
"href": null,
"resource": {
"type": "dataset",
"id": "microsoft/orca-agentinstruct-1M-v1",
"discussionNum": null
},
"url": "https://huggingface.co./datasets/microsoft/orca-agentinstruct-1M-v1",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Ultravox by FixieAI - 70B/ 8B model approaching GPT4o level, pick any LLM, train an adapter with Whisper as Audio Encoder",
"raw": "Ultravox by FixieAI - 70B/ 8B model approaching GPT4o level, pick any LLM, train an adapter with Whisper as Audio Encoder",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./collections/reach-vb/ultravox-audio-language-model-release-67373b602af0a52b2a88ae71",
"href": null,
"resource": {
"type": "collection",
"id": "reach-vb/ultravox-audio-language-model-release-67373b602af0a52b2a88ae71",
"discussionNum": null
},
"url": "https://huggingface.co./collections/reach-vb/ultravox-audio-language-model-release-67373b602af0a52b2a88ae71",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "JanusFlow 1.3 by DeepSeek - Next iteration of their Unified MultiModal LLM Janus with RectifiedFlow",
"raw": "JanusFlow 1.3 by DeepSeek - Next iteration of their Unified MultiModal LLM Janus with RectifiedFlow",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./deepseek-ai/JanusFlow-1.3B",
"href": null,
"resource": {
"type": "model",
"id": "deepseek-ai/JanusFlow-1.3B",
"discussionNum": null
},
"url": "https://huggingface.co./deepseek-ai/JanusFlow-1.3B",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Common Corpus by Pleais - 2,003,039,184,047 multilingual, commercially permissive and high quality tokens! ",
"raw": "Common Corpus by Pleais - 2,003,039,184,047 multilingual, commercially permissive and high quality tokens! ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./datasets/PleIAs/common_corpus",
"href": null,
"resource": {
"type": "dataset",
"id": "PleIAs/common_corpus",
"discussionNum": null
},
"url": "https://huggingface.co./datasets/PleIAs/common_corpus",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I'm sure I missed a lot, can't wait for the next week!",
"raw": "I'm sure I missed a lot, can't wait for the next week!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Put down in comments what I missed! 🤗",
"raw": "Put down in comments what I missed! 🤗",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | What a brilliant week for Open Source AI!
Qwen 2.5 Coder by Alibaba - 0.5B / 1.5B / 3B / 7B / 14B/ 32B (Base + Instruct) Code generation LLMs, with 32B tackling giants like Gemnini 1.5 Pro, Claude Sonnet
https://huggingface.co./collections/Qwen/qwen25-coder-66eaa22e6f99801bf65b0c2f
LLM2CLIP from Microsoft - Leverage LLMs to train ultra-powerful CLIP models! Boosts performance over the previous SOTA by ~17%
https://huggingface.co./collections/microsoft/llm2clip-672323a266173cfa40b32d4c
Athene v2 Chat & Agent by NexusFlow - SoTA general LLM fine-tuned from Qwen 2.5 72B excels at Chat + Function Calling/ JSON/ Agents
https://huggingface.co./collections/Nexusflow/athene-v2-6735b85e505981a794fb02cc
Orca Agent Instruct by Microsoft - 1 million instruct pairs covering text editing, creative writing, coding, reading comprehension, etc - permissively licensed
https://huggingface.co./datasets/microsoft/orca-agentinstruct-1M-v1
Ultravox by FixieAI - 70B/ 8B model approaching GPT4o level, pick any LLM, train an adapter with Whisper as Audio Encoder
https://huggingface.co./collections/reach-vb/ultravox-audio-language-model-release-67373b602af0a52b2a88ae71
JanusFlow 1.3 by DeepSeek - Next iteration of their Unified MultiModal LLM Janus with RectifiedFlow
https://huggingface.co./deepseek-ai/JanusFlow-1.3B
Common Corpus by Pleais - 2,003,039,184,047 multilingual, commercially permissive and high quality tokens!
https://huggingface.co./datasets/PleIAs/common_corpus
I'm sure I missed a lot, can't wait for the next week!
Put down in comments what I missed! 🤗 | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1655385361868-61b85ce86eb1f2c5e6233736.jpeg",
"fullname": "Vaibhav Srivastav",
"name": "reach-vb",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 460,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"YaTharThShaRma999",
"AdinaY",
"cosmosgenius",
"s3nh",
"davidberenstein1957",
"John6666",
"Ameeeee",
"lunarflu",
"jsulz",
"MingxingLi",
"m-ric",
"fullstuckdev",
"mdpi-ai",
"erinys",
"IBOYAI"
],
"count": 15
},
{
"reaction": "👍",
"users": [
"victor",
"Vazdru",
"davidberenstein1957",
"nicolollo",
"lunarflu",
"fullstuckdev",
"Maitt"
],
"count": 7
},
{
"reaction": "🤗",
"users": [
"prithivMLmods",
"lunarflu",
"jsulz",
"fullstuckdev"
],
"count": 4
},
{
"reaction": "🚀",
"users": [
"John6666",
"lunarflu",
"fullstuckdev",
"erinys"
],
"count": 4
}
] | 2024-11-17T21:01:34.000Z | 2024-11-17T21:01:34.035Z | [] | /posts/reach-vb/716422382816033 | 3,996 | 0 |
449943263175424 | [
{
"type": "text",
"value": "next version of ",
"raw": "next version of ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./datasets/sequelbox/Celestia",
"href": null,
"resource": {
"type": "dataset",
"id": "sequelbox/Celestia",
"discussionNum": null
},
"url": "https://huggingface.co./datasets/sequelbox/Celestia",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " will be ",
"raw": " will be ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./datasets/microsoft/orca-agentinstruct-1M-v1",
"href": null,
"resource": {
"type": "dataset",
"id": "microsoft/orca-agentinstruct-1M-v1",
"discussionNum": null
},
"url": "https://huggingface.co./datasets/microsoft/orca-agentinstruct-1M-v1",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " style. coming soon",
"raw": " style. coming soon",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | next version of https://huggingface.co./datasets/sequelbox/Celestia will be https://huggingface.co./datasets/microsoft/orca-agentinstruct-1M-v1 style. coming soon | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/63444f2687964b331809eb55/WvZivsvKsM_t0tBtakovK.png",
"fullname": "t.d.a.g.",
"name": "sequelbox",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 51,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👍",
"users": [
"John6666",
"lunarflu",
"thomwolf"
],
"count": 3
},
{
"reaction": "🚀",
"users": [
"zoeywin"
],
"count": 1
}
] | 2024-11-17T18:26:14.000Z | 2024-11-19T13:16:35.127Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1583857746553-5df7e9e5da6d0311fd3d53f9.jpeg",
"fullname": "Thomas Wolf",
"name": "thomwolf",
"type": "user",
"isPro": true,
"isHf": true,
"isMod": false,
"followerCount": 704,
"isFollowing": false
}
] | /posts/sequelbox/449943263175424 | 1,133 | 1 |
194933978747638 | [
{
"type": "text",
"value": "Minimalistic Adapters 🎃",
"raw": "Minimalistic Adapters 🎃",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🚀Demo Here:",
"raw": "🚀Demo Here:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./spaces/prithivMLmods/FLUX-LoRA-DLC",
"href": null,
"resource": {
"type": "space",
"id": "prithivMLmods/FLUX-LoRA-DLC",
"discussionNum": null
},
"url": "https://huggingface.co./spaces/prithivMLmods/FLUX-LoRA-DLC",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🚀Model:",
"raw": "🚀Model:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "{ Quote Tuner } : ",
"raw": "{ Quote Tuner } : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./prithivMLmods/Flux.1-Dev-Quote-LoRA",
"href": null,
"resource": {
"type": "model",
"id": "prithivMLmods/Flux.1-Dev-Quote-LoRA",
"discussionNum": null
},
"url": "https://huggingface.co./prithivMLmods/Flux.1-Dev-Quote-LoRA",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "{ Stamp Art } : ",
"raw": "{ Stamp Art } : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./prithivMLmods/Flux.1-Dev-Stamp-Art-LoRA",
"href": null,
"resource": {
"type": "model",
"id": "prithivMLmods/Flux.1-Dev-Stamp-Art-LoRA",
"discussionNum": null
},
"url": "https://huggingface.co./prithivMLmods/Flux.1-Dev-Stamp-Art-LoRA",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "{ Hand Sticky } : ",
"raw": "{ Hand Sticky } : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./prithivMLmods/Flux.1-Dev-Hand-Sticky-LoRA",
"href": null,
"resource": {
"type": "model",
"id": "prithivMLmods/Flux.1-Dev-Hand-Sticky-LoRA",
"discussionNum": null
},
"url": "https://huggingface.co./prithivMLmods/Flux.1-Dev-Hand-Sticky-LoRA",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "{ Poster HQ } : ",
"raw": "{ Poster HQ } : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./prithivMLmods/Flux.1-Dev-Poster-HQ-LoRA",
"href": null,
"resource": {
"type": "model",
"id": "prithivMLmods/Flux.1-Dev-Poster-HQ-LoRA",
"discussionNum": null
},
"url": "https://huggingface.co./prithivMLmods/Flux.1-Dev-Poster-HQ-LoRA",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "{ Ctoon Min } : ",
"raw": "{ Ctoon Min } : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./prithivMLmods/Flux.1-Dev-Ctoon-LoRA",
"href": null,
"resource": {
"type": "model",
"id": "prithivMLmods/Flux.1-Dev-Ctoon-LoRA",
"discussionNum": null
},
"url": "https://huggingface.co./prithivMLmods/Flux.1-Dev-Ctoon-LoRA",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🚀Collection:",
"raw": "🚀Collection:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "{ Flux LoRA Collection} : ",
"raw": "{ Flux LoRA Collection} : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./collections/prithivMLmods/flux-lora-collections-66dd5908be2206cfaa8519be",
"href": null,
"resource": {
"type": "collection",
"id": "prithivMLmods/flux-lora-collections-66dd5908be2206cfaa8519be",
"discussionNum": null
},
"url": "https://huggingface.co./collections/prithivMLmods/flux-lora-collections-66dd5908be2206cfaa8519be",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "{ LoRA Space Collection } : ",
"raw": "{ LoRA Space Collection } : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./collections/prithivMLmods/lora-space-collections-6714b72e0d49e1c97fbd6a32",
"href": null,
"resource": {
"type": "collection",
"id": "prithivMLmods/lora-space-collections-6714b72e0d49e1c97fbd6a32",
"discussionNum": null
},
"url": "https://huggingface.co./collections/prithivMLmods/lora-space-collections-6714b72e0d49e1c97fbd6a32",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🚀For More Visit",
"raw": "🚀For More Visit",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co./strangerzonehf",
"href": "https://huggingface.co./strangerzonehf",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ".",
"raw": ".",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ".",
"raw": ".",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ".",
"raw": ".",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🤗@prithivMLmods ",
"raw": "🤗@prithivMLmods ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Minimalistic Adapters 🎃
🚀Demo Here:
https://huggingface.co./spaces/prithivMLmods/FLUX-LoRA-DLC
🚀Model:
{ Quote Tuner } : https://huggingface.co./prithivMLmods/Flux.1-Dev-Quote-LoRA
{ Stamp Art } : https://huggingface.co./prithivMLmods/Flux.1-Dev-Stamp-Art-LoRA
{ Hand Sticky } : https://huggingface.co./prithivMLmods/Flux.1-Dev-Hand-Sticky-LoRA
{ Poster HQ } : https://huggingface.co./prithivMLmods/Flux.1-Dev-Poster-HQ-LoRA
{ Ctoon Min } : https://huggingface.co./prithivMLmods/Flux.1-Dev-Ctoon-LoRA
🚀Collection:
{ Flux LoRA Collection} : https://huggingface.co./collections/prithivMLmods/flux-lora-collections-66dd5908be2206cfaa8519be
{ LoRA Space Collection } : https://huggingface.co./collections/prithivMLmods/lora-space-collections-6714b72e0d49e1c97fbd6a32
🚀For More Visit
https://huggingface.co./strangerzonehf
.
.
.
🤗@prithivMLmods
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/UVtVbF_3rdt0DC8xTkpL1.jpeg",
"fullname": "Prithiv Sakthi",
"name": "prithivMLmods",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 393,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/12Hjd_RsUd59yyHJOTDQj.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/LTpJ-onsbsFsVK6iJC_ys.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/-UgwQiG_3Y5B8D-k85cK_.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/Op2WNMPcugdMNWbztS5CN.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/3302JOoBc5WDYP_nKpDaN.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/EYAsUaQql55ZXljMfIbLX.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/gm8LkgtcQDvw7wgnO5tfq.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/pwt53LD9f-qW1jE0HWCIB.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/F8LJ03rEWMp5mthrS6CTM.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/C-SxwKF0vHIA-NIB5ZOKf.png"
},
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/KQwN20D2aGlqRTxmcW6cI.mp4"
}
] | [] | [
{
"reaction": "🤗",
"users": [
"Ngrthm",
"RenderIo",
"darksfx",
"ai4life44",
"hypergod",
"rdrede",
"victor",
"John6666",
"merve",
"clem",
"OmbelineM"
],
"count": 11
},
{
"reaction": "❤️",
"users": [
"RenderIo",
"Csplk",
"hypergod",
"diabolic6045",
"clem",
"Ngrthm",
"ayush7",
"kimleang123"
],
"count": 8
},
{
"reaction": "🔥",
"users": [
"hypergod",
"ai4life44",
"prefetching",
"clem"
],
"count": 4
},
{
"reaction": "🚀",
"users": [
"darksfx",
"clem",
"Ngrthm"
],
"count": 3
}
] | 2024-11-17T17:14:32.000Z | 2024-11-17T22:45:55.762Z | [
{
"avatarUrl": "/avatars/b2725bb163fa15d6c5856121780d52eb.svg",
"fullname": "Ci Splunk",
"name": "Csplk",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 43,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/UVtVbF_3rdt0DC8xTkpL1.jpeg",
"fullname": "Prithiv Sakthi",
"name": "prithivMLmods",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 393,
"isFollowing": false
}
] | /posts/prithivMLmods/194933978747638 | 3,866 | 3 |
982072243005650 | [
{
"type": "text",
"value": "Kohya brought massive improvements to FLUX LoRA (as low as 4 GB GPUs) and DreamBooth / Fine-Tuning (as low as 6 GB GPUs) training - check attached images in full size to see full details",
"raw": "Kohya brought massive improvements to FLUX LoRA (as low as 4 GB GPUs) and DreamBooth / Fine-Tuning (as low as 6 GB GPUs) training - check attached images in full size to see full details",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "You can download all configs and full instructions",
"raw": "You can download all configs and full instructions",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "> ",
"raw": "> ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://www.patreon.com/posts/112099700",
"href": "https://www.patreon.com/posts/112099700",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " - Fine Tuning post",
"raw": " - Fine Tuning post",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "> ",
"raw": "> ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://www.patreon.com/posts/110879657",
"href": "https://www.patreon.com/posts/110879657",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " - LoRA post",
"raw": " - LoRA post",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Kohya brought massive improvements to FLUX LoRA and DreamBooth / Fine-Tuning (min 6GB GPU) training.",
"raw": "Kohya brought massive improvements to FLUX LoRA and DreamBooth / Fine-Tuning (min 6GB GPU) training.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Now as low as 4GB GPUs can train FLUX LoRA with decent quality and 24GB and below GPUs got a huge speed boost when doing Full DreamBooth / Fine-Tuning training",
"raw": "Now as low as 4GB GPUs can train FLUX LoRA with decent quality and 24GB and below GPUs got a huge speed boost when doing Full DreamBooth / Fine-Tuning training",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "You need minimum 4GB GPU to do a FLUX LoRA training and minimum 6 GB GPU to do FLUX DreamBooth / Full Fine-Tuning training. It is just mind blowing.",
"raw": "You need minimum 4GB GPU to do a FLUX LoRA training and minimum 6 GB GPU to do FLUX DreamBooth / Full Fine-Tuning training. It is just mind blowing.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "You can download all configs and full instructions > ",
"raw": "You can download all configs and full instructions > ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://www.patreon.com/posts/112099700",
"href": "https://www.patreon.com/posts/112099700",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The above post also has 1-click installers and downloaders for Windows, RunPod and Massed Compute",
"raw": "The above post also has 1-click installers and downloaders for Windows, RunPod and Massed Compute",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The model downloader scripts also updated and downloading 30+GB models takes total 1 minute on Massed Compute",
"raw": "The model downloader scripts also updated and downloading 30+GB models takes total 1 minute on Massed Compute",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "You can read the recent updates here : ",
"raw": "You can read the recent updates here : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/kohya-ss/sd-scripts/tree/sd3?tab=readme-ov-file#recent-updates",
"href": "https://github.com/kohya-ss/sd-scripts/tree/sd3?tab=readme-ov-file#recent-updates",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "This is the Kohya GUI branch : ",
"raw": "This is the Kohya GUI branch : ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/bmaltais/kohya_ss/tree/sd3-flux.1",
"href": "https://github.com/bmaltais/kohya_ss/tree/sd3-flux.1",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Key thing to reduce VRAM usage is using block swap",
"raw": "Key thing to reduce VRAM usage is using block swap",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Kohya implemented the logic of OneTrainer to improve block swapping speed significantly and now it is supported for LoRAs as well",
"raw": "Kohya implemented the logic of OneTrainer to improve block swapping speed significantly and now it is supported for LoRAs as well",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Now you can do FP16 training with LoRAs on 24 GB and below GPUs",
"raw": "Now you can do FP16 training with LoRAs on 24 GB and below GPUs",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Now you can train a FLUX LoRA on a 4 GB GPU - key is FP8, block swap and using certain layers training (remember single layer LoRA training)",
"raw": "Now you can train a FLUX LoRA on a 4 GB GPU - key is FP8, block swap and using certain layers training (remember single layer LoRA training)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "It took me more than 1 day to test all newer configs, their VRAM demands, their relative step speeds and prepare the configs :)",
"raw": "It took me more than 1 day to test all newer configs, their VRAM demands, their relative step speeds and prepare the configs :)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Kohya brought massive improvements to FLUX LoRA (as low as 4 GB GPUs) and DreamBooth / Fine-Tuning (as low as 6 GB GPUs) training - check attached images in full size to see full details
You can download all configs and full instructions
> https://www.patreon.com/posts/112099700 - Fine Tuning post
> https://www.patreon.com/posts/110879657 - LoRA post
Kohya brought massive improvements to FLUX LoRA and DreamBooth / Fine-Tuning (min 6GB GPU) training.
Now as low as 4GB GPUs can train FLUX LoRA with decent quality and 24GB and below GPUs got a huge speed boost when doing Full DreamBooth / Fine-Tuning training
You need minimum 4GB GPU to do a FLUX LoRA training and minimum 6 GB GPU to do FLUX DreamBooth / Full Fine-Tuning training. It is just mind blowing.
You can download all configs and full instructions > https://www.patreon.com/posts/112099700
The above post also has 1-click installers and downloaders for Windows, RunPod and Massed Compute
The model downloader scripts also updated and downloading 30+GB models takes total 1 minute on Massed Compute
You can read the recent updates here : https://github.com/kohya-ss/sd-scripts/tree/sd3?tab=readme-ov-file#recent-updates
This is the Kohya GUI branch : https://github.com/bmaltais/kohya_ss/tree/sd3-flux.1
Key thing to reduce VRAM usage is using block swap
Kohya implemented the logic of OneTrainer to improve block swapping speed significantly and now it is supported for LoRAs as well
Now you can do FP16 training with LoRAs on 24 GB and below GPUs
Now you can train a FLUX LoRA on a 4 GB GPU - key is FP8, block swap and using certain layers training (remember single layer LoRA training)
It took me more than 1 day to test all newer configs, their VRAM demands, their relative step speeds and prepare the configs :) | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1672531901326-6345bd89fe134dfd7a0dba40.png",
"fullname": "Furkan Gözükara",
"name": "MonsterMMORPG",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 376,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/OLpWbbp__ZGrxkDvAku7a.jpeg"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Hn8LnZDOI6GVbz1NXho9Z.jpeg"
}
] | [] | [
{
"reaction": "🚀",
"users": [
"MonsterMMORPG",
"John6666",
"Aduagba1",
"carlizor"
],
"count": 4
},
{
"reaction": "❤️",
"users": [
"MonsterMMORPG",
"remjie",
"jayavibhav",
"DennyDenn"
],
"count": 4
},
{
"reaction": "🔥",
"users": [
"MonsterMMORPG",
"carlizor",
"ilikeprivacy"
],
"count": 3
},
{
"reaction": "🤗",
"users": [
"MonsterMMORPG",
"prithivMLmods"
],
"count": 2
},
{
"reaction": "👀",
"users": [
"MonsterMMORPG"
],
"count": 1
},
{
"reaction": "😎",
"users": [
"MonsterMMORPG"
],
"count": 1
},
{
"reaction": "➕",
"users": [
"MonsterMMORPG"
],
"count": 1
},
{
"reaction": "🧠",
"users": [
"MonsterMMORPG"
],
"count": 1
},
{
"reaction": "👍",
"users": [
"MonsterMMORPG"
],
"count": 1
},
{
"reaction": "🤝",
"users": [
"MonsterMMORPG"
],
"count": 1
},
{
"reaction": "🤯",
"users": [
"MonsterMMORPG"
],
"count": 1
}
] | 2024-11-17T14:49:39.000Z | 2024-11-17T14:49:39.775Z | [] | /posts/MonsterMMORPG/982072243005650 | 2,368 | 0 |
953449438611686 | [
{
"type": "text",
"value": "Ok RNNs can rap too:)",
"raw": "Ok RNNs can rap too:)",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Here we implement the seminal RNN paper “Generating Text with Recurrent Neural Networks\"- we train a character-level multiplicative recurrent neural network model (~250k params) for 1000 epochs with Adam opt on 2pac's \"Hit 'em Up\", sample was fun lol.",
"raw": "Here we implement the seminal RNN paper “Generating Text with Recurrent Neural Networks\"- we train a character-level multiplicative recurrent neural network model (~250k params) for 1000 epochs with Adam opt on 2pac's \"Hit 'em Up\", sample was fun lol.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Code: ",
"raw": "Code: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://github.com/Jaykef/ai-algorithms/blob/main/generating_texts_with_rnns.ipynb",
"href": "https://github.com/Jaykef/ai-algorithms/blob/main/generating_texts_with_rnns.ipynb",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Ok RNNs can rap too:)
Here we implement the seminal RNN paper “Generating Text with Recurrent Neural Networks"- we train a character-level multiplicative recurrent neural network model (~250k params) for 1000 epochs with Adam opt on 2pac's "Hit 'em Up", sample was fun lol.
Code: https://github.com/Jaykef/ai-algorithms/blob/main/generating_texts_with_rnns.ipynb | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg",
"fullname": "Jaward Sesay",
"name": "Jaward",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 191,
"isFollowing": false
} | [
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/VLgYTC3kfxsoMHmKkD8Fo.mp4"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/4-z07k3Yar-e7-AKi_7Dh.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/v-KvBI3106FyIuVZMn_hr.png"
}
] | [] | [
{
"reaction": "👀",
"users": [
"John6666",
"YaTharThShaRma999",
"gabr7elferreira"
],
"count": 3
},
{
"reaction": "🔥",
"users": [
"doguscank"
],
"count": 1
}
] | 2024-11-17T13:02:24.000Z | 2024-11-17T13:02:24.594Z | [] | /posts/Jaward/953449438611686 | 1,592 | 0 |
611948696998118 | [
{
"type": "text",
"value": "Finaly I realesed mediapipe-face animation space.",
"raw": "Finaly I realesed mediapipe-face animation space.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Mediapipe 68-points Eyes-Closed and Mouth-Opened",
"raw": "Mediapipe 68-points Eyes-Closed and Mouth-Opened",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./spaces/Akjava/mediapipe-68-facial-guide-eyes-closed-mouth-opened",
"href": null,
"resource": {
"type": "space",
"id": "Akjava/mediapipe-68-facial-guide-eyes-closed-mouth-opened",
"discussionNum": null
},
"url": "https://huggingface.co./spaces/Akjava/mediapipe-68-facial-guide-eyes-closed-mouth-opened",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "[Article]Results: Converted Guide Images(eyes-closed and mouth-opened) with Flux.1 schenll img2img/inpaint",
"raw": "[Article]Results: Converted Guide Images(eyes-closed and mouth-opened) with Flux.1 schenll img2img/inpaint",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co./blog/Akjava/result-guide-image-eyes-mouth",
"href": "https://huggingface.co./blog/Akjava/result-guide-image-eyes-mouth",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "All the other tools listed are designed to support Mediapipe Face Animation",
"raw": "All the other tools listed are designed to support Mediapipe Face Animation",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co./collections/Akjava/mediapipe-tools-672ffe8ee7b62763c31b70c7",
"href": "https://huggingface.co./collections/Akjava/mediapipe-tools-672ffe8ee7b62763c31b70c7",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://huggingface.co./collections/Akjava/webp-3-frame-talking-animation-tools-672819ce4989f354cdbcc739",
"href": "https://huggingface.co./collections/Akjava/webp-3-frame-talking-animation-tools-672819ce4989f354cdbcc739",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Finaly I realesed mediapipe-face animation space.
Mediapipe 68-points Eyes-Closed and Mouth-Opened
https://huggingface.co./spaces/Akjava/mediapipe-68-facial-guide-eyes-closed-mouth-opened
[Article]Results: Converted Guide Images(eyes-closed and mouth-opened) with Flux.1 schenll img2img/inpaint
https://huggingface.co./blog/Akjava/result-guide-image-eyes-mouth
All the other tools listed are designed to support Mediapipe Face Animation
https://huggingface.co./collections/Akjava/mediapipe-tools-672ffe8ee7b62763c31b70c7
https://huggingface.co./collections/Akjava/webp-3-frame-talking-animation-tools-672819ce4989f354cdbcc739 | {
"avatarUrl": "/avatars/fb866e3758189d70488fc6a879151f45.svg",
"fullname": "Akihito Miyazaki",
"name": "Akjava",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 4,
"isFollowing": false
} | [] | [] | [
{
"reaction": "👍",
"users": [
"John6666"
],
"count": 1
}
] | 2024-11-17T12:43:13.000Z | 2024-11-17T12:43:13.043Z | [] | /posts/Akjava/611948696998118 | 410 | 0 |
478756824597278 | [
{
"type": "text",
"value": "Good folks at ",
"raw": "Good folks at ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@nvidia",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "nvidia",
"label": null,
"lang": null
},
{
"type": "text",
"value": " and @Tsinghua_Uni have released LLAMA-MESH - A Revolutionary Approach to 3D Content Generation!",
"raw": " and @Tsinghua_Uni have released LLAMA-MESH - A Revolutionary Approach to 3D Content Generation!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "This innovative framework enables the direct generation of 3D meshes from natural language prompts while maintaining strong language capabilities.",
"raw": "This innovative framework enables the direct generation of 3D meshes from natural language prompts while maintaining strong language capabilities.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Here is the Architecture & Implementation!",
"raw": "Here is the Architecture & Implementation!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ">> Core Components",
"raw": ">> Core Components",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Model Foundation ",
"raw": "Model Foundation ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- If you haven't guessed it yet, it's built on the LLaMA-3.1-8B-Instruct base model ",
"raw": "- If you haven't guessed it yet, it's built on the LLaMA-3.1-8B-Instruct base model ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Maintains original language capabilities while adding 3D generation ",
"raw": "- Maintains original language capabilities while adding 3D generation ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Context length is set to 8,000 tokens ",
"raw": "- Context length is set to 8,000 tokens ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "3D Representation Strategy ",
"raw": "3D Representation Strategy ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Uses the OBJ file format for mesh representation ",
"raw": "- Uses the OBJ file format for mesh representation ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Quantizes vertex coordinates into 64 discrete bins per axis ",
"raw": "- Quantizes vertex coordinates into 64 discrete bins per axis ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Sorts vertices by z-y-x coordinates, from lowest to highest ",
"raw": "- Sorts vertices by z-y-x coordinates, from lowest to highest ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Sorts faces by the lowest vertex indices for consistency ",
"raw": "- Sorts faces by the lowest vertex indices for consistency ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Data Processing Pipeline ",
"raw": "Data Processing Pipeline ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Filters meshes to a maximum of 500 faces for computational efficiency ",
"raw": "- Filters meshes to a maximum of 500 faces for computational efficiency ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Applies random rotations (0°, 90°, 180°, 270°) for data augmentation ",
"raw": "- Applies random rotations (0°, 90°, 180°, 270°) for data augmentation ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Generates ~125k mesh variations from 31k base meshes ",
"raw": "- Generates ~125k mesh variations from 31k base meshes ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Uses Cap3D-generated captions for text descriptions ",
"raw": "- Uses Cap3D-generated captions for text descriptions ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": ">> Training Framework",
"raw": ">> Training Framework",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Dataset Composition ",
"raw": "Dataset Composition ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- 40% Mesh Generation tasks ",
"raw": "- 40% Mesh Generation tasks ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- 20% Mesh Understanding tasks ",
"raw": "- 20% Mesh Understanding tasks ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- 40% General Conversation (UltraChat dataset) ",
"raw": "- 40% General Conversation (UltraChat dataset) ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- 8x training turns for generation, 4x for understanding ",
"raw": "- 8x training turns for generation, 4x for understanding ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Training Configuration ",
"raw": "Training Configuration ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Deployed on 32 A100 GPUs (for Nvidia, this is literally in-house) ",
"raw": "- Deployed on 32 A100 GPUs (for Nvidia, this is literally in-house) ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- 21,000 training iterations ",
"raw": "- 21,000 training iterations ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Global batch size: 128 ",
"raw": "- Global batch size: 128 ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- AdamW optimizer with a 1e-5 learning rate ",
"raw": "- AdamW optimizer with a 1e-5 learning rate ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- 30-step warmup with cosine scheduling ",
"raw": "- 30-step warmup with cosine scheduling ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Total training time: approximately 3 days (based on the paper) ",
"raw": "- Total training time: approximately 3 days (based on the paper) ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "This research opens exciting possibilities for intuitive 3D content creation through natural language interaction. The future of digital design is conversational!",
"raw": "This research opens exciting possibilities for intuitive 3D content creation through natural language interaction. The future of digital design is conversational!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Good folks at @nvidia and @Tsinghua_Uni have released LLAMA-MESH - A Revolutionary Approach to 3D Content Generation!
This innovative framework enables the direct generation of 3D meshes from natural language prompts while maintaining strong language capabilities.
Here is the Architecture & Implementation!
>> Core Components
Model Foundation
- If you haven't guessed it yet, it's built on the LLaMA-3.1-8B-Instruct base model
- Maintains original language capabilities while adding 3D generation
- Context length is set to 8,000 tokens
3D Representation Strategy
- Uses the OBJ file format for mesh representation
- Quantizes vertex coordinates into 64 discrete bins per axis
- Sorts vertices by z-y-x coordinates, from lowest to highest
- Sorts faces by the lowest vertex indices for consistency
Data Processing Pipeline
- Filters meshes to a maximum of 500 faces for computational efficiency
- Applies random rotations (0°, 90°, 180°, 270°) for data augmentation
- Generates ~125k mesh variations from 31k base meshes
- Uses Cap3D-generated captions for text descriptions
>> Training Framework
Dataset Composition
- 40% Mesh Generation tasks
- 20% Mesh Understanding tasks
- 40% General Conversation (UltraChat dataset)
- 8x training turns for generation, 4x for understanding
Training Configuration
- Deployed on 32 A100 GPUs (for Nvidia, this is literally in-house)
- 21,000 training iterations
- Global batch size: 128
- AdamW optimizer with a 1e-5 learning rate
- 30-step warmup with cosine scheduling
- Total training time: approximately 3 days (based on the paper)
This research opens exciting possibilities for intuitive 3D content creation through natural language interaction. The future of digital design is conversational! | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png",
"fullname": "Kuldeep Singh Sidhu",
"name": "singhsidhukuldeep",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 219,
"isFollowing": false
} | [
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/7UzRyFrbCXT2wC_QDLKLx.mp4"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"Mrdesigner14",
"John6666",
"roland0822",
"DatGG",
"KadirErturk",
"EdilCamil"
],
"count": 6
},
{
"reaction": "🚀",
"users": [
"John6666",
"casper911"
],
"count": 2
},
{
"reaction": "👍",
"users": [
"csabakecskemeti",
"gauravpatil"
],
"count": 2
}
] | 2024-11-17T07:57:31.000Z | 2024-11-17T07:57:31.455Z | [] | /posts/singhsidhukuldeep/478756824597278 | 2,281 | 0 |
578160125260008 | [
{
"type": "text",
"value": "OmniVision-968M: a new local VLM for edge devices, fast & small but performant",
"raw": "OmniVision-968M: a new local VLM for edge devices, fast & small but performant",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "💨 a new vision language model with 9x less image tokens, super efficient ",
"raw": "💨 a new vision language model with 9x less image tokens, super efficient ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📖 aligned with DPO for reducing hallucinations",
"raw": "📖 aligned with DPO for reducing hallucinations",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "⚡️ Apache 2.0 license 🔥",
"raw": "⚡️ Apache 2.0 license 🔥",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Demo hf.co/spaces/NexaAIDev/omnivlm-dpo-demo",
"raw": "Demo hf.co/spaces/NexaAIDev/omnivlm-dpo-demo",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Model ",
"raw": "Model ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./NexaAIDev/omnivision-968M",
"href": null,
"resource": {
"type": "model",
"id": "NexaAIDev/omnivision-968M",
"discussionNum": null
},
"url": "https://huggingface.co./NexaAIDev/omnivision-968M",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | OmniVision-968M: a new local VLM for edge devices, fast & small but performant
💨 a new vision language model with 9x less image tokens, super efficient
📖 aligned with DPO for reducing hallucinations
⚡️ Apache 2.0 license 🔥
Demo hf.co/spaces/NexaAIDev/omnivlm-dpo-demo
Model https://huggingface.co./NexaAIDev/omnivision-968M | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1648113222875-6141a88b3a0ec78603c9e784.png",
"fullname": "Merve Noyan",
"name": "merve",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 5589,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/UpftcDUFh7eDXfvTbRROY.jpeg"
}
] | [] | [
{
"reaction": "🔥",
"users": [
"YaTharThShaRma999",
"John6666",
"quyet7779",
"typesdigital",
"Csplk",
"Norod78",
"not-lain",
"Sri-Vigneshwar-DJ",
"ai-everyday",
"victor",
"lhoestq",
"Nydaym",
"Catering3733",
"ogozcelik",
"ucsahin",
"appvoid",
"FGOTYT",
"OmbelineM"
],
"count": 18
},
{
"reaction": "👀",
"users": [
"Csplk",
"maxiw",
"not-lain",
"ucsahin"
],
"count": 4
},
{
"reaction": "🤗",
"users": [
"prithivMLmods",
"ucsahin"
],
"count": 2
}
] | 2024-11-16T23:26:19.000Z | 2024-11-18T16:19:47.318Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/6048ea0c0f59ab4b614f1836/8Eg8IyPtJgOHmywcJ7E8a.jpeg",
"fullname": "RITABRATA MAITI",
"name": "ritabratamaiti",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 2,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/65d883893a52cd9bcd8ab7cf/tRsCJlHNZo1D02kBTmfy9.jpeg",
"fullname": "leroy Samuel Dyer",
"name": "LeroyDyer",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 84,
"isFollowing": false
}
] | /posts/merve/578160125260008 | 4,724 | 4 |
269038377723431 | [
{
"type": "text",
"value": "Kokoro: a small, fast 80M param TTS model hosted on ZeroGPU at ",
"raw": "Kokoro: a small, fast 80M param TTS model hosted on ZeroGPU at ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://hf.co/spaces/hexgrad/Kokoro-TTS",
"href": null,
"resource": {
"type": "space",
"id": "hexgrad/Kokoro-TTS",
"discussionNum": null
},
"url": "https://hf.co/spaces/hexgrad/Kokoro-TTS",
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Kokoro: a small, fast 80M param TTS model hosted on ZeroGPU at https://hf.co/spaces/hexgrad/Kokoro-TTS | {
"avatarUrl": "/avatars/02074f60a2ef445a29343ed90a303cc6.svg",
"fullname": "Hexgrad",
"name": "hexgrad",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 20,
"isFollowing": false
} | [] | [] | [
{
"reaction": "🔥",
"users": [
"YaTharThShaRma999",
"John6666",
"Pendrokar",
"Sri-Vigneshwar-DJ",
"ai-everyday",
"bendangelo",
"ecyht2",
"merve",
"deki",
"victor",
"s3nh",
"Gatozu35",
"fireblade2534"
],
"count": 13
}
] | 2024-11-16T22:37:07.000Z | 2024-11-18T07:27:42.811Z | [
{
"avatarUrl": "/avatars/a1d86d990de3b90ed8fdb29c60337219.svg",
"fullname": "Be",
"name": "bendangelo",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 1,
"isFollowing": false
},
{
"avatarUrl": "/avatars/52a153d04d325469e1be69bce610ebe5.svg",
"fullname": "ecyht2",
"name": "ecyht2",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 3,
"isFollowing": false
}
] | /posts/hexgrad/269038377723431 | 3,146 | 3 |
143199321478313 | [
{
"type": "text",
"value": "🚀 Introducing the Model Drops Tracker! 🕵️♂️",
"raw": "🚀 Introducing the Model Drops Tracker! 🕵️♂️",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Feeling overwhelmed by the AI model release frenzy? 🤯 You're not alone!",
"raw": "Feeling overwhelmed by the AI model release frenzy? 🤯 You're not alone!",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I built this simple tool to help us all keep up:",
"raw": "I built this simple tool to help us all keep up:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Filter recent models from the 🤗 Hub",
"raw": "- Filter recent models from the 🤗 Hub",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Set minimum likes threshold",
"raw": "- Set minimum likes threshold",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "- Choose how recent you want to go",
"raw": "- Choose how recent you want to go",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Try it out and let me know what you think: ",
"raw": "Try it out and let me know what you think: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./spaces/fdaudens/Model-Drops-Tracker",
"href": null,
"resource": {
"type": "space",
"id": "fdaudens/Model-Drops-Tracker",
"discussionNum": null
},
"url": "https://huggingface.co./spaces/fdaudens/Model-Drops-Tracker",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Any features you'd like to see added?",
"raw": "Any features you'd like to see added?",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "#AIModels",
"raw": "#AIModels",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🚀 Introducing the Model Drops Tracker! 🕵️♂️
Feeling overwhelmed by the AI model release frenzy? 🤯 You're not alone!
I built this simple tool to help us all keep up:
- Filter recent models from the 🤗 Hub
- Set minimum likes threshold
- Choose how recent you want to go
Try it out and let me know what you think: https://huggingface.co./spaces/fdaudens/Model-Drops-Tracker
Any features you'd like to see added?
#AIModels | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg",
"fullname": "Florent Daudens",
"name": "fdaudens",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 384,
"isFollowing": false
} | [
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/3UslU7TB7CHobIwSadfUa.mp4"
}
] | [] | [
{
"reaction": "🚀",
"users": [
"davidberenstein1957",
"ajibawa-2023",
"jdzw2014",
"lucianosb",
"ecyht2",
"jgitsolutions",
"John6666",
"LewSypher",
"Nymbo"
],
"count": 9
},
{
"reaction": "👍",
"users": [
"adorkin"
],
"count": 1
}
] | 2024-07-24T19:36:33.000Z | 2024-07-26T18:33:31.204Z | [
{
"avatarUrl": "/avatars/52a153d04d325469e1be69bce610ebe5.svg",
"fullname": "ecyht2",
"name": "ecyht2",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 3,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg",
"fullname": "Florent Daudens",
"name": "fdaudens",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 384,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/648a8d2d4ea19a8097e1c0d7/PKDiLe0WCwzNVWgvLvqr7.jpeg",
"fullname": "Henry Holloway",
"name": "henryholloway",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/fdaudens/143199321478313 | 2,286 | 3 |
336620283743824 | [
{
"type": "text",
"value": "🤖💡Just tried out ",
"raw": "🤖💡Just tried out ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "mention",
"value": null,
"raw": "@m-ric",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": "m-ric",
"label": null,
"lang": null
},
{
"type": "text",
"value": " 's new LLaMA-3.1 70B agent for data analysis. Impressive stuff. ",
"raw": " 's new LLaMA-3.1 70B agent for data analysis. Impressive stuff. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🚢📊 Fed it the Titanic passenger dataset with minimal instructions. The agent autonomously dug in, tested hypotheses, and reached some intriguing conclusions:",
"raw": "🚢📊 Fed it the Titanic passenger dataset with minimal instructions. The agent autonomously dug in, tested hypotheses, and reached some intriguing conclusions:",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "\"Lower class passengers less likely to survive, slight negative correlation with age, and positive correlation between fare price and survival.\" ",
"raw": "\"Lower class passengers less likely to survive, slight negative correlation with age, and positive correlation between fare price and survival.\" ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "📈It even generated charts to visualize the findings! ",
"raw": "📈It even generated charts to visualize the findings! ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🧠💼 Great potential for business intelligence, research, and decision-making when we can just upload datasets and let AI agents loose on them. ",
"raw": "🧠💼 Great potential for business intelligence, research, and decision-making when we can just upload datasets and let AI agents loose on them. ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "👉 Check it out: ",
"raw": "👉 Check it out: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./spaces/m-ric/agent-data-analyst",
"href": null,
"resource": {
"type": "space",
"id": "m-ric/agent-data-analyst",
"discussionNum": null
},
"url": "https://huggingface.co./spaces/m-ric/agent-data-analyst",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "🤔 Any particular use cases you're excited about?",
"raw": "🤔 Any particular use cases you're excited about?",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " ",
"raw": " ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "#AIinDataAnalysis #MachineLearning #DataScience",
"raw": "#AIinDataAnalysis #MachineLearning #DataScience",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | 🤖💡Just tried out @m-ric 's new LLaMA-3.1 70B agent for data analysis. Impressive stuff.
🚢📊 Fed it the Titanic passenger dataset with minimal instructions. The agent autonomously dug in, tested hypotheses, and reached some intriguing conclusions:
"Lower class passengers less likely to survive, slight negative correlation with age, and positive correlation between fare price and survival."
📈It even generated charts to visualize the findings!
🧠💼 Great potential for business intelligence, research, and decision-making when we can just upload datasets and let AI agents loose on them.
👉 Check it out: https://huggingface.co./spaces/m-ric/agent-data-analyst
🤔 Any particular use cases you're excited about?
#AIinDataAnalysis #MachineLearning #DataScience | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg",
"fullname": "Florent Daudens",
"name": "fdaudens",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 384,
"isFollowing": false
} | [
{
"type": "video",
"url": "https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/iDJVQOcxoCJXAMFVwitoq.qt"
}
] | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg",
"fullname": "Aymeric Roucher",
"name": "m-ric",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 494
}
] | [
{
"reaction": "🔥",
"users": [
"osanseviero",
"m-ric"
],
"count": 2
}
] | 2024-07-24T15:12:35.000Z | 2024-07-25T17:17:10.453Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg",
"fullname": "Florent Daudens",
"name": "fdaudens",
"type": "user",
"isPro": false,
"isHf": true,
"isMod": false,
"followerCount": 384,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/63c83abc46421a2efe8160d0/Yy6IvAusEgxQ0qhCvB0Ka.jpeg",
"fullname": "Mac Szankin",
"name": "macsz",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": null,
"isFollowing": false
}
] | /posts/fdaudens/336620283743824 | 814 | 3 |
317039567154641 | [
{
"type": "text",
"value": "Hello there, ",
"raw": "Hello there, ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "New model released, my goal was to try finetune on the last Llama-3.1-8B-Instruct but not a small train, I wanted to do something useful.",
"raw": "New model released, my goal was to try finetune on the last Llama-3.1-8B-Instruct but not a small train, I wanted to do something useful.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "One of the rare model that I didn't made for RP, or in the goal to uncensor it (but I did anyway kek).",
"raw": "One of the rare model that I didn't made for RP, or in the goal to uncensor it (but I did anyway kek).",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The model was trained on 9M Claude conversations ONLY, giving him another writting style.",
"raw": "The model was trained on 9M Claude conversations ONLY, giving him another writting style.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./Undi95/Meta-Llama-3.1-8B-Claude",
"href": null,
"resource": {
"type": "model",
"id": "Undi95/Meta-Llama-3.1-8B-Claude",
"discussionNum": null
},
"url": "https://huggingface.co./Undi95/Meta-Llama-3.1-8B-Claude",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " > OG release fp32, it's the epoch 2",
"raw": " > OG release fp32, it's the epoch 2",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./Undi95/Meta-Llama-3.1-8B-Claude-bf16",
"href": null,
"resource": {
"type": "model",
"id": "Undi95/Meta-Llama-3.1-8B-Claude-bf16",
"discussionNum": null
},
"url": "https://huggingface.co./Undi95/Meta-Llama-3.1-8B-Claude-bf16",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " > Base model resharded in bf16 waiting for available quant without issues",
"raw": " > Base model resharded in bf16 waiting for available quant without issues",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Since it's frustrating to be censored using a local model, orthogonal activation steering was used, trying to force the model to never refuse a prompt.",
"raw": "Since it's frustrating to be censored using a local model, orthogonal activation steering was used, trying to force the model to never refuse a prompt.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./Undi95/Meta-Llama-3.1-8B-Claude-68fail-3000total",
"href": null,
"resource": {
"type": "model",
"id": "Undi95/Meta-Llama-3.1-8B-Claude-68fail-3000total",
"discussionNum": null
},
"url": "https://huggingface.co./Undi95/Meta-Llama-3.1-8B-Claude-68fail-3000total",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " > Uncensored model, refuse 68 times on 3000 toxic prompt",
"raw": " > Uncensored model, refuse 68 times on 3000 toxic prompt",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "resource",
"value": null,
"raw": "https://huggingface.co./Undi95/Meta-Llama-3.1-8B-Claude-39fail-3000total",
"href": null,
"resource": {
"type": "model",
"id": "Undi95/Meta-Llama-3.1-8B-Claude-39fail-3000total",
"discussionNum": null
},
"url": "https://huggingface.co./Undi95/Meta-Llama-3.1-8B-Claude-39fail-3000total",
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": " > Uncensored model, refuse 39 times on 3000 toxic prompt",
"raw": " > Uncensored model, refuse 39 times on 3000 toxic prompt",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "It still refuse some prompt but the majority of them is uncensored. OAS can make a model more dumb or make the base perplexity go higher, so I didn't snipe for 0 refusal.",
"raw": "It still refuse some prompt but the majority of them is uncensored. OAS can make a model more dumb or make the base perplexity go higher, so I didn't snipe for 0 refusal.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "I don't do non-RP model a lot so any feedback is welcome, I would like to re-use this base for some others future project if needed.",
"raw": "I don't do non-RP model a lot so any feedback is welcome, I would like to re-use this base for some others future project if needed.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Hello there,
New model released, my goal was to try finetune on the last Llama-3.1-8B-Instruct but not a small train, I wanted to do something useful.
One of the rare model that I didn't made for RP, or in the goal to uncensor it (but I did anyway kek).
The model was trained on 9M Claude conversations ONLY, giving him another writting style.
https://huggingface.co./Undi95/Meta-Llama-3.1-8B-Claude > OG release fp32, it's the epoch 2
https://huggingface.co./Undi95/Meta-Llama-3.1-8B-Claude-bf16 > Base model resharded in bf16 waiting for available quant without issues
Since it's frustrating to be censored using a local model, orthogonal activation steering was used, trying to force the model to never refuse a prompt.
https://huggingface.co./Undi95/Meta-Llama-3.1-8B-Claude-68fail-3000total > Uncensored model, refuse 68 times on 3000 toxic prompt
https://huggingface.co./Undi95/Meta-Llama-3.1-8B-Claude-39fail-3000total > Uncensored model, refuse 39 times on 3000 toxic prompt
It still refuse some prompt but the majority of them is uncensored. OAS can make a model more dumb or make the base perplexity go higher, so I didn't snipe for 0 refusal.
I don't do non-RP model a lot so any feedback is welcome, I would like to re-use this base for some others future project if needed. | {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/63ab1241ad514ca8d1430003/d-43TcOxG-zqAbzrH2m7H.png",
"fullname": "Undi",
"name": "Undi95",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 3311,
"isFollowing": false
} | [] | [] | [
{
"reaction": "❤️",
"users": [
"Chief-Inspector",
"MarinaraSpaghetti",
"Herman555",
"John6666",
"AtonMountlook",
"Ramikan-BR",
"DuckyBlender",
"osanseviero",
"mambiux",
"den0620",
"win10"
],
"count": 11
}
] | 2024-07-24T15:08:08.000Z | 2024-07-24T22:22:31.327Z | [
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/666627d86cd2ef174a6e2257/QVRd7WN6kVCtT5BDpf8vq.png",
"fullname": "Invisietch",
"name": "invisietch",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 54,
"isFollowing": false
},
{
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/63ab1241ad514ca8d1430003/d-43TcOxG-zqAbzrH2m7H.png",
"fullname": "Undi",
"name": "Undi95",
"type": "user",
"isPro": true,
"isHf": false,
"isMod": false,
"followerCount": 3311,
"isFollowing": false
},
{
"avatarUrl": "/avatars/18daa2d580f5f35cf850bc9df8a03755.svg",
"fullname": "Sporkness",
"name": "SporkySporkness",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 5,
"isFollowing": false
}
] | /posts/Undi95/317039567154641 | 10,458 | 4 |
746206932023722 | [
{
"type": "text",
"value": "Professional Threads Post Writer",
"raw": "Professional Threads Post Writer",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://hf.co/chat/assistant/66a0ee6fc397ecb70cee100d",
"href": "https://hf.co/chat/assistant/66a0ee6fc397ecb70cee100d",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Professional Threads Post Writer
https://hf.co/chat/assistant/66a0ee6fc397ecb70cee100d | {
"avatarUrl": "/avatars/d773a7dd9b706759131fc482ab71ced7.svg",
"fullname": "[email protected]",
"name": "Taf2023",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 8,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/64841af2295256340e4b9f88/KjqQvbwV8O9Jtz_-L49tS.webp"
}
] | [] | [] | 2024-07-24T12:11:18.000Z | 2024-07-24T12:11:18.282Z | [] | /posts/Taf2023/746206932023722 | 491 | 0 |
999630929802342 | [
{
"type": "text",
"value": "Llama 3.1 405B Instruct beats GPT-4o on MixEval-Hard",
"raw": "Llama 3.1 405B Instruct beats GPT-4o on MixEval-Hard",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Just ran MixEval for 405B, Sonnet-3.5 and 4o, with 405B landing right between the other two at 66.19",
"raw": "Just ran MixEval for 405B, Sonnet-3.5 and 4o, with 405B landing right between the other two at 66.19",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "The GPT-4o result of 64.7 replicated locally but Sonnet-3.5 actually scored 70.25/69.45 in my replications 🤔 Still well ahead of the other 2 though.",
"raw": "The GPT-4o result of 64.7 replicated locally but Sonnet-3.5 actually scored 70.25/69.45 in my replications 🤔 Still well ahead of the other 2 though.",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Sammple of 1 of the eval calls here: ",
"raw": "Sammple of 1 of the eval calls here: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://wandb.ai/morgan/MixEval/weave/calls/07b05ae2-2ef5-4525-98a6-c59963b76fe1",
"href": "https://wandb.ai/morgan/MixEval/weave/calls/07b05ae2-2ef5-4525-98a6-c59963b76fe1",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "text",
"value": "Quick auto-logging tracing for openai-compatible clients and many more here: ",
"raw": "Quick auto-logging tracing for openai-compatible clients and many more here: ",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "link",
"value": null,
"raw": "https://wandb.github.io/weave/quickstart/",
"href": "https://wandb.github.io/weave/quickstart/",
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
},
{
"type": "new_line",
"value": null,
"raw": "\n",
"href": null,
"resource": null,
"url": null,
"code": null,
"user": null,
"label": null,
"lang": null
}
] | Llama 3.1 405B Instruct beats GPT-4o on MixEval-Hard
Just ran MixEval for 405B, Sonnet-3.5 and 4o, with 405B landing right between the other two at 66.19
The GPT-4o result of 64.7 replicated locally but Sonnet-3.5 actually scored 70.25/69.45 in my replications 🤔 Still well ahead of the other 2 though.
Sammple of 1 of the eval calls here: https://wandb.ai/morgan/MixEval/weave/calls/07b05ae2-2ef5-4525-98a6-c59963b76fe1
Quick auto-logging tracing for openai-compatible clients and many more here: https://wandb.github.io/weave/quickstart/
| {
"avatarUrl": "https://cdn-avatars.huggingface.co/v1/production/uploads/1618571183509-5f05a97d5d08220171a0ad9d.png",
"fullname": "Morgan McGuire",
"name": "morgan",
"type": "user",
"isPro": false,
"isHf": false,
"isMod": false,
"followerCount": 18,
"isFollowing": false
} | [
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5f05a97d5d08220171a0ad9d/gaQche2YCXq0TTnmmH2Ol.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5f05a97d5d08220171a0ad9d/TbXxia4lQrX5KLU-6405Z.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5f05a97d5d08220171a0ad9d/TdTwni8FlXwUvBwEdrXFC.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5f05a97d5d08220171a0ad9d/r0Zz8rrXpvj0oQ6avJWNi.png"
},
{
"type": "image",
"url": "https://cdn-uploads.huggingface.co/production/uploads/5f05a97d5d08220171a0ad9d/gxIoZoSeQFVMtnBcD08nK.png"
}
] | [] | [
{
"reaction": "👍",
"users": [
"zenosai",
"macsz",
"Corvius"
],
"count": 3
},
{
"reaction": "🔥",
"users": [
"Rohitkhatri75436"
],
"count": 1
}
] | 2024-07-24T11:27:03.000Z | 2024-07-24T11:27:03.893Z | [] | /posts/morgan/999630929802342 | 1,296 | 0 |
This dataset contains posts scraped from https://huggingface.co./posts.
It includes all posts published from the launch date on December 23, 2023, up to November 24, 2024, at 15:40.