🐦 Twitter Post Details

Viewing enriched Twitter post

@haoailab

(1/5) FP4 hardware is here, but 4-bit attention still kills model quality, blocking true end-to-end FP4 serving. To fix that, we propose Attn-QAT, the first systematic study of quantization-aware training for attention. The result: FP4 attention quality is comparable to BF16 attention with 1.1x–1.5x higher throughput than SageAttention3 on an RTX 5090 and 1.39x speedup over FlashAttention-4 on a B200. Blog: https://t.co/NxVSXKWEgI Code: https://t.co/6irFgQ7GeM Checkpoints: https://t.co/GsrzbJlRY8

Media 2
Media 3
Media 4

📊 Media Metadata

{
  "media": [
    {
      "url": "https://crmoxkoizveukayfjuyo.supabase.co/storage/v1/object/public/media/posts/2042343429108351116/media_0.mp4",
      "media_url": "https://crmoxkoizveukayfjuyo.supabase.co/storage/v1/object/public/media/posts/2042343429108351116/media_0.mp4",
      "type": "video",
      "filename": "media_0.mp4"
    },
    {
      "url": "https://crmoxkoizveukayfjuyo.supabase.co/storage/v1/object/public/media/posts/2042343429108351116/media_1.jpg",
      "media_url": "https://crmoxkoizveukayfjuyo.supabase.co/storage/v1/object/public/media/posts/2042343429108351116/media_1.jpg",
      "type": "photo",
      "filename": "media_1.jpg"
    },
    {
      "url": "https://crmoxkoizveukayfjuyo.supabase.co/storage/v1/object/public/media/posts/2042343429108351116/media_2.jpg",
      "media_url": "https://crmoxkoizveukayfjuyo.supabase.co/storage/v1/object/public/media/posts/2042343429108351116/media_2.jpg",
      "type": "photo",
      "filename": "media_2.jpg"
    },
    {
      "url": "https://crmoxkoizveukayfjuyo.supabase.co/storage/v1/object/public/media/posts/2042343429108351116/media_3.jpg",
      "media_url": "https://crmoxkoizveukayfjuyo.supabase.co/storage/v1/object/public/media/posts/2042343429108351116/media_3.jpg",
      "type": "photo",
      "filename": "media_3.jpg"
    }
  ],
  "processed_at": "2026-04-10T02:23:14.525924",
  "pipeline_version": "2.0"
}

🔧 Raw API Response

{
  "type": "tweet",
  "id": "2042343429108351116",
  "url": "https://x.com/haoailab/status/2042343429108351116",
  "twitterUrl": "https://twitter.com/haoailab/status/2042343429108351116",
  "text": "(1/5) FP4 hardware is here, but 4-bit attention still kills model quality, blocking true end-to-end FP4 serving.\nTo fix that, we propose Attn-QAT, the first systematic study of quantization-aware training for attention.\n\nThe result: FP4 attention quality is comparable to BF16 attention with 1.1x–1.5x higher throughput than SageAttention3 on an RTX 5090 and 1.39x speedup over FlashAttention-4 on a B200.\n\nBlog: https://t.co/NxVSXKWEgI\nCode: https://t.co/6irFgQ7GeM\nCheckpoints: https://t.co/GsrzbJlRY8",
  "source": "Twitter for iPhone",
  "retweetCount": 19,
  "replyCount": 5,
  "likeCount": 91,
  "quoteCount": 3,
  "viewCount": 8434,
  "createdAt": "Thu Apr 09 20:46:33 +0000 2026",
  "lang": "en",
  "bookmarkCount": 63,
  "isReply": false,
  "inReplyToId": null,
  "conversationId": "2042343429108351116",
  "displayTextRange": [
    0,
    276
  ],
  "inReplyToUserId": null,
  "inReplyToUsername": null,
  "author": {
    "type": "user",
    "userName": "haoailab",
    "url": "https://x.com/haoailab",
    "twitterUrl": "https://twitter.com/haoailab",
    "id": "1768529398448492544",
    "name": "Hao AI Lab",
    "isVerified": false,
    "isBlueVerified": true,
    "verifiedType": null,
    "profilePicture": "https://pbs.twimg.com/profile_images/1768567902129807360/paum8hpu_normal.jpg",
    "coverPicture": "https://pbs.twimg.com/profile_banners/1768529398448492544/1710495064",
    "description": "",
    "location": "",
    "followers": 5367,
    "following": 369,
    "status": "",
    "canDm": true,
    "canMediaTag": true,
    "createdAt": "Fri Mar 15 06:47:30 +0000 2024",
    "entities": {
      "description": {
        "urls": []
      },
      "url": {}
    },
    "fastFollowersCount": 0,
    "favouritesCount": 833,
    "hasCustomTimelines": true,
    "isTranslator": false,
    "mediaCount": 189,
    "statusesCount": 532,
    "withheldInCountries": [],
    "affiliatesHighlightedLabel": {},
    "possiblySensitive": false,
    "pinnedTweetIds": [
      "2042343429108351116"
    ],
    "profile_bio": {
      "description": "Hao AI Lab at UCSD. Our mission is to democratize large machine learning models, algorithms, and their underlying systems.",
      "entities": {
        "description": {
          "hashtags": [],
          "symbols": [],
          "urls": [],
          "user_mentions": []
        },
        "url": {
          "urls": [
            {
              "display_url": "haoailab.com",
              "expanded_url": "https://haoailab.com/",
              "indices": [
                0,
                23
              ],
              "url": "https://t.co/pnLI3ZLosW"
            }
          ]
        }
      }
    },
    "isAutomated": false,
    "automatedBy": null
  },
  "extendedEntities": {
    "media": [
      {
        "additional_media_info": {
          "monetizable": true
        },
        "allow_download_status": {
          "allow_download": true
        },
        "display_url": "pic.twitter.com/UdZSnlPEH0",
        "expanded_url": "https://twitter.com/haoailab/status/2042343429108351116/video/1",
        "ext_media_availability": {
          "status": "Available"
        },
        "id_str": "2042342874386481152",
        "indices": [
          277,
          300
        ],
        "media_key": "13_2042342874386481152",
        "media_results": {
          "id": "QXBpTWVkaWFSZXN1bHRzOgwABAoAARxX3AYBW0AAAAA=",
          "result": {
            "__typename": "ApiMedia",
            "id": "QXBpTWVkaWE6DAAECgABHFfcBgFbQAAAAA==",
            "media_key": "13_2042342874386481152"
          }
        },
        "media_url_https": "https://pbs.twimg.com/amplify_video_thumb/2042342874386481152/img/SK5STUUqmsVLlBAH.jpg",
        "original_info": {
          "focus_rects": [],
          "height": 2338,
          "width": 1080
        },
        "sizes": {
          "large": {
            "h": 2048,
            "w": 946
          }
        },
        "type": "video",
        "url": "https://t.co/UdZSnlPEH0",
        "video_info": {
          "aspect_ratio": [
            540,
            1169
          ],
          "duration_millis": 25333,
          "variants": [
            {
              "content_type": "application/x-mpegURL",
              "url": "https://video.twimg.com/amplify_video/2042342874386481152/pl/z6SqR_PpYkitKtM_.m3u8?tag=21&v=cfc"
            },
            {
              "bitrate": 632000,
              "content_type": "video/mp4",
              "url": "https://video.twimg.com/amplify_video/2042342874386481152/vid/avc1/320x692/hvZe_82IqJhbCg64.mp4?tag=21"
            },
            {
              "bitrate": 950000,
              "content_type": "video/mp4",
              "url": "https://video.twimg.com/amplify_video/2042342874386481152/vid/avc1/480x1038/cO3cOdF5cJ7-0eHB.mp4?tag=21"
            },
            {
              "bitrate": 2176000,
              "content_type": "video/mp4",
              "url": "https://video.twimg.com/amplify_video/2042342874386481152/vid/avc1/720x1558/AvS3NNfcHuy-K9dT.mp4?tag=21"
            },
            {
              "bitrate": 10368000,
              "content_type": "video/mp4",
              "url": "https://video.twimg.com/amplify_video/2042342874386481152/vid/avc1/1080x2338/VlcccWtcrFAVqSgg.mp4?tag=21"
            }
          ]
        }
      }
    ]
  },
  "card": null,
  "place": {},
  "entities": {
    "hashtags": [],
    "symbols": [],
    "timestamps": [],
    "urls": [
      {
        "display_url": "haoailab.com/blogs/attn-qat/",
        "expanded_url": "https://haoailab.com/blogs/attn-qat/",
        "indices": [
          413,
          436
        ],
        "url": "https://t.co/NxVSXKWEgI"
      },
      {
        "display_url": "github.com/hao-ai-lab/Fas…",
        "expanded_url": "https://github.com/hao-ai-lab/FastVideo/pull/1225",
        "indices": [
          443,
          466
        ],
        "url": "https://t.co/6irFgQ7GeM"
      },
      {
        "display_url": "huggingface.co/FastVideo/14B_…",
        "expanded_url": "https://huggingface.co/FastVideo/14B_qat_400",
        "indices": [
          480,
          503
        ],
        "url": "https://t.co/GsrzbJlRY8"
      }
    ],
    "user_mentions": []
  },
  "quoted_tweet": null,
  "retweeted_tweet": null,
  "isLimitedReply": false,
  "communityInfo": null,
  "article": null
}