🐦 Twitter Post Details

Viewing enriched Twitter post

@arankomatsuzaki

Samsung presents aespa Towards Next-Level Post-Training Quantization of Hyper-Scale Transformers Outperforms conventional quantization schemes by a significant margin, particularly for low-bit precision (INT2) https://t.co/StOzcGQz2k https://t.co/qXudeN8dLL

Media 1

📊 Media Metadata

{
  "data": [
    {
      "id": "",
      "type": "photo",
      "url": null,
      "media_url": "https://pbs.twimg.com/media/GGWCtORbgAADEFb.png",
      "media_url_https": null,
      "display_url": null,
      "expanded_url": null
    }
  ],
  "score": 1.0,
  "scored_at": "2025-08-09T13:46:07.551030",
  "import_source": "manual_curation_2024",
  "links_checked": true,
  "checked_at": "2025-08-10T10:31:55.070513",
  "media": [
    {
      "type": "photo",
      "url": "https://crmoxkoizveukayfjuyo.supabase.co/storage/v1/object/public/media/posts/1757955118862159983/media_0.png?",
      "filename": "media_0.png",
      "original_url": "https://pbs.twimg.com/media/GGWCtORbgAADEFb.png"
    }
  ],
  "storage_migrated": true
}

🔧 Raw API Response

{
  "user": {
    "created_at": "2016-11-04T06:57:37.000Z",
    "default_profile_image": false,
    "description": "ML research & startup with @EnricoShippole",
    "fast_followers_count": 0,
    "favourites_count": 9746,
    "followers_count": 86607,
    "friends_count": 79,
    "has_custom_timelines": true,
    "is_translator": false,
    "listed_count": 1174,
    "location": "",
    "media_count": 1789,
    "name": "Aran Komatsuzaki",
    "normal_followers_count": 86607,
    "possibly_sensitive": false,
    "profile_image_url_https": "https://pbs.twimg.com/profile_images/1561220982328754176/JOYS5kab_normal.jpg",
    "screen_name": "arankomatsuzaki",
    "statuses_count": 4388,
    "translator_type": "none",
    "url": "https://t.co/aZGCShnLYq",
    "verified": true,
    "withheld_in_countries": [],
    "id_str": "794433401591693312"
  },
  "id": "1757955118862159983",
  "conversation_id": "1757955118862159983",
  "full_text": "Samsung presents aespa\n\nTowards Next-Level Post-Training Quantization of Hyper-Scale Transformers\n\nOutperforms conventional quantization schemes by a significant margin, particularly for low-bit precision (INT2)\n\nhttps://t.co/StOzcGQz2k https://t.co/qXudeN8dLL",
  "reply_count": 1,
  "retweet_count": 18,
  "favorite_count": 90,
  "hashtags": [],
  "symbols": [],
  "user_mentions": [],
  "urls": [
    {
      "url": "https://t.co/StOzcGQz2k",
      "expanded_url": "https://arxiv.org/abs/2402.08958",
      "display_url": "arxiv.org/abs/2402.08958"
    }
  ],
  "media": [
    {
      "media_url": "https://pbs.twimg.com/media/GGWCtORbgAADEFb.png",
      "type": "photo"
    }
  ],
  "url": "https://twitter.com/arankomatsuzaki/status/1757955118862159983",
  "created_at": "2024-02-15T02:28:56.000Z",
  "#sort_index": "1757955118862159983",
  "view_count": 15568,
  "quote_count": 4,
  "is_quote_tweet": false,
  "is_retweet": false,
  "is_pinned": false,
  "is_truncated": false,
  "startUrl": "https://twitter.com/arankomatsuzaki/status/1757955118862159983"
}