🐦 Twitter Post Details

Viewing enriched Twitter post

@andersonbcdefg

gte-tiny not small enough for you? Consider bge-micro, an embeddings model with 1/4 the layers of bge-small. It's not SOTA—compressing a model this much does incur degradation—but it's still good enough to be competitive with all-MiniLM-L6-v2, with 1/2 the non-embedding params!

📊 Media Metadata

{
  "media": [
    {
      "id": "",
      "type": "photo",
      "url": null,
      "media_url": "https://pbs.twimg.com/media/F72n0vpb0AAA3Lf.png",
      "media_url_https": null,
      "display_url": null,
      "expanded_url": null
    }
  ],
  "nlp": {
    "processed_at": "2025-08-06T12:42:28.931018",
    "sentiment": "neutral",
    "topics": [
      "Machine Learning",
      "Deep Learning",
      "Neural Networks",
      "AI Applications"
    ],
    "ner": {
      "entities": [
        {
          "entity": "gte-tiny",
          "type": "model"
        },
        {
          "entity": "bge-micro",
          "type": "model"
        },
        {
          "entity": "bge-small",
          "type": "model"
        },
        {
          "entity": "all-MiniLM-L6-v2",
          "type": "model"
        },
        {
          "entity": "non-embedding params",
          "type": "parameter"
        }
      ]
    }
  },
  "original_structure": "had_media_only"
}

🔧 Raw API Response

{
  "user": {
    "created_at": "2020-08-29T23:49:56.000Z",
    "default_profile_image": false,
    "description": "🤖 Computer scientist, next-word-prediction enjoyer\n📊 Prev. research fellow @ Stanford RegLab\n🛠️ bUiLdiNg sOmeThiNg nEw (galactic ai)\n🏳️‍🌈",
    "fast_followers_count": 0,
    "favourites_count": 5602,
    "followers_count": 1639,
    "friends_count": 1595,
    "has_custom_timelines": true,
    "is_translator": false,
    "listed_count": 35,
    "location": "San Francisco, CA",
    "media_count": 159,
    "name": "Ben (48/100)",
    "normal_followers_count": 1639,
    "possibly_sensitive": false,
    "profile_banner_url": "https://pbs.twimg.com/profile_banners/1299856802268377090/1671821579",
    "profile_image_url_https": "https://pbs.twimg.com/profile_images/1299864018081865729/CMlOyn1u_normal.jpg",
    "screen_name": "andersonbcdefg",
    "statuses_count": 2782,
    "translator_type": "none",
    "url": "https://t.co/UFbYZQueec",
    "verified": false,
    "withheld_in_countries": [],
    "id_str": "1299856802268377090"
  },
  "id": "1710708732534108413",
  "conversation_id": "1710708732534108413",
  "full_text": "gte-tiny not small enough for you? Consider bge-micro, an embeddings model with 1/4 the layers of bge-small. It's not SOTA—compressing a model this much does incur degradation—but it's still good enough to be competitive with all-MiniLM-L6-v2, with 1/2 the non-embedding params!",
  "reply_count": 7,
  "retweet_count": 16,
  "favorite_count": 127,
  "hashtags": [],
  "symbols": [],
  "user_mentions": [],
  "urls": [],
  "media": [
    {
      "media_url": "https://pbs.twimg.com/media/F72n0vpb0AAA3Lf.png",
      "type": "photo"
    }
  ],
  "url": "https://twitter.com/andersonbcdefg/status/1710708732534108413",
  "created_at": "2023-10-07T17:28:39.000Z",
  "#sort_index": "1710708732534108413",
  "view_count": 66162,
  "quote_count": 3,
  "is_quote_tweet": false,
  "is_retweet": false,
  "is_pinned": false,
  "is_truncated": false,
  "startUrl": "https://twitter.com/andersonbcdefg/status/1710708732534108413"
}