🐦 Twitter Post Details

Viewing enriched Twitter post

@LiorOnAI

Google might've created the successor of the Transformer architecture. It's a new architecture that pairs attention with a learnable long-term memory module. Attention handles short-term context with accurate dependency modeling. The memory module stores and retrieves… https://t.co/HO1cS3RIG1

Media 1

📊 Media Metadata

{
  "score": 0.83,
  "scored_at": "2025-08-09T13:47:19.520101",
  "import_source": "unknown_source",
  "links_checked": true,
  "checked_at": "2025-08-10T10:33:16.209344",
  "original_structure": "had_data_only",
  "media": [
    {
      "id": "1909946948984954880",
      "type": "photo",
      "url": "https://t.co/HO1cS3RIG1",
      "media_url": null,
      "media_url_https": "https://pbs.twimg.com/media/GoF-oXoW4AAodzR.jpg",
      "display_url": "pic.x.com/HO1cS3RIG1",
      "expanded_url": "https://x.com/LiorOnAI/status/1909946951732281610/photo/1"
    }
  ]
}

🔧 Raw API Response

{
  "tweet": {
    "bookmark_count": 356,
    "bookmarked": false,
    "created_at": "Wed Apr 09 12:30:13 +0000 2025",
    "conversation_id_str": "1909946951732281610",
    "display_text_range": [
      0,
      273
    ],
    "entities": {
      "hashtags": [],
      "media": [
        {
          "display_url": "pic.x.com/HO1cS3RIG1",
          "expanded_url": "https://x.com/LiorOnAI/status/1909946951732281610/photo/1",
          "id_str": "1909946948984954880",
          "indices": [
            274,
            297
          ],
          "media_key": "3_1909946948984954880",
          "media_url_https": "https://pbs.twimg.com/media/GoF-oXoW4AAodzR.jpg",
          "type": "photo",
          "url": "https://t.co/HO1cS3RIG1",
          "ext_media_availability": {
            "status": "Available"
          },
          "features": {
            "large": {
              "faces": []
            },
            "medium": {
              "faces": []
            },
            "small": {
              "faces": []
            },
            "orig": {
              "faces": []
            }
          },
          "sizes": {
            "large": {
              "h": 408,
              "w": 1256,
              "resize": "fit"
            },
            "medium": {
              "h": 390,
              "w": 1200,
              "resize": "fit"
            },
            "small": {
              "h": 221,
              "w": 680,
              "resize": "fit"
            },
            "thumb": {
              "h": 150,
              "w": 150,
              "resize": "crop"
            }
          },
          "original_info": {
            "height": 408,
            "width": 1256,
            "focus_rects": [
              {
                "x": 0,
                "y": 0,
                "w": 729,
                "h": 408
              },
              {
                "x": 15,
                "y": 0,
                "w": 408,
                "h": 408
              },
              {
                "x": 40,
                "y": 0,
                "w": 358,
                "h": 408
              },
              {
                "x": 117,
                "y": 0,
                "w": 204,
                "h": 408
              },
              {
                "x": 0,
                "y": 0,
                "w": 1256,
                "h": 408
              }
            ]
          },
          "media_results": {
            "result": {
              "media_key": "3_1909946948984954880"
            }
          }
        }
      ],
      "symbols": [],
      "timestamps": [],
      "urls": [],
      "user_mentions": []
    },
    "extended_entities": {
      "media": [
        {
          "display_url": "pic.x.com/HO1cS3RIG1",
          "expanded_url": "https://x.com/LiorOnAI/status/1909946951732281610/photo/1",
          "id_str": "1909946948984954880",
          "indices": [
            274,
            297
          ],
          "media_key": "3_1909946948984954880",
          "media_url_https": "https://pbs.twimg.com/media/GoF-oXoW4AAodzR.jpg",
          "type": "photo",
          "url": "https://t.co/HO1cS3RIG1",
          "ext_media_availability": {
            "status": "Available"
          },
          "features": {
            "large": {
              "faces": []
            },
            "medium": {
              "faces": []
            },
            "small": {
              "faces": []
            },
            "orig": {
              "faces": []
            }
          },
          "sizes": {
            "large": {
              "h": 408,
              "w": 1256,
              "resize": "fit"
            },
            "medium": {
              "h": 390,
              "w": 1200,
              "resize": "fit"
            },
            "small": {
              "h": 221,
              "w": 680,
              "resize": "fit"
            },
            "thumb": {
              "h": 150,
              "w": 150,
              "resize": "crop"
            }
          },
          "original_info": {
            "height": 408,
            "width": 1256,
            "focus_rects": [
              {
                "x": 0,
                "y": 0,
                "w": 729,
                "h": 408
              },
              {
                "x": 15,
                "y": 0,
                "w": 408,
                "h": 408
              },
              {
                "x": 40,
                "y": 0,
                "w": 358,
                "h": 408
              },
              {
                "x": 117,
                "y": 0,
                "w": 204,
                "h": 408
              },
              {
                "x": 0,
                "y": 0,
                "w": 1256,
                "h": 408
              }
            ]
          },
          "media_results": {
            "result": {
              "media_key": "3_1909946948984954880"
            }
          }
        }
      ]
    },
    "favorite_count": 514,
    "favorited": false,
    "full_text": "Google might've created the successor of the Transformer architecture.\n\nIt's a new architecture that pairs attention with a learnable long-term memory module. \n\nAttention handles short-term context with accurate dependency modeling.\n\nThe memory module stores and retrieves… https://t.co/HO1cS3RIG1",
    "is_quote_status": false,
    "lang": "en",
    "possibly_sensitive": false,
    "possibly_sensitive_editable": true,
    "quote_count": 6,
    "reply_count": 22,
    "retweet_count": 77,
    "retweeted": false,
    "user_id_str": "931470139",
    "id_str": "1909946951732281610",
    "note_tweet": {
      "is_expandable": true,
      "note_tweet_results": {
        "result": {
          "id": "Tm90ZVR3ZWV0OjE5MDk5NDY5NTE2NTY3NDI5MTI=",
          "text": "Google might've created the successor of the Transformer architecture.\n\nIt's a new architecture that pairs attention with a learnable long-term memory module. \n\nAttention handles short-term context with accurate dependency modeling.\n\nThe memory module stores and retrieves long-past information, even during test time.",
          "entity_set": {
            "hashtags": [],
            "symbols": [],
            "urls": [],
            "user_mentions": []
          }
        }
      }
    }
  },
  "user": {
    "__typename": "User",
    "id": "VXNlcjo5MzE0NzAxMzk=",
    "rest_id": "931470139",
    "affiliates_highlighted_label": {},
    "has_graduated_access": true,
    "is_blue_verified": true,
    "profile_image_shape": "Circle",
    "legacy": {
      "can_dm": true,
      "can_media_tag": false,
      "created_at": "Wed Nov 07 07:19:36 +0000 2012",
      "default_profile": false,
      "default_profile_image": false,
      "description": "Covering the latest in AI development • ML Eng since 2017 • Building @AlphaSignalAI into the #1 source of dev news in AI → At 240k readers.",
      "entities": {
        "description": {
          "urls": []
        },
        "url": {
          "urls": [
            {
              "display_url": "alphasignal.ai",
              "expanded_url": "https://alphasignal.ai",
              "url": "https://t.co/AyubevaLcb",
              "indices": [
                0,
                23
              ]
            }
          ]
        }
      },
      "fast_followers_count": 0,
      "favourites_count": 6205,
      "followers_count": 103547,
      "friends_count": 1587,
      "has_custom_timelines": true,
      "is_translator": false,
      "listed_count": 2098,
      "location": "SF (let’s hang)",
      "media_count": 609,
      "name": "Lior⚡",
      "normal_followers_count": 103547,
      "pinned_tweet_ids_str": [],
      "possibly_sensitive": false,
      "profile_banner_url": "https://pbs.twimg.com/profile_banners/931470139/1747705414",
      "profile_image_url_https": "https://pbs.twimg.com/profile_images/1817166841992597504/zLRK5Qx__normal.jpg",
      "profile_interstitial_type": "",
      "screen_name": "LiorOnAI",
      "statuses_count": 3439,
      "translator_type": "none",
      "url": "https://t.co/AyubevaLcb",
      "verified": false,
      "want_retweets": false,
      "withheld_in_countries": []
    },
    "professional": {
      "rest_id": "1621155519586271232",
      "professional_type": "Creator",
      "category": []
    },
    "tipjar_settings": {}
  },
  "views": "30834"
}