[tds_menu_login inline="yes" guest_tdicon="td-icon-profile" logout_tdicon="td-icon-log-out" tdc_css="eyJwaG9uZSI6eyJtYXJnaW4tcmlnaHQiOiIyMCIsIm1hcmdpbi1ib3R0b20iOiIwIiwibWFyZ2luLWxlZnQiOiI2IiwiZGlzcGxheSI6IiJ9LCJwaG9uZV9tYXhfd2lkdGgiOjc2N30=" toggle_hide="eyJwaG9uZSI6InllcyJ9" ia_space="eyJwaG9uZSI6IjAifQ==" icon_size="eyJhbGwiOjI0LCJwaG9uZSI6IjIwIn0=" avatar_size="eyJwaG9uZSI6IjIwIn0=" show_menu="yes" menu_offset_top="eyJwaG9uZSI6IjE4In0=" menu_offset_horiz="eyJhbGwiOjgsInBob25lIjoiLTMifQ==" menu_width="eyJwaG9uZSI6IjE4MCJ9" menu_horiz_align="eyJhbGwiOiJjb250ZW50LWhvcml6LWxlZnQiLCJwaG9uZSI6ImNvbnRlbnQtaG9yaXotcmlnaHQifQ==" menu_uh_padd="eyJwaG9uZSI6IjEwcHggMTVweCA4cHgifQ==" menu_gh_padd="eyJwaG9uZSI6IjEwcHggMTVweCA4cHgifQ==" menu_ul_padd="eyJwaG9uZSI6IjhweCAxNXB4In0=" menu_ul_space="eyJwaG9uZSI6IjYifQ==" menu_ulo_padd="eyJwaG9uZSI6IjhweCAxNXB4IDEwcHgifQ==" menu_gc_padd="eyJwaG9uZSI6IjhweCAxNXB4IDEwcHgifQ==" menu_bg="var(--news-hub-black)" menu_shadow_shadow_size="eyJwaG9uZSI6IjAifQ==" menu_arrow_color="rgba(0,0,0,0)" menu_uh_color="var(--news-hub-light-grey)" menu_uh_border_color="var(--news-hub-dark-grey)" menu_ul_link_color="var(--news-hub-white)" menu_ul_link_color_h="var(--news-hub-accent-hover)" menu_ul_sep_color="var(--news-hub-dark-grey)" menu_uf_txt_color="var(--news-hub-white)" menu_uf_txt_color_h="var(--news-hub-accent-hover)" menu_uf_border_color="var(--news-hub-dark-grey)" f_uh_font_size="eyJwaG9uZSI6IjEyIn0=" f_uh_font_line_height="eyJwaG9uZSI6IjEuMyJ9" f_uh_font_family="eyJwaG9uZSI6IjMyNSJ9" f_links_font_size="eyJwaG9uZSI6IjEyIn0=" f_links_font_line_height="eyJwaG9uZSI6IjEuMyJ9" f_links_font_family="eyJwaG9uZSI6IjMyNSJ9" f_uf_font_size="eyJwaG9uZSI6IjEyIn0=" f_uf_font_line_height="eyJwaG9uZSI6IjEuMyJ9" f_uf_font_family="eyJwaG9uZSI6IjMyNSJ9" f_gh_font_family="eyJwaG9uZSI6IjMyNSJ9" f_gh_font_size="eyJwaG9uZSI6IjEyIn0=" f_gh_font_line_height="eyJwaG9uZSI6IjEuMyJ9" f_btn1_font_family="eyJwaG9uZSI6IjMyNSJ9" f_btn1_font_weight="eyJwaG9uZSI6IjcwMCJ9" f_btn1_font_transform="eyJwaG9uZSI6InVwcGVyY2FzZSJ9" f_btn2_font_weight="eyJwaG9uZSI6IjcwMCJ9" f_btn2_font_transform="eyJwaG9uZSI6InVwcGVyY2FzZSJ9" f_btn2_font_family="eyJwaG9uZSI6IjMyNSJ9"]
-7.4 C
New York
[tds_menu_login guest_tdicon="td-icon-profile" logout_tdicon="td-icon-log-out" tdc_css="eyJhbGwiOnsibWFyZ2luLWJvdHRvbSI6IjAiLCJkaXNwbGF5IjoiIn19" toggle_txt_color="var(--news-hub-white)" menu_offset_top="eyJhbGwiOiIxOSIsImxhbmRzY2FwZSI6IjE3IiwicG9ydHJhaXQiOiIxNSJ9" menu_offset_horiz="eyJhbGwiOi02LCJsYW5kc2NhcGUiOiItMyIsInBvcnRyYWl0IjoiLTIifQ==" menu_horiz_align="content-horiz-right" menu_bg="var(--news-hub-black)" menu_uh_color="var(--news-hub-light-grey)" menu_uh_border_color="var(--news-hub-dark-grey)" menu_ul_link_color="#ffffff" menu_ul_link_color_h="var(--news-hub-accent-hover)" menu_ul_sep_color="var(--news-hub-dark-grey)" menu_uf_txt_color="var(--news-hub-white)" menu_uf_txt_color_h="var(--news-hub-accent-hover)" menu_uf_border_color="var(--news-hub-dark-grey)" f_uh_font_family="325" f_uh_font_line_height="1.3" f_links_font_family="325" f_links_font_line_height="1.3" f_uf_font_line_height="1.3" f_uf_font_family="325" menu_uh_padd="eyJhbGwiOiIyMHB4IDI1cHggMThweCIsImxhbmRzY2FwZSI6IjE1cHggMjBweCAxM3B4IiwicG9ydHJhaXQiOiIxMHB4IDE1cHggOHB4In0=" menu_ul_padd="eyJhbGwiOiIxOHB4IDI1cHgiLCJsYW5kc2NhcGUiOiIxNnB4IDIwcHgiLCJwb3J0cmFpdCI6IjhweCAxNXB4In0=" menu_ul_space="eyJhbGwiOiIxMCIsImxhbmRzY2FwZSI6IjgiLCJwb3J0cmFpdCI6IjYifQ==" menu_ulo_padd="eyJhbGwiOiIxOHB4IDI1cHggMjBweCIsImxhbmRzY2FwZSI6IjEzcHggMjBweCAxNXB4IiwicG9ydHJhaXQiOiI4cHggMTVweCAxMHB4In0=" menu_shadow_shadow_size="0" menu_arrow_color="rgba(255,255,255,0)" menu_width="eyJhbGwiOiIyMjAiLCJwb3J0cmFpdCI6IjE4MCJ9" show_version="" menu_gh_padd="eyJhbGwiOiIyMHB4IDI1cHggMThweCIsImxhbmRzY2FwZSI6IjE1cHggMjBweCAxM3B4IiwicG9ydHJhaXQiOiIxMHB4IDE1cHggOHB4In0=" menu_gc_padd="eyJhbGwiOiIxOHB4IDI1cHggMjBweCIsImxhbmRzY2FwZSI6IjEzcHggMjBweCAxNXB4IiwicG9ydHJhaXQiOiI4cHggMTVweCAxMHB4In0=" menu_gh_color="var(--news-hub-light-grey)" menu_gh_border_color="var(--news-hub-dark-grey)" f_gh_font_family="325" menu_gc_btn1_bg_color="var(--news-hub-accent)" menu_gc_btn1_bg_color_h="var(--news-hub-accent-hover)" menu_gc_btn2_color="var(--news-hub-accent)" menu_gc_btn2_color_h="var(--news-hub-accent-hover)" f_btn1_font_family="325" f_btn1_font_transform="uppercase" f_btn2_font_family="325" f_btn2_font_transform="uppercase" f_btn1_font_weight="700" f_btn2_font_weight="700" show_menu="yes" f_uf_font_size="eyJsYW5kc2NhcGUiOiIxMiIsInBvcnRyYWl0IjoiMTIifQ==" icon_color="var(--news-hub-white)" icon_size="eyJhbGwiOjIyLCJsYW5kc2NhcGUiOiIyMCIsInBvcnRyYWl0IjoiMTgifQ==" avatar_size="eyJhbGwiOiIyMiIsImxhbmRzY2FwZSI6IjIwIiwicG9ydHJhaXQiOiIxOCJ9" ia_space="eyJhbGwiOiIxMCIsImxhbmRzY2FwZSI6IjgiLCJwb3J0cmFpdCI6IjYifQ==" f_toggle_font_family="325" f_toggle_font_size="eyJhbGwiOiIxNCIsImxhbmRzY2FwZSI6IjEzIiwicG9ydHJhaXQiOiIxMiJ9" logout_size="eyJhbGwiOjE0LCJsYW5kc2NhcGUiOiIxMyJ9" f_uh_font_size="eyJsYW5kc2NhcGUiOiIxMyIsInBvcnRyYWl0IjoiMTIifQ==" f_links_font_size="eyJsYW5kc2NhcGUiOiIxMyIsInBvcnRyYWl0IjoiMTIifQ==" f_gh_font_size="eyJsYW5kc2NhcGUiOiIxMyIsInBvcnRyYWl0IjoiMTIifQ=="]

Next-Gen HBM4 Memory Reportedly Features Significantly Bumped Up Bandwidth

Published:

Next-Gen HBM4 Memory Reportedly Features Significantly Bumped Up Bandwidth 1

DigiTimes discloses that HBM4 could reach a memory bus size of up to 2048-bit, opening an immense potential for GPUs & the AI segment.

Samsung & SK Hynix are Reportedly Working on "2000 I/O Ports" on HBM4, Expect Serious HPC Output in Future AI GPUs

Quoting Seoul Economy, DigiTimes reveals that the next-gen HBM memory could see a huge jump in memory bandwidth, with an expected bump of 2x. In terms of its impact within the industry, this means a significantly large amount and to put it in this perspective, it is important to note that HBM memory hasn't seen an advancement in memory interface since 2015.

While on paper, this development looks great but it certainly comes with a lot of "ifs", mainly concerning how manufacturers will manage the data transfer rate and the necessary changes in individual memory stacks. Currently, the industry has seen the integration of HBM3e with the latest AI GPUs, which could reach up to 5 TB/s bandwidth per chip, bringing decent performance uplifts in NVIDIA's highly popular H100 AI GPUs.

SK hynix Lands In NVIDIA's Request To Sample Next-Gen HBM3E DRAM For Future AI GPUs 1

DigiTimes has reported the development claiming that Samsung and SK Hynix are moving towards integrating "2000 I/O" ports on their next-gen HBM4 memory standard. In layman's terms, this means that the process will feature much larger computational capabilities along with support for much larger LLMs, which is a key factor in the next-gen approach of genAI development. While the report hasn't disclosed an official development, we will ultimately reach the point but as for now, the milestone is far away and we will discuss why below.

The AI industry is currently in a paradigm shift, with genAI capabilities being infused into consumer applications, which has led the tech behemoths into an apparent "race." This has ultimately come with immense demand for AI GPUs, which require HBM as a primary component, and right now, memory manufacturers are focused on delivering adequate supply. Don't get me wrong, innovations within the HBM market are indeed imminent, however, they won't occur soon at least in the upcoming years, unless there is something "cooking" that we aren't aware of yet.

Written by Muhammad Zuhair

WccftechContinue reading/original-link]

Related articles

spot_img

Recent articles

spot_img