[tds_menu_login inline="yes" guest_tdicon="td-icon-profile" logout_tdicon="td-icon-log-out" tdc_css="eyJwaG9uZSI6eyJtYXJnaW4tcmlnaHQiOiIyMCIsIm1hcmdpbi1ib3R0b20iOiIwIiwibWFyZ2luLWxlZnQiOiI2IiwiZGlzcGxheSI6IiJ9LCJwaG9uZV9tYXhfd2lkdGgiOjc2N30=" toggle_hide="eyJwaG9uZSI6InllcyJ9" ia_space="eyJwaG9uZSI6IjAifQ==" icon_size="eyJhbGwiOjI0LCJwaG9uZSI6IjIwIn0=" avatar_size="eyJwaG9uZSI6IjIwIn0=" show_menu="yes" menu_offset_top="eyJwaG9uZSI6IjE4In0=" menu_offset_horiz="eyJhbGwiOjgsInBob25lIjoiLTMifQ==" menu_width="eyJwaG9uZSI6IjE4MCJ9" menu_horiz_align="eyJhbGwiOiJjb250ZW50LWhvcml6LWxlZnQiLCJwaG9uZSI6ImNvbnRlbnQtaG9yaXotcmlnaHQifQ==" menu_uh_padd="eyJwaG9uZSI6IjEwcHggMTVweCA4cHgifQ==" menu_gh_padd="eyJwaG9uZSI6IjEwcHggMTVweCA4cHgifQ==" menu_ul_padd="eyJwaG9uZSI6IjhweCAxNXB4In0=" menu_ul_space="eyJwaG9uZSI6IjYifQ==" menu_ulo_padd="eyJwaG9uZSI6IjhweCAxNXB4IDEwcHgifQ==" menu_gc_padd="eyJwaG9uZSI6IjhweCAxNXB4IDEwcHgifQ==" menu_bg="var(--news-hub-black)" menu_shadow_shadow_size="eyJwaG9uZSI6IjAifQ==" menu_arrow_color="rgba(0,0,0,0)" menu_uh_color="var(--news-hub-light-grey)" menu_uh_border_color="var(--news-hub-dark-grey)" menu_ul_link_color="var(--news-hub-white)" menu_ul_link_color_h="var(--news-hub-accent-hover)" menu_ul_sep_color="var(--news-hub-dark-grey)" menu_uf_txt_color="var(--news-hub-white)" menu_uf_txt_color_h="var(--news-hub-accent-hover)" menu_uf_border_color="var(--news-hub-dark-grey)" f_uh_font_size="eyJwaG9uZSI6IjEyIn0=" f_uh_font_line_height="eyJwaG9uZSI6IjEuMyJ9" f_uh_font_family="eyJwaG9uZSI6IjMyNSJ9" f_links_font_size="eyJwaG9uZSI6IjEyIn0=" f_links_font_line_height="eyJwaG9uZSI6IjEuMyJ9" f_links_font_family="eyJwaG9uZSI6IjMyNSJ9" f_uf_font_size="eyJwaG9uZSI6IjEyIn0=" f_uf_font_line_height="eyJwaG9uZSI6IjEuMyJ9" f_uf_font_family="eyJwaG9uZSI6IjMyNSJ9" f_gh_font_family="eyJwaG9uZSI6IjMyNSJ9" f_gh_font_size="eyJwaG9uZSI6IjEyIn0=" f_gh_font_line_height="eyJwaG9uZSI6IjEuMyJ9" f_btn1_font_family="eyJwaG9uZSI6IjMyNSJ9" f_btn1_font_weight="eyJwaG9uZSI6IjcwMCJ9" f_btn1_font_transform="eyJwaG9uZSI6InVwcGVyY2FzZSJ9" f_btn2_font_weight="eyJwaG9uZSI6IjcwMCJ9" f_btn2_font_transform="eyJwaG9uZSI6InVwcGVyY2FzZSJ9" f_btn2_font_family="eyJwaG9uZSI6IjMyNSJ9"]
4.3 C
New York
[tds_menu_login guest_tdicon="td-icon-profile" logout_tdicon="td-icon-log-out" tdc_css="eyJhbGwiOnsibWFyZ2luLWJvdHRvbSI6IjAiLCJkaXNwbGF5IjoiIn19" toggle_txt_color="var(--news-hub-white)" menu_offset_top="eyJhbGwiOiIxOSIsImxhbmRzY2FwZSI6IjE3IiwicG9ydHJhaXQiOiIxNSJ9" menu_offset_horiz="eyJhbGwiOi02LCJsYW5kc2NhcGUiOiItMyIsInBvcnRyYWl0IjoiLTIifQ==" menu_horiz_align="content-horiz-right" menu_bg="var(--news-hub-black)" menu_uh_color="var(--news-hub-light-grey)" menu_uh_border_color="var(--news-hub-dark-grey)" menu_ul_link_color="#ffffff" menu_ul_link_color_h="var(--news-hub-accent-hover)" menu_ul_sep_color="var(--news-hub-dark-grey)" menu_uf_txt_color="var(--news-hub-white)" menu_uf_txt_color_h="var(--news-hub-accent-hover)" menu_uf_border_color="var(--news-hub-dark-grey)" f_uh_font_family="325" f_uh_font_line_height="1.3" f_links_font_family="325" f_links_font_line_height="1.3" f_uf_font_line_height="1.3" f_uf_font_family="325" menu_uh_padd="eyJhbGwiOiIyMHB4IDI1cHggMThweCIsImxhbmRzY2FwZSI6IjE1cHggMjBweCAxM3B4IiwicG9ydHJhaXQiOiIxMHB4IDE1cHggOHB4In0=" menu_ul_padd="eyJhbGwiOiIxOHB4IDI1cHgiLCJsYW5kc2NhcGUiOiIxNnB4IDIwcHgiLCJwb3J0cmFpdCI6IjhweCAxNXB4In0=" menu_ul_space="eyJhbGwiOiIxMCIsImxhbmRzY2FwZSI6IjgiLCJwb3J0cmFpdCI6IjYifQ==" menu_ulo_padd="eyJhbGwiOiIxOHB4IDI1cHggMjBweCIsImxhbmRzY2FwZSI6IjEzcHggMjBweCAxNXB4IiwicG9ydHJhaXQiOiI4cHggMTVweCAxMHB4In0=" menu_shadow_shadow_size="0" menu_arrow_color="rgba(255,255,255,0)" menu_width="eyJhbGwiOiIyMjAiLCJwb3J0cmFpdCI6IjE4MCJ9" show_version="" menu_gh_padd="eyJhbGwiOiIyMHB4IDI1cHggMThweCIsImxhbmRzY2FwZSI6IjE1cHggMjBweCAxM3B4IiwicG9ydHJhaXQiOiIxMHB4IDE1cHggOHB4In0=" menu_gc_padd="eyJhbGwiOiIxOHB4IDI1cHggMjBweCIsImxhbmRzY2FwZSI6IjEzcHggMjBweCAxNXB4IiwicG9ydHJhaXQiOiI4cHggMTVweCAxMHB4In0=" menu_gh_color="var(--news-hub-light-grey)" menu_gh_border_color="var(--news-hub-dark-grey)" f_gh_font_family="325" menu_gc_btn1_bg_color="var(--news-hub-accent)" menu_gc_btn1_bg_color_h="var(--news-hub-accent-hover)" menu_gc_btn2_color="var(--news-hub-accent)" menu_gc_btn2_color_h="var(--news-hub-accent-hover)" f_btn1_font_family="325" f_btn1_font_transform="uppercase" f_btn2_font_family="325" f_btn2_font_transform="uppercase" f_btn1_font_weight="700" f_btn2_font_weight="700" show_menu="yes" f_uf_font_size="eyJsYW5kc2NhcGUiOiIxMiIsInBvcnRyYWl0IjoiMTIifQ==" icon_color="var(--news-hub-white)" icon_size="eyJhbGwiOjIyLCJsYW5kc2NhcGUiOiIyMCIsInBvcnRyYWl0IjoiMTgifQ==" avatar_size="eyJhbGwiOiIyMiIsImxhbmRzY2FwZSI6IjIwIiwicG9ydHJhaXQiOiIxOCJ9" ia_space="eyJhbGwiOiIxMCIsImxhbmRzY2FwZSI6IjgiLCJwb3J0cmFpdCI6IjYifQ==" f_toggle_font_family="325" f_toggle_font_size="eyJhbGwiOiIxNCIsImxhbmRzY2FwZSI6IjEzIiwicG9ydHJhaXQiOiIxMiJ9" logout_size="eyJhbGwiOjE0LCJsYW5kc2NhcGUiOiIxMyJ9" f_uh_font_size="eyJsYW5kc2NhcGUiOiIxMyIsInBvcnRyYWl0IjoiMTIifQ==" f_links_font_size="eyJsYW5kc2NhcGUiOiIxMyIsInBvcnRyYWl0IjoiMTIifQ==" f_gh_font_size="eyJsYW5kc2NhcGUiOiIxMyIsInBvcnRyYWl0IjoiMTIifQ=="]

Samsung Intros 9.8 Gbps HBM3E “Shinebolt”, 32 Gbps GDDR7, 7.5 Gbps LPDDR5x CAMM2 Memory

Published:

Samsung Intros 9.8 Gbps HBM3E "Shinebolt", 32 Gbps GDDR7, 7.5 Gbps LPDDR5x CAMM2 Memory 1

Samsung has officially introduced its next-gen memory technologies including HBM3E, GDDR7, LPDDR5x CAMM2, and more during its Memory Tech Day 2023.

Samsung Goes All Out With Next-Gen Memory Technologies Including HBM3E, GDDR7, LPDDR5x CAMM2 & More

We have already reported the developments on the Samsung HBM3E memory codenamed "Shine Bolt" and GDDR7 for next-generation AI, Gaming, and data center applications. These can be seen as the two biggest highlights of the Memory Tech Day 2023 but Samsung sure has a lot more action going on.

Samsung HBM3E "Shinebolt" Memory For AI & Data Centers

Building on Samsung’s expertise in commercializing the industry’s first HBM2 and opening the HBM market for high-performance computing (HPC) in 2016, the company today revealed its next-generation HBM3E DRAM, named Shinebolt. Samsung’s Shinebolt will power next-generation AI applications, improving total cost of ownership (TCO) and speeding up AI-model training and inference in the data center.

Image Source: Samsung

The HBM3E boasts an impressive speed of 9.8 gigabits-per-second (Gbps) per pin speed, meaning it can achieve transfer rates exceeding more than 1.2 terabytes-per-second (TBps). In order to enable higher-layer stacks and improve thermal characteristics, Samsung has optimized its non-conductive film (NCF) technology to eliminate gaps between chip layers and maximize thermal conductivity. Samsung’s 8H and 12H HBM3 products are currently in mass production and samples for Shinebolt are shipping to customers.

Leaning into its strength as a total semiconductor solutions provider, the company also plans to offer a custom turnkey service that combines next-generation HBM, advanced packaging technologies, and foundry offerings together.

HBM Memory Specifications Comparison

DRAMHBM1HBM2HBM2eHBM3HBM3 Gen2HBMNext (HBM4)
I/O (Bus Interface)10241024102410241024-20481024-2048
Prefetch (I/O)222222
Maximum Bandwidth128 GB/s256 GB/s460.8 GB/s819.2 GB/s1.2 TB/s1.5 - 2.0 TB/s
DRAM ICs Per Stack488128-128-12
Maximum Capacity4 GB8 GB16 GB24 GB24 - 36 GB36-64 GB
tRC48ns45ns45nsTBATBATBA
tCCD2ns (=1tCK)2ns (=1tCK)2ns (=1tCK)TBATBATBA
VPPExternal VPPExternal VPPExternal VPP
External VPP
External VPPTBA
VDD1.2V1.2V1.2VTBATBATBA
Command InputDual CommandDual CommandDual CommandDual CommandDual CommandDual Command

Samsung GDDR7 - 32 Gbps & 32 Gb DRAM For Next-Gen Gaming Graphics

Other products highlighted at the event include the 32Gb DDR5 DRAM with the industry’s highest capacity, the industry’s first 32Gbps GDDR7, and the petabyte-scale PBSSD, which offers a significant boost to storage capabilities for server applications.

According to Samsung, the GDDR7 memory will offer a 40% performance boost and 20% power efficiency improvement compared to the current fastest 24 Gbps GDDR6 DRAM offering up to 16 Gb die capacities. The first products will be rated at transfer speeds of up to 32 Gbps which marks a 33% improvement over GDDR6 memory while achieving up to 1.5 TB/s of bandwidth which will be achieved on a 384-bit bus interface solution.

Image Source: Samsung

Following is the bandwidth the 32 Gbps pin speeds would offer across multiple bus configurations:

  • 512-bit - 2048 GB/s (2.0 TB/s)
  • 384-bit - 1536 GB/s (1.5 TB/s)
  • 320-bit - 1280 GB/s (1.3 TB/s)
  • 256-bit - 1024 GB/s (1.0 TB/s)
  • 192-bit - 768 GB/s
  • 128-bit - 512 GB/s

The company has also tested early samples running at speeds of up to 36 Gbps though we doubt those will be ready in enough mass-produced quantities to fulfill the next-gen gaming and AI GPU lineups.

GDDR7 memory will also offer 20% higher efficiency and that's great considering memory consumes a huge amount of power for high-end GPUs. It is said that the Samsung GDDR7 DRAM will include technology specifically optimized for high-speed workloads and there will also be a low-operating voltage option designed for applications with mindful power usage such as laptops. For thermals, the new memory standard will utilize an epoxy molding compound (EMC) with high thermal conductivity which reduces thermal resistance by up to 70%. It was reported back in August that Samsung was sampling its GDDR7 DRAM to NVIDIA for early evaluation of its next-gen gaming graphics cards.

GDDR Graphics Memory Evolution:

GRAPHICS MEMORYGDDR5XGDDR6GDDR6XGDDR7
WorkloadGamingGaming / AIGaming / AIGaming / AI
Platform (Example)GeForce GTX 1080 TiGeForce RTX 2080 TiGeForce RTX 4090GeForce RTX 5090?
Number of Placements12121212?
Gb/s/pin 11.414-1619-2432-36
GB/s/placement 4556-6476-96128-144
GB/s/system547672-768 912-11521536-1728
Configuration (Example)384 IO (12pcs x 32 IO package)384 IO (12pcs x 32 IO package)384 IO (12pcs x 32 IO package)384 IO (12pcs x 32 IO package)?
Frame Buffer of Typical System 12GB12GB24 GB24 GB?
Average Device Power (pJ/bit) 8.07.57.25TBD
Typical IO ChannelPCB (P2P SM)PCB (P2P SM)PCB (P2P SM)PCB (P2P SM)

Samsung LPDDR5x For Next-Gen CAMM2 Modules, Slimming Down Mobile Designs

In order to process data-intensive tasks, today’s AI technologies are moving toward a hybrid model that allocates and distributes workload among cloud and edge devices. Accordingly, Samsung introduced a range of memory solutions that support high-performance, high-capacity, low-power, and small form factors at the edge.

In addition to the industry’s first 7.5Gbps LPDDR5X CAMM2  which is expected to be a true game changer in the next-generation PC and laptop DRAM market the company also showcased its 9.6Gbps LPDDR5X DRAM, LLW DRAM specialized for on-device AI, next-generation Universal Flash Storage (UFS), and the high-capacity Quad-Level Cell (QLC) SSD BM9C1 for PCs.

GPU Memory Technology Updates

Graphics Card NameMemory TechnologyMemory SpeedMemory BusMemory BandwidthRelease
AMD Radeon R9 Fury XHBM11.0 Gbps4096-bit512 GB/s2015
NVIDIA GTX 1080GDDR5X10.0 Gbps256-bit320 GB/s2016
NVIDIA Tesla P100HBM21.4 Gbps4096-bit720 GB/s2016
NVIDIA Titan XpGDDR5X11.4 Gbps384-bit547 GB/s2017
AMD RX Vega 64HBM21.9 Gbps2048-bit483 GB/s2017
NVIDIA Titan VHBM21.7 Gbps3072-bit652 GB/s2017
NVIDIA Tesla V100HBM21.7 Gbps4096-bit901 GB/s2017
NVIDIA RTX 2080 TiGDDR614.0 Gbps384-bit672 GB/s2018
AMD Instinct MI100HBM22.4 Gbps4096-bit1229 GB/s2020
NVIDIA A100 80 GBHBM2e3.2 Gbps5120-bit2039 GB/s2020
NVIDIA RTX 3090GDDR6X19.5 Gbps384-bit936.2 GB/s2020
AMD Instinct MI200HBM2e3.2 Gbps8192-bit3200 GB/s2021
NVIDIA RTX 3090 TiGDDR6X21.0 Gbps384-bit1008 GB/s2022
NVIDIA H100 80 GBHBM3/E2.6 Gbps5120-bit1681 GB/s2022
Written by Hassan Mujtaba

WccftechContinue reading/original-link]

Related articles

spot_img

Recent articles

spot_img