<?xml version="1.0" encoding="UTF-8"?><rss version="2.0"
	xmlns:content="http://purl.org/rss/1.0/modules/content/"
	xmlns:wfw="http://wellformedweb.org/CommentAPI/"
	xmlns:dc="http://purl.org/dc/elements/1.1/"
	xmlns:atom="http://www.w3.org/2005/Atom"
	xmlns:sy="http://purl.org/rss/1.0/modules/syndication/"
	xmlns:slash="http://purl.org/rss/1.0/modules/slash/"
	>

<channel>
	<title>LLAMA 3.1 &#8211; SFC Sports and Entertainment</title>
	<atom:link href="https://www.spursfancave.com/tag/llama-3-1/feed/" rel="self" type="application/rss+xml" />
	<link>https://www.spursfancave.com</link>
	<description>It’s all about the Fans!</description>
	<lastBuildDate>Fri, 11 Oct 2024 01:48:00 +0000</lastBuildDate>
	<language>en</language>
	<sy:updatePeriod>
	hourly	</sy:updatePeriod>
	<sy:updateFrequency>
	1	</sy:updateFrequency>
	<generator>https://wordpress.org/?v=6.9.4</generator>
	<item>
		<title>Llama 3.1 405B Achieves 1.5x Throughput Boost with NVIDIA H200 GPUs and NVLink</title>
		<link>https://www.spursfancave.com/llama-3-1-405b-achieves-1-5x-throughput-boost-with-nvidia-h200-gpus-and-nvlink/</link>
					<comments>https://www.spursfancave.com/llama-3-1-405b-achieves-1-5x-throughput-boost-with-nvidia-h200-gpus-and-nvlink/#respond</comments>
		
		<dc:creator><![CDATA[Rik Xperty]]></dc:creator>
		<pubDate>Fri, 11 Oct 2024 01:48:00 +0000</pubDate>
				<category><![CDATA[Crypto]]></category>
		<category><![CDATA[AI]]></category>
		<category><![CDATA[Core]]></category>
		<category><![CDATA[GPUS]]></category>
		<category><![CDATA[LLAMA 3.1]]></category>
		<category><![CDATA[LLAMA 3.1 405B]]></category>
		<category><![CDATA[LLMS]]></category>
		<category><![CDATA[news]]></category>
		<category><![CDATA[NVidia]]></category>
		<category><![CDATA[Performance]]></category>
		<category><![CDATA[techniques]]></category>
		<category><![CDATA[TENSOR]]></category>
		<guid isPermaLink="false">https://www.spursfancave.com/llama-3-1-405b-achieves-1-5x-throughput-boost-with-nvidia-h200-gpus-and-nvlink/</guid>

					<description><![CDATA[<div class="media_block"><a href="https://image.blockchain.news:443/features/D8E08E86F8EDBDDCD68414CF49BDD8B1401B11A69515DFF98E6B2B03EE9CF9D7.jpg"><img src="https://image.blockchain.news:443/features/D8E08E86F8EDBDDCD68414CF49BDD8B1401B11A69515DFF98E6B2B03EE9CF9D7.jpg" class="media_thumbnail"/></a></div>]]></description>
		
					<wfw:commentRss>https://www.spursfancave.com/llama-3-1-405b-achieves-1-5x-throughput-boost-with-nvidia-h200-gpus-and-nvlink/feed/</wfw:commentRss>
			<slash:comments>0</slash:comments>
		
		
			</item>
		<item>
		<title>NVIDIA Enhances Llama 3.1 405B Performance with TensorRT Model Optimizer</title>
		<link>https://www.spursfancave.com/nvidia-enhances-llama-3-1-405b-performance-with-tensorrt-model-optimizer/</link>
					<comments>https://www.spursfancave.com/nvidia-enhances-llama-3-1-405b-performance-with-tensorrt-model-optimizer/#respond</comments>
		
		<dc:creator><![CDATA[Rik Xperty]]></dc:creator>
		<pubDate>Thu, 29 Aug 2024 16:10:12 +0000</pubDate>
				<category><![CDATA[Crypto]]></category>
		<category><![CDATA[AI]]></category>
		<category><![CDATA[language model]]></category>
		<category><![CDATA[Large Language Model]]></category>
		<category><![CDATA[LLAMA 3.1]]></category>
		<category><![CDATA[LLAMA 3.1 405B]]></category>
		<category><![CDATA[Meta]]></category>
		<category><![CDATA[news]]></category>
		<category><![CDATA[NVidia]]></category>
		<category><![CDATA[Performance]]></category>
		<category><![CDATA[TENSORRT]]></category>
		<guid isPermaLink="false">https://www.spursfancave.com/nvidia-enhances-llama-3-1-405b-performance-with-tensorrt-model-optimizer/</guid>

					<description><![CDATA[<div class="media_block"><a href="https://image.blockchain.news:443/features/D8E08E86F8EDBDDCD68414CF49BDD8B1401B11A69515DFF98E6B2B03EE9CF9D7.jpg"><img src="https://image.blockchain.news:443/features/D8E08E86F8EDBDDCD68414CF49BDD8B1401B11A69515DFF98E6B2B03EE9CF9D7.jpg" class="media_thumbnail"/></a></div>]]></description>
		
					<wfw:commentRss>https://www.spursfancave.com/nvidia-enhances-llama-3-1-405b-performance-with-tensorrt-model-optimizer/feed/</wfw:commentRss>
			<slash:comments>0</slash:comments>
		
		
			</item>
		<item>
		<title>Llama 3.1 Now Optimized for AMD Platforms from Data Center to AI PCs</title>
		<link>https://www.spursfancave.com/llama-3-1-now-optimized-for-amd-platforms-from-data-center-to-ai-pcs/</link>
					<comments>https://www.spursfancave.com/llama-3-1-now-optimized-for-amd-platforms-from-data-center-to-ai-pcs/#respond</comments>
		
		<dc:creator><![CDATA[Rik Xperty]]></dc:creator>
		<pubDate>Tue, 23 Jul 2024 18:13:00 +0000</pubDate>
				<category><![CDATA[Crypto]]></category>
		<category><![CDATA[AI]]></category>
		<category><![CDATA[AMD]]></category>
		<category><![CDATA[Center]]></category>
		<category><![CDATA[Data]]></category>
		<category><![CDATA[DATA CENTER]]></category>
		<category><![CDATA[LLAMA 3.1]]></category>
		<category><![CDATA[Meta]]></category>
		<category><![CDATA[news]]></category>
		<guid isPermaLink="false">https://www.spursfancave.com/llama-3-1-now-optimized-for-amd-platforms-from-data-center-to-ai-pcs/</guid>

					<description><![CDATA[<div class="media_block"><a href="https://image.blockchain.news:443/features/9EE0AA0B1F3BD20BCCC7C00CFFD871DCA35D15A36341179DDF6072A1E953606D.jpg"><img src="https://image.blockchain.news:443/features/9EE0AA0B1F3BD20BCCC7C00CFFD871DCA35D15A36341179DDF6072A1E953606D.jpg" class="media_thumbnail"/></a></div>]]></description>
		
					<wfw:commentRss>https://www.spursfancave.com/llama-3-1-now-optimized-for-amd-platforms-from-data-center-to-ai-pcs/feed/</wfw:commentRss>
			<slash:comments>0</slash:comments>
		
		
			</item>
	</channel>
</rss>
