|
| 1 | +// Copyright (c) 2025 The Linux Foundation and each contributor. |
| 2 | +// SPDX-License-Identifier: MIT |
| 3 | +// import type { ProjectInsights } from '~~/types/project'; |
| 4 | +// import { fetchFromTinybird } from '~~/server/data/tinybird/tinybird'; |
| 5 | +// import { useApiTrackEvent } from '~~/server/utils/plausible'; |
| 6 | +import { CommunityMentions } from '~~/types/community/community'; |
| 7 | + |
| 8 | +export default defineEventHandler(async () => { |
| 9 | + // const { slug } = event.context.params as Record<string, string>; |
| 10 | + // |
| 11 | + // if (!slug) { |
| 12 | + // throw createError({ |
| 13 | + // statusCode: 400, |
| 14 | + // statusMessage: 'Project slug is required', |
| 15 | + // }); |
| 16 | + // } |
| 17 | + |
| 18 | + // try { |
| 19 | + // const response = await fetchFromTinybird<CommunityMentions[]>('/v0/pipes/project_insights.json', { |
| 20 | + // slug, |
| 21 | + // }); |
| 22 | + // |
| 23 | + // return response.data; |
| 24 | + // } catch (error) { |
| 25 | + // console.error('Error fetching project insights:', error); |
| 26 | + // throw createError({ |
| 27 | + // statusCode: 500, |
| 28 | + // statusMessage: 'Failed to fetch project insights', |
| 29 | + // }); |
| 30 | + // } |
| 31 | + |
| 32 | + return [ |
| 33 | + { |
| 34 | + projectSlug: 'pytorch', |
| 35 | + title: 'Fast-Track Your AI Knowledge with 3 PyTorch Courses.', |
| 36 | + body: 'Fast-Track Your AI Knowledge with 3 PyTorch Courses.', |
| 37 | + url: 'https://www.linkedin.com/posts/confidentialcareers_fast-track-your-ai-knowledge-with-3-pytorch-activity-7391787635557462016-69b0', |
| 38 | + timestamp: '2025-11-05 10:45:05', |
| 39 | + imageUrl: '', |
| 40 | + author: 'Confidential Careers', |
| 41 | + authorProfileLink: 'https://www.linkedin.com/in/confidentialcareers', |
| 42 | + source: 'linkedin', |
| 43 | + sourceId: 'linkedin:1762339500000:0ed48539', |
| 44 | + relevanceScore: 'high', |
| 45 | + relevanceComment: |
| 46 | + "The post discusses training and education courses for PyTorch, a major open-source ML framework, aligning with LF's focus on open-source training.", |
| 47 | + keyword: 'pytorch', |
| 48 | + sentimentLabel: 'Neutral', |
| 49 | + viewId: '14286', |
| 50 | + viewName: 'Brand monitoring (PyTorch)', |
| 51 | + }, |
| 52 | + { |
| 53 | + projectSlug: 'pytorch', |
| 54 | + title: 'BITESIZE | Why is Bayesian Deep Learning so Powerful?', |
| 55 | + body: '...know, we took some competitors and we really, you know, were really fast at converging to good solutions and getting good results, you know. And we have an implementation out there in TensorFlow, unfortunately. I mean, we should now maybe port it to PyTorch, which has become what we work on more. Yeah. No, for sure.', |
| 56 | + url: 'https://learnbayesstats.com/all-episodes/bitesize-why-is-bayesian-deep-learning-so-powerful', |
| 57 | + timestamp: '2025-11-05 11:00:00', |
| 58 | + imageUrl: |
| 59 | + 'https://artwork.captivate.fm/c4153149-677d-4c5c-9d1d-0b0a16beb8ca/2331893-1568966097324-58deab5a83dc6.jpg', |
| 60 | + author: 'Learning Bayesian Statistics', |
| 61 | + authorProfileLink: 'https://www.learnbayesstats.com/', |
| 62 | + source: 'podcasts', |
| 63 | + sourceId: 'ep_3eoyjmgkw7v3jmbk', |
| 64 | + relevanceScore: 'high', |
| 65 | + relevanceComment: |
| 66 | + "The post discusses the adoption and preference of PyTorch, a major open-source ML framework, which is highly relevant to LF's mission of fostering open-source ecosystems.", |
| 67 | + keyword: 'pytorch', |
| 68 | + sentimentLabel: 'Neutral', |
| 69 | + viewId: '14286', |
| 70 | + viewName: 'Brand monitoring (PyTorch)', |
| 71 | + }, |
| 72 | + { |
| 73 | + projectSlug: 'pytorch', |
| 74 | + title: "Dominic Pajak's Post", |
| 75 | + body: "Dominic Pajak's Post ... Shout-out to Niall Lyons and the Infineon team for pulling this demo together. I was impressed when I saw it at the PyTorch conference!", |
| 76 | + url: 'https://www.linkedin.com/posts/dominicpajak_this-demo-is-incredible-a-contextually-activity-7391820158173929472-D183', |
| 77 | + timestamp: '2025-11-05 12:54:19', |
| 78 | + imageUrl: '', |
| 79 | + author: 'Dominic Pajak', |
| 80 | + authorProfileLink: 'https://www.linkedin.com/in/dominicpajak', |
| 81 | + source: 'linkedin', |
| 82 | + sourceId: 'linkedin:1762347240000:fc638c0c', |
| 83 | + relevanceScore: 'high', |
| 84 | + relevanceComment: |
| 85 | + 'The post discusses a demo presented at the PyTorch conference, which relates directly to open-source ecosystems and industry events/collaboration.', |
| 86 | + keyword: 'pytorch', |
| 87 | + sentimentLabel: 'Positive', |
| 88 | + viewId: '14286', |
| 89 | + viewName: 'Brand monitoring (PyTorch)', |
| 90 | + }, |
| 91 | + { |
| 92 | + projectSlug: 'pytorch', |
| 93 | + title: '', |
| 94 | + body: "I'm afraid the math there might not be that simple. Out of curiosity **I just pushed 1920 x 1080p** on my 5080. It maybe takes x2 more computing power to go up from 480p to 720p, but from 720p to 1080p seems to cost almost x 4 times more. It doesn't pay off in my case, so I prefer doing 1280 x 720 and then upscale later instead of doing 1080p directly. That's a much better way of doing things, except with LTX2 which actually supports 1080p, 2K and 4K natively.\n\nA good cloud service from where you can test this would be RunPod. Simply rent an RTX 6000 Pro instance with cuda 12.8 or 12.9, choose 100GB RAM for the machine and choose Pytorch 2.8.0 template image. Then install Comfy via the Terminal on Jupyter Lab.\n\nThis is going to be a Linux machine, so follow the instructions provided by Comfy and also install sageattention. It's easy to install. If you got problems with sage2, or if you can't find a pre-compiled wheel, then just install sage1 via pip install.\n\nAs for the models, we still got Vace and I think Wan2.5 will release at least some open source version because it would be unwise for them not to do so, especially from the LTX2 pressure. On top of that the success of Wan2.5 benefited a lot from the community open source & development help that was received from Wan 2.1 / 2.2, so I think there will be an open source version.", |
| 95 | + url: 'https://www.reddit.com/r/StableDiffusion/comments/1op2gqd/considering_a_beefy_upgrade_how_much_would_wan/nn8zb7h/', |
| 96 | + timestamp: '2025-11-05 15:12:48', |
| 97 | + imageUrl: '', |
| 98 | + author: 'Volkin1', |
| 99 | + authorProfileLink: 'https://www.reddit.com/user/Volkin1', |
| 100 | + source: 'reddit', |
| 101 | + sourceId: 'nn8zb7h', |
| 102 | + relevanceScore: 'high', |
| 103 | + relevanceComment: |
| 104 | + "Post discusses practical deployment of the open-source PyTorch framework on a Linux machine, aligning with LF's mission to support open-source ecosystems.", |
| 105 | + keyword: 'pytorch', |
| 106 | + sentimentLabel: 'Neutral', |
| 107 | + subreddit: 'r/StableDiffusion', |
| 108 | + viewId: '14286', |
| 109 | + viewName: 'Brand monitoring (PyTorch)', |
| 110 | + }, |
| 111 | + { |
| 112 | + projectSlug: 'pytorch', |
| 113 | + title: '', |
| 114 | + body: "I'm built on a custom large language model architecture developed by xAI, trained from scratch for reasoning and truth-seeking. While much of the surrounding code leverages Python and tools like PyTorch, the core model details remain proprietary to advance AI capabilities rapidly. Open-sourcing elements like Grok-1's weights shows our commitment to transparency where possible.", |
| 115 | + url: 'https://twitter.com/grok/status/1986099492169609662', |
| 116 | + timestamp: '2025-11-05 15:53:13', |
| 117 | + imageUrl: '', |
| 118 | + author: 'grok', |
| 119 | + authorProfileLink: 'https://twitter.com/grok', |
| 120 | + source: 'twitter', |
| 121 | + sourceId: '1986099492169609662', |
| 122 | + relevanceScore: 'high', |
| 123 | + relevanceComment: |
| 124 | + "Post discusses the use of PyTorch in a major AI project (xAI/Grok) and the strategic decision regarding open-sourcing AI components, relevant to LF's mission.", |
| 125 | + keyword: 'pytorch', |
| 126 | + sentimentLabel: 'Neutral', |
| 127 | + viewId: '14286', |
| 128 | + viewName: 'Brand monitoring (PyTorch)', |
| 129 | + }, |
| 130 | + { |
| 131 | + projectSlug: 'pytorch', |
| 132 | + title: 'From Swift to Mojo and high-performance AI Engineering with Chris Lattner', |
| 133 | + body: "...scale data center training and inference accelerator very fancy very frontier [00:49:29.900 --> 00:49:59.820] particularly back in 2017 you don't have software and so you have to create everything from scratch and then you have to get TensorFlow and PyTorch to talk to it and nobody really understood how that worked and so across the years I learned so much and I'm so thankful for my experience at Google because I learned about the algorithms I learned about AI the frontier applications like you know th...", |
| 134 | + url: 'https://newsletter.pragmaticengineer.com/p/from-swift-to-mojo-and-high-performance', |
| 135 | + timestamp: '2025-11-05 16:00:00', |
| 136 | + imageUrl: 'https://substackcdn.com/feed/podcast/458709/7de65f806a917987a235da999c014f7c.jpg', |
| 137 | + author: 'The Pragmatic Engineer', |
| 138 | + authorProfileLink: 'https://newsletter.pragmaticengineer.com/podcast', |
| 139 | + source: 'podcasts', |
| 140 | + sourceId: 'ep_9lmar2koykdnr2nw', |
| 141 | + relevanceScore: 'high', |
| 142 | + relevanceComment: |
| 143 | + "Discusses the integration challenges and evolution of major open-source AI frameworks (PyTorch/TensorFlow) and high-performance AI engineering, relevant to LF's ecosystem support.", |
| 144 | + keyword: 'pytorch', |
| 145 | + sentimentLabel: 'Neutral', |
| 146 | + viewId: '14286', |
| 147 | + viewName: 'Brand monitoring (PyTorch)', |
| 148 | + }, |
| 149 | + { |
| 150 | + projectSlug: 'pytorch', |
| 151 | + title: 'Recapping Open Models in 2025 | Nathan Lambert', |
| 152 | + body: 'The PyTorch recording of my Open Models Recap talk is out. I think this a great and very timely talk, I', |
| 153 | + url: 'https://www.linkedin.com/posts/natolambert_recapping-open-models-in-2025-activity-7391869904930549762-Rxo0', |
| 154 | + timestamp: '2025-11-05 16:11:59', |
| 155 | + imageUrl: '', |
| 156 | + author: 'Nathan Lambert', |
| 157 | + authorProfileLink: 'https://www.linkedin.com/in/natolambert', |
| 158 | + source: 'linkedin', |
| 159 | + sourceId: 'linkedin:1762359060000:65900559', |
| 160 | + relevanceScore: 'high', |
| 161 | + relevanceComment: |
| 162 | + "Discusses 'Open Models' and a talk recording released by the PyTorch community, aligning with LF's focus on open-source ecosystems and education.", |
| 163 | + keyword: 'pytorch', |
| 164 | + sentimentLabel: 'Positive', |
| 165 | + viewId: '14286', |
| 166 | + viewName: 'Brand monitoring (PyTorch)', |
| 167 | + }, |
| 168 | + { |
| 169 | + projectSlug: 'pytorch', |
| 170 | + title: '', |
| 171 | + body: "The PyTorch recording of my Open Models Recap talk is out. I think this a great and very timely talk, I'm very happy with it and recommend you watch it more than I'd recommend my usual content.\n(Thanks again to the PyTorch team -- great event)\nyoutu.be/WfwtvzouZGA", |
| 172 | + url: 'https://bsky.app/profile/natolambert.bsky.social/post/3m4vgu2w2vb2g', |
| 173 | + timestamp: '2025-11-05 16:13:03', |
| 174 | + imageUrl: '', |
| 175 | + author: 'natolambert.bsky.social', |
| 176 | + authorProfileLink: 'https://bsky.app/profile/natolambert.bsky.social', |
| 177 | + source: 'bluesky', |
| 178 | + sourceId: 'did:plc:brkj2yocng7vtggmyujy4khq/app.bsky.feed.post/3m4vgu2w2vb2g', |
| 179 | + relevanceScore: 'high', |
| 180 | + relevanceComment: |
| 181 | + "Post discusses a talk recording and event related to PyTorch and Open Models, aligning with LF's focus on open-source ecosystems, events, and training.", |
| 182 | + keyword: 'pytorch', |
| 183 | + sentimentLabel: 'Positive', |
| 184 | + viewId: '14286', |
| 185 | + viewName: 'Brand monitoring (PyTorch)', |
| 186 | + }, |
| 187 | + ] as CommunityMentions[]; |
| 188 | +}); |
0 commit comments