BEGIN:VCALENDAR
VERSION:2.0
PRODID:-// - ECPv6.15.16//NONSGML v1.0//EN
CALSCALE:GREGORIAN
METHOD:PUBLISH
X-ORIGINAL-URL:https://www.neuropac.info
X-WR-CALDESC:Events for 
REFRESH-INTERVAL;VALUE=DURATION:PT1H
X-Robots-Tag:noindex
X-PUBLISHED-TTL:PT1H
BEGIN:VTIMEZONE
TZID:America/New_York
BEGIN:DAYLIGHT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
TZNAME:EDT
DTSTART:20220313T070000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
TZNAME:EST
DTSTART:20221106T060000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
TZNAME:EDT
DTSTART:20230312T070000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
TZNAME:EST
DTSTART:20231105T060000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
TZNAME:EDT
DTSTART:20240310T070000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
TZNAME:EST
DTSTART:20241103T060000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
TZNAME:EDT
DTSTART:20250309T070000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
TZNAME:EST
DTSTART:20251102T060000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
TZNAME:EDT
DTSTART:20260308T070000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
TZNAME:EST
DTSTART:20261101T060000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20220327T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20221030T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20230326T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20231029T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20240331T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20241027T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20250330T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20251026T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20260329T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20261025T010000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:Europe/Amsterdam
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20240331T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20241027T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20250330T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20251026T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20260329T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20261025T010000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:Europe/Stockholm
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20240331T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20241027T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20250330T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20251026T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20260329T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20261025T010000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:Europe/Zurich
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20230326T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20231029T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20240331T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20241027T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20250330T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20251026T010000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:America/Los_Angeles
BEGIN:DAYLIGHT
TZOFFSETFROM:-0800
TZOFFSETTO:-0700
TZNAME:PDT
DTSTART:20220313T100000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0700
TZOFFSETTO:-0800
TZNAME:PST
DTSTART:20221106T090000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0800
TZOFFSETTO:-0700
TZNAME:PDT
DTSTART:20230312T100000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0700
TZOFFSETTO:-0800
TZNAME:PST
DTSTART:20231105T090000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0800
TZOFFSETTO:-0700
TZNAME:PDT
DTSTART:20240310T100000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0700
TZOFFSETTO:-0800
TZNAME:PST
DTSTART:20241103T090000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:UTC
BEGIN:STANDARD
TZOFFSETFROM:+0000
TZOFFSETTO:+0000
TZNAME:UTC
DTSTART:20220101T000000
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
DTSTART;TZID=America/New_York:20251105T140000
DTEND;TZID=America/New_York:20251106T173000
DTSTAMP:20260417T090814
CREATED:20250920T192257Z
LAST-MODIFIED:20250920T192257Z
UID:10000355-1762351200-1762450200@www.neuropac.info
SUMMARY:SNUFA 2025 — Spiking Neural Networks as Universal Function Approximators
DESCRIPTION:SNUFA 2025 (Spiking Neural Networks as Universal Function Approximators) is a virtual workshop taking place on 5–6 November 2025. The workshop brings together researchers and practitioners to explore the theory\, applications\, and universality of spiking neural networks\, featuring invited talks\, contributed presentations\, flash talks\, and a virtual poster session. \nKey Focus: Understanding how spiking neural networks can serve as universal function approximators for neuromorphic computing and AI applications. \nRegistration:\nhttps://www.eventbrite.co.uk/e/snufa-2025-tickets-1549418545579\nAttendance is free\, but registration is required (to receive the streaming links).\n  \nAbstract submission\nDeadline: Sept 26\, 2025 (anywhere on earth)
URL:https://www.neuropac.info/event/snufa-2025-spiking-neural-networks-as-universal-function-approximators/
LOCATION:Online
CATEGORIES:Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;VALUE=DATE:20250902
DTEND;VALUE=DATE:20250904
DTSTAMP:20260417T090814
CREATED:20250529T105105Z
LAST-MODIFIED:20250529T105105Z
UID:10000333-1756771200-1756943999@www.neuropac.info
SUMMARY:4th Cognition and Natural Sensory Processing (CNSP) Workshop
DESCRIPTION:We’re happy to announce that the 4th Cognition and Natural Sensory Processing (CNSP) Workshop will take place virtually on September 2-3. \nThe workshop will be two days of talks and hands-on tutorials relating to analysis of neural data involving natural\, continuous sensory inputs (e.g.\, speech/music listening). Our keynote speakers will be Dr. Jean-Rémi King and Dr. Sam Nastase. The workshop will also feature interactive tutorial sessions on the analysis of continuous sensory data. Please see our workshop page for more information and register here! And we invite PIs to share this with their teams. \nWe are delighted to invite students and postdocs to submit proposals for a talk + tutorial involving analysis of continuous sensory neural data from their work. Please submit your proposal here. \nWorkshop page: https://cnspworkshop.net/workshops.html\nRegistration: https://cnspworkshop.net/registration.html
URL:https://www.neuropac.info/event/4th-cognition-and-natural-sensory-processing-cnsp-workshop/
LOCATION:Online
CATEGORIES:Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;VALUE=DATE:20250825
DTEND;VALUE=DATE:20250902
DTSTAMP:20260417T090814
CREATED:20250627T223114Z
LAST-MODIFIED:20250627T223114Z
UID:10000341-1756080000-1756771199@www.neuropac.info
SUMMARY:Computational Neuroscience\, Neurotechnology and Neuro-inspired AI (ISRC-CN³)
DESCRIPTION:We are delighted to invite you to apply to the 5th Computational Neuroscience\, Neurotechnology and Neuro-inspired AI (CN³) Summer School\, taking place from 25 August to 1 September 2025 at Ulster University\, Derry~Londonderry\, UK. A hybrid format is available\, allowing participation either in-person or online. This summer school offers a unique\, immersive experience for students\, early-career researchers\, and professionals interested in the intersection of neuroscience\, artificial intelligence\, and neurotechnology.\n\nOrganised by the Intelligent Systems Research Centre (ISRC) at Derry~Londonderry\, this summer school provides interdisciplinary training at the intersection of neuroscience and AI. The programme is designed to equip participants with both theoretical understanding and hands-on experience in emerging neurotechnologies.\n\nKey Highlights:\n• Lectures by international experts in computational neuroscience and AI\n• Practical workshops in Python\, MATLAB\, and neuro-modelling tools\n• Sessions on brain-computer interfaces\, cognitive robotics\, neuromorphic computing\, and more\n• Opportunities to present and discuss research\n• Networking with global peers and experts\n• Exposure to entrepreneurship in neurotechnology and translational research\n\nThe summer school is ideal for:\n• Final-year undergraduate students\n• MSc and PhD students\n• Postdoctoral researchers\n• Industry professionals seeking cross-disciplinary knowledge\n\nApplication Deadlines:\n• In-person (visa-required): 15 June 2025\n• In-person (non-visa): 15 July 2025\n• Online participation: 31 July 2025\n\nFurther details and registration instructions can be found at:\nhttps://www.ulster.ac.uk/conference/isrc-cn3-summer-school
URL:https://www.neuropac.info/event/computational-neuroscience-neurotechnology-and-neuro-inspired-ai-isrc-cn%c2%b3/
LOCATION:Online
CATEGORIES:School
END:VEVENT
BEGIN:VEVENT
DTSTART;VALUE=DATE:20250714
DTEND;VALUE=DATE:20250726
DTSTAMP:20260417T090814
CREATED:20250529T111246Z
LAST-MODIFIED:20250529T111246Z
UID:10000338-1752451200-1753487999@www.neuropac.info
SUMMARY:NeuroAI Live Online Course by Neuromatch
DESCRIPTION:Full time\, 2 Week\, Live Instruction Course \nWhat are common principles of natural and artificial intelligence? \nThe core challenge of intelligence is generalization. Neuroscience\, cognitive science\, and AI are all questing for principles that help generalization. Major system features that affect generalization include: task structure (multitasking\, multiple inputs with same output and vice versa)\, microcircuitry (nonlinearities\, canonical motifs and their operations\, sparsity)\, macrocircuitry or architecture (e.g. modules for memory\, information segregation\, weight sharing by input symmetry or common development)\, learning rules (synaptic plasticity\, modulation)\, and data stream (e.g. curriculum). \nWe aim to present current understanding of how these issues arise in both natural and artificial intelligence\, comparing how these system features affect representations\, computations\, and learning. We provide case studies and coding exercises that illustrate these issues in neuroscience\, cognitive science and AI. \n\nLearning Goal 1: A common understanding and vocabulary to describe challenges faced by naturally intelligent systems\n\nDescribe core shared concepts in neuroscience\, cognitive science and machine learning and how they differ to each other\nDescribe and implement different ways in which an ANN can be compared to a BNN\nDescribe multiple scales of computation\, and multiple scales of study (e.g. Marr’s levels\, what/how/why?)\n\n\nLearning Goal 2: Experience a multiplicity of approaches and interests at the intersection of neuro and AI; be able to describe some of these approaches and interests\nLearning Goal 3: Be able to practically implement NeuroAI models\n\nCoding and training models\nAdding more features to existing models\nDebugging (within guardrails)\nInterpreting\, analyzing and critiquing existing models\n\n\nLearning Goal 4: Complete research that deals with difficulties in NeuroAI\n\nWriting down a problem in a way that makes it tractable\nInteracting with other people from other disciplines fruitfully\nDo research (reading papers\, implementing previous SOTA\, coding new methods\, evaluating diff methods) in NeuroAI\nCommunicating their research in ways that are comprehensible to their target audience\n\n\n\nAll our content is open source\, you can see the NeuroAI course book here.
URL:https://www.neuropac.info/event/neuroai-live-online-course-by-neuromatch/
LOCATION:Online
CATEGORIES:School
END:VEVENT
BEGIN:VEVENT
DTSTART;VALUE=DATE:20250617
DTEND;VALUE=DATE:20250619
DTSTAMP:20260417T090814
CREATED:20250205T092325Z
LAST-MODIFIED:20250205T092325Z
UID:10000318-1750118400-1750291199@www.neuropac.info
SUMMARY:NEST Conference 2025
DESCRIPTION:The NEST Initiative is excited to invite everyone interested in Neural Simulation Technology and the NEST Simulator to the virtual NEST Conference 2025. The NEST Conference provides an opportunity for the NEST Community to meet\, exchange success stories\, swap advice\, learn about current developments in and around NEST spiking network simulation and its application. We particularly encourage young scientists to participate in the conference! \nThis year’s conference will again take place as a virtual conference on Tuesday/Wednesday 17/18 June 2025.
URL:https://www.neuropac.info/event/nest-conference-2025/
LOCATION:Online
CATEGORIES:Conference
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20250401T140000
DTEND;TZID=Europe/Berlin:20250401T150000
DTSTAMP:20260417T090814
CREATED:20250404T174514Z
LAST-MODIFIED:20250404T174514Z
UID:10000330-1743516000-1743519600@www.neuropac.info
SUMMARY:NHR PerfLab Seminar: Neuromorphic Computing from the Computer Science Perspective – Algorithms and Applications
DESCRIPTION:Speaker: Catherine Schuman\, Department of Electrical Engineering and Computer Science\, University of Tennessee \nDate and time: Tuesday\, April 1\, 2025\, 2:00 p.m. CEST \nZoom: https://go-nhr.de/perflab-seminar \nAbstract Neuromorphic computing is a popular technology for the future of computing.  Much of the focus in neuromorphic computing research and development has focused on new architectures\, devices\, and materials\, rather than in the software\, algorithms\, and applications of these systems.  In this talk\, I will overview the field of neuromorphic from the computer science perspective.  I will give an introduction to spiking neural networks\, as well as some of the most common algorithms used in the field.  Finally\, I will discuss the potential for using neuromorphic systems in real-world applications\, from scientific data analysis to autonomous vehicles.
URL:https://www.neuropac.info/event/nhr-perflab-seminar-neuromorphic-computing-from-the-computer-science-perspective-algorithms-and-applications/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Amsterdam:20250212T130000
DTEND;TZID=Europe/Amsterdam:20250212T143000
DTSTAMP:20260417T090814
CREATED:20250205T093937Z
LAST-MODIFIED:20250205T093937Z
UID:10000325-1739365200-1739370600@www.neuropac.info
SUMMARY:Seminar by Francky Catthoor: Where digital becomes almost-neural: variability phenomena at the extreme miniaturization limit of 'classical' microchips
DESCRIPTION:Francky Catthoor\nHe is an expert with a life-long research record on (digital) microchip design\, and he has also collaborated with neuromorphic computing researchers. In this special seminar\, he will give an informal introduction to the challenges of extreme miniaturization\, after which we will enjoy an open discussion round. It is not unlikely that we can learn a lot for our CogniGron research! \nSeminar title\nWhere digital becomes almost-neural: variability phenomena at the\nextreme miniaturization limit of ‘classical’ microchips. \nSeminar abstract\nWhen transistor sizes are pushed to the physical limits of\nminiaturization\, effects that are known from brains and analogue\nneuromorphic substrates appear and have to be dealt with. These\nphenomena include stochasticity\, drifting dynamics\, device mismatch\,\nimportance of signal travel delays and multi-timescale synchronization\,\nageing. In the world of ‘classical’ digital microchip engineering\, these\neffects are recognized and dealt with by architectural and\ncontrol-theoretic measures.
URL:https://www.neuropac.info/event/seminar-by-francky-catthoor-where-digital-becomes-almost-neural-variability-phenomena-at-the-extreme-miniaturization-limit-of-classical-microchips/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Stockholm:20250203T150000
DTEND;TZID=Europe/Stockholm:20250203T160000
DTSTAMP:20260417T090814
CREATED:20250205T092621Z
LAST-MODIFIED:20250205T092621Z
UID:10000319-1738594800-1738598400@www.neuropac.info
SUMMARY:SmallTalks "Brain-inspired neuromorphic computing using two dimensional materials"
DESCRIPTION:Welcome to a seminar in the series SmallTalks [about Nanoscience] arranged by Nano​ Area of Advance. \nSpeaker: Sameer Kumar Mallik\, Postdoc\, Quantum Device Physics\, Microtechnology and Nanoscience \nCoffee will be served before the start of the seminar. Students are welcome to participate! \nAbstract: Neuromorphic computing is a cutting-edge approach to designing computer systems inspired by the structure and functioning of the human brain. Unlike conventional computers\, which rely on von Neumann processing techniques\, neuromorphic systems use artificial neurons and synapses to mimic human cognition abilities such as pattern recognition\, sensory processing\, and decision-making more efficiently. Two-dimensional (2D) materials\, such as graphene\, transition metal dichalcogenides (TMDs)\, and hexagonal boron nitride\, have emerged as promising candidates for advancing neuromorphic systems. These materials exhibit unique electrical\, optical\, and mechanical properties\, including atomic thinness\, tunable bandgaps\, high carrier mobility\, and scalability\, which make them ideal for constructing energy-efficient\, high-performance synaptic and neuronal components. This presentation explores how 2D materials could revolutionize technologies like artificial intelligence\, smart devices\, and low-power electronics for a more sustainable and connected future.
URL:https://www.neuropac.info/event/smalltalks-brain-inspired-neuromorphic-computing-using-two-dimensional-materials/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;VALUE=DATE:20241220
DTEND;VALUE=DATE:20241221
DTSTAMP:20260417T090814
CREATED:20241202T111936Z
LAST-MODIFIED:20241202T111936Z
UID:10000309-1734652800-1734739199@www.neuropac.info
SUMMARY:ONM Student Talk: Ram Gaurav @ Virginia Tech
DESCRIPTION:Ramashish Gaurav (Ram) is a 3rd year Ph.D. student at Virginia Tech\, USA. He is supervised by Prof. Yang (Cindy) Yi in her BRICC Lab\, ECE @ VT. Of late\, Ram has been working on reservoir-based spiking models for Time Series Classification (TSC). Reservoir Computing is a well-established domain for time-series processing where a reservoir of statically (and recurrently) connected neurons compute high-dimensional temporal features\, over which a linear readout layer learns the mapping to the output. \nIn his recent work [1]\, Ram designed the Legendre-SNN (LSNN)\, a simple – yet high performing SNN model (for univariate TSC) where he has used the Legendre Delay Network (LDN) [2] as a non-spiking reservoir (in fact\, the LDN in LSNN is implemented with just basic matrix-operations). In a subsequent work (currently under review)\, he extended his LSNN to DeepLSNN that accounts for multivariate time-series signals too; upon experimenting with it\, he found that DeepLSNN models outperform a popular (and complex) LSTM-Conv integrated model [3] on more than 30% of 101 TSC datasets. His latest work is on the evaluation of Legendre-SNN on the Loihi-2 chip [4] — on which this talk is focused at. \nTalk details here\, time TBA.
URL:https://www.neuropac.info/event/onm-student-talk-ram-gaurav-virginia-tech/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;VALUE=DATE:20241105
DTEND;VALUE=DATE:20241107
DTSTAMP:20260417T090814
CREATED:20241003T122750Z
LAST-MODIFIED:20241003T122750Z
UID:10000301-1730764800-1730937599@www.neuropac.info
SUMMARY:SNUFA: Spiking Neural networks as Universal Function Approximators
DESCRIPTION:SNUFA is an online workshop and community focused on research advances in the field of “Spiking Networks as Universal Function Approximators.”\nThe annual SNUFA online workshop brings together researchers in spiking neural networks to present their work and discuss translating these findings into a better understanding of neural circuits and novel brain-inspired computing approaches. Topics of interest include artificial and biologically plausible learning algorithms and the dissection of trained spiking circuits toward understanding neural processing.
URL:https://www.neuropac.info/event/snufa-spiking-neural-networks-as-universal-function-approximators/
LOCATION:Online
CATEGORIES:Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=America/New_York:20240504T110000
DTEND;TZID=America/New_York:20240504T121500
DTSTAMP:20260417T090814
CREATED:20240428T081014Z
LAST-MODIFIED:20240428T081014Z
UID:10000285-1714820400-1714824900@www.neuropac.info
SUMMARY:Sangyeob Kim @ ONM - C-DNN and C-Transformer: Mixing ANNs and SNNs for the Best of Both Worlds
DESCRIPTION:From the Open Neuromorphic website. \nSangyeob and his team have developed a C-DNN processor that effectively processes object recognition workloads\, achieving 51.3% higher energy efficiency compared to the previous state-of-the-art processor. Subsequently\, they have applied C-DNN not only to image classification but also to other applications\, and have developed the C-Transformer\, which applies this technique to a Large Language Model (LLM). As a result\, they demonstrate that the energy consumed in LLM can be reduced by 30% to 72% using the C-DNN technique\, compared to the previous state-of-the-art processor. In this talk\, we will introduce the processor developed for C-DNN and C-Transformer\, and discuss how neuromorphic computing can be used in actual applications in the future. \n\n\nAbout the Speaker\nSangyeob Kim (Student Member\, IEEE) received the B.S.\, M.S. and Ph.D. degrees from the School of Electrical Engineering\, Korea Advanced Institute of Science and Technology (KAIST)\, Daejeon\, South Korea\, in 2018\, 2020 and 2023\, respectively. He is currently a Post-Doctoral Associate with the KAIST. His current research interests include energy-efficient system-on-chip design\, especially focused on deep neural network accelerators\, neuromorphic hardware\, and computing-in-memory accelerators.
URL:https://www.neuropac.info/event/sangyeob-kim-onm-c-dnn-and-c-transformer-mixing-anns-and-snns-for-the-best-of-both-worlds/
LOCATION:Online
CATEGORIES:Talk,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;VALUE=DATE:20240430
DTEND;VALUE=DATE:20240501
DTSTAMP:20260417T090814
CREATED:20240219T093217Z
LAST-MODIFIED:20240219T093217Z
UID:10000281-1714435200-1714521599@www.neuropac.info
SUMMARY:ICONS 2024 Paper Deadline
DESCRIPTION:CALL FOR PAPERS: International Conference on Neuromorphic Systems (ICONS) 2024\nImportant Dates:\nApril 30\, 2024: Submissions due \nMay 15\, 2024: Reviews due \nJune 6\, 2024: Decision notification \nJuly 30-August 2\, 2024: Conference \nWith the looming end of the “Moore’s Law” era\, there is an emerging challenge to “create a new type of computer that can proactively interpret and learn from data\, solve unfamiliar problems using what it has learned\, and operate with the energy efficiency of the human brain.” \nNeuromorphic computing will play a major role in this challenge and has the potential to transform the way we use computers through new materials\, new brain-inspired chips\, greater understanding of neuroscience\, and breakthroughs in machine understanding/intelligence. Neuromorphic computing systems have the potential to mimic the functionality of neural systems in the brain\, which we believe will lead to more powerful and efficient computing paradigms. The goal of this conference is to bring together leading researchers in neuromorphic computing to present new research\, develop new collaborations\, and provide a forum to publish work in this area. \nRESEARCH PAPERS ARE REQUESTED FOR TOPICS ON NEUROMORPHIC COMPUTING\, SPECIFICALLY IN FOUR FOCUS AREAS:\n\nSystems\, architectures\, and circuits\n\nNetwork\, neuron\, and synapse models\nNon-von Neumann computing architectures and models\nEmerging devices and hardware implementations\nEvent or spike-based systems\nNeuromorphic circuits\nNovel brain-inspired system architectures\n\n\nMachine intelligence algorithms for programming or training neuromorphic devices\n\nSupervised\, unsupervised and self-supervised learning methods\nBiologically-inspired algorithms\nAdaptations to existing algorithms for use on or with neuromorphic systems\n\n\nApplications for and use-cases of neuromorphic systems\n\nApplications where neuromorphic systems have the potential to outperform state-of-the-art techniques\nSuggestions for benchmark tasks for neuromorphic computing\nNeuromorphic datasets\n\n\nSupporting software and systems for neuromorphic systems\n\nEfficient simulation techniques for hardware and large-scale networks\nCompilers and programming frameworks\nVisualization tools\n\n\n\nNote: Submissions outside the scope of these areas\, including materials science and neuroscience\, will also be considered (especially for lightning talks and posters)\, although they are not the focus of this conference. \nWE ARE ACCEPTING SUBMISSIONS IN THE FOLLOWING FORMATS:\n\nFull papers (6-8 pages)\, which will be considered for full (20 minute) presentations. Full papers should present original research and will be included in the conference proceedings. The page limit includes references\, appendix and any other material that would accompany the paper.\nShort papers (3-4 pages)\, which will be considered for full presentations and/or lightning talks. Short papers can be position papers or present preliminary results and will be included in the conference proceedings. The page limit includes references\, appendix and any other material that would accompany the paper.\nExtended abstracts (1 page) for lightning talks and/or poster presentations. Extended abstracts will not be included in the conference proceedings.\nTutorial submissions (2-3 pages) for 1-2 hour tutorial sessions. Tutorials should include a hands-on component for tutorial attendees to work on or interact with neuromorphic software or hardware. Tutorials should be led by no more than three facilitators. Unlike paper and abstract submissions\, tutorial submissions should be submitted via email to Melika Payvand and Maryam Parsa at melika [at] ini.uzh.ch and mparsa [at] gmu.edu.\nSpecial session submissions (2-3 pages) for 1-2 hour special sessions. Special sessions should include invited presentations on a specific topic. Special session submissions will not be included in the conference proceedings. Unlike paper and abstract submissions\, special session submissions should be submitted via email to Melika Payvand and Maryam Parsa at melika [at] ini.uzh.ch and mparsa [at] gmu.edu\n\nProgram Co-Chairs:\nMelika Payvand\, Institute of Neuroinformatics\, University of Zurich and ETH Zurich \nMaryam Parsa\, George Mason University \nSubmission Website: Coming soon
URL:https://www.neuropac.info/event/icons-2024-paper-deadline/
LOCATION:Online
CATEGORIES:Conference,Deadline
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20240418T230000
DTEND;TZID=Europe/Berlin:20240419T003000
DTSTAMP:20260417T090814
CREATED:20231103T153206Z
LAST-MODIFIED:20240428T080746Z
UID:10000267-1713481200-1713486600@www.neuropac.info
SUMMARY:Tobias Fischer @ ONM - Advances in Neuromorphic Visual Place Recognition
DESCRIPTION:From the Open Neuromorphic website. \nAbout the Speaker\nTobias conducts interdisciplinary research at the intersection of intelligent robotics\, computer vision\, and computational cognition. My main goal is to develop high-performing\, bio-inspired computer vision algorithms that simultaneously examine animals/humans and robots’ perceptional capabilities. He is a Lecturer (Assistant Professor) in Queensland University of Technology’s Centre for Robotics. He joined the Centre as an Associate Investigator and Research Fellow in January 2020. Previously\, he was a postdoctoral researcher in the Personal Robotics Lab at Imperial College London. He received a PhD from Imperial College in January 2019. His thesis was awarded the UK Best Thesis in Robotics Award 2018 and the Eryl Cadwaladr Davies Award for the best thesis in Imperial’s EEE Department in 2017-2018. He previously received an M.Sc. degree (distinction) in Artificial Intelligence from The University of Edinburgh in 2014 and a B.Sc. degree in Computer Engineering from Ilmenau University of Technology\, Germany\, in 2013. His works have attracted two best poster awards\, one best paper award\, and he was the senior author of the winning submission to the Facebook Mapillary Place Recognition Challenge 2020.
URL:https://www.neuropac.info/event/tobias-fischer-onm-advances-in-neuromorphic-visual-place-recognition/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;VALUE=DATE:20240326
DTEND;VALUE=DATE:20240329
DTSTAMP:20260417T090814
CREATED:20240316T233432Z
LAST-MODIFIED:20240316T234415Z
UID:10000284-1711411200-1711670399@www.neuropac.info
SUMMARY:Neuromorphic technology: a giant leap for AI
DESCRIPTION:This event will convene a community of researchers and innovators working on cutting-edge neuromorphic hardware and brain-inspired algorithms. This is an AI UK Fringe event supported by the Alan Turing Institute. \nWhile the transformative potential of neuromorphic technology for the future of AI and computing has gained recognition in the USA National AI R&D Strategic Plan\, a similar community has only recently taken root in the UK. To secure UK leadership in this strategically important technology requires urgent attention. Recognizing this imperative\, Innovate UK’s Horizon Scanning team has embarked on the task of consolidating UK neuromorphic research and innovation. The proposed event aims to catalyze and amplify these efforts\, fostering collaboration and knowledge exchange within a broad neuromorphic stakeholder community. Loughborough University is one of the key research centres in this field\, having a track record of hosting three recent world-leading neuromorphic workshops and established a diverse network both within the UK and across the globe. \nRegister via: this form
URL:https://www.neuropac.info/event/neuromorphic-technology-a-giant-leap-for-ai/
LOCATION:Online
CATEGORIES:Symposium,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20240305T180000
DTEND;TZID=Europe/Berlin:20240305T193000
DTSTAMP:20260417T090814
CREATED:20240130T001106Z
LAST-MODIFIED:20240308T111533Z
UID:10000277-1709661600-1709667000@www.neuropac.info
SUMMARY:Maxence Ernoult @ ONM: Accelerating Neuromorphic Inference and Training at the Edge @ Rain
DESCRIPTION:From the Open Neuromorphic website. \n\n\nMaxence will present us Rain’s vision and technological roadmap to build hardware optimized for inference and training at the edge including both the hardware and algorithm aspects with an emphasis on why physical and mathematical principles matter more to him than biological inspiration. \n\n\n\n\n\nAbout the Speaker\nMaxence Ernoult graduated from Ecole Polytechnique and the University of Cambridge in 2016\, specializing in applied mathematics and theoretical physics. His PhD research was conducted in neuromorphic computing at Sorbonne University\, in collaboration with Mila. During this time\, he specialized in developing hardware-friendly alternatives to backpropagation and played a significant role in scaling up several of these alternatives\, including Equilibrium Propagation and Difference Target Propagation. This work was undertaken alongside notable figures such as Ben Scellier\, Blake Richards\, and Yoshua Bengio. In 2021\, Maxence joined IBM Research\, focusing on AI safety. Subsequently\, in 2022\, he began a new position at Rain.
URL:https://www.neuropac.info/event/maxence-ernoult-onm-accelerating-neuromorphic-inference-and-training-at-the-edge-rain/
LOCATION:Online
CATEGORIES:Talk,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Zurich:20240227T180000
DTEND;TZID=Europe/Zurich:20240227T193000
DTSTAMP:20260417T090814
CREATED:20240105T080743Z
LAST-MODIFIED:20240308T111522Z
UID:10000275-1709056800-1709062200@www.neuropac.info
SUMMARY:Aaron Spieler @ ONM - The ELM Neuron: An Efficient and Expressive Cortical Neuron Model Can Solve Long-Horizon Tasks
DESCRIPTION:From the Open Neuromorphic website. \nBiological cortical neurons are remarkably sophisticated computational devices\, temporally integrating their vast synaptic input over an intricate dendritic tree\, subject to complex\, nonlinearly interacting internal biological processes. \nWith the aim to explore the computational implications of leaky memory units and nonlinear dendritic processing\, we introduce the Expressive Leaky Memory (ELM) neuron model\, a biologically inspired phenomenological model of a cortical neuron. Remarkably\, by exploiting a few such slowly decaying memory-like hidden states and two-layered nonlinear integration of synaptic input\, our ELM neuron can accurately match the aforementioned input-output relationship with under ten-thousand trainable parameters. \nWe evaluate the model on various tasks with demanding temporal structures\, including the Long Range Arena (LRA) datasets\, as well as a novel neuromorphic dataset based on the Spiking Heidelberg Digits dataset (SHD-Adding). The ELM neuron reliably outperforms the classic Transformer or Chrono-LSTM architectures on these tasks\, even solving the Pathfinder-X task with over 70% accuracy (16k context length). \n\n\n\n\n\nAbout the Speaker\nAaron Spieler is a computational neuroscientist passionate about exploring the intersection of deep learning and neuroscience. After earning his Bachelor’s in Computer Science from the University of Potsdam\, he undertook an extended internship at Amazon Web Services working in deep learning based forecasting\, before further specializing with a Master’s in Computational Neuroscience at the University of Tübingen. Throughout his Master’s thesis and a subsequent internship at the Max Planck Institute for Intelligent Systems\, Aaron focused on phenomenological neuron modeling with applications to long-range prediction tasks. Pursuing this work allowed him to collaborate with excellent researchers from diverse backgrounds\, including Prof. Bernhard Schölkopf and Prof. Anna Levina.
URL:https://www.neuropac.info/event/aaron-spieler-onm-the-elm-neuron-an-efficient-and-expressive-cortical-neuron-model-can-solve-long-horizon-tasks/
LOCATION:Online
CATEGORIES:Talk,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Zurich:20240205T180000
DTEND;TZID=Europe/Zurich:20240205T200000
DTSTAMP:20260417T090814
CREATED:20240105T080554Z
LAST-MODIFIED:20240105T080554Z
UID:10000274-1707156000-1707163200@www.neuropac.info
SUMMARY:Jens E. Pedersen @ ONM - NIR: A Unified Instruction Set for Brain-Inspired Computing
DESCRIPTION:Have you wondered how to use neuromorphic hardware platforms? \nAre you depressed by your power bill after you bought your >400W GPU rig? \nThen you came to the right place! \nIn this workshop\, we will show you how to move models from your favourite framework directly to neuromorphic hardware with 1-2 lines of code! We will present the technology behind\, the Neuromorphic Intermediate Representation \, and demonstrate how we can use it to run a live spiking convnet on the Speck chip. \nNIR is currently supported by Intel Loihi \, Speck \, SpiNNaker2 \, Xylo and a host of simulators\, including Norse \, snnTorch \, and Spyx . \nJoin us on the 5th of February to get your own hands-on experience with NIR and neuromorphic hardware! \nAll it requires is a computer and a bit of Python knowledge. \nAgenda: \n\n18:00 – 19:00: NIR introduction\n\nMotivation: coupling neuromorphic hardware and software\nDemonstrating NIR: from PyTorch to Speck\nQ&A\n\n\n19:00 – 20:00: Workshop\n\nHands-on experience with NIR via Jupyter Notebooks or custom models\nQ&A and collaborative discussions\n\n\n\nSpeakers: \n\nJens E. Pedersen \, PhD at the Neurocomputing Systems lab at KTH Royal Institute of Technology\, Sweden\n\nNote: The event will be hosted virtually. Stay tuned for the video link and further updates. \n\n\n\n\n\nAbout the Speaker\nJens is a computer scientist studying his PhD in neuromorphic computing at the KTH Royal Institute of Technology. Jens co-authored the Norse simulator and the AEStream event-based streaming library.
URL:https://www.neuropac.info/event/jens-e-pedersen-onm-nir-a-unified-instruction-set-for-brain-inspired-computing/
LOCATION:Online
CATEGORIES:Talk,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Zurich:20240125T180000
DTEND;TZID=Europe/Zurich:20240125T193000
DTSTAMP:20260417T090814
CREATED:20240105T080357Z
LAST-MODIFIED:20240105T080357Z
UID:10000273-1706205600-1706211000@www.neuropac.info
SUMMARY:Carlos Ortega-Otero @ ONM - IBM NorthPole: Neural Inference at the Frontier of Energy\, Space\, and Time
DESCRIPTION:Abstract \nComputing\, since its inception\, has been processor-centric\, with memory separated from compute. Inspired by the organic brain and optimized for inorganic silicon\, NorthPole is a neural inference architecture that blurs this boundary by eliminating off-chip memory\, intertwining compute with memory on-chip\, and appearing externally as an active memory chip. NorthPole is a low-precision\, massively parallel\, densely interconnected\, energy-efficient\, and spatial computing architecture with a co-optimized\, high-utilization programming model. \nOn the ResNet50 benchmark image classification network\, relative to a graphics processing unit (GPU) that uses a comparable 12-nanometer technology process\, NorthPole achieves a 25 times higher energy metric of frames per second (FPS) per watt\, a 5 times higher space metric of FPS per transistor\, and a 22 times lower time metric of latency. Similar results are reported for the Yolo-v4 detection network. \nNorthPole outperforms all prevalent architectures\, even those that use more-advanced technology processes. \nAbout the Speaker \n\n\n\n\n\nDr. Carlos Ortega-Otero is an Sr. Research Staff Member at IBM driven by a passion in Circuit Design\, Neuromorphic Chip Architectures\, Low-Power Circuits and Physical Design optimizations. He earned his Ph.D. from Cornell University under the guidance of Prof. Rajit Manohar. \nThroughout his career\, he has worked in groundbreaking projects\, including Ultra-Low Power Asynchronous Sensor Network nodes\, Medical Implantable Wireless Sensors\, The TrueNorth Brain-Inspired Chip\, and the NorthPole Project. At IBM\, Carlos works under the leadership of Dr. Dharmendra Modha in the Brain-Inspired Computing Group. \nHe plays key roles in Architecture\, Specification\, Digital Implementation\, Physical Design\, Timing Signoff\, and Manufacturing teams of the NorthPole Project. Carlos is proud to be part of the Brain-Inspired Computing Group at IBM that continues to shape the future of Integrated Circuits and AI.
URL:https://www.neuropac.info/event/carlos-ortega-otero-onm-ibm-northpole-neural-inference-at-the-frontier-of-energy-space-and-time/
LOCATION:Online
CATEGORIES:Talk,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Zurich:20240115T180000
DTEND;TZID=Europe/Zurich:20240115T193000
DTSTAMP:20260417T090814
CREATED:20240105T080205Z
LAST-MODIFIED:20240105T080205Z
UID:10000272-1705341600-1705347000@www.neuropac.info
SUMMARY:Cristian Axenie @ ONM - Hybrid Learning for Event-Based Visual Motion Detection and Tracking of Pedestrians
DESCRIPTION:The Vision Zero Program’s purpose is to reduce traffic-related fatalities and serious injuries while promoting equitable\, safe\, and healthy mobility for all. Ultimately\, the challenge is to detect pedestrians during the day and especially at night in order to implement safety measures. \nThe current study introduces an award-winning low-power solution employing neuromorphic visual sensing and hybrid neuro-statistical processing developed by the Technische Hochschule Nürnberg team for the TinyML Vision Zero San Jose Competition. The solution proposes a novel neuromorphic edge fusion of spiking neural networks and event-based expectation maximization for the detection and tracking of pedestrians and bicyclists. \nWe provide a deployment-ready evaluation of the detection performance along with robustness\, energy footprint\, and weatherization while emphasizing the advantages of the neuro-statistical edge solution and its city-level scaling capabilities. \n\n\nAbout the Speaker\nDr. Axenie is Professor of Artificial Intelligence and Research Group Leader in Cognitive Neurocomputing at the Technische Hochschule Nürnberg Georg Simon Ohm in Germany. \nAfter earning a Dr. Eng. Sc. in Neuroscience and Robotics from the Technical University of Munich in 2016\, Dr. Axenie joined the Huawei Research Center in Munich. Between 2017 and 2023 Dr. Axenie was Staff Research Engineer with Huawei Research Center. At the same time\, Dr. Axenie was the Principal Investigator and Head of the Audi Konfuzius-Institut Ingolstadt Laboratory at the Technische Hochschule Ingolstadt. \nDr. Axenie is a seasoned researcher with more than 15 years of academic research and more than 10 years of industrial research experience. His research was disseminated in more than 50 peer-reviewed publications and more than 10 patents. Currently Dr. Axenie focuses on sustainable and efficient deployment of intelligent algorithms for sensor fusion and closed-loop control.
URL:https://www.neuropac.info/event/cristian-axenie-onm-hybrid-learning-for-event-based-visual-motion-detection-and-tracking-of-pedestrians/
LOCATION:Online
CATEGORIES:Talk,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20231219T180000
DTEND;TZID=Europe/Berlin:20231219T193000
DTSTAMP:20260417T090814
CREATED:20231103T152925Z
LAST-MODIFIED:20231103T152925Z
UID:10000265-1703008800-1703014200@www.neuropac.info
SUMMARY:Brad Aimone @ ONM - Programming Scalable Neuromorphic Algorithms With Fugu
DESCRIPTION:From the Open Neuromorphic website \nExplore neural-inspired computing with Brad Aimone\, a leading neuroscientist at Sandia Labs. Join us for insights into next-gen technology and neuroscience.
URL:https://www.neuropac.info/event/brad-aimone-onm-programming-scalable-neuromorphic-algorithms-with-fugu/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20231213T060000
DTEND;TZID=Europe/Berlin:20231213T080000
DTSTAMP:20260417T090814
CREATED:20231130T123001Z
LAST-MODIFIED:20231130T123015Z
UID:10000271-1702447200-1702454400@www.neuropac.info
SUMMARY:Kade Heckel @ ONM - Neuromorphic Hackathon with Spyx
DESCRIPTION:From the open-neuromorphic.org website: \nJoin us on December 13th for an exciting Spyx hackathon and ONM talk! Learn how to use and contribute to Spyx \, a high-performance spiking neural network library\, and gain insights into the latest developments in neuromorphic frameworks. The session will cover Spyx’s utilization of memory and GPU to maximize training throughput\, along with discussions on the evolving landscape of neuromorphic computing. \nDon’t miss this opportunity to engage with experts\, collaborate on cutting-edge projects\, and explore the potential of Spyx in shaping the future of neuromorphic computing. Whether you’re a seasoned developer or just curious about the field\, this event promises valuable insights and hands-on experience. \nAgenda: \n\n18:00 – 19:00: Spyx Introduction\n\nDive into Spyx\, its features\, and how to contribute\nHands-on session: Explore Spyx functionalities and tackle real-world challenges\nQ&A and collaborative discussions\n\n\n19:00 – 20:00: Hackathon\n\nCollaborate on cutting-edge projects and explore the potential of Spyx\nQ&A and collaborative discussions\n\n\n\nSpeakers: \n\nKade Heckel\n\nNote: The event will be hosted virtually. Stay tuned for the video link and further updates. Let’s come together to push the boundaries of neuromorphic computing!
URL:https://www.neuropac.info/event/kade-heckel-onm-neuromorphic-hackathon-with-spyx/
LOCATION:Online
CATEGORIES:Talk,Tutorial,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=America/Los_Angeles:20231205T080000
DTEND;TZID=America/Los_Angeles:20231205T090000
DTSTAMP:20260417T090814
CREATED:20231130T122725Z
LAST-MODIFIED:20231130T122725Z
UID:10000270-1701763200-1701766800@www.neuropac.info
SUMMARY:Michael Jurado @ INRC - Enhancing Performance and Efficiency of SNNs
DESCRIPTION:Title:\nEnhancing Performance and Efficiency of SNNs: From Spike-Based Loss Improvements to Synaptic Sparsification Techniques. \nAbstract:\nThe introduction of offline training capabilities like Spike Layer Error Reassignment in Time (SLAYER) and advancements in the probabilistic interpretations of Spiking Neural Network (SNN) output reinforce SNNs as a viable alternative to Artificial Neural Networks (ANNs). However\, special care must be taken during Surrogate Gradient (SG) training to achieve desired performance and efficiency. This talk will cover our recent work in improving spike-based loss functions for SNNs as well as sparsifying SNNs for low cost\, high performant neuromorphic computing. \nSpikemax was previously introduced as a family of differentiable loss methods which use windowed spike counts to form classification probabilities. We modify the Spikemaxs loss method to use rates and a scaling parameter instead of counts to form Scaled-Spikemax. Our mathematical analysis shows that an appropriate scaling term can yield less coarse probability outputs from the SNN and help smooth the gradient of the loss during training. Experimentally\, we show that Scaled-Spikemax achieves faster training convergence than Spikemax and results in relative improvements of 4.2% and 9.9% in accuracy for NMNIST and N-TIDIGITS18\, respectively. We then extend Scaled-Spikemax to construct a spike-based loss function for multi-label classification called Spikemoid. The viability of Spikemoid is shown via the first known multi-label classification results on N-TIDIGITS18 and 2NMNIST\, a novel variation of NMNIST that superimposes event-driven sensory data. \nHowever\, SNNs trained through SG methods oftentimes use dense or convolutional connections which are not always suitable for Loihi2. In order to minimize core usage and power consumption on chip\, we employ synaptic pruning techniques as part of our SNN training pipelines. We demonstrate the effectiveness of synaptic pruning techniques for ANN to SNN conversion of vgg16 on Loihi1 as well as for a lava-dl trained SNN for the Intel DNS Challenge. This later approach involved the use of Gradual Magnitude Pruning (GMP) applied during SLAYER training\, which reduced the memory footprint of the baseline SDNN by 50-75%. We highlight infrastructure changes to netX which enable conversion of lava-dl trained SNNs into sparsity aware lava processes. \nMeeting link to join is available to INRC members and affiliates on the INRC Forum Schedule (click here). \nIf you are not yet a member of the INRC\, please see the “Joining the INRC link” below. \nBio: Michael Jurado is a research engineer at the Georgia Tech Research Institute. He studied computer science at Georgia Tech and received his master’s degree in Machine Learning in 2022. Lately\, Michael has been studying and developing neuromorphic algorithms for edge computing and a regular contributor to the lava code base. In his free time\, he likes to read and study languages. \n\n\n\n\n\n\n\n\nFor the recording and slides\, see the full INRC Forum 2023 Schedule (accessible only to INRC Affiliates and Engaged Members). \nIf you are interested in becoming a member\, here is the information about ”Joining the INRC.
URL:https://www.neuropac.info/event/michael-jurado-inrc-enhancing-performance-and-efficiency-of-snns/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=America/Los_Angeles:20231128T080000
DTEND;TZID=America/Los_Angeles:20231128T090000
DTSTAMP:20260417T090814
CREATED:20231130T122249Z
LAST-MODIFIED:20231130T122249Z
UID:10000269-1701158400-1701162000@www.neuropac.info
SUMMARY:Jannik Lubeoinski @ INRC - Brian2Lava: connecting the Brian 2 simulator to neuromorphic hardware
DESCRIPTION:Abstract:\nNeuromorphic hardware allows for fast and energy-efficient simulation of spiking neural networks. However\, the usage of such devices is still a challenge\, as it requires detailed knowledge about the neuromorphic hardware as well as the used software interface\, e.g.\, the Lava framework for neuromorphic computing spearheaded by Intel. This stands in contrast to the relative ease of simulating spiking neural networks on conventional CPU or GPU architectures\, for which user-friendly simulation environments exist. The Brian 2 simulator\, for instance\, allows to readily define a spiking neural network with a set of equations\, handling all subsequent hardware interactions. \nTo link the best of both worlds\, we are developing Brain2Lava. Brian2Lava combines the intuitive user interface of Brian 2 with the functionality of Lava. By means of a so-called device for Brian 2\, Brian2Lava seamlessly generates and executes the desired simulations in Lava without the need for users to write additional code. At the current stage\, Brian2Lava supports most Brian 2 features when executing Lava on CPU\, and a selection of essential features for the execution on Intel’s Loihi 2 chip. We are constantly working to expand the number of features supported with the chip\, aiming to enable to flexibly execute simulations on different hardware platforms. \nIn summary\, by bridging the gap between user-friendly model definition and neuromorphic implementation\, Brian2Lava empowers engineers and neuroscientists alike to leverage the potential of neuromorphic hardware with greater ease and efficiency. \nBio: Jannik Luboeinski is currently a postdoctoral researcher at University of Göttingen. He received his B.Sc. and M.Sc. degrees in Physics from Technical University of Darmstadt and Goethe University Frankfurt\, respectively. From 2017 to 2021\, he did his Ph.D. with Christian Tetzlaff at University of Göttingen\, investigating the role of two-phase synaptic plasticity in recurrent spiking neural networks\, which resulted in the publication of several journal papers. In 2021\, Dr. Luboeinski continued to work in the group of Professor Tetzlaff (now Computational Synaptic Physiology Group) as a postdoctoral researcher. A major aim of his research is to identify properties that enable efficient memory processes in biological and artificial neural systems. His work currently focuses on neuromorphic computing and the development of simulation software for recurrent spiking neural networks. \nMeeting link to join is available to INRC members and affiliates on the INRC Forum Schedule (click here). \nIf you are not yet a member of the INRC\, please see the “Joining the INRC link” below. \nFor the recording and slides\, see the full INRC Forum 2023 Schedule (accessible only to INRC Affiliates and Engaged Members). \nIf you are interested in becoming a member\, here is the information about ”Joining the INRC.”
URL:https://www.neuropac.info/event/jannik-lubeoinski-inrc-brian2lava-connecting-the-brian-2-simulator-to-neuromorphic-hardware/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20231116T180000
DTEND;TZID=Europe/Berlin:20231116T190000
DTSTAMP:20260417T090814
CREATED:20231103T152759Z
LAST-MODIFIED:20231103T152928Z
UID:10000264-1700157600-1700161200@www.neuropac.info
SUMMARY:Timoleon Moraitis @ ONM - Making Neuromorphic Computing Mainstream
DESCRIPTION:From the Open Neuromorphic website \nJoin us for a workshop with Timoleon Moraitis\, research group leader in neuromorphic computing\, at the interface of computational neuroscience with artificial intelligence.
URL:https://www.neuropac.info/event/timoleon-moraitis-onm-making-neuromorphic-computing-mainstream/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;VALUE=DATE:20231107
DTEND;VALUE=DATE:20231109
DTSTAMP:20260417T090814
CREATED:20230828T171403Z
LAST-MODIFIED:20230828T171403Z
UID:10000239-1699315200-1699487999@www.neuropac.info
SUMMARY:SNUFA Workshop 2023
DESCRIPTION:SNUFA is an online workshop and community focused on research advances in the field of “Spiking Networks as Universal Function Approximators”. The annual SNUFA online workshop brings together researchers in spiking neural networks to present their work and discuss translating these findings into a better understanding of neural circuits and novel brain-inspired computing approaches. Topics of interest include artificial and biologically plausible learning algorithms and the dissection of trained spiking circuits toward understanding neural processing. \nThe workshops are organised by Dan Goodman and Friedemann Zenke. \nRegister or submit on the website: https://snufa.net/2023/
URL:https://www.neuropac.info/event/snufa-workshop-2023/
LOCATION:Online
CATEGORIES:Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20230926T180000
DTEND;TZID=Europe/Berlin:20230926T193000
DTSTAMP:20260417T090814
CREATED:20230925T101429Z
LAST-MODIFIED:20230925T101429Z
UID:10000246-1695751200-1695756600@www.neuropac.info
SUMMARY:Giulia D’Angelo: What’s catching your eye? The visual attention mechanism
DESCRIPTION:Abstract\nEvery agent\, whether animal or robotic\, needs to process its visual sensory input in an efficient way\, to allow understanding of\, and interaction with\, the environment. The process of filtering revelant information out of the continuous bombardment of complex sensory data is called selective attention. Visual attention is the result of the complex interplay between bottom-up and top-down mechanisms to perceptually organise and understand the scene. Giulia will describe how to approach visual attention using bio-inspired models emulating the human visual system to allow robots to interact with their surroundings. \nSpeaker’s bio\nGiulia D’Angelo is a postdoctoral researcher in neuroengineering in the EDPR laboratory at the Italian Institute of Technology. She obtained a B.Sc. in biomedical engineering and an M.Sc. in neuroengineering\, developing a neuromorphic visual system at the King’s College of London. She successfully defended her Ph.D. VIVA in 2022 at the university of Manchester\, proposing a biologically plausible model for event-driven saliency-based visual attention. She is currently working on bio-inspired visual algorithms exploiting neuromorphic platforms.
URL:https://www.neuropac.info/event/giulia-dangelo-whats-catching-your-eye-the-visual-attention-mechanism/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=UTC:20230713T163000
DTEND;TZID=UTC:20230713T173000
DTSTAMP:20260417T090814
CREATED:20230127T222256Z
LAST-MODIFIED:20230828T170621Z
UID:10000047-1689265800-1689269400@www.neuropac.info
SUMMARY:Theory of Neuromorphic Computing
DESCRIPTION:Recurring discussion meeting by researchers interested in the theory of neuromorphic computing. \nHosted by Arne Diehl and Johan Kwisthout of Radboud University. To join the meetings\, please contact Arne Diehl: arne.diehl@donders.ru.nl.
URL:https://www.neuropac.info/event/theory-of-neuromorphic-computing/2023-07-13/
LOCATION:Online
CATEGORIES:Discussion
ORGANIZER;CN="Arne Diehl":MAILTO:arne.diehl@donders.ru.nl
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=UTC:20230629T163000
DTEND;TZID=UTC:20230629T173000
DTSTAMP:20260417T090814
CREATED:20230127T222256Z
LAST-MODIFIED:20230828T170621Z
UID:10000046-1688056200-1688059800@www.neuropac.info
SUMMARY:Theory of Neuromorphic Computing
DESCRIPTION:Recurring discussion meeting by researchers interested in the theory of neuromorphic computing. \nHosted by Arne Diehl and Johan Kwisthout of Radboud University. To join the meetings\, please contact Arne Diehl: arne.diehl@donders.ru.nl.
URL:https://www.neuropac.info/event/theory-of-neuromorphic-computing/2023-06-29/
LOCATION:Online
CATEGORIES:Discussion
ORGANIZER;CN="Arne Diehl":MAILTO:arne.diehl@donders.ru.nl
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=America/Los_Angeles:20230627T080000
DTEND;TZID=America/Los_Angeles:20230627T090000
DTSTAMP:20260417T090814
CREATED:20230626T220627Z
LAST-MODIFIED:20230626T220627Z
UID:10000238-1687852800-1687856400@www.neuropac.info
SUMMARY:INRC Forum: Robert Legenstein
DESCRIPTION:Memory-enriched computation and learning through synaptic and non-synaptic plasticity\nAbstract:Virtually any task faced by humans has a temporal component and therefore demands some form of memory. Consequently\, a variety of memory systems and mechanisms have been shown to exist in the brain of humans and other animals. These memory systems operate on a multitude of time scales\, from seconds to years. Yet\, it is still not well understood how memory is implemented in the brain and how cortical neuronal networks utilize these systems for computation. In this talk\, I will present some recent models that extend (spiking and non-spiking) neural network models with memory using Hebbian and non-Hebbian types of plasticity. I will discuss the similarities between these models and transformers\, arguably the most powerful models for sequence processing in the area of machine learning. I will show that Hebbian plasticity can significantly increase the computational and learning capabilities of spiking neural networks. Further\, I will show how neurons with non-synaptic plasticity can be utilized for memory and how networks of such neurons can be trained without the need to backpropagate errors through time. \nBio: Dr. Robert Legenstein received his PhD in computer science from the Graz University of Technology\, Graz\, Austria\, in 2002. He is a full professor at the Department of Computer Science\, TU Graz\, head of the Institute for Theoretical Computer Science\, and leading the Graz Center for Machine Learning. Dr. Legenstein has served as associate editor of IEEE Transactions on Neural Networks and Learning Systems (2012-2016). He is an action editor for Transactions on Machine Learning Research\, and he was on the program committee for NeurIPS and ICLR several times. His primary research interests are learning in models for biological networks of neurons and neuromorphic hardware\, probabilistic neural computation\, novel brain-inspired architectures for computation and learning\, and memristor-based computing concepts. \nFor the meeting link\, see the full INRC Forum Spring 2023 Schedule (accessible only to INRC Affiliates and Fully Engaged Members).
URL:https://www.neuropac.info/event/inrc-forum-robert-legenstein/
LOCATION:Online
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=America/Los_Angeles:20230620T080000
DTEND;TZID=America/Los_Angeles:20230620T090000
DTSTAMP:20260417T090814
CREATED:20230618T010420Z
LAST-MODIFIED:20230618T010420Z
UID:10000237-1687248000-1687251600@www.neuropac.info
SUMMARY:INRC Forum: Wolfgang Maass\, Christoph Stoeckl & Yukun Yang
DESCRIPTION:Local prediction-learning in high-dimensional spaces enables neural networks to plan\nAbstract: Being able to plan a sequence of actions in order to reach a goal\, or more generally to solve a problem\, is a cornerstone of higher brain function. But compelling models which explain how the brain can achieve that are missing. We show that local synaptic plasticity enables a neural network to create high-dimensional representations of actions and sensory inputs so that they encode salient information about their relationship. In fact\, it can create a cognitive map that reduces planning to a simple geometric problem in a high-dimensional space that can easily be solved by a neural network. This method also explains how self-supervised learning enables a neural network to control a complex muscle system so that it can handle locomotion challenges that never occurred during learning. The underlying learning strategy bears some similarity to self-attention networks (Transformers). But it does not require non-local learning rules or very large datasets. Hence it is suitable for implementation in highly energy-efficient neuromorphic hardware\, in particular for on-chip learning on Loihi 2.\nOne goal of our presentation will be to initiate discussions about the relation of this learning-based use of large vectors to other VSA approaches\, its relation to Transformers\, and possible applications in robotics. \nBio: Wolfgang Maass is a Professor of Computer Science at Technische Universität Graz. He received his PhD (1974) and Habilitation (1978) in Mathematics from Ludwig-Maximilians-Universität in Munich. He conducted research at MIT\, the University of Chicago\, and UC Berkeley\, as a Heisenberg Fellow of the Deutsche Forschungsgemeinschaft. He has been the Editor of Machine Learning (1995-1997)\, Archive for Mathematical Logic (1987-2000)\, and Biological Cybernetics (2006-present). He was also a Sloan Fellow at the Computational Neurobiology Lab of the Salk Institute in La Jolla\, California from 1997-1998. Since 2005\, he has been an Adjunct Fellow of the Frankfurt Institute of Advanced Studies (FIAS).\nChristoph Stoeckl is a Postdoc researcher at Technische Universität Graz working in the intersection between computational neuroscience and AI. His research interests include neuromorphic hardware as well as exploring connections between Transformers and neural networks. Before joining the research lab of Prof. Maass\, he obtained a Master’s degree in Computer Science also at TU Graz.\nYukun Yang is a 1st-year Doctoral Student at Technische Universität Graz\, supervised by Prof. Wolfgang Maass. His primary research interest is at the intersection of AI and neuroscience\, with a focus on discovering the learning principles of the brain and its neuromorphic applications. Before joining TU Graz\, he earned M.S. in the ECE Department at Duke University in 2020. Earlier\, he received B.E. in Information Engineering from Xi’an Jiaotong University in 2018. \nFor the meeting link\, see the full INRC Forum Spring 2023 Schedule (accessible only to INRC Affiliates and Fully Engaged Members).
URL:https://www.neuropac.info/event/inrc-forum-tu-graz/
LOCATION:Online
END:VEVENT
END:VCALENDAR