BEGIN:VCALENDAR
VERSION:2.0
PRODID:-// - ECPv6.15.16//NONSGML v1.0//EN
CALSCALE:GREGORIAN
METHOD:PUBLISH
X-ORIGINAL-URL:https://www.neuropac.info
X-WR-CALDESC:Events for 
REFRESH-INTERVAL;VALUE=DURATION:PT1H
X-Robots-Tag:noindex
X-PUBLISHED-TTL:PT1H
BEGIN:VTIMEZONE
TZID:America/Los_Angeles
BEGIN:DAYLIGHT
TZOFFSETFROM:-0800
TZOFFSETTO:-0700
TZNAME:PDT
DTSTART:20240310T100000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0700
TZOFFSETTO:-0800
TZNAME:PST
DTSTART:20241103T090000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0800
TZOFFSETTO:-0700
TZNAME:PDT
DTSTART:20250309T100000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0700
TZOFFSETTO:-0800
TZNAME:PST
DTSTART:20251102T090000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0800
TZOFFSETTO:-0700
TZNAME:PDT
DTSTART:20260308T100000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0700
TZOFFSETTO:-0800
TZNAME:PST
DTSTART:20261101T090000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:Europe/Helsinki
BEGIN:DAYLIGHT
TZOFFSETFROM:+0200
TZOFFSETTO:+0300
TZNAME:EEST
DTSTART:20240331T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0300
TZOFFSETTO:+0200
TZNAME:EET
DTSTART:20241027T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0200
TZOFFSETTO:+0300
TZNAME:EEST
DTSTART:20250330T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0300
TZOFFSETTO:+0200
TZNAME:EET
DTSTART:20251026T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0200
TZOFFSETTO:+0300
TZNAME:EEST
DTSTART:20260329T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0300
TZOFFSETTO:+0200
TZNAME:EET
DTSTART:20261025T010000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:Europe/London
BEGIN:DAYLIGHT
TZOFFSETFROM:+0000
TZOFFSETTO:+0100
TZNAME:BST
DTSTART:20240331T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0100
TZOFFSETTO:+0000
TZNAME:GMT
DTSTART:20241027T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0000
TZOFFSETTO:+0100
TZNAME:BST
DTSTART:20250330T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0100
TZOFFSETTO:+0000
TZNAME:GMT
DTSTART:20251026T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0000
TZOFFSETTO:+0100
TZNAME:BST
DTSTART:20260329T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0100
TZOFFSETTO:+0000
TZNAME:GMT
DTSTART:20261025T010000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20220327T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20221030T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20230326T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20231029T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20240331T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20241027T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20250330T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20251026T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20260329T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20261025T010000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:Europe/Amsterdam
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20220327T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20221030T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20230326T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20231029T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20240331T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20241027T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20250330T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20251026T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20260329T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20261025T010000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:Europe/Stockholm
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20240331T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20241027T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20250330T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20251026T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20260329T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20261025T010000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:America/New_York
BEGIN:DAYLIGHT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
TZNAME:EDT
DTSTART:20220313T070000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
TZNAME:EST
DTSTART:20221106T060000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
TZNAME:EDT
DTSTART:20230312T070000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
TZNAME:EST
DTSTART:20231105T060000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
TZNAME:EDT
DTSTART:20240310T070000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
TZNAME:EST
DTSTART:20241103T060000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
TZNAME:EDT
DTSTART:20250309T070000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
TZNAME:EST
DTSTART:20251102T060000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
TZNAME:EDT
DTSTART:20260308T070000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
TZNAME:EST
DTSTART:20261101T060000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:Europe/Zurich
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20230326T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20231029T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20240331T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20241027T010000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:20250330T010000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:20251026T010000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:America/Los_Angeles
BEGIN:DAYLIGHT
TZOFFSETFROM:-0800
TZOFFSETTO:-0700
TZNAME:PDT
DTSTART:20220313T100000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0700
TZOFFSETTO:-0800
TZNAME:PST
DTSTART:20221106T090000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0800
TZOFFSETTO:-0700
TZNAME:PDT
DTSTART:20230312T100000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0700
TZOFFSETTO:-0800
TZNAME:PST
DTSTART:20231105T090000
END:STANDARD
BEGIN:DAYLIGHT
TZOFFSETFROM:-0800
TZOFFSETTO:-0700
TZNAME:PDT
DTSTART:20240310T100000
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:-0700
TZOFFSETTO:-0800
TZNAME:PST
DTSTART:20241103T090000
END:STANDARD
END:VTIMEZONE
BEGIN:VTIMEZONE
TZID:UTC
BEGIN:STANDARD
TZOFFSETFROM:+0000
TZOFFSETTO:+0000
TZNAME:UTC
DTSTART:20220101T000000
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
DTSTART;TZID=America/Los_Angeles:20251113T140000
DTEND;TZID=America/Los_Angeles:20251113T153000
DTSTAMP:20260429T150529
CREATED:20250920T190906Z
LAST-MODIFIED:20250920T191120Z
UID:10000354-1763042400-1763047800@www.neuropac.info
SUMMARY:Neuromorphic Engineering for Clinical Care Minisymposium
DESCRIPTION:Conference: IEEE EMBS NER 2025\nSession Host: NIH BRAIN Initiative\nDate: November 13\, 2025\nTime: 2:00 – 3:30 PM (US Pacific Time\, UTC-8)Location: NER 2025 Conference (IEEE EMBS)\n\nThis minisymposium will highlight advances in neuromorphic engineering for clinical care\, with contributions from researchers and clinicians working at the intersection of neural technologies\, healthcare applications\, and brain-inspired computing. Organized by the NIH BRAIN Initiative\, the session aims to foster dialogue between neuroscience\, engineering\, and clinical communities. \n 
URL:https://www.neuropac.info/event/neuromorphic-engineering-for-clinical-care-minisymposium/
LOCATION:Town and Country Resort\, 500 Hotel Cir N\, San Diego\, 92108\, United States
CATEGORIES:Conference,Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Helsinki:20250926T140000
DTEND;TZID=Europe/Helsinki:20250926T150000
DTSTAMP:20260429T150529
CREATED:20250923T233543Z
LAST-MODIFIED:20250924T093644Z
UID:10000357-1758895200-1758898800@www.neuropac.info
SUMMARY:AIDA4Edge: Event-Based Selective Attention for Multi-Resolution fast ROI Detection
DESCRIPTION:Event-based vision sensors offer a fundamentally different way of perceiving the world\, by only capturing changes in a scene and enabling low-latency and efficient perception. \nHowever\, the high event rates of modern sensors can make them impractical for edge applications. In this talk\, I will present an Event-based Selective Attention approach that leverages multi-resolution processing for fast Region of Interest (ROI) detection\, validated on a large-scale automotive dataset. Inspired by biological attention\, the method combines coarse global monitoring with fine-grained saliency detection\, dynamically focusing resources only on relevant activity. This enables scalable\, low-power processing while preserving high sensitivity to salient features. \nThe seminar will be delivered by Dr Luca Peres and facilitated by Dr Davide Bertozzi and Dr Oliver Rhodes. \nThe webinar will be held in hybrid format and available online on Teams at this link: https://tinyurl.com/AIDA4Edge-Webinar \nContact: luca.peres-2@manchester.ac.uk
URL:https://www.neuropac.info/event/aida4edge-event-based-selective-attention-for-multi-resolution-fast-roi-detection/
LOCATION:University of Manchester\, Oxford Road\, Manchester\, M13 9PL\, United Kingdom
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/London:20250702T130000
DTEND;TZID=Europe/London:20250702T140000
DTSTAMP:20260429T150529
CREATED:20250627T222704Z
LAST-MODIFIED:20250627T222704Z
UID:10000340-1751461200-1751464800@www.neuropac.info
SUMMARY:University of Manchester - Inaugural Lecture: 35 years of Neuromorphic Systems Engineering
DESCRIPTION:Inaugural Lecture: 35 years of Neuromorphic Systems Engineering by Professor André Van Schaik\, Furber Chair in Computer Systems Engineering\, Department of Computer Science \nAbstract: In this Inaugural Lecture I will give an overview of my career in Neuromorphic Engineering\, some recent achievements of the International Centre for Neuromorphic Systems at Western Sydney University\, and my plans for a new International Centre for Neuromorphic Systems at the University of Manchester. \nA brief bio can be found here: https://research.manchester.ac.uk/en/persons/andré-van-schaik
URL:https://www.neuropac.info/event/university-of-manchester-inaugural-lecture-35-years-of-neuromorphic-systems-engineering/
LOCATION:University of Manchester\, Oxford Road\, Manchester\, M13 9PL\, United Kingdom
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20250401T140000
DTEND;TZID=Europe/Berlin:20250401T150000
DTSTAMP:20260429T150529
CREATED:20250404T174514Z
LAST-MODIFIED:20250404T174514Z
UID:10000330-1743516000-1743519600@www.neuropac.info
SUMMARY:NHR PerfLab Seminar: Neuromorphic Computing from the Computer Science Perspective – Algorithms and Applications
DESCRIPTION:Speaker: Catherine Schuman\, Department of Electrical Engineering and Computer Science\, University of Tennessee \nDate and time: Tuesday\, April 1\, 2025\, 2:00 p.m. CEST \nZoom: https://go-nhr.de/perflab-seminar \nAbstract Neuromorphic computing is a popular technology for the future of computing.  Much of the focus in neuromorphic computing research and development has focused on new architectures\, devices\, and materials\, rather than in the software\, algorithms\, and applications of these systems.  In this talk\, I will overview the field of neuromorphic from the computer science perspective.  I will give an introduction to spiking neural networks\, as well as some of the most common algorithms used in the field.  Finally\, I will discuss the potential for using neuromorphic systems in real-world applications\, from scientific data analysis to autonomous vehicles.
URL:https://www.neuropac.info/event/nhr-perflab-seminar-neuromorphic-computing-from-the-computer-science-perspective-algorithms-and-applications/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Amsterdam:20250212T130000
DTEND;TZID=Europe/Amsterdam:20250212T143000
DTSTAMP:20260429T150529
CREATED:20250205T093937Z
LAST-MODIFIED:20250205T093937Z
UID:10000325-1739365200-1739370600@www.neuropac.info
SUMMARY:Seminar by Francky Catthoor: Where digital becomes almost-neural: variability phenomena at the extreme miniaturization limit of 'classical' microchips
DESCRIPTION:Francky Catthoor\nHe is an expert with a life-long research record on (digital) microchip design\, and he has also collaborated with neuromorphic computing researchers. In this special seminar\, he will give an informal introduction to the challenges of extreme miniaturization\, after which we will enjoy an open discussion round. It is not unlikely that we can learn a lot for our CogniGron research! \nSeminar title\nWhere digital becomes almost-neural: variability phenomena at the\nextreme miniaturization limit of ‘classical’ microchips. \nSeminar abstract\nWhen transistor sizes are pushed to the physical limits of\nminiaturization\, effects that are known from brains and analogue\nneuromorphic substrates appear and have to be dealt with. These\nphenomena include stochasticity\, drifting dynamics\, device mismatch\,\nimportance of signal travel delays and multi-timescale synchronization\,\nageing. In the world of ‘classical’ digital microchip engineering\, these\neffects are recognized and dealt with by architectural and\ncontrol-theoretic measures.
URL:https://www.neuropac.info/event/seminar-by-francky-catthoor-where-digital-becomes-almost-neural-variability-phenomena-at-the-extreme-miniaturization-limit-of-classical-microchips/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Stockholm:20250203T150000
DTEND;TZID=Europe/Stockholm:20250203T160000
DTSTAMP:20260429T150529
CREATED:20250205T092621Z
LAST-MODIFIED:20250205T092621Z
UID:10000319-1738594800-1738598400@www.neuropac.info
SUMMARY:SmallTalks "Brain-inspired neuromorphic computing using two dimensional materials"
DESCRIPTION:Welcome to a seminar in the series SmallTalks [about Nanoscience] arranged by Nano​ Area of Advance. \nSpeaker: Sameer Kumar Mallik\, Postdoc\, Quantum Device Physics\, Microtechnology and Nanoscience \nCoffee will be served before the start of the seminar. Students are welcome to participate! \nAbstract: Neuromorphic computing is a cutting-edge approach to designing computer systems inspired by the structure and functioning of the human brain. Unlike conventional computers\, which rely on von Neumann processing techniques\, neuromorphic systems use artificial neurons and synapses to mimic human cognition abilities such as pattern recognition\, sensory processing\, and decision-making more efficiently. Two-dimensional (2D) materials\, such as graphene\, transition metal dichalcogenides (TMDs)\, and hexagonal boron nitride\, have emerged as promising candidates for advancing neuromorphic systems. These materials exhibit unique electrical\, optical\, and mechanical properties\, including atomic thinness\, tunable bandgaps\, high carrier mobility\, and scalability\, which make them ideal for constructing energy-efficient\, high-performance synaptic and neuronal components. This presentation explores how 2D materials could revolutionize technologies like artificial intelligence\, smart devices\, and low-power electronics for a more sustainable and connected future.
URL:https://www.neuropac.info/event/smalltalks-brain-inspired-neuromorphic-computing-using-two-dimensional-materials/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;VALUE=DATE:20250124
DTEND;VALUE=DATE:20250125
DTSTAMP:20260429T150529
CREATED:20250205T091523Z
LAST-MODIFIED:20250205T091523Z
UID:10000317-1737676800-1737763199@www.neuropac.info
SUMMARY:Neuromorphic Computing for Science
DESCRIPTION:Our brains consume around 20 Watts of power\, a negligible amount in comparison to the multi-million Watts consumed by supercomputers. The emerging paradigm of Neuromorphic Computing draws inspiration from the structure and functioning of the human brain\, particularly this small power consumption and extremely fast response times. \nJoin theoretical and computational physicist Johan Mentink as he presents evidence of why the neuromorphic computing paradigms offer not only much more energy-efficient but also much faster solutions to widely used computational science problems\, with the potential to break existing computational barriers. If you’re interested in the future of computing and how our brains have provided the blueprint\, then you don’t want to miss this event! \nThis event is in partnership with the Embassy of the Kingdom of the Netherlands.
URL:https://www.neuropac.info/event/neuromorphic-computing-for-science/
LOCATION:The Royal Institution\, 21 Albemarle Street\, London\, W1S 4BS\, United Kingdom
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;VALUE=DATE:20241220
DTEND;VALUE=DATE:20241221
DTSTAMP:20260429T150529
CREATED:20241202T111936Z
LAST-MODIFIED:20241202T111936Z
UID:10000309-1734652800-1734739199@www.neuropac.info
SUMMARY:ONM Student Talk: Ram Gaurav @ Virginia Tech
DESCRIPTION:Ramashish Gaurav (Ram) is a 3rd year Ph.D. student at Virginia Tech\, USA. He is supervised by Prof. Yang (Cindy) Yi in her BRICC Lab\, ECE @ VT. Of late\, Ram has been working on reservoir-based spiking models for Time Series Classification (TSC). Reservoir Computing is a well-established domain for time-series processing where a reservoir of statically (and recurrently) connected neurons compute high-dimensional temporal features\, over which a linear readout layer learns the mapping to the output. \nIn his recent work [1]\, Ram designed the Legendre-SNN (LSNN)\, a simple – yet high performing SNN model (for univariate TSC) where he has used the Legendre Delay Network (LDN) [2] as a non-spiking reservoir (in fact\, the LDN in LSNN is implemented with just basic matrix-operations). In a subsequent work (currently under review)\, he extended his LSNN to DeepLSNN that accounts for multivariate time-series signals too; upon experimenting with it\, he found that DeepLSNN models outperform a popular (and complex) LSTM-Conv integrated model [3] on more than 30% of 101 TSC datasets. His latest work is on the evaluation of Legendre-SNN on the Loihi-2 chip [4] — on which this talk is focused at. \nTalk details here\, time TBA.
URL:https://www.neuropac.info/event/onm-student-talk-ram-gaurav-virginia-tech/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=America/New_York:20240504T110000
DTEND;TZID=America/New_York:20240504T121500
DTSTAMP:20260429T150529
CREATED:20240428T081014Z
LAST-MODIFIED:20240428T081014Z
UID:10000285-1714820400-1714824900@www.neuropac.info
SUMMARY:Sangyeob Kim @ ONM - C-DNN and C-Transformer: Mixing ANNs and SNNs for the Best of Both Worlds
DESCRIPTION:From the Open Neuromorphic website. \nSangyeob and his team have developed a C-DNN processor that effectively processes object recognition workloads\, achieving 51.3% higher energy efficiency compared to the previous state-of-the-art processor. Subsequently\, they have applied C-DNN not only to image classification but also to other applications\, and have developed the C-Transformer\, which applies this technique to a Large Language Model (LLM). As a result\, they demonstrate that the energy consumed in LLM can be reduced by 30% to 72% using the C-DNN technique\, compared to the previous state-of-the-art processor. In this talk\, we will introduce the processor developed for C-DNN and C-Transformer\, and discuss how neuromorphic computing can be used in actual applications in the future. \n\n\nAbout the Speaker\nSangyeob Kim (Student Member\, IEEE) received the B.S.\, M.S. and Ph.D. degrees from the School of Electrical Engineering\, Korea Advanced Institute of Science and Technology (KAIST)\, Daejeon\, South Korea\, in 2018\, 2020 and 2023\, respectively. He is currently a Post-Doctoral Associate with the KAIST. His current research interests include energy-efficient system-on-chip design\, especially focused on deep neural network accelerators\, neuromorphic hardware\, and computing-in-memory accelerators.
URL:https://www.neuropac.info/event/sangyeob-kim-onm-c-dnn-and-c-transformer-mixing-anns-and-snns-for-the-best-of-both-worlds/
LOCATION:Online
CATEGORIES:Talk,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20240418T230000
DTEND;TZID=Europe/Berlin:20240419T003000
DTSTAMP:20260429T150529
CREATED:20231103T153206Z
LAST-MODIFIED:20240428T080746Z
UID:10000267-1713481200-1713486600@www.neuropac.info
SUMMARY:Tobias Fischer @ ONM - Advances in Neuromorphic Visual Place Recognition
DESCRIPTION:From the Open Neuromorphic website. \nAbout the Speaker\nTobias conducts interdisciplinary research at the intersection of intelligent robotics\, computer vision\, and computational cognition. My main goal is to develop high-performing\, bio-inspired computer vision algorithms that simultaneously examine animals/humans and robots’ perceptional capabilities. He is a Lecturer (Assistant Professor) in Queensland University of Technology’s Centre for Robotics. He joined the Centre as an Associate Investigator and Research Fellow in January 2020. Previously\, he was a postdoctoral researcher in the Personal Robotics Lab at Imperial College London. He received a PhD from Imperial College in January 2019. His thesis was awarded the UK Best Thesis in Robotics Award 2018 and the Eryl Cadwaladr Davies Award for the best thesis in Imperial’s EEE Department in 2017-2018. He previously received an M.Sc. degree (distinction) in Artificial Intelligence from The University of Edinburgh in 2014 and a B.Sc. degree in Computer Engineering from Ilmenau University of Technology\, Germany\, in 2013. His works have attracted two best poster awards\, one best paper award\, and he was the senior author of the winning submission to the Facebook Mapillary Place Recognition Challenge 2020.
URL:https://www.neuropac.info/event/tobias-fischer-onm-advances-in-neuromorphic-visual-place-recognition/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20240305T180000
DTEND;TZID=Europe/Berlin:20240305T193000
DTSTAMP:20260429T150529
CREATED:20240130T001106Z
LAST-MODIFIED:20240308T111533Z
UID:10000277-1709661600-1709667000@www.neuropac.info
SUMMARY:Maxence Ernoult @ ONM: Accelerating Neuromorphic Inference and Training at the Edge @ Rain
DESCRIPTION:From the Open Neuromorphic website. \n\n\nMaxence will present us Rain’s vision and technological roadmap to build hardware optimized for inference and training at the edge including both the hardware and algorithm aspects with an emphasis on why physical and mathematical principles matter more to him than biological inspiration. \n\n\n\n\n\nAbout the Speaker\nMaxence Ernoult graduated from Ecole Polytechnique and the University of Cambridge in 2016\, specializing in applied mathematics and theoretical physics. His PhD research was conducted in neuromorphic computing at Sorbonne University\, in collaboration with Mila. During this time\, he specialized in developing hardware-friendly alternatives to backpropagation and played a significant role in scaling up several of these alternatives\, including Equilibrium Propagation and Difference Target Propagation. This work was undertaken alongside notable figures such as Ben Scellier\, Blake Richards\, and Yoshua Bengio. In 2021\, Maxence joined IBM Research\, focusing on AI safety. Subsequently\, in 2022\, he began a new position at Rain.
URL:https://www.neuropac.info/event/maxence-ernoult-onm-accelerating-neuromorphic-inference-and-training-at-the-edge-rain/
LOCATION:Online
CATEGORIES:Talk,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Zurich:20240227T180000
DTEND;TZID=Europe/Zurich:20240227T193000
DTSTAMP:20260429T150529
CREATED:20240105T080743Z
LAST-MODIFIED:20240308T111522Z
UID:10000275-1709056800-1709062200@www.neuropac.info
SUMMARY:Aaron Spieler @ ONM - The ELM Neuron: An Efficient and Expressive Cortical Neuron Model Can Solve Long-Horizon Tasks
DESCRIPTION:From the Open Neuromorphic website. \nBiological cortical neurons are remarkably sophisticated computational devices\, temporally integrating their vast synaptic input over an intricate dendritic tree\, subject to complex\, nonlinearly interacting internal biological processes. \nWith the aim to explore the computational implications of leaky memory units and nonlinear dendritic processing\, we introduce the Expressive Leaky Memory (ELM) neuron model\, a biologically inspired phenomenological model of a cortical neuron. Remarkably\, by exploiting a few such slowly decaying memory-like hidden states and two-layered nonlinear integration of synaptic input\, our ELM neuron can accurately match the aforementioned input-output relationship with under ten-thousand trainable parameters. \nWe evaluate the model on various tasks with demanding temporal structures\, including the Long Range Arena (LRA) datasets\, as well as a novel neuromorphic dataset based on the Spiking Heidelberg Digits dataset (SHD-Adding). The ELM neuron reliably outperforms the classic Transformer or Chrono-LSTM architectures on these tasks\, even solving the Pathfinder-X task with over 70% accuracy (16k context length). \n\n\n\n\n\nAbout the Speaker\nAaron Spieler is a computational neuroscientist passionate about exploring the intersection of deep learning and neuroscience. After earning his Bachelor’s in Computer Science from the University of Potsdam\, he undertook an extended internship at Amazon Web Services working in deep learning based forecasting\, before further specializing with a Master’s in Computational Neuroscience at the University of Tübingen. Throughout his Master’s thesis and a subsequent internship at the Max Planck Institute for Intelligent Systems\, Aaron focused on phenomenological neuron modeling with applications to long-range prediction tasks. Pursuing this work allowed him to collaborate with excellent researchers from diverse backgrounds\, including Prof. Bernhard Schölkopf and Prof. Anna Levina.
URL:https://www.neuropac.info/event/aaron-spieler-onm-the-elm-neuron-an-efficient-and-expressive-cortical-neuron-model-can-solve-long-horizon-tasks/
LOCATION:Online
CATEGORIES:Talk,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Amsterdam:20240207T160000
DTEND;TZID=Europe/Amsterdam:20240207T170000
DTSTAMP:20260429T150529
CREATED:20240130T002332Z
LAST-MODIFIED:20240130T002332Z
UID:10000278-1707321600-1707325200@www.neuropac.info
SUMMARY:Mahyar Shahsavari @ ELLIS MeetUp Nijmegen: Revolutionizing Intelligence
DESCRIPTION:Title:\n“Revolutionizing Intelligence: Bridging Minds and Machines with Neuromorphic Computing” \nAbstract\nNeuromorphic event-driven systems emulate the computational mechanisms of the brain through the utilization of spiking neural networks (SNN). Neuromorphic systems serve two primary application domains: simulating neural information processing in neuroscience and acting as accelerators for cognitive computing in engineering applications.\nIn this seminar\, we delve into the core principles of neuromorphic paradigm. Mahyar will discuss how this innovative approach addresses the limitations of traditional AI models in computation\, ushering in a new era of efficiency\, adaptability\, and parallel processing.\nThe presentation highlights key advancements\, such as spiking neural networks and neuromorphic hardware\, demonstrating their pivotal role in achieving brain-like computation and real-time processing. In this seminar the potential applications in industry\, robotics\, edge computing and beyond will be discussed\, ultimately inviting the audience to envision a harmonious integration of minds and machines through the revolutionary lens of neuromorphic computing. \nMicrosoft Teams Seminar Link
URL:https://www.neuropac.info/event/mahyar-shahsavari-ellis-meetup-nijmegen-revolutionizing-intelligence/
LOCATION:Nijmegen\, Nijmegen\, Netherlands
CATEGORIES:Talk,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Zurich:20240205T180000
DTEND;TZID=Europe/Zurich:20240205T200000
DTSTAMP:20260429T150529
CREATED:20240105T080554Z
LAST-MODIFIED:20240105T080554Z
UID:10000274-1707156000-1707163200@www.neuropac.info
SUMMARY:Jens E. Pedersen @ ONM - NIR: A Unified Instruction Set for Brain-Inspired Computing
DESCRIPTION:Have you wondered how to use neuromorphic hardware platforms? \nAre you depressed by your power bill after you bought your >400W GPU rig? \nThen you came to the right place! \nIn this workshop\, we will show you how to move models from your favourite framework directly to neuromorphic hardware with 1-2 lines of code! We will present the technology behind\, the Neuromorphic Intermediate Representation \, and demonstrate how we can use it to run a live spiking convnet on the Speck chip. \nNIR is currently supported by Intel Loihi \, Speck \, SpiNNaker2 \, Xylo and a host of simulators\, including Norse \, snnTorch \, and Spyx . \nJoin us on the 5th of February to get your own hands-on experience with NIR and neuromorphic hardware! \nAll it requires is a computer and a bit of Python knowledge. \nAgenda: \n\n18:00 – 19:00: NIR introduction\n\nMotivation: coupling neuromorphic hardware and software\nDemonstrating NIR: from PyTorch to Speck\nQ&A\n\n\n19:00 – 20:00: Workshop\n\nHands-on experience with NIR via Jupyter Notebooks or custom models\nQ&A and collaborative discussions\n\n\n\nSpeakers: \n\nJens E. Pedersen \, PhD at the Neurocomputing Systems lab at KTH Royal Institute of Technology\, Sweden\n\nNote: The event will be hosted virtually. Stay tuned for the video link and further updates. \n\n\n\n\n\nAbout the Speaker\nJens is a computer scientist studying his PhD in neuromorphic computing at the KTH Royal Institute of Technology. Jens co-authored the Norse simulator and the AEStream event-based streaming library.
URL:https://www.neuropac.info/event/jens-e-pedersen-onm-nir-a-unified-instruction-set-for-brain-inspired-computing/
LOCATION:Online
CATEGORIES:Talk,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Zurich:20240125T180000
DTEND;TZID=Europe/Zurich:20240125T193000
DTSTAMP:20260429T150529
CREATED:20240105T080357Z
LAST-MODIFIED:20240105T080357Z
UID:10000273-1706205600-1706211000@www.neuropac.info
SUMMARY:Carlos Ortega-Otero @ ONM - IBM NorthPole: Neural Inference at the Frontier of Energy\, Space\, and Time
DESCRIPTION:Abstract \nComputing\, since its inception\, has been processor-centric\, with memory separated from compute. Inspired by the organic brain and optimized for inorganic silicon\, NorthPole is a neural inference architecture that blurs this boundary by eliminating off-chip memory\, intertwining compute with memory on-chip\, and appearing externally as an active memory chip. NorthPole is a low-precision\, massively parallel\, densely interconnected\, energy-efficient\, and spatial computing architecture with a co-optimized\, high-utilization programming model. \nOn the ResNet50 benchmark image classification network\, relative to a graphics processing unit (GPU) that uses a comparable 12-nanometer technology process\, NorthPole achieves a 25 times higher energy metric of frames per second (FPS) per watt\, a 5 times higher space metric of FPS per transistor\, and a 22 times lower time metric of latency. Similar results are reported for the Yolo-v4 detection network. \nNorthPole outperforms all prevalent architectures\, even those that use more-advanced technology processes. \nAbout the Speaker \n\n\n\n\n\nDr. Carlos Ortega-Otero is an Sr. Research Staff Member at IBM driven by a passion in Circuit Design\, Neuromorphic Chip Architectures\, Low-Power Circuits and Physical Design optimizations. He earned his Ph.D. from Cornell University under the guidance of Prof. Rajit Manohar. \nThroughout his career\, he has worked in groundbreaking projects\, including Ultra-Low Power Asynchronous Sensor Network nodes\, Medical Implantable Wireless Sensors\, The TrueNorth Brain-Inspired Chip\, and the NorthPole Project. At IBM\, Carlos works under the leadership of Dr. Dharmendra Modha in the Brain-Inspired Computing Group. \nHe plays key roles in Architecture\, Specification\, Digital Implementation\, Physical Design\, Timing Signoff\, and Manufacturing teams of the NorthPole Project. Carlos is proud to be part of the Brain-Inspired Computing Group at IBM that continues to shape the future of Integrated Circuits and AI.
URL:https://www.neuropac.info/event/carlos-ortega-otero-onm-ibm-northpole-neural-inference-at-the-frontier-of-energy-space-and-time/
LOCATION:Online
CATEGORIES:Talk,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Zurich:20240115T180000
DTEND;TZID=Europe/Zurich:20240115T193000
DTSTAMP:20260429T150529
CREATED:20240105T080205Z
LAST-MODIFIED:20240105T080205Z
UID:10000272-1705341600-1705347000@www.neuropac.info
SUMMARY:Cristian Axenie @ ONM - Hybrid Learning for Event-Based Visual Motion Detection and Tracking of Pedestrians
DESCRIPTION:The Vision Zero Program’s purpose is to reduce traffic-related fatalities and serious injuries while promoting equitable\, safe\, and healthy mobility for all. Ultimately\, the challenge is to detect pedestrians during the day and especially at night in order to implement safety measures. \nThe current study introduces an award-winning low-power solution employing neuromorphic visual sensing and hybrid neuro-statistical processing developed by the Technische Hochschule Nürnberg team for the TinyML Vision Zero San Jose Competition. The solution proposes a novel neuromorphic edge fusion of spiking neural networks and event-based expectation maximization for the detection and tracking of pedestrians and bicyclists. \nWe provide a deployment-ready evaluation of the detection performance along with robustness\, energy footprint\, and weatherization while emphasizing the advantages of the neuro-statistical edge solution and its city-level scaling capabilities. \n\n\nAbout the Speaker\nDr. Axenie is Professor of Artificial Intelligence and Research Group Leader in Cognitive Neurocomputing at the Technische Hochschule Nürnberg Georg Simon Ohm in Germany. \nAfter earning a Dr. Eng. Sc. in Neuroscience and Robotics from the Technical University of Munich in 2016\, Dr. Axenie joined the Huawei Research Center in Munich. Between 2017 and 2023 Dr. Axenie was Staff Research Engineer with Huawei Research Center. At the same time\, Dr. Axenie was the Principal Investigator and Head of the Audi Konfuzius-Institut Ingolstadt Laboratory at the Technische Hochschule Ingolstadt. \nDr. Axenie is a seasoned researcher with more than 15 years of academic research and more than 10 years of industrial research experience. His research was disseminated in more than 50 peer-reviewed publications and more than 10 patents. Currently Dr. Axenie focuses on sustainable and efficient deployment of intelligent algorithms for sensor fusion and closed-loop control.
URL:https://www.neuropac.info/event/cristian-axenie-onm-hybrid-learning-for-event-based-visual-motion-detection-and-tracking-of-pedestrians/
LOCATION:Online
CATEGORIES:Talk,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20231219T180000
DTEND;TZID=Europe/Berlin:20231219T193000
DTSTAMP:20260429T150529
CREATED:20231103T152925Z
LAST-MODIFIED:20231103T152925Z
UID:10000265-1703008800-1703014200@www.neuropac.info
SUMMARY:Brad Aimone @ ONM - Programming Scalable Neuromorphic Algorithms With Fugu
DESCRIPTION:From the Open Neuromorphic website \nExplore neural-inspired computing with Brad Aimone\, a leading neuroscientist at Sandia Labs. Join us for insights into next-gen technology and neuroscience.
URL:https://www.neuropac.info/event/brad-aimone-onm-programming-scalable-neuromorphic-algorithms-with-fugu/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20231213T060000
DTEND;TZID=Europe/Berlin:20231213T080000
DTSTAMP:20260429T150529
CREATED:20231130T123001Z
LAST-MODIFIED:20231130T123015Z
UID:10000271-1702447200-1702454400@www.neuropac.info
SUMMARY:Kade Heckel @ ONM - Neuromorphic Hackathon with Spyx
DESCRIPTION:From the open-neuromorphic.org website: \nJoin us on December 13th for an exciting Spyx hackathon and ONM talk! Learn how to use and contribute to Spyx \, a high-performance spiking neural network library\, and gain insights into the latest developments in neuromorphic frameworks. The session will cover Spyx’s utilization of memory and GPU to maximize training throughput\, along with discussions on the evolving landscape of neuromorphic computing. \nDon’t miss this opportunity to engage with experts\, collaborate on cutting-edge projects\, and explore the potential of Spyx in shaping the future of neuromorphic computing. Whether you’re a seasoned developer or just curious about the field\, this event promises valuable insights and hands-on experience. \nAgenda: \n\n18:00 – 19:00: Spyx Introduction\n\nDive into Spyx\, its features\, and how to contribute\nHands-on session: Explore Spyx functionalities and tackle real-world challenges\nQ&A and collaborative discussions\n\n\n19:00 – 20:00: Hackathon\n\nCollaborate on cutting-edge projects and explore the potential of Spyx\nQ&A and collaborative discussions\n\n\n\nSpeakers: \n\nKade Heckel\n\nNote: The event will be hosted virtually. Stay tuned for the video link and further updates. Let’s come together to push the boundaries of neuromorphic computing!
URL:https://www.neuropac.info/event/kade-heckel-onm-neuromorphic-hackathon-with-spyx/
LOCATION:Online
CATEGORIES:Talk,Tutorial,Workshop
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=America/Los_Angeles:20231205T080000
DTEND;TZID=America/Los_Angeles:20231205T090000
DTSTAMP:20260429T150529
CREATED:20231130T122725Z
LAST-MODIFIED:20231130T122725Z
UID:10000270-1701763200-1701766800@www.neuropac.info
SUMMARY:Michael Jurado @ INRC - Enhancing Performance and Efficiency of SNNs
DESCRIPTION:Title:\nEnhancing Performance and Efficiency of SNNs: From Spike-Based Loss Improvements to Synaptic Sparsification Techniques. \nAbstract:\nThe introduction of offline training capabilities like Spike Layer Error Reassignment in Time (SLAYER) and advancements in the probabilistic interpretations of Spiking Neural Network (SNN) output reinforce SNNs as a viable alternative to Artificial Neural Networks (ANNs). However\, special care must be taken during Surrogate Gradient (SG) training to achieve desired performance and efficiency. This talk will cover our recent work in improving spike-based loss functions for SNNs as well as sparsifying SNNs for low cost\, high performant neuromorphic computing. \nSpikemax was previously introduced as a family of differentiable loss methods which use windowed spike counts to form classification probabilities. We modify the Spikemaxs loss method to use rates and a scaling parameter instead of counts to form Scaled-Spikemax. Our mathematical analysis shows that an appropriate scaling term can yield less coarse probability outputs from the SNN and help smooth the gradient of the loss during training. Experimentally\, we show that Scaled-Spikemax achieves faster training convergence than Spikemax and results in relative improvements of 4.2% and 9.9% in accuracy for NMNIST and N-TIDIGITS18\, respectively. We then extend Scaled-Spikemax to construct a spike-based loss function for multi-label classification called Spikemoid. The viability of Spikemoid is shown via the first known multi-label classification results on N-TIDIGITS18 and 2NMNIST\, a novel variation of NMNIST that superimposes event-driven sensory data. \nHowever\, SNNs trained through SG methods oftentimes use dense or convolutional connections which are not always suitable for Loihi2. In order to minimize core usage and power consumption on chip\, we employ synaptic pruning techniques as part of our SNN training pipelines. We demonstrate the effectiveness of synaptic pruning techniques for ANN to SNN conversion of vgg16 on Loihi1 as well as for a lava-dl trained SNN for the Intel DNS Challenge. This later approach involved the use of Gradual Magnitude Pruning (GMP) applied during SLAYER training\, which reduced the memory footprint of the baseline SDNN by 50-75%. We highlight infrastructure changes to netX which enable conversion of lava-dl trained SNNs into sparsity aware lava processes. \nMeeting link to join is available to INRC members and affiliates on the INRC Forum Schedule (click here). \nIf you are not yet a member of the INRC\, please see the “Joining the INRC link” below. \nBio: Michael Jurado is a research engineer at the Georgia Tech Research Institute. He studied computer science at Georgia Tech and received his master’s degree in Machine Learning in 2022. Lately\, Michael has been studying and developing neuromorphic algorithms for edge computing and a regular contributor to the lava code base. In his free time\, he likes to read and study languages. \n\n\n\n\n\n\n\n\nFor the recording and slides\, see the full INRC Forum 2023 Schedule (accessible only to INRC Affiliates and Engaged Members). \nIf you are interested in becoming a member\, here is the information about ”Joining the INRC.
URL:https://www.neuropac.info/event/michael-jurado-inrc-enhancing-performance-and-efficiency-of-snns/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=America/Los_Angeles:20231128T080000
DTEND;TZID=America/Los_Angeles:20231128T090000
DTSTAMP:20260429T150529
CREATED:20231130T122249Z
LAST-MODIFIED:20231130T122249Z
UID:10000269-1701158400-1701162000@www.neuropac.info
SUMMARY:Jannik Lubeoinski @ INRC - Brian2Lava: connecting the Brian 2 simulator to neuromorphic hardware
DESCRIPTION:Abstract:\nNeuromorphic hardware allows for fast and energy-efficient simulation of spiking neural networks. However\, the usage of such devices is still a challenge\, as it requires detailed knowledge about the neuromorphic hardware as well as the used software interface\, e.g.\, the Lava framework for neuromorphic computing spearheaded by Intel. This stands in contrast to the relative ease of simulating spiking neural networks on conventional CPU or GPU architectures\, for which user-friendly simulation environments exist. The Brian 2 simulator\, for instance\, allows to readily define a spiking neural network with a set of equations\, handling all subsequent hardware interactions. \nTo link the best of both worlds\, we are developing Brain2Lava. Brian2Lava combines the intuitive user interface of Brian 2 with the functionality of Lava. By means of a so-called device for Brian 2\, Brian2Lava seamlessly generates and executes the desired simulations in Lava without the need for users to write additional code. At the current stage\, Brian2Lava supports most Brian 2 features when executing Lava on CPU\, and a selection of essential features for the execution on Intel’s Loihi 2 chip. We are constantly working to expand the number of features supported with the chip\, aiming to enable to flexibly execute simulations on different hardware platforms. \nIn summary\, by bridging the gap between user-friendly model definition and neuromorphic implementation\, Brian2Lava empowers engineers and neuroscientists alike to leverage the potential of neuromorphic hardware with greater ease and efficiency. \nBio: Jannik Luboeinski is currently a postdoctoral researcher at University of Göttingen. He received his B.Sc. and M.Sc. degrees in Physics from Technical University of Darmstadt and Goethe University Frankfurt\, respectively. From 2017 to 2021\, he did his Ph.D. with Christian Tetzlaff at University of Göttingen\, investigating the role of two-phase synaptic plasticity in recurrent spiking neural networks\, which resulted in the publication of several journal papers. In 2021\, Dr. Luboeinski continued to work in the group of Professor Tetzlaff (now Computational Synaptic Physiology Group) as a postdoctoral researcher. A major aim of his research is to identify properties that enable efficient memory processes in biological and artificial neural systems. His work currently focuses on neuromorphic computing and the development of simulation software for recurrent spiking neural networks. \nMeeting link to join is available to INRC members and affiliates on the INRC Forum Schedule (click here). \nIf you are not yet a member of the INRC\, please see the “Joining the INRC link” below. \nFor the recording and slides\, see the full INRC Forum 2023 Schedule (accessible only to INRC Affiliates and Engaged Members). \nIf you are interested in becoming a member\, here is the information about ”Joining the INRC.”
URL:https://www.neuropac.info/event/jannik-lubeoinski-inrc-brian2lava-connecting-the-brian-2-simulator-to-neuromorphic-hardware/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20231116T180000
DTEND;TZID=Europe/Berlin:20231116T190000
DTSTAMP:20260429T150529
CREATED:20231103T152759Z
LAST-MODIFIED:20231103T152928Z
UID:10000264-1700157600-1700161200@www.neuropac.info
SUMMARY:Timoleon Moraitis @ ONM - Making Neuromorphic Computing Mainstream
DESCRIPTION:From the Open Neuromorphic website \nJoin us for a workshop with Timoleon Moraitis\, research group leader in neuromorphic computing\, at the interface of computational neuroscience with artificial intelligence.
URL:https://www.neuropac.info/event/timoleon-moraitis-onm-making-neuromorphic-computing-mainstream/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Berlin:20230926T180000
DTEND;TZID=Europe/Berlin:20230926T193000
DTSTAMP:20260429T150529
CREATED:20230925T101429Z
LAST-MODIFIED:20230925T101429Z
UID:10000246-1695751200-1695756600@www.neuropac.info
SUMMARY:Giulia D’Angelo: What’s catching your eye? The visual attention mechanism
DESCRIPTION:Abstract\nEvery agent\, whether animal or robotic\, needs to process its visual sensory input in an efficient way\, to allow understanding of\, and interaction with\, the environment. The process of filtering revelant information out of the continuous bombardment of complex sensory data is called selective attention. Visual attention is the result of the complex interplay between bottom-up and top-down mechanisms to perceptually organise and understand the scene. Giulia will describe how to approach visual attention using bio-inspired models emulating the human visual system to allow robots to interact with their surroundings. \nSpeaker’s bio\nGiulia D’Angelo is a postdoctoral researcher in neuroengineering in the EDPR laboratory at the Italian Institute of Technology. She obtained a B.Sc. in biomedical engineering and an M.Sc. in neuroengineering\, developing a neuromorphic visual system at the King’s College of London. She successfully defended her Ph.D. VIVA in 2022 at the university of Manchester\, proposing a biologically plausible model for event-driven saliency-based visual attention. She is currently working on bio-inspired visual algorithms exploiting neuromorphic platforms.
URL:https://www.neuropac.info/event/giulia-dangelo-whats-catching-your-eye-the-visual-attention-mechanism/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=America/New_York:20230505T110000
DTEND;TZID=America/New_York:20230505T120000
DTSTAMP:20260429T150529
CREATED:20230423T184025Z
LAST-MODIFIED:20230423T185121Z
UID:10000232-1683284400-1683288000@www.neuropac.info
SUMMARY:Frances Chance - Modeling Coordinate Transformations in Neural and Neuromorphic Systems
DESCRIPTION:Hosted by the Perception and Robotics Group Seminar Series on Robotics and Computer Vision at the University of Maryland. \nAbstract. Animals excel at a wide range behaviors\, many of which are essential for survival. For example\, dragonflies are aerial predators\, known for both their speed and high success rate\, that must perform fast\, accurate\, and efficient calculations to survive. I will present a neural network model\, inspired by the dragonfly nervous system\, that calculates turning for successful prey interception. The model relies upon a coordinate transformation from eye-coordinates to body-coordinates\, an operation that must be performed by almost any animal nervous system relying upon sensory information to interact with the external world. I will discuss how I and collaborators are combining neuroscience experiments\, modeling studies\, and exploration of neuromorphic architectures to understand how the biological dragonfly nervous system performs coordinate transformations and to develop novel approaches for efficient neural- inspired computation. \nBio. As a computational neuroscientist\, Frances Chance has always been fascinated by how neural circuits compute information. Her current research focuses on applying knowledge of how neural systems operate towards the development of novel neuro-inspired algorithms and brain- based architectures. Frances Chance received her PhD and MS from Brandeis University and her BS from the California Institute of Technology. Currently she is a Principal Member of the Technical Staff at Sandia National Laboratories.
URL:https://www.neuropac.info/event/frances-chance-modeling-coordinate-transformations-in-neural-and-neuromorphic-systems/
LOCATION:University of Maryland\, 8125 Paint Branch Dr (Room IRB 4105)\, College Park\, MD\, 20740\, United States
CATEGORIES:Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Amsterdam:20230418T210000
DTEND;TZID=Europe/Amsterdam:20230418T223000
DTSTAMP:20260429T150529
CREATED:20230320T142706Z
LAST-MODIFIED:20230320T142706Z
UID:10000033-1681851600-1681857000@www.neuropac.info
SUMMARY:NeuroPAC Seminar: Forum on Neuromorphic Navigation
DESCRIPTION:Panelists: \n\nAndrew Davidson\, Imperial College\, London\nKostas Daniilidis\, University of Pennsylvania\nMichael Milford\, Queensland University of Technology\n\nJoin the seminar: https://umd.zoom.us/j/93344217202 \nMore information: https://www.neuropac.info/seminars/
URL:https://www.neuropac.info/event/neuropac-seminar-forum-on-neuromorphic-navigation/
LOCATION:Online
CATEGORIES:Symposium,Talk
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=UTC:20230214T180000
DTEND;TZID=UTC:20230214T193000
DTSTAMP:20260429T150529
CREATED:20230127T223002Z
LAST-MODIFIED:20230127T223002Z
UID:10000006-1676397600-1676403000@www.neuropac.info
SUMMARY:Giorgia Dellaferrera: PEPITA - A forward-forward alternative to backpropagation
DESCRIPTION:Bio: Giorgia Dellaferrera has completed her PhD in computational neuroscience at the Institute of Neuroinformatics (ETH Zurich and the University of Zurich) and IBM Research Zurich with Prof. Indiveri\, Prof. Eleftheriou and Dr. Pantazi. Her doctoral thesis focused on the interplay between neuroscience and artificial intelligence\, with an emphasis on learning mechanisms in brains and machines. During her PhD\, she visited the lab of Prof. Kreiman at the Harvard Medical School (US)\, where she developed a biologically inspired training strategy for artificial neural networks. Before her PhD\, Giorgia obtained a master in Applied Physics at the Swiss Federal Institute of Technology Lausanne (EPFL) and worked as an intern at the Okinawa Institute of Science and Technology\, Logitech\, Imperial College London\, and EPFL.
URL:https://www.neuropac.info/event/giorgia-dellaferrera-pepita-a-forward-forward-alternative-to-backpropagation/
LOCATION:Online
CATEGORIES:Talk
END:VEVENT
END:VCALENDAR