<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:image="http://www.google.com/schemas/sitemap-image/1.1" xmlns:xhtml="http://www.w3.org/1999/xhtml" xmlns:video="http://www.google.com/schemas/sitemap-video/1.1">
  <url>
    <loc>https://www.radicalai.org/blog</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-07-19</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/blog/18-pieces-of-advice-from-our-first-18-interviews</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-07-19</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/blog/3-lessons-from-failing-at-our-logo-design</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-07-19</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1591555515991-10V61J8BMGBLH799AARE/1.jpeg</image:loc>
      <image:title>Blog - &amp;nbsp;3 Lessons I Learned From Failing at Our Logo Design - Our First Logo</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1591555607159-RBYZS79FXI9VXS6WRIQS/1_hw8Tevd9OW7MwrstA3jlTg.png</image:loc>
      <image:title>Blog - &amp;nbsp;3 Lessons I Learned From Failing at Our Logo Design - Logo Attempt:</image:title>
      <image:caption>Huge Blunder Edition</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1591555669139-MFFKM6T3T9V9BVXXB5MO/3.png</image:loc>
      <image:title>Blog - &amp;nbsp;3 Lessons I Learned From Failing at Our Logo Design - Logo Mach 3:</image:title>
      <image:caption>Where we ended up.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/blog/how-technology-shapes-society</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-05-17</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/blog/discerning-the-truth</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-04-18</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/blog/10-articles</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-04-18</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/blog/pandemic-teaching-ai-ethics</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-04-18</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/blog/covid-ai-ethics-checklist</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-04-18</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/blog/radical-ai-values</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-04-18</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/blog/tag/society</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
  </url>
  <url>
    <loc>https://www.radicalai.org/blog/tag/humanities</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
  </url>
  <url>
    <loc>https://www.radicalai.org/blog/tag/AI</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
  </url>
  <url>
    <loc>https://www.radicalai.org/blog/tag/technology</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
  </url>
  <url>
    <loc>https://www.radicalai.org/blog/tag/ethics</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
  </url>
  <url>
    <loc>https://www.radicalai.org/minisodes</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2022-04-20</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/minisodes/welcome-back</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2022-04-20</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/minisodes/2020-hindsight</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2022-04-20</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/minisodes/why-we-do-this</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-12-23</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/minisodes/minisode-4</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-10-14</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/minisodes/minisode-3</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-06-17</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/minisodes/minisode-2</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-05-17</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/minisodes/blog-post-title-two-sjtsm</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-05-17</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/minisodes/welcome-to-radical-ai</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-04-20</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/continue-the-conversation</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-12-16</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/continue-the-conversation/misinformation-free-expression</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-03-03</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1614563088829-G2YB36SJKRHNXIXV8MN1/https___cdn.evbuc.com_images_124219397_433994948714_1_original.jpg</image:loc>
      <image:title>Continue The Conversation - Improving Social Media: Misinformation &amp;amp; Free Expression</image:title>
      <image:caption>Details from the original livestream event put on by All Tech is Human</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/continue-the-conversation/content-moderation</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-03-01</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1611701066533-22RGLG0RH5RW8R1Q648S/https___cdn.evbuc.com_images_121426587_433994948714_1_original.jpg</image:loc>
      <image:title>Continue The Conversation - Improving Social Media: Content Moderation &amp;amp; Democracy</image:title>
      <image:caption>Details from the original livestream event put on by All Tech is Human</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/continue-the-conversation/business-case-for-ai-ethics</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-01-26</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1608085980730-IFQ0G00VJPBSRK2W6BBG/https___cdn.evbuc.com_images_118467263_433994948714_1_original.jpg</image:loc>
      <image:title>Continue The Conversation - The Business Case for AI Ethics</image:title>
      <image:caption>Details from the original livestream event put on by All Tech is Human</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/continue-the-conversation/coded-bias</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-12-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1605663406200-S469Y06GLR0WQBDLE6R6/coded-bias.jpeg</image:loc>
      <image:title>Continue The Conversation - Coded Bias, AI, and the Future of Civil Rights</image:title>
      <image:caption>Details from the original livestream event put on by All Tech is Human</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/continue-the-conversation/social-media-us-election</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-11-18</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1604193155473-4I55YO9WVHM0B1C5V47D/atih-5.jpeg</image:loc>
      <image:title>Continue The Conversation - Social Media's Role in the US Election</image:title>
      <image:caption>Details from the original livestream event put on by All Tech is Human</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/continue-the-conversation/next-gen-responsible-tech</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-11-01</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1599948251653-FPGO6ON2NIO5ZPP9DHVO/responsible-tech.jpeg</image:loc>
      <image:title>Continue The Conversation - Building the Next Generation of Responsible Technologists &amp;amp; Changemakers</image:title>
      <image:caption>Details from the original livestream event put on by All Tech is Human</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/continue-the-conversation/big-tech-power-diplomacy</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-09-12</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1598398751520-MTN5RH3NFOL7AFQFHJAO/atih-3-pic.jpeg</image:loc>
      <image:title>Continue The Conversation - Big Tech, Power &amp;amp; Diplomacy</image:title>
      <image:caption>Details from the original livestream event put on by All Tech is Human</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/continue-the-conversation/data-discrimination-algorithmic-bias</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-08-25</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595982666277-M3EA16YYKKONBU3L6XCH/ATIH-2.jpeg</image:loc>
      <image:title>Continue The Conversation - Data Discrimination &amp;amp; Algorithmic Bias</image:title>
      <image:caption>Details from the original livestream event put on by All Tech is Human</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/continue-the-conversation/building-anti-racist-tech</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-07-13</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1594568409034-0UG51SCNBRZSJZ3OCVDT/anti-racist-tech.jpeg</image:loc>
      <image:title>Continue The Conversation - Building Anti-Racist Technology &amp;amp; Culture</image:title>
      <image:caption>Details from the original livestream event put on by All Tech is Human</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/measurementality/podcast</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-12-20</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/measurementality/podcast/7</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-12-20</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1639761902327-8ZKS7Q0SULN5SBB53T4N/MMT+%237.png</image:loc>
      <image:title>Measurementality Podcast - Measurementality: Why AI Registries are Critical for Metrics of Accountability - This is a 10-episode series sponsored by and in collaboration with IEEE SA. For more information on the series, read our Measurementality page. To submit your answers to these 3 questions, read our submission guidelines and submit your thoughts on the IEEE SA website. If you like this show, follow us on Spotify, subscribe on iTunes, and leave us a review!</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/measurementality/podcast/6</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-12-17</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1633647844191-3FECZT8AINSAOE66GOHN/MMT+%236.png</image:loc>
      <image:title>Measurementality Podcast - Measurementality: Authentic Accountability for Successful AI with Yoav Schlesinger - This is a 10-episode series sponsored by and in collaboration with IEEE SA. For more information on the series, read our Measurementality page. To submit your answers to these 3 questions, read our submission guidelines and submit your thoughts on the IEEE SA website. If you like this show, follow us on Spotify, subscribe on iTunes, and leave us a review!</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/measurementality/podcast/5</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-10-07</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1631836499188-PWW6Q0BSB9RGVK87KH74/MMT+%235.png</image:loc>
      <image:title>Measurementality Podcast - Measurementality: Intergenerational Collaboration with Sinead Bovell - This is a 10-episode series sponsored by and in collaboration with IEEE SA. For more information on the series, read our Measurementality page. To submit your answers to these 3 questions, read our submission guidelines and submit your thoughts on the IEEE SA website. If you like this show, follow us on Spotify, subscribe on iTunes, and leave us a review!</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/measurementality/podcast/4</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-09-17</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1623864650832-USCO0A1FRYXTS5F09FBL/MMT+%234.png</image:loc>
      <image:title>Measurementality Podcast - Measurementality: What are we Optimizing for? with Laura Musikanski and Jonathan Stray - This is a 10-episode series sponsored by and in collaboration with IEEE SA. For more information on the series, read our Measurementality page. To submit your answers to these 3 questions, read our submission guidelines and submit your thoughts on the IEEE SA website. If you like this show, follow us on Spotify, subscribe on iTunes, and leave us a review!</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/measurementality/podcast/3</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-06-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1618799670678-SCPTF6ZDG9Z49EEHPSEQ/MMT+%233.png</image:loc>
      <image:title>Measurementality Podcast - Measurementality: Counting Mental Health and Caregiving with Amandeep Singh Gill and Riane Eisler - This is a 10-episode series sponsored by and in collaboration with IEEE SA. For more information on the series, read our Measurementality page. To submit your answers to these 3 questions, read our submission guidelines and submit your thoughts on the IEEE SA website. If you like this show, follow us on Spotify, subscribe on iTunes, and leave us a review!</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/measurementality/podcast/2</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-04-19</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1614544580138-PT6RSOW4AK0HSTDA21L3/Copy+of+Measurementality+%232.png</image:loc>
      <image:title>Measurementality Podcast - Measurementality: Children’s Data and Sustainability - This is a 10-episode series sponsored by and in collaboration with IEEE SA. For more information on the series, read our Measurementality page. To submit your answers to these 3 questions, read our submission guidelines and submit your thoughts on the IEEE SA website. If you like this show, follow us on Spotify, subscribe on iTunes, and leave us a review!</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/measurementality/podcast/1</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-02-28</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1612306116497-WPGSS2QY46JW5IFU554N/Measurementality+%231+%281%29.png</image:loc>
      <image:title>Measurementality Podcast - Measurementality: Defining What Counts in the Algorithmic Age - This is a 10-episode series sponsored by and in collaboration with IEEE SA. For more information on the series, read our Measurementality page. To submit your answers to these 3 questions, read our submission guidelines and submit your thoughts on the IEEE SA website. If you like this show, follow us on Spotify, subscribe on iTunes, and leave us a review!</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/about</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-08-19</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1587054986489-Z9X9POBOKGPZQYQ8DRJX/69310026_10214783983140536_9174692323471130624_o.jpg</image:loc>
      <image:title>About - Dylan Doyle-Burke</image:title>
      <image:caption>Tobias Keene, D.D.S. Hailing from Richmond, Virginia, Dr. Tobias Keene brings a bit of unabashed Southern hospitality to all his patients. He moved to Washington, D.C. over thirty years ago as a freshman at Ivy College. Right after graduation, he attended World University’s School of Dentistry. Before opening Keene Dental in 1994, he worked for free clinics and some of the finest practices in the District. He is part of the 123 Dental Association and stays up-to-date on the latest dental discoveries. When not striving to keep his patients happy and healthy, he’s enjoys hiking with his family in Rock Creek Park.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1c5dc7e4-9b92-4b31-836b-054eebbcd731/headshot-long.jpg</image:loc>
      <image:title>About - Jessie J. Smith (Jess)</image:title>
      <image:caption>Jessie J. Smith (Jess) is a fifth year PhD candidate in the Department of Information Science at the University of Colorado Boulder. Jess received her bachelor's degree in software engineering from Cal Poly SLO. Her PhD research focuses on AI ethics, machine learning fairness and bias, and incorporating ethical speculation in the computer science classroom. Jess' dissertation focuses on operationalizing fairness for industry machine learning ecosystems, with special focus on quantitatively measuring the unobservable experience of fairness. Jess loves to engage in public scholarship about her research to encourage transparency and interdisciplinary dialogue about the unintended consequences of technology. Follow Jess on Twitter @_jessiejsmith_</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1612364291007-HAO97C6CIJVIW9ZUZXPO/98FB850D-80A1-45B7-98C0-1B57604D1023.JPG</image:loc>
      <image:title>About - Nikhil Dharmaraj</image:title>
      <image:caption>Tobias Keene, D.D.S. Hailing from Richmond, Virginia, Dr. Tobias Keene brings a bit of unabashed Southern hospitality to all his patients. He moved to Washington, D.C. over thirty years ago as a freshman at Ivy College. Right after graduation, he attended World University’s School of Dentistry. Before opening Keene Dental in 1994, he worked for free clinics and some of the finest practices in the District. He is part of the 123 Dental Association and stays up-to-date on the latest dental discoveries. When not striving to keep his patients happy and healthy, he’s enjoys hiking with his family in Rock Creek Park.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1612364368203-1W3S0Z5P1ENN45K8OINV/IMG_8324.jpg</image:loc>
      <image:title>About - Lena Wang</image:title>
      <image:caption>Lena holds a combined Bachelors of Arts (Honours)/Science (Advanced) from the University of Sydney, with majors in computer science, physics, and philosophy. She is interested in the systemic social consequences of technology developed in a capitalist and colonialist context. In particular, she is concerned with the raced effects of algorithmic bias in carceral technology, the embedding of military purposes in computer vision projects, and the alienation of tech workers from their labour. Follow Lena on Twitter @lenayiwang</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/podcast</loc>
    <changefreq>daily</changefreq>
    <priority>1.0</priority>
    <lastmod>2023-02-28</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168157317-KSINF047CJW11GNMZIP9/15_+IBM%2C+Microsoft%2C+and+Amazon+Disavow+Facial+Recognition+Technology_+What+Do+You+Need+to+Know_+with+Deb+Raji.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1677607847822-MSPL6PUIACGTMY28OF0S/Limitations+of+ChatGPT.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1601489874155-0IZ29OXPFGG52L2XC6V9/Liz+O%27Sullivan.png</image:loc>
      <image:title>Podcast</image:title>
      <image:caption>Liz</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/531271d1-6c22-4d07-86e9-0ab6dfbfdb47/Decolonial+Digital+Mental+Health+%281%29.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168147930-9WEL59SQ29CCYUNR1019/8_+Love%2C+Challenge%2C+and+Hope_+Building+a+Movement+to+Dismantle+the+New+Jim+Code+with+Ruha+Benjamin.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/dcf844d5-5451-4382-9225-38c31ad924d3/Casteist+Tech+%282%29.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168144072-LTHBGOQZQHA09RHOU09F/4_+Apple+_+Google+Partner+to+Promote+Coronavirus+Contact+Tracing.+Should+You+be+Worried_.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1634308308667-VSFSJ9AGWP5RGSERA8BM/AI+Today.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168151464-ZACCYS1GGJVWMCEL3TCN/9_+Labor+and+Innovation_+Exploring+the+Power+of+Design+and+Storytelling+with+Lilly+Irani.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1622611017944-IMV1VARY3OKR9TFFJ7AQ/Good+Robot.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168143912-8520I12CIZOYVGNXRC9H/1_+Can+a+Machine+Ever+be+Moral_+Robot+Politeness+and+Persuasion+with+Tom+Williams.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1615351685793-ULKU0KFLF4YT49NQ1JDT/computer+is+on+fire.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168143078-VJLKSAGQCTSWRPIXPIKJ/2_+Are+We+Being+Watched_+Unpacking+AI+Surveillance+with+Kandrea+Wade.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1604459687372-H3MKTQSOH4KJSQ50BSXJ/Transparency+Politics.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168144705-LGUSCAYLDH30QR1ESSZS/3_+Have+Classification+Algorithms+Gone+Too+Far_+Exploring+Gender+in+AI+with+Morgan+Klaus+Scheuerman.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1603859485434-9GXXYUZ4Y443GM5QE39Q/Propaganda.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168144946-F0BMP12HM4XESHL8YLFY/5_+Is+God+in+your+iPhone_+Black+Liberation+Theology%2C+Accessibility%2C+and+Digital+Citizenry+with+Shamika+Goddard.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1604435116163-V63HMHM7VUEXAF19EECN/Copy+of+Vote.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168143454-15ZVPVGYOFQ22DYKH3C4/6_+Racism+and+Sexism+in+AI+Technology_+Navigating+Systems+of+Power+with+Sarah+Myers+West.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1602087870932-IQ85TJCPJ6EGB74ZZCCN/PAI+%282%29.png</image:loc>
      <image:title>Podcast</image:title>
      <image:caption>PAI</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168150586-NPS1X6DZJ3PFVO2VGS2T/10_+Tech+Journalism+and+Ethics_+Where+is+the+Truth+Anyway_+with+Karen+Hao.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1600272910059-0LNMX0B4M2X99ZL2CEUJ/Coalition+for+Critical+Technology.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168153687-5MJ50KGJ6467JTRC0ZH0/11_+Robot+Rights_+Exploring+Algorithmic+Colonization+with+Abeba+Birhane.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1596987128186-I4YVV8KSTWO8VYFIJIAX/AI+4+Social+Good+Panel.png</image:loc>
      <image:title>Podcast</image:title>
      <image:caption>ai4socialgood panel</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168153265-MTQBT5FKYGKJLRFJZDS2/11_+The+History+that+Defines+our+Technological+Future+with+Archivist+Eun+Seo+Jo.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595257715445-ES63425N1TTLH5TQJN6M/18_+Surveillance%2C+Stigma+%26+Sociotechnical+Design+for+HIV+in+Dating+and+Hookup+Platforms+with+Calvin+Liang%2C+Jevan+Hutson%2C+and+Os+Keyes+%282%29.png</image:loc>
      <image:title>Podcast</image:title>
      <image:caption>panel-episode</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1593541301445-AT793F57DZ4TH1BT2BBT/Renee+%26+Racial+Bias+%281%29.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168153168-A7OHEDLJE30OKCEBVYQ6/13_+Data+as+Protest_+Data+for+Black+Lives+with+Yeshi+Milner+%283%29.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1593741068671-BPPKTU8NK6977XXH48QK/A+message+from+timnit+gebru.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168155902-TMYAZRWYPTVUM4XBJSF4/14_+Emoji+Design%2C+White+Accountability%2C+and+the+Ethical+Future+of+Chatbots+with+Miriam+Sweeney.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168159611-KUFWLUWXVO860G7VLZ27/16_+The+Power+of+Linguistics_+Unpacking+Natural+Language+Processing+Ethics+with+Emily+Bender.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168158033-CMH9AYWJVXUSY1PO6KZJ/17_+Science+Fiction%2C+Science+Fact%2C+and+AI+Consciousness+with+Beth+Singler.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168155809-TWK4XW02TCD9LGRBYAW0/12_+Confronting+Our+Reality_+Racial+Representation+and+Systemic+Transformation+with+Dr.+Timnit+Gebru+%281%29.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595430908280-CR0VRH6FOBL91LZY9YER/19_+Mary+L.+Gray.png</image:loc>
      <image:title>Podcast</image:title>
      <image:caption>mary-l-gray</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1596642740824-HAK4KGTKNQDWCT01ZLSB/20_+John+C.+Havens.png</image:loc>
      <image:title>Podcast</image:title>
      <image:caption>John C. Havens</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1597250585140-4K8NYI1OOYJIPGP8ZZFP/21_+Eric+Rice.png</image:loc>
      <image:title>Podcast</image:title>
      <image:caption>Eric Rice</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1597844052448-G5YL2OL5UWO9WK48EM4G/22_+Veena+Dubal.png</image:loc>
      <image:title>Podcast</image:title>
      <image:caption>Veena Dubal</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1605070879829-N326VU4GVEKWLKY8ALWD/Ryan+Calo.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1599072208815-PH69ODB3X2Z5TI4BTCKB/23_+Jennifer+Wortman+Vaughan.png</image:loc>
      <image:title>Podcast</image:title>
      <image:caption>jenn</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1599628931652-LUVPUYMLIUR50KQDNDOC/24_+Anima.png</image:loc>
      <image:title>Podcast</image:title>
      <image:caption>anima</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1600870341865-YLJU2GOVA1IV3768BWTE/Michael.png</image:loc>
      <image:title>Podcast</image:title>
      <image:caption>michael</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1606840000365-VXM7EY8GUD2VE5T3ZYEH/Kate.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1606921573164-NVF91G42QJQHUT05J7OY/Kathy.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1607494452613-T0CJGDAU27ZHISWKNK78/Moses.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1611152969835-TB2TL1F3VS667J567JHG/Meredith+Ringel+Morris.png</image:loc>
      <image:title>Podcast</image:title>
      <image:caption>Meredith Ringel Morris</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1612969606491-9GZ3I1ZBJNF0ZQ91DE5V/Anna+Lenhart.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1614265598366-TPMKFRQKZCCM87HL7ZW3/Zanele+Munyikwa.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1617153752655-KZE9R06KPQF44QIP3L71/Copy+of+Zanele+Munyikwa.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1617755511777-92DSRFZKZ1JODUROB1M8/Kate+Crawford.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1619022625283-4BQ9HKHG03WXUOI93DQK/Cynthia+Bennett.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1620060867655-X8IC02WD65P2OAXW39MI/Steven+Umbrello.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1622134150866-9NADR2TJFARYKYHL41YE/Divya+Siddarth.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1631029506357-XH5IRJMYFU0JSYV1XAWJ/Jason+Edward+Lewis.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1633493133640-DPQH5UXRR231PO9OFYVL/Stevie+Chancellor.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1635911442363-7FXYD4DIHU2B64AYUP8G/Design+Justice.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/0905e3f9-4409-4270-bbb0-244fc604e685/Decolonial+AI+101.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1651010205550-V2BBN6GD9Y0NXCDHCSUZ/Let%27s+talk+about+sex.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/a686801c-a9ad-4715-a176-9616ecb94153/Visualizing+our+Lives+%281%29.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1658877213635-ZRLNXY5H5IEED3H1F96P/Shion+Guha.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/88af9bad-b0b3-459b-a43a-8035527573c8/Tung-Hui+%281%29.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1664375098686-VHDDBHKAQJB5XIUVXLD7/Rebecca+Finlay.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1666794294486-UL3T2YD9AYYHVB072QRX/Seyi+Akiwowo.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/4b0c5fbb-0d29-4524-85a6-62653d73e86a/Screen+Shot+2022-11-30+at+8.42.45+AM.jpg</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1674623287727-E05AM4C9D1VQUI89VBNZ/ChatGPT.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1679494049784-51Y35G401CA7CLE5PQAB/More+than+a+glitch.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1682452374532-HCHROOK6IX4XDRZ9OAZK/Twitter+vs.+Mastodon.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/2af28138-92b7-46e2-8425-24dea3581557/RAI+Goodbye.png</image:loc>
      <image:title>Podcast</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/contact</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-08-19</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1590360985287-LA43465NSXIDWBEN0DLW/radicalAILogoNoText.jpg</image:loc>
      <image:title>Contact</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e4-morgan-scheuerman</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-04-18</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1587181542501-HDYWDP6EDGAV8WZXWTN2/morgan_bio.png</image:loc>
      <image:title>E4-Morgan</image:title>
      <image:caption>In this episode of the Radical AI podcast hosts Dylan and Jess interview Morgan Klaus Scheuerman. Morgan is an Information Science PhD student at the University of Colorado interested in exploring the ways individuals with diverse gender identities interact with technology. He grew up in Maryland, where he earned a Bachelor of Art in Communication &amp; Media Studies with a minor in Gender Studies at Goucher College and a Master of Science in Human-Centered Computing at University of Maryland, Baltimore County. His master thesis work focused on the way transgender individuals' experience safety and bias when interacting with digital technologies. At CU, he works with Jed Brubaker in the Identity Lab. In his spare time, Morgan enjoys travel, hiking, photography and consuming snobby hipster coffee beverages in mid-century modern cafes. You can follow Morgan on Twitter at: @morganklauss</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e2-tom-williams</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-04-18</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1587179608715-RTU87HONUBXZ8RSYZ85O/thomas-williams.jpg</image:loc>
      <image:title>E2-Tom</image:title>
      <image:caption>In this episode of the Radical AI podcast Dylan and Jess interview Dr. Tom Williams. Tom Williams is an Assistant Professor of Computer Science at the Colorado School of Mines, where he directs the Mines Interactive Robotics Research Lab. Prior to joining Mines, Tom earned a joint PhD in Computer Science and Cognitive Science from Tufts University in 2017. Tom’s research focuses on enabling and understanding natural language based human-robot interaction that is sensitive to environmental, cognitive, social, and moral context. His work is funded by grants from NSF, ARL, and USAFA, as well as by Early Career awards from both NASA and the US Air Force. You can find more about Tom on Twitter @williamstome You can find out more about the Mirror lab at: MIRRORLab.mines.edu</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e1-welcome</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-04-18</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1587179911268-ZJ1YFEQZDI6FV0JVHOZD/radicalboth.JPG</image:loc>
      <image:title>Welcome</image:title>
      <image:caption>In this first episode of the Radical AI podcast hosts Dylan and Jess explore what Radical AI is, why they created this podcast, who the podcast is for, and what listeners can hope to expect from future episodes. Specifically, Dylan and Jess review review what their backgrounds are and what Radical AI means to them. Please see the full transcript of the episode below and welcome to the conversation!</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e5-seda-gurses</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-04-18</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1587182165920-FOS8Z49X5N2BWC7IM47A/SedaGursesSmall.jpg</image:loc>
      <image:title>E5-Seda</image:title>
      <image:caption>In this episode of the Radical AI podcast hosts Dylan and Jess interview Dr. Seda Gurses. Seda is currently an Associate Professor in the Department of Multi-Actor Systems at the Faculty of Technology Policy and Management, at TU Delft and an affiliate at the COSIC Group at the Department of Electrical Engineering (ESAT), KU Leuven. Her work focuses on privacy enhancing and protective optimization technologies (PETs and POTs), privacy engineering, as well as questions around software infrastructures, social justice and political economy as they intersect with computer science. Recently she co-chaired CRAFT: Critiquing and Rethinking Accountability, Fairness and Transparency, an independently curated program at ACM FAT* together with Seeta Peña Gangadharan and Suresh Venkatasubramanian. In addition to her academic work, she is a member of the arts collective Constant VZW in Brussels and a member of The Institute for Technology in the Public Interest You can follow Seda on twitter at: twitter.com/sedyst Links Mentioned in this Episode Include: The long Tail of Contact Tracing Stop the Apple and Google contact tracing platform. (Or be ready to ditch your smartphone.) The documentation and discussions around DP3T</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e3-kandrea-wade</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-04-18</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1587180598466-44X0DSITPZ1I14QCP0YB/download.jpg</image:loc>
      <image:title>E3-Kandrea</image:title>
      <image:caption>In this episode of the Radical AI podcast hosts Dylan and Jess interview Kandrea Wade. Kandrea is a PhD student in the Information Science department at CU Boulder focusing on algorithmic identity and the digital surveillance of marginalized groups. Along with developing her research at CU Boulder, Kandrea seeks to discover and assist in creating proper ethical regulations and education on algorithmic identity and digital literacy. With a background of over 15 years in entertainment and media, her interests have evolved from demographic programming for entertainment and media theory to corporate user ethics and legal protections for the digital citizen. Kandrea holds BA in technical theatre from The University of Texas at Arlington and an MA in media, culture, and communications from New York University. You can connect with Kandrea on Twitter at: @KandreaWade</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/minisode-1</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-04-19</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1587311409656-D99MATMAM4G5YMEOSV9Y/minisodes.jpg</image:loc>
      <image:title>Minisode 1</image:title>
      <image:caption>In this Minisode hosts Jess and Dylan debrief the breaking news of contact tracing apps, socio-political power structures, and reveal future guests for the show. Every month The Radical AI Podcast releases a Minisode reviewing the previous month's episodes and updating listeners on insider news from the Radical AI world. As always we invite you to please subscribe, rate, and leave a review to show your support!</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e6-shamika-goddard</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2022-01-04</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1587565260960-NVKW9VBT315A5XQWHOYM/resized_shamika_goddard.jpg</image:loc>
      <image:title>E6-Shamika</image:title>
      <image:caption>What does it mean to be an embodied black woman in technology spaces? What is techno-womanism? Do spirituality and liberation have a place in our conversations about technology? To answer these questions and more The Radical AI Podcast welcomes Shamika Goddard to the show. Shamika was born and raised in San Antonio, TX, and is the oldest of four children. Attending math and engineering camps for fun in junior high and high school, Shamika was excited about learning and eager to help save the world. After graduating from Stanford University, she served a year with AmeriCorps through Reading Partners in Queens and decided to stay in New York City. She went on to study technology and ethics at Union Theological Seminary in the city of New York and is thrilled to be serving others as a Tech Chaplain. She currently attends CU Boulder's iSchool studying technology, ethics, and social justice issues. Relevant Links: Twitter - @shamikalashawn Website - shamikalashawn.com</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e7-sarah-myers-west</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-04-30</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1588206428614-UMJ3JG7F4Q1FAOAHM4J9/smw.jpg</image:loc>
      <image:title>Episode 7: Racism and Sexism in AI Technology? Navigating Systems of Power with Sarah Myers West</image:title>
      <image:caption>Can you separate technology from power? Is technology ever objective? How do we build technology that meets the needs of everyone? To answer these questions and more The Radical AI Podcast welcomes Dr. Sarah Myers West to the show. Dr. Sarah Myers West is a postdoctoral researcher at the AI Now Institute. Her research centers on the critical study of technology and culture, with an emphasis on historical and ethnographic methods. Dr. West is currently working on a project that addresses the politics of diversity and inclusion in technological communities by exploring the nexus of artificial intelligence, gender, and intersectionality. She received her doctoral degree from the Annenberg School for Communication and Journalism at the University of Southern California in 2018, where her dissertation examined the cultural history and politics of encryption technologies from the 1960s to the present day. If you enjoy this episode please make sure to subscribe, submit a rating and review, and connect with us on twitter at @radicalaipod.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/episode-8-ruha-benjamin</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-05-15</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1589348727103-OQ0DVZIXAV12AU06QMSD/ruha.jpeg</image:loc>
      <image:title>Love, Challenge, and Hope: Building a Movement to Dismantle the New Jim Code with Ruha Benjamin</image:title>
      <image:caption>How is racism embedded in technological systems? How do we address the root causes of discrimination? How do we as designers and consumers of AI technology reclaim our agency and create a world of equity for all? To answer these questions and more The Radical AI Podcast welcomes Dr. Ruha Benjamin to the show.   Dr. Benjamin is Associate Professor of African American Studies at Princeton University and founder of the Just Data Lab. She is author of People’s Science: Bodies and Rights on the Stem Cell Frontier (2013) and Race After Technology: Abolitionist Tools for the New Jim Code (2019) among other publications. Her work investigates the social dimensions of science, medicine, and technology with a focus on the relationship between innovation and inequity, health and justice, knowledge, and power. To find more information about Ruha Benjamin’s work, you can find her website at ruhabenjamin.com, or you can follow her on Twitter @ruha9. If you enjoy this episode please make sure to subscribe, submit a rating and review, and connect with us on twitter at @radicalaipod.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e9-lilly-irani</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-09-04</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595168151464-ZACCYS1GGJVWMCEL3TCN/9_+Labor+and+Innovation_+Exploring+the+Power+of+Design+and+Storytelling+with+Lilly+Irani.png</image:loc>
      <image:title>Labor and Innovation: Exploring the Power of Design and Storytelling with Lilly Irani</image:title>
      <image:caption>What is the intersection between labor justice movements and the AI technology industry? How can we use design and ethnography to address the relationship between technology, power, and liberation? To answer these questions and more The Radical AI Podcast welcomes Dr. Lilly Irani to the show.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/minisode-2</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-05-17</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1587311409656-D99MATMAM4G5YMEOSV9Y/minisodes.jpg</image:loc>
      <image:title>Minisode 2</image:title>
      <image:caption>In this Minisode hosts Dylan and Jess celebrate one month since launch and debrief racism and sexism in AI, labor movements in tech spaces, theology, and so much more! Every month The Radical AI Podcast releases a Minisode reviewing the previous month's episodes and updating listeners on insider news from the Radical AI world. As always we invite you to please subscribe, rate, and leave a review to show your support!</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e10-karen-hao</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-05-31</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1590077170140-TZPJG18OVE2COWTWZHL2/karen.jpeg</image:loc>
      <image:title>E10-Karen</image:title>
      <image:caption>What is the role of journalism in telling the stories of tech ethics? How can journalism bridge the gap between technology and public policy? How do we measure truth in journalism, research and beyond? To answer these questions and more The Radical AI Podcast welcomes Karen Hao to the show. Karen is the artificial intelligence reporter for MIT Technology Review. She covers the ethics and social impacts of technology as well as its applications for social good. Karen also writes the AI newsletter, “the Algorithm”, which thoughtfully examines the field’s latest news and research. Previously, Karen was a reporter and data scientist at Quartz and an application engineer at the first startup to spin out of Google X. You can follow Karen Hao on Twitter @_KarenHao . Read Karen’s AI work at the MIT Technology Review. For more of Karen’s work and stories, check out her website. If you enjoy this episode please make sure to subscribe, submit a rating and review, and connect with us on twitter at @radicalaipod.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e11-abeba-birhane</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-05-31</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1590595566379-G6AHJ2M2BFJM8R2VL619/abeba.jpg</image:loc>
      <image:title>E11-Abeba</image:title>
      <image:caption>Should we grant robots rights? What is moral relationality and how can it be useful for designing machine learning algorithms? What is the algorithmic colonization of Africa and why is it harmful? To answer these questions and more The Radical AI Podcast welcomes Abeba Birhane to the show.  Abeba Birhane is a PhD candidate in cognitive science at University College Dublin in the School of Computer Science. She studies the relationships between emerging technologies, personhood and society. Specifically, Abeba explores how technology can shape what it means to be human. Abeba’s work is incredibly interdisciplinary - bridging the fields of cognitive science, psychology, computer science, critical data studies, and philosophy. You can follow Abeba Birhane on Twitter @Abebab. For more of Abeba’s work, check out her website. Relevant links from the episode: The Value of Machine Learning by Ria Kalluri Towards an anti-fascist AI by Dan Mcquillan Counting the Countless by Os Keyes If you enjoy this episode please make sure to subscribe, submit a rating and review, and connect with us on twitter at @radicalaipod.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/bonus-episode-eun-seo-jo</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-06-01</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1591038836503-ALAIVKBI4LPJRM2RX7A9/unseojo.jpg</image:loc>
      <image:title>Bonus-Episode-Eun-Seo-Jo</image:title>
      <image:caption>How does your data tell your story? Is historical data political? What do our archives have to do with defining the future of our technology?To answer these questions and more The Radical AI Podcast welcomes Stanford PhD. Student and Archivist Eun Seo Jo to the show.  Eun Seo Jo is a PhD student in History at Stanford University. Her research broadly covers applications of machine learning on historical data and the ethical concerns of using socio-cultural data for AI research and systems. You can follow Eun Seo Jo on Twitter @unsojo. Relevant links from the episode: Lessons from Archives: Strategies for Collecting Sociocultural Data in Machine Learning by Eun Seo Jo and Timnit Gebru Disseminating Research News in HCI: Perceived Hazards, How-To's, and Opportunities for Innovation by C. Estelle Smith, Eduardo Nevarez, and Haiyi Zhu If you enjoy this episode please make sure to subscribe, submit a rating and review, and connect with us on twitter at @radicalaipod.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e13-yeshi-milner</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-06-10</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1591802763089-YSASZ51HJJQXLA9ODOJ6/yeshi.jpg</image:loc>
      <image:title>E13-Yeshi-Milner</image:title>
      <image:caption>How can we claim agency over data systems to fight for racial justice? What is Data for Black Lives? How can you join the movement? To answer these questions and more we welcome Yeshi Milner to the show. Yeshi Milner is the co-founder and executive director of Data for Black Lives. Raised in Miami, FL, Yeshi began organizing against the school-to-prison pipeline at Power U Center for Social Change as a high school senior. There she developed a lifelong commitment to movement building as a vehicle for creating and sustaining large-scale social change. More recently, Yeshi was a campaign manager at Color of Change, where she spearheaded several major national initiatives, including OrganizeFor, the only online petition platform dedicated to building the political voice of Black people. You can follow or message Yeshi on Twitter @YESHICAN. Relevant links from the episode: Fundraiser for Minneapolis youth Data for Black Lives Website If you enjoy this episode please make sure to subscribe, submit a rating and review, and connect with us on twitter at @radicalaipod.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e12-timnit-gebru</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-06-10</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1591802348251-IIPC9IUMZN0TMUC6NXG1/timnit.jpg</image:loc>
      <image:title>E12-Timnit-Gebru</image:title>
      <image:caption>How do we respond to the racism in the world we have been given? What does it mean to transform technology systems in the spirit of justice and equity? How do we engage with diversity and representation without reducing our efforts to simple branding and lip service? To answer these questions and more the Radical AI Podcast welcomes one of our heroes Dr. Timnit Gebru to the show.  Dr. Timnit Gebru is a research scientist at Google on the ethical AI team and a co-founder of Black in AI. Timnit previously did her postdoc at Microsoft Research for the FATE (Fairness Transparency Accountability and Ethics in AI) group, where she studied algorithmic bias and the ethical implications underlying any data mining project. She received her Ph.D. from the Stanford Artificial Intelligence Laboratory, studying computer vision under Fei-Fei Li. You can follow Timnit Gebru on Twitter @timnitGebru. Relevant links from the episode: Datasheets for Datasets by Timnit Gebru, Jamie Morgenstern, Briana Vecchione, Jennifer Wortman Vaughan, Hanna Wallach, Hal Daumé III, and Kate Crawford. Black in AI website If you enjoy this episode please make sure to subscribe, submit a rating and review, and connect with us on twitter at @radicalaipod.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e14-miriam-sweeney</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-06-17</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1592411774394-O0XKZR68IER2EU8M86F7/miriam.png</image:loc>
      <image:title>E14-Miriam-Sweeney</image:title>
      <image:caption>What are the ethics of emoji design and why does it matter? What are some of the ethical concerns we should have about chatbots and virtual assistants? How can these technologies perpetuate racial and gender stereotypes? To answer these questions and more The Radical AI Podcast welcomes Dr. Miriam Sweeney to the show.  Dr. Miriam Sweeney is an assistant professor in the School of Library &amp; Information Studies at the University of Alabama. She is a critical cultural digital media scholar who studies anthropomorphic design, virtual assistants, voice interfaces, and AI through the lenses of race, gender, and sexuality. Her current project, Facing Our Computers: Identity, Interfaces, and Intimate Data, explores the linkages between identity, design, and dataveillance in AI voice assistants, digital assistants, and chatbot interfaces.  You can follow Miriam on Twitter @Miriam_Sweeney. Relevant links from the episode: Q - the first genderless voice If you enjoy this episode please make sure to subscribe, submit a rating and review, and connect with us on twitter at @radicalaipod.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e15-deb-raji</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-06-24</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1593008547685-KHPJYZFERN7O9AU38N45/deb.jpg</image:loc>
      <image:title>E15-Deb-Raji</image:title>
      <image:caption>What does it mean that IBM, Microsoft, Amazon, and others have distanced themselves from developing facial recognition technology and providing facial recognition data to vendors? Should you be skeptical? Where is the hope? To answer these questions and more we welcome Deb Raji to the show. Deb is a tech fellow at the AI Now Institute Working on critical perspectives to evaluation practice in AI, conducting audits on deployed AI systems and facial recognition, and AI auditing policy. She has worked closely with the Algorithmic Justice League initiative and on several projects to highlight cases of bias in computer vision. Deb was named one of MIT Technology Review’s 35 Innovators Under 35 for her research on the harms of racially biased data in facial recognition technologies. You can follow Deb on Twitter @rajinio. Relevant links from the episode: Algorithmic Justice League Closing the AI Accountability Gap: Defining an End-to-End Framework for Internal Algorithmic Auditing Saving Face: Investigating the Ethical Concerns of Facial Recognition Auditing MIT Technology Review 35 Innovators Under 35 - Inioluwa Deborah Raji If you enjoy this episode please make sure to subscribe, submit a rating and review, and connect with us on twitter at @radicalaipod.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/bonus-episode-renee-cummings-ei</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-01-21</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1593541301445-AT793F57DZ4TH1BT2BBT/Renee+%26+Racial+Bias+%281%29.png</image:loc>
      <image:title>E16-Renee-Cummings-Bonus</image:title>
      <image:caption>Sponsored by Ethical Intelligence this bonus episode features a presentation delivered by Renee Cummings as a workshop given on 06/24/20. We also welcome Ethical Intelligence CEO Olivia Gambelin to the show as a guest host. Renée Cummings is a criminologist and international criminal justice consultant who specializes in Artificial Intelligence (AI); ethical AI, bias in AI, diversity and inclusion in AI, algorithmic authenticity and accountability, data integrity and equity, AI for social good and social justice in AI policy and governance. She is the CEO of Urban AI. Slides referenced can be found by contacting Ethical Intelligence at ethicalintelligence.co You can follow Ethical Intelligence on Twitter @ethicalaico and you can follow Renée Cummings on Twitter @CummingsRenee. If you enjoy this episode please make sure to subscribe, submit a rating and review, and connect with us on twitter at @radicalaipod.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e16-emily-bender</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-07-17</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1593631309394-S6HWBK6X0ILANHGITQAS/emily.jpeg</image:loc>
      <image:title>E16-Emily-Bender</image:title>
      <image:caption>What are the societal impacts and ethics of Natural Language Processing (or NLP)? How can language be a form of power? How can we effectively teach ethics in the NLP classroom? How can we promote healthy interdisciplinary collaboration in the development of NLP products?To answer these questions and emore we welcome Dr. Emily M. Bender to the show. Dr. Emily M. Bender researches linguistics, computational linguistics, and ethical issues in Natural Language Processing.  Emily is currently a Professor in the Department of Linguistics and an Adjunct Professor in the Department of Computer Science and Engineering at the University of Washington. She is also the faculty director of the CLMS program and the director of the Computational Linguistics Laboratory. You can follow Emily M. Bender on Twitter @emilymbender or check out her personal website and publications. If you enjoy this episode please make sure to subscribe, submit a rating and review, and connect with us on twitter at @radicalaipod. Relevant Links from the Episode: Emily M. Bender’s Class Materials: Ethics in NLP (2017) (2019) Articles: Tay Chatbot gone wrong on Twitter More on “Language is Power and Power uses Language” Data Statements for Natural Language Processing: Toward Mitigating System Bias and Enabling Better Science “Better, less-stereotyped word vectors” by Robyn Speer People: Lesley Carmichael (LinkedIn) (YouTube) Ryan Calo (UW Webpage) Batya Friedman (UW Webpage) Meg Mitchell (Personal Website) ACL tutorial (T7 here) ACL list of Ethics and NLP courses</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e17-beth-singler</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-07-17</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1594674858089-2U6KE978V43WZBSVLQ2M/43816617704_451a90327e_o.jpg</image:loc>
      <image:title>E17-Beth-Singler</image:title>
      <image:caption>How can Science Fiction be used to get the public involved in the AI Ethics conversation? What are religious studies and how can they relate to AI? Why is it important to distinguish between Science Fiction and Science Fact when it comes to the future of AI? To answer these questions and more we welcome Dr. Beth Singler to the show.  Dr. Beth Singler is a Junior Research Fellow in Artificial Intelligence at the University of Cambridge. Previously, Beth was the post-doctoral Research Associate on the “Human Identity in an age of Nearly-Human Machines” project at the Faraday Institute for Science and Religion. Through her research, Beth explores the social, ethical, philosophical, and religious implications of advances in Artificial Intelligence and robotics. You can follow Dr. Beth Singler on Twitter @BVLSingler. If you enjoy this episode please make sure to subscribe, submit a rating and review, and connect with us on twitter at @radicalaipod. Relevant Links from the Episode: “Blessed by the algorithm”: Theistic conceptions of artificial intelligence in online discourse Beth’s Personal Website</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e18-calvin-jevan-os</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-07-20</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595257715445-ES63425N1TTLH5TQJN6M/18_+Surveillance%2C+Stigma+%26+Sociotechnical+Design+for+HIV+in+Dating+and+Hookup+Platforms+with+Calvin+Liang%2C+Jevan+Hutson%2C+and+Os+Keyes+%282%29.png</image:loc>
      <image:title>E18-Calvin-Jevan-Os</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/what-next</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-10-25</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595125759448-7QJ1LRCXWNT2STNADQNI/First+18+Episodes-V2+%281%29.png</image:loc>
      <image:title>What Next?</image:title>
      <image:caption>Guests and Partnered Speakers From The First 3 Months of The Radical AI Podcast</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e19-mary-l-gray</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-07-22</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1595430908280-CR0VRH6FOBL91LZY9YER/19_+Mary+L.+Gray.png</image:loc>
      <image:title>E19-Mary-L-Gray - In what way does technology make us more or less visible to each other? What is Ghost Work and how might it impact the future of work? How can AI Ethicists relate more intimately with compassion?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e20-john-c-havens</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-08-05</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1596642740824-HAK4KGTKNQDWCT01ZLSB/20_+John+C.+Havens.png</image:loc>
      <image:title>Ethically Aligned Design &amp; Applied AI Ethics with John C. Havens - What is IEEE and what is their “ethically aligned design” initiative? How can positive visions for the future help us create better technology? What do kindness and wellbeing have to do with AI Ethics?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/ai-for-social-good-panel</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-08-09</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1596986368139-HCWW46KNYC4JGIUHZIBX/AI+4+Social+Good+Panel.png</image:loc>
      <image:title>Bonus-Episode-AI4SocialGood-Panel - What is AI for Social Good? In this special bonus panel episode on AI for Social Good 101, we interviewed Dr. Anamika Barman-Adhikari, Dr. Fei Fang, and Dr. Amulya Yadav.</image:title>
      <image:caption>Anamika Barman-Adhikari is an Associate Professor of social work at the University of Denver. She received her Ph.D. in Social Work from University of Southern California.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e21-eric-rice</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1597250585140-4K8NYI1OOYJIPGP8ZZFP/21_+Eric+Rice.png</image:loc>
      <image:title>Finding Joy in Meaningful Work: AI for Social Good in Social Work &amp; Social Justice with Eric Rice - Where is the limit in the use of technology to solve societal problems? How can Social Work utilize AI to address social injustice? To answer these questions and more we welcome Dr. Eric Rice to the show.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e22-veena-dubal</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1597844052448-G5YL2OL5UWO9WK48EM4G/22_+Veena+Dubal.png</image:loc>
      <image:title>E22-Veena-Dubal - What is precarious work and how does it impact the psychology of labor? How might platforms like Uber and Lyft be negatively impacting their workers? How do gig economy apps control the lives of those who use them for work?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e23-jenn-wortman-vaughan</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1599072208815-PH69ODB3X2Z5TI4BTCKB/23_+Jennifer+Wortman+Vaughan.png</image:loc>
      <image:title>E23-Jenn-Wortman-Vaughan - What are the differences between explainability, intelligibility, interpretability, and transparency in Responsible AI? What is human-centered machine learning? Should we be regulating machine learning transparency?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/e24-anima-anandkumar</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1599628931652-LUVPUYMLIUR50KQDNDOC/24_+Anima.png</image:loc>
      <image:title>E24-Anima - What are current attitudes towards AI Ethics from within the tech industry? How can we make computer science a more inclusive discipline for women? What does it mean to democratize AI? Why should we? How can we?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/coalition-for-critical-technology</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1600272910059-0LNMX0B4M2X99ZL2CEUJ/Coalition+for+Critical+Technology.png</image:loc>
      <image:title>Coalition-For-Critical-Technology - What is the tech to prison pipeline? How can we build infrastructures of resistance to it? What role does academia play in perpetuating carceral technology?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/michael-madaio</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1600870341865-YLJU2GOVA1IV3768BWTE/Michael.png</image:loc>
      <image:title>Michael-Madaio - What are the limitations of using checklists for fairness? What are the alternatives? How do we effectively design ethical AI systems around our collective values?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/liz-osullivan</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1601489874155-0IZ29OXPFGG52L2XC6V9/Liz+O%27Sullivan.png</image:loc>
      <image:title>Liz O'Sullivan - What should you know about the state of surveillance in the world today? What can we do as consumers to stop unintentionally contributing to surveillance? The Facial Recognition industry had a reckoning after the murder of George Floyd - are things getting better?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/manipulated-media</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1602087870932-IQ85TJCPJ6EGB74ZZCCN/PAI+%282%29.png</image:loc>
      <image:title>Manipulated Media - What is media integrity? What is media manipulation? What do you need to know about fake news?To answer these questions and more we welcome to the show Claire Leibowicz and Emily Saltz -- two representatives from the Partnership on AI’s AI and Media Integrity team.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/go-vote</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1604435116163-V63HMHM7VUEXAF19EECN/Copy+of+Vote.png</image:loc>
      <image:title>Go Vote! - Welcome to a very special 2020 US Election episode of the Radical AI Podcast in which we engage with the role that technology and social media play in voting practices and discuss what global citizens should know about the impact this election might have on the future of democracy worldwide.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/media</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-10-25</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1603653834203-RGJYO8RHRSCETLG2APWG/rai-experian.png</image:loc>
      <image:title>Media - Experian DataTalk Podcast</image:title>
      <image:caption>September 16, 2020 Jess and Dylan were featured in Experian’s DataTalk Podcast to discuss Radical AI, the mission behind the podcast, AI Ethics 101, and their experience running this project so far.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1603654444487-CN98O8J73T8OU6SDYO4V/Screen+Shot+2020-10-25+at+1.33.50+PM.png</image:loc>
      <image:title>Media - Changemaker’s Interview</image:title>
      <image:caption>September 21, 2020 Dylan and Jess were featured as Changemakers on All Tech is Human, discussing their work with the Radical AI Podcast, their experiences and thoughts about Responsible Tech, and what they’ve learned on their journey so far.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1603654389533-N996BJH3AUU691CGZHJ8/ai-peace.png</image:loc>
      <image:title>Media - AI For Peace Newsletter</image:title>
      <image:caption>August, 2020 The Radical AI Podcast was featured in the AI For Peace Newsletter. The episode of choice for this month’s newsletter was Finding Joy in Meaningful Work: AI for Social Good in Social Work &amp; Social Justice with Eric Rice.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1603587177456-8MU0MC322D01ZQIG1DZ0/responsible-tech-guide.png</image:loc>
      <image:title>Media - Responsible Tech Guide</image:title>
      <image:caption>September 15, 2020 All Tech is Human released their first Responsible Tech Guide where Dylan and Jess were featured as Changemakers on the highly coveted page 157.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/political-ads-propaganda</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1603859485434-9GXXYUZ4Y443GM5QE39Q/Propaganda.png</image:loc>
      <image:title>Political Ads and Propaganda - What should you know about propaganda and political ads in the age of information? How do they impact democracy across the globe? To cover this important topic, we welcome to the show Nayantara Ranganathan and Manuel Beltrán.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/internship</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-11-02</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/tech-and-democracy</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-11-04</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/transparency-politics</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1604459687372-H3MKTQSOH4KJSQ50BSXJ/Transparency+Politics.png</image:loc>
      <image:title>Transparency as a Political Choice - What is the relationship between the government and artificial intelligence? To unpack this timely question we interview Rumman Chowdhury and Mona Sloane.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/robot-regulation</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1605070879829-N326VU4GVEKWLKY8ALWD/Ryan+Calo.png</image:loc>
      <image:title>Robot Regulation - What is robot regulation and why does it matter? To answer this question we welcome to the show Ryan Calo.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/robot-relationships</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1606840000365-VXM7EY8GUD2VE5T3ZYEH/Kate.png</image:loc>
      <image:title>Kate Darling - Have you ever seen a robot and called it cute? Have you ever seen a drone and felt afraid? Have you ever apologized to Siri or yelled at your rumba to get out of the way? Have you ever named your car? Our relationships with robots are complex and messy, to explore this topic, we interview Kate Darling, a leading expert in Robot Ethics and a Research Specialist at the MIT Media Lab.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/industry-ai-ethics</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1606921573164-NVF91G42QJQHUT05J7OY/Kathy.png</image:loc>
      <image:title>Kathy Baxter - What do you need to know about AI Ethics in the tech industry? To explore this question we welcome Kathy Baxter to the show.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/black-in-ai-academic-program</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1607494452613-T0CJGDAU27ZHISWKNK78/Moses.png</image:loc>
      <image:title>Black in AI Academic Program - In this episode, we interview Moses Namara of Black in AI about the new Black in AI Academic Program, a program that serves as a resource to support Black junior researchers as they apply to graduate programs, navigate graduate school, and enter the postgraduate job market.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/measurementality</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-12-20</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1610919959546-TSDF072GTL9J89W821PS/BW_+webinars+hosted+by+john+c.+havens.png</image:loc>
      <image:title>Measurementality</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1610921927352-6WM89GBUQ6FQ4Y29B6YU/BW_+Reports+hosted+by+john+c.+havens.png</image:loc>
      <image:title>Measurementality</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1610920147406-7WLOSACUEJHZCF25SYER/image-asset.jpeg</image:loc>
      <image:title>Measurementality - “Unless we know what counts in other people’s lives then we cannot assume what we build will honor them..”</image:title>
      <image:caption>— John C. Havens Read the full Measurementality launch article here</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/ability-accessibility-ai</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1611152969835-TB2TL1F3VS667J567JHG/Meredith+Ringel+Morris.png</image:loc>
      <image:title>Meredith Ringel Morris - What should you know about Ability and Accessibility in AI and responsible technology development? In this episode we interview Meredith Ringel Morris.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/anti-trust</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-03-29</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1612969606491-9GZ3I1ZBJNF0ZQ91DE5V/Anna+Lenhart.png</image:loc>
      <image:title>Anti-Trust: Congress and the Tech Lobby with Anna Lenhart - What should you know about Anti-Trust regulation nationally and internationally? How does the tech sector drive policy? In this episode we interview Anna Lenhart</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/social-inequality-digital-economy</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1614265598366-TPMKFRQKZCCM87HL7ZW3/Zanele+Munyikwa.png</image:loc>
      <image:title>Social Inequality &amp; The Digital Economy - How does the Digital Economy perpetuate social inequality? In this episode we interview Zanele Munyikwa to explore this topic.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/your-computer-is-on-fire</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-03-16</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1615351685793-ULKU0KFLF4YT49NQ1JDT/computer+is+on+fire.png</image:loc>
      <image:title>Your Computer is on Fire - How do we challenge techno-utopianism? How do we dismantle systems of oppression in technology? To answer these questions and more we welcome to the show two editors of the new collection from MIT Press Your Computer is on Fire, Mar Hicks and Kavita Philip.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/bias-in-nlp</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-04-07</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1617153752655-KZE9R06KPQF44QIP3L71/Copy+of+Zanele+Munyikwa.png</image:loc>
      <image:title>Su Lin Blodgett - How do we define bias? Is all bias the same? Is it possible to eliminate bias completely in our AI systems? Should we even try? To answer these questions and more we welcome to the show Su Lin Blodgett.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/atlas-of-ai</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-04-07</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1617755511777-92DSRFZKZ1JODUROB1M8/Kate+Crawford.png</image:loc>
      <image:title>Kate Crawford - What is the Atlas of AI? Why is it important? How is AI an industry of extraction? How is AI impacting the planet? What can be done? To answer these questions and more we welcome to the show Dr. Kate Crawford to discuss Kate's new book Atlas of AI: Power, Politics, and the Planetary Costs of Artificial Intelligence.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/cynthia-bennett</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-05-06</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1619022625283-4BQ9HKHG03WXUOI93DQK/Cynthia+Bennett.png</image:loc>
      <image:title>Cynthia Bennett - How can we center the lived experiences and creativity of people with disabilities in the design of our technology? On this week's episode we welcome Cynthia Bennett to the show.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/killer-robots</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-05-06</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1620060867655-X8IC02WD65P2OAXW39MI/Steven+Umbrello.png</image:loc>
      <image:title>Steven Umbrello - What is Value Sensitive Design and how can it inform the development and deployment of killer robots and autonomous weapon systems. On this week's episode we welcome Steven Umbrello to the show.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/technology-power-curriculum</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2022-08-17</lastmod>
  </url>
  <url>
    <loc>https://www.radicalai.org/decentralizing-ai</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-09-13</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1622133924584-L2QIBA6KU8PXXXBYVH8F/Divya+Siddarth.png</image:loc>
      <image:title>Divya Siddarth - What is decentralized AI and how and why should we design and implement it? To answer this question and more in this episode we interview Divya Siddarth about decentralizing AI, democratization, and how we can utilize the logic of social movements to influence our technology design.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/feminist-ai</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-09-13</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1622648434413-8FL64SU1L9WR9PTJUDGA/Good+Robot+%281%29.png</image:loc>
      <image:title>Feminist AI 101 - What is Feminist AI and how and why should we design and implement it? To answer this question and more in this episode we interview Eleanor Drage and Kerry Mackereth about the ins and outs of Feminist AI.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/casteist-tech</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-09-13</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1624025967485-RPI5S3IS1WIVR57UN0EL/Casteist+Tech+%281%29.png</image:loc>
      <image:title>Casteist Technology and Digital Brahminism - What is casteism and how does it play a role in Silicon Valley and technology development? What is digital brahminism? How do these impact and harm individuals and communities?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/indigenous-ai</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-09-13</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1631029506357-XH5IRJMYFU0JSYV1XAWJ/Jason+Edward+Lewis.png</image:loc>
      <image:title>Indigenous AI 101 - What is Indigenous AI and how might it drive our technology design and implementation?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/mental-health-ai</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-11-09</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1633493133640-DPQH5UXRR231PO9OFYVL/Stevie+Chancellor.png</image:loc>
      <image:title>Predicting Mental Illness Through AI - How is AI used to predict mental illness? What are the benefits and challenges to its use?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/ai-failure</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-11-09</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1634308308667-VSFSJ9AGWP5RGSERA8BM/AI+Today.png</image:loc>
      <image:title>What causes AI to fail? - What causes AI to fail from a business/industry perspective and beyond? What metrics are used to measure and indicate failure? And how can we improve the field of AI by learning from these failures?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/design-justice</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-11-09</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1635911442363-7FXYD4DIHU2B64AYUP8G/Design+Justice.png</image:loc>
      <image:title>Design Justice 101 - What is Design Justice? How can we employ it to disrupt power systems supporting the matrix of domination?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/decolonial-ai-101</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2021-12-08</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/0905e3f9-4409-4270-bbb0-244fc604e685/Decolonial+AI+101.png</image:loc>
      <image:title>Decolonial AI 101 - What is Decolonial AI? How can we apply a postcolonial lens to AI design?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/digital-closet</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2022-04-28</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1651010205550-V2BBN6GD9Y0NXCDHCSUZ/Let%27s+talk+about+sex.png</image:loc>
      <image:title>Let's Talk About Sex - What is the history of digital pornography? How do algorithms perpetuate LGBTQIA+ content censorship? What is the role that content moderation and corporate ownership plays in perpetuating misogyny and heteronormativity?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/data-viz</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-03-03</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/a686801c-a9ad-4715-a176-9616ecb94153/Visualizing+our+Lives+%281%29.png</image:loc>
      <image:title>Visualizing Our Lives Through Data with Jaime Snyder - How do we see ourselves in data? What is self-tracking and how can we design for visualizing the data of our bodies and mental health? How do we make visualized data more accessible?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/decolonial-digital-mental-health</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-03-03</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/531271d1-6c22-4d07-86e9-0ab6dfbfdb47/Decolonial+Digital+Mental+Health+%281%29.png</image:loc>
      <image:title>Decolonializing Digital Mental Health - In this episode we have a panel discussion about decolonial digital mental health with three leading experts on the topic: Sachin Pendse, Munmun De Choudhury, and Neha Kumar</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/government-using-ai</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-03-02</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1658877213635-ZRLNXY5H5IEED3H1F96P/Shion+Guha.png</image:loc>
      <image:title>Should the Government Use AI - How does the government use algorithms? How do algorithms impact social services, policing, and other social services? And where does Silicon Valley fit in?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/digital-lethargy</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-03-02</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/88af9bad-b0b3-459b-a43a-8035527573c8/Tung-Hui+%281%29.png</image:loc>
      <image:title>Digital Lethargy - What is Digital Lethargy? How can we adapt to an age of disconnection? How can art act as a force of resistance?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/data-privacy-womens-rights</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-03-02</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1664375098686-VHDDBHKAQJB5XIUVXLD7/Rebecca+Finlay.png</image:loc>
      <image:title>Data Privacy and Women's Rights - What is the reality of data privacy after the overruling of Roe v. Wade?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/stay-safe-online</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-03-07</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1666794294486-UL3T2YD9AYYHVB072QRX/Seyi+Akiwowo.png</image:loc>
      <image:title>Stay Safe Online - How can technology be designed to fight online abuse and harassment? What is the difference between cancel culture and appropriate accountability? How can you stay safe online?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/data-senses</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-03-07</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/4b0c5fbb-0d29-4524-85a6-62653d73e86a/Screen+Shot+2022-11-30+at+8.42.45+AM.jpg</image:loc>
      <image:title>Data Senses - What can our senses teach us about data? What can data teach us about our senses?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/chatgpt</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-03-07</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1674623287727-E05AM4C9D1VQUI89VBNZ/ChatGPT.png</image:loc>
      <image:title>ChatGPT - This recent natural language chatbot has been getting ALL the hype. In this episode we interview Deep Dhillon about the ins and outs of ChatGPT!</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/chatgpt-limitations</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-03-01</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1677607847822-MSPL6PUIACGTMY28OF0S/Limitations+of+ChatGPT.png</image:loc>
      <image:title>Limitations of ChatGPT - In this episode, we unpack the limitations of ChatGPT. We interview Dr. Emily M. Bender and Dr. Casey Fiesler about the ethical considerations of ChatGPT, bias and discrimination, and the importance of algorithmic literacy in the face of chatbots.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/more-than-a-glitch</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-03-29</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1679494049784-51Y35G401CA7CLE5PQAB/More+than+a+glitch.png</image:loc>
      <image:title>More Than a Glitch - In this episode, we discuss Meredith Broussards influential new book, More than a Glitch: Confronting Race, Gender, and Ability Bias in Tech – published by MIT Press.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/twitter-vs-mastodon</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-04-26</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/1682452374532-HCHROOK6IX4XDRZ9OAZK/Twitter+vs.+Mastodon.png</image:loc>
      <image:title>Twitter vs. Mastodon - In this episode, we interview Dr. Jonathan Flowers about the 101 of Twitter vs. Mastodon, the power dynamics of the fediverse, and potential paths forward in our digital lives.</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://www.radicalai.org/rai-goodbye</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2023-08-19</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/5e9884bbf25f9a7326953521/2af28138-92b7-46e2-8425-24dea3581557/RAI+Goodbye.png</image:loc>
      <image:title>RAI Goodbye - The Radical AI Podcast has unfortunately reached its end. In this episode, Dylan and Jess say goodbye to the podcast and thank you listeners for your unwavering support throughout the years.</image:title>
    </image:image>
  </url>
</urlset>

