<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:image="http://www.google.com/schemas/sitemap-image/1.1" xmlns:xhtml="http://www.w3.org/1999/xhtml">
  <url>
    <loc>https://orcaarisk.com/articles</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-02-26</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/2020/2/26/ceo-cathy-oneils-letter-to-the-house-committee-on-financial-services</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-02-26</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/2017/1/24/what-is-a-data-audit</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2017-01-24</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/2016/10/12/on-measuring-disparate-impact</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2016-10-12</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/2016/10/12/sentencing-more-biased-by-race-than-by-class</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2017-01-25</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1476300136470-IMMOLVDA5VW7ZSUNS7NA/image-asset.png</image:loc>
      <image:title>Articles - Sentencing more biased by race than by class</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/2016/10/12/propublica-report-recidivism-risk-models-are-racially-biased</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2017-01-25</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1476300042447-0B3SC1S0IXC90ARD5OO3/image-asset.png</image:loc>
      <image:title>Articles - ProPublica report: recidivism risk models are racially biased</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/2016/10/12/algorithms-are-as-biased-as-human-curators</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2016-10-12</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/2016/10/12/todd-schneiders-medium-data</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2016-10-12</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1476299784355-QUKYIB8ZJIWWAS020PNN/image-asset.png</image:loc>
      <image:title>Articles - Todd Schneider's "medium data"</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/2016/10/12/the-shsat-matching-algorithm</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2016-10-12</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/2016/10/12/do-charter-schools-cherrypick-students</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2016-10-12</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1476299439502-IEHEK3TOYT10OHN12DYX/image-asset.png</image:loc>
      <image:title>Articles - Do Charter Schools Cherrypick Students?</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1476299473424-3MPYAVMTAM3S6GIVGYF4/image-asset.png</image:loc>
      <image:title>Articles - Do Charter Schools Cherrypick Students?</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1476299556920-Z2FHF779I75CCMKP0ACS/image-asset.png</image:loc>
      <image:title>Articles - Do Charter Schools Cherrypick Students?</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1476299582526-4EPDSZ0P009X8JVQJ58U/image-asset.png</image:loc>
      <image:title>Articles - Do Charter Schools Cherrypick Students?</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1476299352026-VIG2ZWW7A8AKJH2JDCXK/image-asset.png</image:loc>
      <image:title>Articles - Do Charter Schools Cherrypick Students?</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1476299317019-66AMHZL5AS7M18CKZSVU/screen-shot-2015-11-19-at-11-06-13-am.png</image:loc>
      <image:title>Articles - Do Charter Schools Cherrypick Students?</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/2016/10/12/debiasing-techniques-in-science</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2016-10-12</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1476299657571-69YVWPN9JDPV5IOG5DCU/image-asset.jpeg</image:loc>
      <image:title>Articles - Debiasing techniques in science</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/2016/9/21/orcaas-mission</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2016-10-10</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/2016/9/21/blogum-blormum</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2016-10-10</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/2016/9/21/ipsummormon</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2016-10-10</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/articles/category/Speaking</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
  </url>
  <url>
    <loc>https://orcaarisk.com/welcome</loc>
    <changefreq>daily</changefreq>
    <priority>1.0</priority>
    <lastmod>2026-01-28</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1474469975510-1A6KJ33QMSZ1W8ODQDEC/neuralnet_stockimage.jpg</image:loc>
      <image:title>Welcome</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1474471179182-0U50P98M6IRHQJDR2P4J/services-background.jpg</image:loc>
      <image:title>Welcome</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1474471179182-0U50P98M6IRHQJDR2P4J/services-background.jpg</image:loc>
      <image:title>Welcome</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1565717725495-E7GD2RWV4M5351161TU3/Copy%2Bof%2BCathy%2BO_Neil%2B%2528125%2529-Full%2BSize.jpg</image:loc>
      <image:title>Welcome - Cathy O’Neil</image:title>
      <image:caption>CEO Cathy has been an independent data science consultant since 2012 and has worked for clients including the Illinois Attorney General’s Office and Consumer Reports. She wrote the book Doing Data Science in 2013 and Weapons of Math Destruction: How Big Data Increases Inequality And Threatens Democracy, released in September 2016.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1565717162551-3CGW8HB5FIV5NHB2D47V/DSC0085wcurversw_crop2_700.jpeg</image:loc>
      <image:title>Welcome - Tom Adams</image:title>
      <image:caption>COO and General Counsel Thomas Adams has over twenty-five years of business and legal experience. He has represented banks, companies and individuals on corporate, securities and business law matters. He also provided strategic advice, litigation support and expert witness testimony on issues relating to the financial crisis. Mr. Adams is an expert in creating solutions and solving problems for complex financial and corporate transactions and has provided strategic advice and analysis to banks, insurance companies, private equity companies, hedge funds and a variety of other companies. He graduated from Fordham Law School in 1989 and Colgate University in 1986. He is admitted to practice in New York.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1624279801369-EAYUCV88PO0EMAGH8AA4/IMG_5132.jpg</image:loc>
      <image:title>Welcome - Jacob Appel</image:title>
      <image:caption>Chief Strategist Jake is ORCAA’s Chief Strategist. He conducts algorithmic audits, and specializes in designing tests and analyses to assess the performance of algorithms and their impacts on stakeholders. Before joining ORCAA he worked with the Behavioral Insights Team, where he advised state and local governments on incorporating behavioral science “nudges” into citizen-facing policies and programs, and testing them with randomized experiments. Jake received a BS in mathematics from Columbia University and an MPA from Princeton School of Public and International Affairs. He coauthored two books: More Than Good Intentions: How a new economics is helping to solve global poverty, and Failing in the Field: What we can learn when field research goes wrong.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1619051555871-RPCCD6VZGDGC4PBRB7LA/MeredithBroussard3_by_Lucy_Baber.jpeg</image:loc>
      <image:title>Welcome - Meredith Broussard</image:title>
      <image:caption>Affiliate Data journalist Meredith Broussard is an associate professor at the Arthur L. Carter Journalism Institute of New York University, research director at the NYU Alliance for Public Interest Technology, and the author of “Artificial Unintelligence: How Computers Misunderstand the World.” Her academic research focuses on artificial intelligence in investigative reporting and ethical AI, with a particular interest in using data analysis for social good. She appeared in the 2020 documentary Coded Bias, an official selection of the Sundance Film Festival that was nominated for an NAACP Image Award. She is an affiliate faculty member at the Moore Sloan Data Science Environment at the NYU Center for Data Science, a 2019 Reynolds Journalism Institute Fellow, and her work has been supported by New America, the Institute of Museum &amp; Library Services, and the Tow Center at Columbia Journalism School. A former features editor at the Philadelphia Inquirer, she has also worked as a software developer at AT&amp;T Bell Labs and the MIT Media Lab. Her features and essays have appeared in The Atlantic, The New York Times, Slate, and other outlets.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1624278503665-4FM6D0MY11QDPRLELG67/Sherry.jpeg</image:loc>
      <image:title>Welcome - Şerife (Sherry) Wong</image:title>
      <image:caption>Affiliate Şerife (Sherry) Wong is an artist and founder of Icarus Salon, an art and research organization exploring the societal implications of emerging technology. She is a researcher at the Berggruen Institute where she focuses on the data economy for the Transformations of the Human program, serves on the board of directors for Digital Peace Now, and is a member of Tech Inquiry. She has been a resident on artificial intelligence at the Rockefeller Foundation Bellagio Center, a jury member at Ars Electronica for the European Commission, and frequently collaborates on AI governance projects with the Center for Advanced Study in the Behavioral Sciences at Stanford. Previously, she created the Impact Residency at Autodesk’s Pier 9 Technology Center where she worked with over 100 leading creative technologists exploring the future of robotics, AR/VR, engineering, computer-aided machining, and machine learning for product development, and worked at the Electronic Frontier Foundation.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1631019745082-98H782Q7SLIOL6D2K632/imagejpeg_0.jpg</image:loc>
      <image:title>Welcome - Betty O’Neil</image:title>
      <image:caption>Affiliate Betty O’Neil (really Elizabeth) is a computer scientist specializing in database internals, and is also interested in how computers can be used to make the world a better place. Like her daughter Cathy, she earned a PhD in Mathematics (Applied in her case) at Harvard to get started. She was a professor at the University of Massachusetts Boston for many years, and now is joining ORCAA’s efforts in using data science in socially responsible ways. She is a co-author of a graduate database textbook. As a database internals expert, she has helped implement several important databases, including Microsoft SQL Server (two patents in 2001), and more recently, Stonebraker’s Vertica and VoltDB. She is a lifelong nerd and can program anything.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/07fced90-697a-43dc-96a4-9856d1598bed/00100dPORTRAIT_00100_BURST20180927143729035_COVER+%281%29+%283%29+%281%29.jpg</image:loc>
      <image:title>Welcome - Deborah Raji</image:title>
      <image:caption>Affiliate Deborah Raji is an affiliate at ORCAA. She has worked closely with the Algorithmic Justice League initiative, founded by Joy Buolamwini of the MIT Media Lab, on several award-winning projects to highlight cases of bias in facial recognition. She was a mentee in Google AI’s flagship research mentorship cohort, working with their Ethical AI team on various projects to operationalize ethical considerations in ML practice, including the Model Cards documentation project, and SMACTR internal auditing framework. She was also recently a research fellow at the Partnership on AI, working on formalizing documentation practice in Machine Learning through their ABOUT ML initiative, as well as pushing forward benchmarking and model evaluation norms. She is a Mozilla fellow and was recently named as one of MIT Tech Review’s 35 Innovators Under 35. She is currently pursuing a Ph.D in Computer Science at UC Berkeley.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/043e2813-d89c-4c86-820b-6ef5c1b144b4/75-754884_female-silhouette-headshot-hd-png-download.jpg</image:loc>
      <image:title>Welcome - Anna Zink</image:title>
      <image:caption>Affiliate Anna Zink is a principal researcher at Chicago Booth's Center for Applied AI where she works on their algorithmic bias initiative. Her research is focused on algorithmic fairness applications in health care, including the evaluation of risk adjustment formulas used for health plan payments. Before receiving her PhD in Health Policy from Harvard University, she worked as a data analyst at Acumen, LLC. where, among a small team of analysts, she partnered with the Department of Justice on cases of Medicare fraud, waste, and abuse and helped develop fraud surveillance methods.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/543cb504-819b-41ed-b69a-032ae3a2df24/EmmaPierson.jpg</image:loc>
      <image:title>Welcome - Emma Pierson</image:title>
      <image:caption>Affiliate Emma Pierson is an assistant professor of computer science at the Jacobs Technion-Cornell Institute at Cornell Tech and the Technion, and a computer science field member at Cornell University. She holds a secondary joint appointment as an Assistant Professor of Population Health Sciences at Weill Cornell Medical College. She develops data science and machine learning methods to study inequality and healthcare. Her work has been recognized by best paper, poster, and talk awards, an NSF CAREER award, a Rhodes Scholarship, Hertz Fellowship, Rising Star in EECS, MIT Technology Review 35 Innovators Under 35, and Forbes 30 Under 30 in Science. Her research has been published at venues including ICML, KDD, WWW, Nature, and Nature Medicine, and she has also written for The New York Times, FiveThirtyEight, Wired, and various other publications.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/eedfb962-11e2-4b3b-8be9-a822a73cf363/Shamus+Photo+Preference.jpg</image:loc>
      <image:title>Welcome - Shamus Khan</image:title>
      <image:caption>Affiliate Shamus Khan is Willard Thorp professor of sociology and American Studies at Princeton University. He is the author of over 100 articles, books, and essays, including Privilege: The Making of an Adolescent Elite at St. Paul’s School, and Sexual Citizens: Sex, Power, and Assault on Campus (with Jennifer Hirsch), one of NPR’s best books of 2020. He writes regularly in the New York Times and Washington Post. He has been awarded Columbia University’s highest teaching honor, the Presidential Teaching Award (2016), and the Zetterberg Prize from the Upsala University for “the best sociologist under 40” (2018).</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/ede5c6c4-5bef-4044-ac09-f0bf575caaea/Velasco_Kristopher_knv346_g-stdnt_2020-5-2.jpg</image:loc>
      <image:title>Welcome - Kristopher Velasco</image:title>
      <image:caption>Affiliate Kristopher Velasco (he/him/his) is an Assistant Professor in the Department of Sociology at Princeton University. Kristopher’s research is driven by one overarching question: how do organizations and institutions facilitate social and cultural change? He addresses this question by focusing on changing understandings of gender and sexuality and the backlash this invites. Kristopher has received awards and grants for his research from the American Sociological Association, American Political Science Association, International Studies Association, Academy of Management, the Ford Foundation, and the National Science Foundation. Kristopher received his B.A. from the University of Kansas and M.A. and Ph.D. from the University of Texas at Austin.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/faf5ed51-e5ed-4180-b375-3093435f5aa2/Adam_Becker_headshot.jpg</image:loc>
      <image:title>Welcome - Adam Becker</image:title>
      <image:caption>Affiliate Adam Becker is a science journalist and author with a PhD in astrophysics. His most recent book, More Everything Forever, dismantles tech billionaires’ grandiose delusions about the future of AI and humanity. In addition to his books, Adam has written for The New York Times, The Atlantic, the BBC, NPR, Fortune, Scientific American, Quanta, New Scientist, and many other publications. He has been a science journalism fellow at the Santa Fe Institute and a science communicator in residence at the Simons Institute for the Theory of Computing. He lives in California.</image:caption>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1649791944908-OZ2P2J521FLTRHAS8RTQ/DORA.png</image:loc>
      <image:title>Welcome</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1610468105777-UQ4ZPJ825F6QHA2VW7XL/HireVue-logo-color-200.png</image:loc>
      <image:title>Welcome</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1598967793861-RRY02VB2EYD7PH5XAAPP/ABB_white.jpg</image:loc>
      <image:title>Welcome</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1598967416771-APV4GOE0RYE9VDF2XAI0/WA+DOL.png</image:loc>
      <image:title>Welcome</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1598967415648-U6CB86K8FJRIOESNNJ5L/IL+AG.jpg</image:loc>
      <image:title>Welcome</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1598968570466-3GT5L0HLIT7UCW1ET8HZ/Rentlog2.jpg</image:loc>
      <image:title>Welcome</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1598967414895-6KFD81CVLFRDUWG9FJUY/Amsterda.jpg</image:loc>
      <image:title>Welcome</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1598967414715-JKE2A67OYM45UVHUDV2J/Siemens-Logo.png</image:loc>
      <image:title>Welcome</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1598967415030-4L2L8HCG5P6476ZMYTND/ConsRep.png</image:loc>
      <image:title>Welcome</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1686769243559-7VPUK2USSIQIN2NJSDAN/DISB.jpeg</image:loc>
      <image:title>Welcome</image:title>
    </image:image>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/5e7c6e8c-8cc6-4e18-bb04-3bb91bb51577/Explainable+Fairness+process+diagram+%2817%29.jpg</image:loc>
      <image:title>Welcome - Make it stand out</image:title>
      <image:caption>Whatever it is, the way you tell your story online can make all the difference.</image:caption>
    </image:image>
  </url>
  <url>
    <loc>https://orcaarisk.com/events</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2019-10-18</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/events/2019/12/10/algorithms-and-you</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2019-10-18</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/events/2019/10/10/ted-talk</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2019-10-18</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2024-12-04</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2024/12/4/uber-publishes-orcaas-report-on-its-ai-governance</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2024-12-04</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2024/6/11/featured-article-in-mit-sloan-management-review</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2024-06-11</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2024/2/8/orcaa-joins-the-us-ai-safety-institute</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2024-02-08</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2023/12/20/bisg-conference-summary-paper-now-available</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2023-12-20</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2023/12/20/air-covers-orcaa-octograms-infer-service-for-insurers</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2023-12-20</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2023/10/31/cathy-oneil-at-nist-workshop-on-measuring-ai-risks</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2023-10-31</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2023/10/30/cathy-oneil-at-ai-insights-forum</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2023-10-30</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2023/7/28/jacob-appel-in-wired-big-techs-voluntary-safety-pledges-are-not-enough</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2023-07-28</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2023/7/7/axios-reports-on-nyc-bias-audit-law-quoting-jacob-appel</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2023-07-07</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2023/7/6/nbc-quotes-cathy-oneil-as-nyc-bias-audits-law-comes-into-effect</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2023-07-06</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2023/6/14/announcing-infer-a-new-offering-to-address-unfair-bias-in-insurance</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2023-06-14</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2023/5/8/orcaa-and-octagram-at-cas</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2023-05-08</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2023/2/15/why-the-colorado-draft-ai-insurance-rules-are-a-big-deal</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2023-02-15</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2023/2/15/venturebeat-colorado-draft-ai-insurance-rules-are-a-major-leap-forward</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2023-02-15</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2022/10/7/vox-quotes-orcaa-on-white-house-ai-ethics-blueprint</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2022-10-07</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2022/9/12/politico-5-questions-for-cathy-oneil</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2022-09-12</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2022/6/7/orcaa-is-helping-dc-disb-test-for-unintentional-bias-in-auto-insurance</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2022-06-07</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2022/4/12/launch-of-our-work-with-colorados-division-of-insurance</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2022-04-12</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2022/2/23/lexisnexis-features-cathy-oneil-on-new-algorithmic-auditing-laws</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2022-02-23</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/11/18/cathy-oneil-in-the-washington-post-a-chronological-feed-wouldnt-fix-facebook</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-11-18</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/11/17/theguardian-features-cathy-oneil-and-meredith-broussard-on-problematic-policing-technology</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-11-17</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/11/17/fivethirtyeight-features-cathy-oneils-hot-take-on-how-to-fix-facebook</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-11-17</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/10/28/senate-testimony-cathy-oneil-on-social-media-algorithms</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-10-28</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/10/11/bloomberg-opinion-facebooks-algorithms-are-too-big-to-fix</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-10-11</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/10/11/bloomberg-opinion-facebooks-instagram-research-isnt-anything-like-science</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-10-11</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/9/22/our-audit-of-olays-skin-advisor-is-live</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-10-11</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/9/22/the-social-dilemma-wins-two-emmys</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-09-22</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/9/15/forbes-covers-our-audit-with-olay</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-09-22</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/9/15/adexchanger-orcaas-audit-helps-olay-decodethebias</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-09-22</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/5/27/cathy-oneil-in-the-new-yorker-is-online-test-monitoring-here-to-stay</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-05-27</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/2/26/fast-company-the-equitable-tech-movement</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-02-26</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/2/2/bloomberg-opinion-to-protect-consumers-watch-the-finance-algorithms</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-02-02</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/1/25/ceo-cathy-oneil-on-morningbrew-whats-the-single-biggest-obstacle-facing-ai-and-algorithms-in-the-next-five-years</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-01-25</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/1/19/fortune-hirevue-drops-facial-monitoring-amid-ai-algorithm-audit</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-01-19</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2021/1/12/orcaas-audit-of-hirevue-is-live</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2021-01-12</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2020/9/9/bloomberg-opinion-mutant-algorithms-are-coming-for-your-education</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-09-09</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2020/6/19/bloomberg-opinion-heres-an-algorithm-for-defunding-the-police</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-06-19</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2020/6/19/thomson-reuters-foundation-news-in-a-us-first-california-city-set-to-ban-predictive-policing</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-06-19</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2020/4/20/bloomberg-opinion-this-isnt-the-flattened-curve-we-were-promised</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-04-20</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2020/4/20/bloomberg-opinion-the-covid-19-tracking-app-wont-work</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-04-20</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2020/4/20/bloomberg-opinion-10-reasons-to-doubt-the-covid-19-data</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-04-20</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2020/2/26/orcaa-to-house-committee-on-financial-services-proposed-rule-creates-loopholes-for-biased-algorithms</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-02-26</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2020/2/12/algorithmwatch-lawmakers-should-provide-rule-based-descriptions-of-what-it-means-not-to-be-racist</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2020-02-12</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2019/11/26/biased-algorithms-biased-world</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2019-11-26</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2019/11/19/wired-the-apple-card-didnt-see-genderand-thats-the-problem</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2019-11-19</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2019/11/13/slate-its-this-invisible-system-of-harm-interview-with-cathy-oneil</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2019-11-13</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2019/11/15/cathy-oneil-on-weapons-of-math-destruction-how-big-data-threatens-democracy</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2019-11-15</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2019/9/23/how-bias-is-built-into-algorithms</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2019-11-15</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2018/5/18/this-logo-is-like-an-organic-sticker-for-algorithms</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2019-11-15</lastmod>
    <image:image>
      <image:loc>https://images.squarespace-cdn.com/content/v1/57e296e703596e711035d689/1573847746680-2K6WVPHS7ODFMAXZUJ21/p-1-is-a-badge-of-algorithmic-fairness-the-next-certified-organic-1.jpg</image:loc>
      <image:title>In the News - Wired - This logo is like an “organic” sticker for algorithms</image:title>
    </image:image>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/2019/11/15/mit-review-this-company-audits-algorithms-to-see-how-biased-they-are</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
    <lastmod>2019-11-15</lastmod>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/tag/wired</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/tag/MIT+Review</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/tag/video</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/tag/interview</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
  </url>
  <url>
    <loc>https://orcaarisk.com/in-the-news/tag/slate</loc>
    <changefreq>monthly</changefreq>
    <priority>0.5</priority>
  </url>
  <url>
    <loc>https://orcaarisk.com/careers</loc>
    <changefreq>daily</changefreq>
    <priority>0.75</priority>
    <lastmod>2020-01-03</lastmod>
  </url>
</urlset>

