Examples of the ICARUS ontology concepts organized by categories.
Chapter 1: "Permanent Maxillary and Mandibular Incisors"\n
Chapter 2: "The Permanent Maxillary and Mandibular Premolar Teeth"\n
Chapter 3: "Dental Anatomical Features and Caries: A Relationship to be Investigated"\n
Chapter 4: "Anatomy Applied to Block Anaesthesia"\n
Chapter 5: "Treatment Considerations for Missing Teeth"\n
Chapter 6: "Anatomical and Functional Restoration of the Compromised Occlusion: From Theory to Materials"\n
Chapter 7: "Evaluation of the Anatomy of the Lower First Premolar"\n
Chapter 8: "A Comparative Study of the Validity and Reproducibility of Mesiodistal Tooth Size and Dental Arch with the iTero Intraoral Scanner and the Traditional Method"\n
Chapter 9: "Identification of Lower Central Incisors"\n
The book is aimed toward dentists and can also be well used in education and research.',isbn:"978-1-78923-511-1",printIsbn:"978-1-78923-510-4",pdfIsbn:"978-1-83881-247-8",doi:"10.5772/65542",price:119,priceEur:129,priceUsd:155,slug:"dental-anatomy",numberOfPages:204,isOpenForSubmission:!1,isInWos:null,hash:"445cd419d97f339f2b6514c742e6b050",bookSignature:"Bağdagül Helvacioğlu Kivanç",publishedDate:"August 1st 2018",coverURL:"https://cdn.intechopen.com/books/images_new/5814.jpg",numberOfDownloads:7253,numberOfWosCitations:0,numberOfCrossrefCitations:1,numberOfDimensionsCitations:3,hasAltmetrics:0,numberOfTotalCitations:4,isAvailableForWebshopOrdering:!0,dateEndFirstStepPublish:"October 4th 2016",dateEndSecondStepPublish:"October 25th 2016",dateEndThirdStepPublish:"July 16th 2017",dateEndFourthStepPublish:"August 16th 2017",dateEndFifthStepPublish:"October 16th 2017",currentStepOfPublishingProcess:5,indexedIn:"1,2,3,4,5,6",editedByType:"Edited by",kuFlag:!1,editors:[{id:"178570",title:"Dr.",name:"Bağdagül",middleName:null,surname:"Helvacıoğlu Kıvanç",slug:"bagdagul-helvacioglu-kivanc",fullName:"Bağdagül Helvacıoğlu Kıvanç",profilePictureURL:"https://mts.intechopen.com/storage/users/178570/images/7646_n.jpg",biography:"Bağdagül Helvacıoğlu Kıvanç is a dentist, a teacher, a researcher and a scientist in the field of Endodontics. She was born in Zonguldak, Turkey, on February 14, 1974; she is married and has two children. She graduated in 1997 from the Ankara University, Faculty of Dentistry, Ankara, Turkey. She aquired her PhD in 2004 from the Gazi University, Faculty of Dentistry, Department of Endodontics, Ankara, Turkey, and she is still an associate professor at the same department. She has published numerous articles and a book chapter in the areas of Operative Dentistry, Esthetic Dentistry and Endodontics. She is a member of Turkish Endodontic Society and European Endodontic Society.",institutionString:null,position:null,outsideEditionCount:0,totalCites:0,totalAuthoredChapters:"2",totalChapterViews:"0",totalEditedBooks:"1",institution:{name:"Gazi University",institutionURL:null,country:{name:"Turkey"}}}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,coeditorOne:null,coeditorTwo:null,coeditorThree:null,coeditorFour:null,coeditorFive:null,topics:[{id:"174",title:"Dentistry",slug:"dentistry"}],chapters:[{id:"56461",title:"Permanent Maxillary and Mandibular Incisors",doi:"10.5772/intechopen.69542",slug:"permanent-maxillary-and-mandibular-incisors",totalDownloads:1482,totalCrossrefCites:0,totalDimensionsCites:0,signatures:"Mohammed E. Grawish, Lamyaa M. Grawish and Hala M. Grawish",downloadPdfUrl:"/chapter/pdf-download/56461",previewPdfUrl:"/chapter/pdf-preview/56461",authors:[{id:"82989",title:"Prof.",name:"Mohammed",surname:"Grawish",slug:"mohammed-grawish",fullName:"Mohammed Grawish"}],corrections:null},{id:"62386",title:"The Permanent Maxillary and Mandibular Premolar Teeth",doi:"10.5772/intechopen.79464",slug:"the-permanent-maxillary-and-mandibular-premolar-teeth",totalDownloads:1710,totalCrossrefCites:0,totalDimensionsCites:1,signatures:"Işıl Çekiç Nagaş, Ferhan Eğilmez and Bağdagül Helvacioğlu Kivanç",downloadPdfUrl:"/chapter/pdf-download/62386",previewPdfUrl:"/chapter/pdf-preview/62386",authors:[{id:"178570",title:"Dr.",name:"Bağdagül",surname:"Helvacıoğlu Kıvanç",slug:"bagdagul-helvacioglu-kivanc",fullName:"Bağdagül Helvacıoğlu Kıvanç"}],corrections:null},{id:"57546",title:"Dental Anatomical Features and Caries: A Relationship to be Investigated",doi:"10.5772/intechopen.71337",slug:"dental-anatomical-features-and-caries-a-relationship-to-be-investigated",totalDownloads:863,totalCrossrefCites:0,totalDimensionsCites:1,signatures:"Marcel Alves Avelino de Paiva, Dayane Franco Barros Mangueira\nLeite, Isabela Albuquerque Passos Farias, Antônio de Pádua\nCavalcante Costa and Fábio Correia Sampaio",downloadPdfUrl:"/chapter/pdf-download/57546",previewPdfUrl:"/chapter/pdf-preview/57546",authors:[{id:"138852",title:"Prof.",name:"Fabio",surname:"Sampaio",slug:"fabio-sampaio",fullName:"Fabio Sampaio"},{id:"213662",title:"Prof.",name:"Isabela Albuquerque",surname:"Passos Farias",slug:"isabela-albuquerque-passos-farias",fullName:"Isabela Albuquerque Passos Farias"},{id:"213663",title:"Prof.",name:"Dayane Franco",surname:"Barros Mangueira Leite",slug:"dayane-franco-barros-mangueira-leite",fullName:"Dayane Franco Barros Mangueira Leite"},{id:"213664",title:"BSc.",name:"Marcel Alves",surname:"Avelino De Paiva",slug:"marcel-alves-avelino-de-paiva",fullName:"Marcel Alves Avelino De Paiva"},{id:"213666",title:"Prof.",name:"Antonio De Pádua",surname:"Cavalcante Da Costa",slug:"antonio-de-padua-cavalcante-da-costa",fullName:"Antonio De Pádua Cavalcante Da Costa"}],corrections:null},{id:"56119",title:"Anatomy Applied to Block Anesthesia for Maxillofacial Surgery",doi:"10.5772/intechopen.69545",slug:"anatomy-applied-to-block-anesthesia-for-maxillofacial-surgery",totalDownloads:845,totalCrossrefCites:0,totalDimensionsCites:0,signatures:"Alex Vargas, Paula Astorga and Tomas Rioseco",downloadPdfUrl:"/chapter/pdf-download/56119",previewPdfUrl:"/chapter/pdf-preview/56119",authors:[{id:"199400",title:"Dr.",name:"Alex",surname:"Vargas",slug:"alex-vargas",fullName:"Alex Vargas"},{id:"202023",title:"Dr.",name:"Paula",surname:"Astorga",slug:"paula-astorga",fullName:"Paula Astorga"},{id:"205059",title:"Dr.",name:"Tomas",surname:"Rioseco",slug:"tomas-rioseco",fullName:"Tomas Rioseco"}],corrections:null},{id:"55902",title:"Treatment Considerations for Missing Teeth",doi:"10.5772/intechopen.69543",slug:"treatment-considerations-for-missing-teeth",totalDownloads:519,totalCrossrefCites:1,totalDimensionsCites:1,signatures:"Abdolreza Jamilian, Alireza Darnahal, Ludovica Nucci, Fabrizia\nD’Apuzzo and Letizia Perillo",downloadPdfUrl:"/chapter/pdf-download/55902",previewPdfUrl:"/chapter/pdf-preview/55902",authors:[{id:"171777",title:"Prof.",name:"Abdolreza",surname:"Jamilian",slug:"abdolreza-jamilian",fullName:"Abdolreza Jamilian"},{id:"171873",title:"Dr.",name:"Alireza",surname:"Darnahal",slug:"alireza-darnahal",fullName:"Alireza Darnahal"},{id:"173044",title:"Prof.",name:"Letizia",surname:"Perillo",slug:"letizia-perillo",fullName:"Letizia Perillo"},{id:"198961",title:"MSc.",name:"Fabrizia",surname:"D'Apuzzo",slug:"fabrizia-d'apuzzo",fullName:"Fabrizia D'Apuzzo"},{id:"206137",title:"Mrs.",name:"Ludovica",surname:"Nucci",slug:"ludovica-nucci",fullName:"Ludovica Nucci"}],corrections:null},{id:"55973",title:"Anatomical and Functional Restoration of the Compromised Occlusion: From Theory to Materials",doi:"10.5772/intechopen.69544",slug:"anatomical-and-functional-restoration-of-the-compromised-occlusion-from-theory-to-materials",totalDownloads:618,totalCrossrefCites:0,totalDimensionsCites:0,signatures:"Nicola Mobilio and Santo Catapano",downloadPdfUrl:"/chapter/pdf-download/55973",previewPdfUrl:"/chapter/pdf-preview/55973",authors:[{id:"179565",title:"Dr.",name:"Nicola",surname:"Mobilio",slug:"nicola-mobilio",fullName:"Nicola Mobilio"},{id:"199397",title:"Prof.",name:"Santo",surname:"Catapano",slug:"santo-catapano",fullName:"Santo Catapano"}],corrections:null},{id:"57245",title:"Evaluation of the Anatomy of the Lower First Premolar",doi:"10.5772/intechopen.71038",slug:"evaluation-of-the-anatomy-of-the-lower-first-premolar",totalDownloads:393,totalCrossrefCites:0,totalDimensionsCites:0,signatures:"Ticiana Sidorenko de Oliveira Capote, Suellen Tayenne Pedroso\nPinto, Marcelo Brito Conte, Juliana Álvares Duarte Bonini Campos\nand Marcela de Almeida Gonçalves",downloadPdfUrl:"/chapter/pdf-download/57245",previewPdfUrl:"/chapter/pdf-preview/57245",authors:[{id:"87871",title:"Prof.",name:"Ticiana",surname:"Capote",slug:"ticiana-capote",fullName:"Ticiana Capote"},{id:"199157",title:"Prof.",name:"Marcela",surname:"De Almeida Gonçalves",slug:"marcela-de-almeida-goncalves",fullName:"Marcela De Almeida Gonçalves"},{id:"199243",title:"BSc.",name:"Marcelo",surname:"Brito Conte",slug:"marcelo-brito-conte",fullName:"Marcelo Brito Conte"},{id:"199244",title:"Prof.",name:"Juliana",surname:"Álvares Duarte Bonini Campos",slug:"juliana-alvares-duarte-bonini-campos",fullName:"Juliana Álvares Duarte Bonini Campos"},{id:"217420",title:"Mrs.",name:"Suellen",surname:"Tayenne Pedroso Pinto",slug:"suellen-tayenne-pedroso-pinto",fullName:"Suellen Tayenne Pedroso Pinto"}],corrections:null},{id:"57752",title:"A Comparative Study of the Validity and Reproducibility of Mesiodistal Tooth Size and Dental Arch with iTeroTM Intraoral Scanner and the Traditional Method",doi:"10.5772/intechopen.70963",slug:"a-comparative-study-of-the-validity-and-reproducibility-of-mesiodistal-tooth-size-and-dental-arch-wi",totalDownloads:460,totalCrossrefCites:0,totalDimensionsCites:0,signatures:"Ignacio Faus-Matoses, Ana Mora, Carlos Bellot-Arcís, Jose Luis\nGandia-Franco and Vanessa Paredes-Gallardo",downloadPdfUrl:"/chapter/pdf-download/57752",previewPdfUrl:"/chapter/pdf-preview/57752",authors:[{id:"150456",title:"Prof.",name:"Vanessa",surname:"Paredes",slug:"vanessa-paredes",fullName:"Vanessa Paredes"},{id:"150458",title:"Prof.",name:"José-Luis",surname:"Gandia",slug:"jose-luis-gandia",fullName:"José-Luis Gandia"},{id:"212242",title:"Prof.",name:"Ignacio",surname:"Faus",slug:"ignacio-faus",fullName:"Ignacio Faus"},{id:"212243",title:"Prof.",name:"Carlos",surname:"Bellot-Arcís",slug:"carlos-bellot-arcis",fullName:"Carlos Bellot-Arcís"},{id:"218390",title:"Prof.",name:"Ana",surname:"Mora",slug:"ana-mora",fullName:"Ana Mora"}],corrections:null},{id:"57378",title:"Identification of Lower Central Incisors",doi:"10.5772/intechopen.71341",slug:"identification-of-lower-central-incisors",totalDownloads:366,totalCrossrefCites:0,totalDimensionsCites:0,signatures:"Marcela de Almeida Gonçalves, Bruno Luís Graciliano Silva, Marcelo\nBrito Conte, Juliana Álvares Duarte Bonini Campos and Ticiana\nSidorenko de Oliveira Capote",downloadPdfUrl:"/chapter/pdf-download/57378",previewPdfUrl:"/chapter/pdf-preview/57378",authors:[{id:"199157",title:"Prof.",name:"Marcela",surname:"De Almeida Gonçalves",slug:"marcela-de-almeida-goncalves",fullName:"Marcela De Almeida Gonçalves"},{id:"199243",title:"BSc.",name:"Marcelo",surname:"Brito Conte",slug:"marcelo-brito-conte",fullName:"Marcelo Brito Conte"},{id:"199244",title:"Prof.",name:"Juliana",surname:"Álvares Duarte Bonini Campos",slug:"juliana-alvares-duarte-bonini-campos",fullName:"Juliana Álvares Duarte Bonini Campos"},{id:"221435",title:"Mr.",name:"Bruno Luis Graciliano",surname:"Silva",slug:"bruno-luis-graciliano-silva",fullName:"Bruno Luis Graciliano Silva"},{id:"221438",title:"Prof.",name:"Ticiana Sidorenko De Oliveira",surname:"Capote",slug:"ticiana-sidorenko-de-oliveira-capote",fullName:"Ticiana Sidorenko De Oliveira Capote"}],corrections:null}],productType:{id:"1",title:"Edited Volume",chapterContentType:"chapter",authoredCaption:"Edited by"}},relatedBooks:[{type:"book",id:"7572",title:"Trauma in Dentistry",subtitle:null,isOpenForSubmission:!1,hash:"7cb94732cfb315f8d1e70ebf500eb8a9",slug:"trauma-in-dentistry",bookSignature:"Serdar Gözler",coverURL:"https://cdn.intechopen.com/books/images_new/7572.jpg",editedByType:"Edited by",editors:[{id:"204606",title:"Dr.",name:"Serdar",surname:"Gözler",slug:"serdar-gozler",fullName:"Serdar Gözler"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"9387",title:"Oral Diseases",subtitle:null,isOpenForSubmission:!1,hash:"76591a3bd6bedaa1c8d1f72870268e23",slug:"oral-diseases",bookSignature:"Gokul Sridharan, Anil Sukumaran and Alaa Eddin Omar Al Ostwani",coverURL:"https://cdn.intechopen.com/books/images_new/9387.jpg",editedByType:"Edited by",editors:[{id:"82453",title:"Dr.",name:"Gokul",surname:"Sridharan",slug:"gokul-sridharan",fullName:"Gokul Sridharan"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"8837",title:"Human Teeth",subtitle:"Key Skills and Clinical Illustrations",isOpenForSubmission:!1,hash:"ac055c5801032970123e0a196c2e1d32",slug:"human-teeth-key-skills-and-clinical-illustrations",bookSignature:"Zühre Akarslan and Farid Bourzgui",coverURL:"https://cdn.intechopen.com/books/images_new/8837.jpg",editedByType:"Edited by",editors:[{id:"171887",title:"Prof.",name:"Zühre",surname:"Akarslan",slug:"zuhre-akarslan",fullName:"Zühre Akarslan"}],equalEditorOne:{id:"52177",title:"Prof.",name:"Farid",middleName:null,surname:"Bourzgui",slug:"farid-bourzgui",fullName:"Farid Bourzgui",profilePictureURL:"https://mts.intechopen.com/storage/users/52177/images/system/52177.jpeg",biography:"Farid Bourzgui is a professor of Orthodontics in the School of Dental Medicine at Hassan II University in Casablanca, Morocco. He received his PhD from the School of Dental Medicine at Hassan II University in 1995. He holds various certificates and diplomas: a Certificate of Higher Studies in Group A (major: Technology of Biomaterials used in Dentistry, 1996), a Certificate of Advanced Studies of group B, (major: Dentofacial Orthopaedics, 1997) from the Faculty of Dental Surgery at University Denis Diderot-Paris VII, France, a diploma of Higher Studies in Biocompatibility of Biomaterials from the Faculty of Medicine and Pharmacy of Casablanca (2002), a Certificate of Clinical Occlusal Odontology from the Faculty of Dentistry Casablanca (2004) and a university degree in Biostatistics and Perceptual Health Measurement from the Faculty of Medicine and Pharmacy of Casablanca in 2011.\r\nFarid Bourzgui is a former intern and resident of Casablanca University Hospital, Ibn Rushd in Casablanca. Dr Bourzgui specialises in Orthodontics and received his National Diploma in Dentistry (major: Dentofacial Orthopedics) from the School of Dentistry in Casablanca in 2000.\r\nDr Bourzgui has published a number of articles and book chapters on various aspects of Orthodontics. He has served on the board of the Moroccan Society of Dentistry, and was the President of the Moroccan Society of Dentistry from 2002 to 2004.",institutionString:"University Hassan II of Casablanca",position:null,outsideEditionCount:0,totalCites:0,totalAuthoredChapters:"7",totalChapterViews:"0",totalEditedBooks:"2",institution:{name:"University of Hassan II Casablanca",institutionURL:null,country:{name:"Morocco"}}},equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"7497",title:"Computer Vision in Dentistry",subtitle:null,isOpenForSubmission:!1,hash:"1e9812cebd46ef9e28257f3e96547f6a",slug:"computer-vision-in-dentistry",bookSignature:"Monika Elzbieta Machoy",coverURL:"https://cdn.intechopen.com/books/images_new/7497.jpg",editedByType:"Edited by",editors:[{id:"248279",title:"Dr.",name:"Monika",surname:"Machoy",slug:"monika-machoy",fullName:"Monika Machoy"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"1591",title:"Infrared Spectroscopy",subtitle:"Materials Science, Engineering and Technology",isOpenForSubmission:!1,hash:"99b4b7b71a8caeb693ed762b40b017f4",slug:"infrared-spectroscopy-materials-science-engineering-and-technology",bookSignature:"Theophile Theophanides",coverURL:"https://cdn.intechopen.com/books/images_new/1591.jpg",editedByType:"Edited by",editors:[{id:"37194",title:"Dr.",name:"Theophanides",surname:"Theophile",slug:"theophanides-theophile",fullName:"Theophanides Theophile"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"3092",title:"Anopheles mosquitoes",subtitle:"New insights into malaria vectors",isOpenForSubmission:!1,hash:"c9e622485316d5e296288bf24d2b0d64",slug:"anopheles-mosquitoes-new-insights-into-malaria-vectors",bookSignature:"Sylvie Manguin",coverURL:"https://cdn.intechopen.com/books/images_new/3092.jpg",editedByType:"Edited by",editors:[{id:"50017",title:"Prof.",name:"Sylvie",surname:"Manguin",slug:"sylvie-manguin",fullName:"Sylvie Manguin"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"3161",title:"Frontiers in Guided Wave Optics and Optoelectronics",subtitle:null,isOpenForSubmission:!1,hash:"deb44e9c99f82bbce1083abea743146c",slug:"frontiers-in-guided-wave-optics-and-optoelectronics",bookSignature:"Bishnu Pal",coverURL:"https://cdn.intechopen.com/books/images_new/3161.jpg",editedByType:"Edited by",editors:[{id:"4782",title:"Prof.",name:"Bishnu",surname:"Pal",slug:"bishnu-pal",fullName:"Bishnu Pal"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"72",title:"Ionic Liquids",subtitle:"Theory, Properties, New Approaches",isOpenForSubmission:!1,hash:"d94ffa3cfa10505e3b1d676d46fcd3f5",slug:"ionic-liquids-theory-properties-new-approaches",bookSignature:"Alexander Kokorin",coverURL:"https://cdn.intechopen.com/books/images_new/72.jpg",editedByType:"Edited by",editors:[{id:"19816",title:"Prof.",name:"Alexander",surname:"Kokorin",slug:"alexander-kokorin",fullName:"Alexander Kokorin"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"1373",title:"Ionic Liquids",subtitle:"Applications and Perspectives",isOpenForSubmission:!1,hash:"5e9ae5ae9167cde4b344e499a792c41c",slug:"ionic-liquids-applications-and-perspectives",bookSignature:"Alexander Kokorin",coverURL:"https://cdn.intechopen.com/books/images_new/1373.jpg",editedByType:"Edited by",editors:[{id:"19816",title:"Prof.",name:"Alexander",surname:"Kokorin",slug:"alexander-kokorin",fullName:"Alexander Kokorin"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"57",title:"Physics and Applications of Graphene",subtitle:"Experiments",isOpenForSubmission:!1,hash:"0e6622a71cf4f02f45bfdd5691e1189a",slug:"physics-and-applications-of-graphene-experiments",bookSignature:"Sergey Mikhailov",coverURL:"https://cdn.intechopen.com/books/images_new/57.jpg",editedByType:"Edited by",editors:[{id:"16042",title:"Dr.",name:"Sergey",surname:"Mikhailov",slug:"sergey-mikhailov",fullName:"Sergey Mikhailov"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}}],ofsBooks:[]},correction:{item:{id:"67216",slug:"corrigendum-to-open-abdomen-the-surgeons-challenge",title:"Corrigendum to: Open Abdomen: The Surgeons’ Challenge",doi:null,correctionPDFUrl:"https://cdn.intechopen.com/pdfs/67216.pdf",downloadPdfUrl:"/chapter/pdf-download/67216",previewPdfUrl:"/chapter/pdf-preview/67216",totalDownloads:null,totalCrossrefCites:null,bibtexUrl:"/chapter/bibtex/67216",risUrl:"/chapter/ris/67216",chapter:{id:"64137",slug:"open-abdomen-the-surgeons-challenge",signatures:"Juan José Santivañez Palominos, Vergara Arturo and Cadena Manuel",dateSubmitted:"May 7th 2018",dateReviewed:"September 10th 2018",datePrePublished:null,datePublished:"May 10th 2019",book:{id:"7046",title:"Wound Healing",subtitle:"Current Perspectives",fullTitle:"Wound Healing - Current Perspectives",slug:"wound-healing-current-perspectives",publishedDate:"May 10th 2019",bookSignature:"Kamil Hakan Dogan",coverURL:"https://cdn.intechopen.com/books/images_new/7046.jpg",licenceType:"CC BY 3.0",editedByType:"Edited by",editors:[{id:"30612",title:"Prof.",name:"Kamil Hakan",middleName:null,surname:"Dogan",slug:"kamil-hakan-dogan",fullName:"Kamil Hakan Dogan"}],productType:{id:"1",title:"Edited Volume",chapterContentType:"chapter",authoredCaption:"Edited by"}},authors:[{id:"257817",title:"Prof.",name:"Manuel",middleName:null,surname:"Cadena",fullName:"Manuel Cadena",slug:"manuel-cadena",email:"manuelcade@gmail.com",position:null,institution:null}]}},chapter:{id:"64137",slug:"open-abdomen-the-surgeons-challenge",signatures:"Juan José Santivañez Palominos, Vergara Arturo and Cadena Manuel",dateSubmitted:"May 7th 2018",dateReviewed:"September 10th 2018",datePrePublished:null,datePublished:"May 10th 2019",book:{id:"7046",title:"Wound Healing",subtitle:"Current Perspectives",fullTitle:"Wound Healing - Current Perspectives",slug:"wound-healing-current-perspectives",publishedDate:"May 10th 2019",bookSignature:"Kamil Hakan Dogan",coverURL:"https://cdn.intechopen.com/books/images_new/7046.jpg",licenceType:"CC BY 3.0",editedByType:"Edited by",editors:[{id:"30612",title:"Prof.",name:"Kamil Hakan",middleName:null,surname:"Dogan",slug:"kamil-hakan-dogan",fullName:"Kamil Hakan Dogan"}],productType:{id:"1",title:"Edited Volume",chapterContentType:"chapter",authoredCaption:"Edited by"}},authors:[{id:"257817",title:"Prof.",name:"Manuel",middleName:null,surname:"Cadena",fullName:"Manuel Cadena",slug:"manuel-cadena",email:"manuelcade@gmail.com",position:null,institution:null}]},book:{id:"7046",title:"Wound Healing",subtitle:"Current Perspectives",fullTitle:"Wound Healing - Current Perspectives",slug:"wound-healing-current-perspectives",publishedDate:"May 10th 2019",bookSignature:"Kamil Hakan Dogan",coverURL:"https://cdn.intechopen.com/books/images_new/7046.jpg",licenceType:"CC BY 3.0",editedByType:"Edited by",editors:[{id:"30612",title:"Prof.",name:"Kamil Hakan",middleName:null,surname:"Dogan",slug:"kamil-hakan-dogan",fullName:"Kamil Hakan Dogan"}],productType:{id:"1",title:"Edited Volume",chapterContentType:"chapter",authoredCaption:"Edited by"}}},ofsBook:{item:{type:"book",id:"10822",leadTitle:null,title:"Unmanned Aerial Vehicles",subtitle:null,reviewType:"peer-reviewed",abstract:"This book will be a self-contained collection of scholarly papers targeting an audience of practicing researchers, academics, PhD students and other scientists. The contents of the book will be written by multiple authors and edited by experts in the field.",isbn:null,printIsbn:null,pdfIsbn:null,doi:null,price:0,priceEur:0,priceUsd:0,slug:null,numberOfPages:0,isOpenForSubmission:!0,hash:"17a1ee96dd653bd6eef6eb51e06f49da",bookSignature:"",publishedDate:null,coverURL:"https://cdn.intechopen.com/books/images_new/10822.jpg",keywords:null,numberOfDownloads:null,numberOfWosCitations:0,numberOfCrossrefCitations:null,numberOfDimensionsCitations:null,numberOfTotalCitations:null,isAvailableForWebshopOrdering:!0,dateEndFirstStepPublish:"November 25th 2020",dateEndSecondStepPublish:"December 16th 2020",dateEndThirdStepPublish:"February 14th 2021",dateEndFourthStepPublish:"May 5th 2021",dateEndFifthStepPublish:"July 4th 2021",remainingDaysToSecondStep:"a month",secondStepPassed:!0,currentStepOfPublishingProcess:1,editedByType:null,kuFlag:!1,biosketch:null,coeditorOneBiosketch:null,coeditorTwoBiosketch:null,coeditorThreeBiosketch:null,coeditorFourBiosketch:null,coeditorFiveBiosketch:null,editors:null,coeditorOne:null,coeditorTwo:null,coeditorThree:null,coeditorFour:null,coeditorFive:null,topics:null,chapters:null,productType:{id:"1",title:"Edited Volume",chapterContentType:"chapter",authoredCaption:"Edited by"},personalPublishingAssistant:null},relatedBooks:[{type:"book",id:"1591",title:"Infrared Spectroscopy",subtitle:"Materials Science, Engineering and Technology",isOpenForSubmission:!1,hash:"99b4b7b71a8caeb693ed762b40b017f4",slug:"infrared-spectroscopy-materials-science-engineering-and-technology",bookSignature:"Theophile Theophanides",coverURL:"https://cdn.intechopen.com/books/images_new/1591.jpg",editedByType:"Edited by",editors:[{id:"37194",title:"Dr.",name:"Theophanides",surname:"Theophile",slug:"theophanides-theophile",fullName:"Theophanides Theophile"}],productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"3092",title:"Anopheles mosquitoes",subtitle:"New insights into malaria vectors",isOpenForSubmission:!1,hash:"c9e622485316d5e296288bf24d2b0d64",slug:"anopheles-mosquitoes-new-insights-into-malaria-vectors",bookSignature:"Sylvie Manguin",coverURL:"https://cdn.intechopen.com/books/images_new/3092.jpg",editedByType:"Edited by",editors:[{id:"50017",title:"Prof.",name:"Sylvie",surname:"Manguin",slug:"sylvie-manguin",fullName:"Sylvie Manguin"}],productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"3161",title:"Frontiers in Guided Wave Optics and Optoelectronics",subtitle:null,isOpenForSubmission:!1,hash:"deb44e9c99f82bbce1083abea743146c",slug:"frontiers-in-guided-wave-optics-and-optoelectronics",bookSignature:"Bishnu Pal",coverURL:"https://cdn.intechopen.com/books/images_new/3161.jpg",editedByType:"Edited by",editors:[{id:"4782",title:"Prof.",name:"Bishnu",surname:"Pal",slug:"bishnu-pal",fullName:"Bishnu Pal"}],productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"72",title:"Ionic Liquids",subtitle:"Theory, Properties, New Approaches",isOpenForSubmission:!1,hash:"d94ffa3cfa10505e3b1d676d46fcd3f5",slug:"ionic-liquids-theory-properties-new-approaches",bookSignature:"Alexander Kokorin",coverURL:"https://cdn.intechopen.com/books/images_new/72.jpg",editedByType:"Edited by",editors:[{id:"19816",title:"Prof.",name:"Alexander",surname:"Kokorin",slug:"alexander-kokorin",fullName:"Alexander Kokorin"}],productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"1373",title:"Ionic Liquids",subtitle:"Applications and Perspectives",isOpenForSubmission:!1,hash:"5e9ae5ae9167cde4b344e499a792c41c",slug:"ionic-liquids-applications-and-perspectives",bookSignature:"Alexander Kokorin",coverURL:"https://cdn.intechopen.com/books/images_new/1373.jpg",editedByType:"Edited by",editors:[{id:"19816",title:"Prof.",name:"Alexander",surname:"Kokorin",slug:"alexander-kokorin",fullName:"Alexander Kokorin"}],productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"57",title:"Physics and Applications of Graphene",subtitle:"Experiments",isOpenForSubmission:!1,hash:"0e6622a71cf4f02f45bfdd5691e1189a",slug:"physics-and-applications-of-graphene-experiments",bookSignature:"Sergey Mikhailov",coverURL:"https://cdn.intechopen.com/books/images_new/57.jpg",editedByType:"Edited by",editors:[{id:"16042",title:"Dr.",name:"Sergey",surname:"Mikhailov",slug:"sergey-mikhailov",fullName:"Sergey Mikhailov"}],productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"371",title:"Abiotic Stress in Plants",subtitle:"Mechanisms and Adaptations",isOpenForSubmission:!1,hash:"588466f487e307619849d72389178a74",slug:"abiotic-stress-in-plants-mechanisms-and-adaptations",bookSignature:"Arun Shanker and B. Venkateswarlu",coverURL:"https://cdn.intechopen.com/books/images_new/371.jpg",editedByType:"Edited by",editors:[{id:"58592",title:"Dr.",name:"Arun",surname:"Shanker",slug:"arun-shanker",fullName:"Arun Shanker"}],productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"878",title:"Phytochemicals",subtitle:"A Global Perspective of Their Role in Nutrition and Health",isOpenForSubmission:!1,hash:"ec77671f63975ef2d16192897deb6835",slug:"phytochemicals-a-global-perspective-of-their-role-in-nutrition-and-health",bookSignature:"Venketeshwer Rao",coverURL:"https://cdn.intechopen.com/books/images_new/878.jpg",editedByType:"Edited by",editors:[{id:"82663",title:"Dr.",name:"Venketeshwer",surname:"Rao",slug:"venketeshwer-rao",fullName:"Venketeshwer Rao"}],productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"4816",title:"Face Recognition",subtitle:null,isOpenForSubmission:!1,hash:"146063b5359146b7718ea86bad47c8eb",slug:"face_recognition",bookSignature:"Kresimir Delac and Mislav Grgic",coverURL:"https://cdn.intechopen.com/books/images_new/4816.jpg",editedByType:"Edited by",editors:[{id:"528",title:"Dr.",name:"Kresimir",surname:"Delac",slug:"kresimir-delac",fullName:"Kresimir Delac"}],productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"3621",title:"Silver Nanoparticles",subtitle:null,isOpenForSubmission:!1,hash:null,slug:"silver-nanoparticles",bookSignature:"David Pozo Perez",coverURL:"https://cdn.intechopen.com/books/images_new/3621.jpg",editedByType:"Edited by",editors:[{id:"6667",title:"Dr.",name:"David",surname:"Pozo",slug:"david-pozo",fullName:"David Pozo"}],productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}}]},chapter:{item:{type:"chapter",id:"56126",title:"Interoperability in a Heterogeneous Team of Search and Rescue Robots",doi:"10.5772/intechopen.69493",slug:"interoperability-in-a-heterogeneous-team-of-search-and-rescue-robots",body:'\n
There are nowadays many different types of unmanned systems being used in different domains and, certainly, this number will increase significantly in the upcoming years. In general, large scale systems aiming at solving all problems with one single type of platform have proven to be expensive and not flexible enough. Heterogeneous teams, composed by unmanned air, ground, surface, and underwater systems (UxS), of different types and sizes, offer the possibility to exploit the best features of each kind and combine them to obtain compound capabilities, which have demonstrated to be more cost‐efficient and adaptable to new scenarios. Recent research efforts have focused on developing the autonomy of the team by increasing the interactions between these systems, making them aware of each other, executing tasks that require cooperation, and finally implementing flock or swarm coordinated behaviours.
\nThe ICARUS project involves a team of assistive unmanned air, ground and sea vehicles for search and rescue operations. In order to effectively support the on‐site person responsible for the operations, these systems must be able to collaborate as a seamlessly integrated team, coordinated from the ICARUS Robot Command and Control station (RC2) in the field.
\nA heterogeneous fleet is the one composed by elements of different kinds such as the ICARUS team, including up to ten different vehicles (long‐endurance fixed‐wing, outdoors multi‐rotor, indoors multi‐rotor, large UGV, small UGV, Teodor UGV, U‐ranger USV, ROAZ USV, MARES AUV and several rescue capsules). Each robot has been developed by a different provider or partner, using its own design, framework and middleware. Thus, a strong effort had to be devoted to their integration as a team and this is the work described in this Chapter. Although many standards have been proposed by the community, most of the field robotic systems have their own command and reporting protocols, and consequently require their own ground control stations. This profusion of protocols makes the cooperation between systems difficult. The lack of unified standards poses an unnecessary burden on the operation and maintenance of multi‐vehicle systems. The work described in this chapter aims at contributing to the harmonization of the multiple standardization initiatives for the coordination of heterogeneous teams.
\nThe ultimate objective of the ICARUS project is to achieve robot interoperability, which can be understood as the ability of robots to operate in synergy to the execution of assigned missions. Interoperability enables diverse teams to work together, sharing data, intelligence and resources.
\nInteroperability is the key that acts as the glue among the different units within the team, enabling efficient multi‐robot cooperation. Seamless and non‐ambiguous interaction between different robots of any provider and domain demands a common, well‐defined interface.
\nICARUS proposes the adaptation of all the vehicles to a single standard external interface as a method to ensure interoperability. Each robot development team is free to use their own tools inside their systems as long as the interaction with the rest of the team follows a set of definitions and rules referred to as the interoperability standard. This follows the façade pattern [1] very frequently used in software engineering. It essentially hides the complexities of the implementation and provides the outer components with a simpler interface. It is typically deployed as software library implementing a wrapper or adapter template. On one side, this library implements the interoperability standard interface and, on the other side, it provides a set of classes and functions (an API) for its integration with the specific middleware or software provided by each platform.
\nThis approach may initially seem to reduce the level of integration among the agents if we compare it against natively sharing an internal protocol in all systems, but it promotes the maximum decoupling between the custom implementations, with its particularities, and the definition of the common interface. In the long term, this has shown to improve the seamless integration of the maximum number of systems and domains at a lower cost. The integration of new platforms into the team has literally been done in a matter of few hours during the project, provided that on‐board hardware resources and communications are made available by the robot provider.
\nTherefore, the ultimate goal of the work on the heterogeneous team is to consolidate a common command, control and payload interface to be agreed and adopted by all robotics platforms and control ground stations (CGS) involved in an ICARUS operation. This approach provides a common framework for the development of collaborative unmanned assets, minimizing the integration time and costs by avoiding ad‐hoc implementations.
\nThere are other advantages in using interoperability standards. The use of a widely accepted interface helps to easily integrate new technologies with minor modifications to the existing systems. This facilitates the insertion of new technology for their operational use in the field, as end‐users rely on proven technology and the preliminary validation will focus only on de‐risking the new developments. Another advantage of the use of standards is that it will facilitate the backwards and forwards compatibility between existing and future vehicles and CGS provided by different providers. This can benefit companies to maximize the revenue from a specific product.
\nOur strategy in terms of interoperability is to build upon existing body of work in the field, avoiding duplicating and re‐inventing proven technology. During the initial steps of the work, the most relevant multi‐domain interoperability protocols for unmanned systems were identified and evaluated against the ICARUS end‐user requirements and foreseen scenarios. During this phase, several collaborations with other European [2] and NATO [3] initiatives, together with the organization of workshops involving end‐users and stakeholders, were extremely relevant to gather good quality information on the state of the art in the field.
\nOne of the challenges in multi‐robot multi‐domain interface standardization is to be able to embrace all type of systems, independently of their domain, particularities (i.e. size, operational modes, etc.) or constraints (i.e. computational resources, communication bandwidth, etc.). Therefore, in order to methodically evaluate the existing initiatives, an analysis of the ICARUS robot specific interfaces control (ICD) and functional specifications (FSD) was performed to generate what we referred to as the project interoperability needs. Any information that was domain‐ or platform‐ specific was removed from the analysis to ensure the level of abstraction required to ensure standardization. Likewise, the needs were further developed through an analysis of other potential vehicles that could be integrated into the system in the future.
\nThis set of needs has been formalized as an ontology. An ontology is ‘an explicit, formal specification of a shared conceptualization’ [4]. We use it to describe the set of concepts required to coordinate a multi‐robot search and rescue operation. This includes concepts, at different levels from robots to systems, capabilities and sensors, and their relationships and assumptions. There have been previous and parallel efforts in this field. Namely, the IEEE Robotics and Automation Society (IEEE‐RAS) created a working group named Ontologies for Robotics and Automation that aims at the definition of a core ontology for robotics and automation [5]. The work performed in ICARUS has a strict focus on heterogeneous multi‐robot operations in search and rescue, and as such, it proposes an application‐specific ontology, addressing tasks and platforms involved in search and rescue missions.
\nThis analysis resulted in a description of the set of multi‐domain concepts and relationships or messages commonly found in unmanned systems. Table 1 summarizes the key categories and provides some example of interactions between systems.
\nCategory | \nDescription and examples | \n
---|---|
Transport | \nInter‐process communication such as send, receive, broadcast, etc. | \n
Commands | \nGeneric accessories such as set, get, etc. for any standard concept | \n
Management | \nHeartbeat, system status, clock synchronization, alarms, etc. | \n
Telemetry | \nPose and velocity reports in appropriate system coordinates, etc. | \n
Telecontrol | \nTeleoperation, waypoint and mission management, etc. | \n
Perception | \nImagery, ranging, audio, etc. | \n
Manipulation | \nJoint and end‐effector control of robotics arms | \n
Mapping | \nMaps, digital elevation models, point clouds | \n
S & Rintelligence | \nSectors, disaster alerts, humanitarian information | \n
Examples of the ICARUS ontology concepts organized by categories.
The complete ontology was used in a gap‐analysis for the evaluation of the existing standards as described in Section 3.
\nA key concept that enables interoperability among the largest number possible of unmanned systems is the levels of interoperability (LoI). This concept is introduced by STANAG 4586 [6] and has been adopted in ICARUS and adapted for the purpose of our project. LoI defines the different degrees of compliance with the standard interface. It proposes a mechanism to account for a large variety of approaches and levels at which different systems can be integrated, accounting therefore for more integration strategies and combinations.
\nSTANAG 4586 defines LoI as ‘the platform, subsystem or sensor ability to be interoperable for basic types of functions related to unmanned systems’. These levels show different degrees of control that a user has over the vehicle, payload or both. However, these definitions have been adapted to our project as follows.
\nTherefore, the levels of interoperability in ICARUS are defined as shown in Table 2.
\nLevel of interoperability | \n|
---|---|
LoI 1 | \nIndirect receipt/transmission of telemetry, control and payload data: the UxV data are received from (or sent to) another source (another CGS, web‐server, etc.) | \n
LoI 2 | \nDirect receipt/transmission of UxV telemetry and payload data, but without control authority over it | \n
LoI 3 | \nDirect control and monitoring over the UxV without launch and recovery. A dedicated control station keeps control for the safety critical operations of the platform (i.e. take‐off and landing, deployment and recovery, etc) and hand it over to the CGS once ready for mission | \n
LoI 4 | \nHighest level of interoperability. The CGS has full control of the UxV | \n
ICARUS definition of the levels of interoperability.
The levels of interoperability for each of the ICARUS systems are as shown in Table 3.
\nLevel of automation per robot | \n|||||
---|---|---|---|---|---|
\n | LoI 1 | \nLoI 2 | \nLoI 3 | \nLoI 4 | \nNotes | \n
Long‐endurance fixed‐wing | \n\n | \n | X | \n\n | Take‐off and landing procedures for the UAVs are handled by the proprietary control stations. The system is handed over to the ICARUS C2I once in air | \n
Outdoors multi‐rotor | \n\n | \n | X | \n\n | |
Indoors multi‐rotor | \n\n | \n | X | \n\n | |
Large UGV | \n\n | \n | \n | X | \n\n |
Small UGV | \n\n | \n | \n | X | \n\n |
U‐ranger USV | \nX | \n\n | \n | \n | U‐ranger is a highly equipped and extremely fast USV. Integration is done through its proprietary CGS for safety purposes | \n
ROAZ USV | \n\n | \n | X | \n\n | ROAZ USV is primarily operated from a proprietary CGS. When ICARUS mission starts, control is handed over to the ICARUS C2I | \n
Rescue capsule | \n\n | \n | \n | X | \n\n |
LoI for each of the robots in ICARUS.
ICARUS is, by definition, a human‐centred designed system. One of the most critical end‐user requirements is to ensure that a member of the search and rescue team in the field always supervises the robot operations to ensure safety and effectiveness. ICARUS robotics asset can generally be remotely controlled. Most of these systems also provide on‐board autonomy modules that allow the operator to plan a mission to be autonomously executed by the system. This should presumably help reducing the workload of the operator. However, in a realistic scenario, unexpected events are highly likely to occur and the intervention from the operator, such as manually overriding the mission execution, is often required. This increases the cognitive workload of the operator leading to stress and potential mistakes, which are even more critical in the context of multi‐robot operations.
\nAdjustable automation (AA) is the ability of a robot to behave autonomously and dynamically change its level of independence, intelligence and controllability to adapt to different tasks and scenarios [7]. AA presents advantages when dealing with communication delays, human workload and safety [8]. Having systems that can dynamically reduce or increase the level of automation running on board provides a more flexible and reliable system.
\nIn ICARUS, AA is achieved by supporting multiple levels of automation in the robots, e.g. fully autonomous, guided by the operator, and fully controlled by the operator. The C2I also supports adjustable automation by automatically changing its display and control functions based on the relevance of the information, the current situation the robots encounter, and user preferences.
\nThe level of automation of a robot is related to the degree of intervention of the human operator and other robots in the decision process. However, the fact that a robot is autonomous does not imply that it has to make all its decisions by itself. Different levels of automation and classifications have been described in the literature [9]. Specifically, Lacroix et al. [10] defines five levels of automation according to the robot responsibilities towards a fleet of robots (tasks allocation, mission coordination, etc), which are mostly relevant for tightly coupled coordination. In ICARUS, the levels of automation are understood in terms of tasks execution and are reduced to essentially three modes, as shown in Table 4.
\nLevel of automation | \n|
---|---|
Level 1 | \nTeleoperation. No automation on‐board the robot. The robot is directly controlled by the operator | \n
Level 2 | \nSemi‐autonomous. Execution capabilities. The robot is able to manage partially ordered sequences of elementary tasks, and to return execution status of the tasks. An operator is supervising the mission from the RC2 | \n
Level 3 | \nFully‐autonomous. Deliberative capabilities. Complex task requests are managed (tasks planning and scheduling) | \n
ICARUS definition of the level of automation.
An ICARUS platform can seamlessly carry out a given task at different automation levels, depending on the robot operator choice, the mission plan priorities, workload and constraints of the mission and platform. As mentioned before, the concept of adjustable autonomy implies the ability to adapt and dynamically change between these levels of autonomy depending on situational changes. Some examples of adjustable autonomy within the context of ICARUS are:
\nA UAV may provide fully‐autonomous navigation in nominal conditions, but may fall back to semi‐autonomous navigation in the presence of victims detected on the sensor stream.
The RC2 operator may have initially designed the mission to manually operate the outdoor multi‐rotor to inspect a building, but operation enters a highly complex area and he/she decides to enable semi‐autonomous mode to ensure all corners are correctly surveyed.
Most ICARUS platforms provide all three operation modes. However, there are specific constraints in some platforms due to their size or domain. Namely, the large UGV is usually remotely operated or waypoint guided. Such a large system should not be tasked with pre‐defined missions. On the other hand, the U‐ranger is such a fast maritime system that the operator should rely on the on‐board autonomy, which is equipped with collision avoidance functionality. Obstacles at sea are difficult to see by the operator and therefore, this system is better commanded at full automation. Table 5 illustrates the automation levels available in each of the ICARUS platforms.
\nLevel of automation per robot | \n||||||||
---|---|---|---|---|---|---|---|---|
\n | Long‐endurance fixed‐wing | \nOuthoor multi‐rotor | \nIndoor multi‐rotor | \nLarge UGV | \nSmall UGV | \nU‐Ranger USV | \nROAZ USV | \nRescue Capsule | \n
Level 1 | \n\n | \n | \n | \n | \n | \n | \n | \n |
Level 2 | \n\n | \n | \n | \n | \n | \n | \n | \n |
Level 3 | \n\n | \n | \n | \n | \n | \n | \n | \n |
Level of automation for each of the robots in ICARUS.
An effective heterogeneous team management requires the capability to do reasoning about the mission goals in order to provide a task‐to‐robot decomposition. This task allocation must take into account the current capabilities and constraints of each asset in the team. Different strategies for the cooperation are feasible and, therefore, different requirements may be placed on the interface in order to implement these strategies. A heterogeneous team usually contains a set of vehicles with diverse capabilities that can therefore play different roles in the mission. Concepts such as roles, responsibilities, modes of operations and tasks may be part of the standard interface that supports the fleet interoperability.
\nThe platforms involved in ICARUS have been carefully selected to help each other. In other words, they play complementary roles. Several ICARUS platforms grouped together form a team. Each vehicle has been designed to provide a set of specific functionalities but they can address more complex missions by supporting each other.
\nDifferent strategies for the coordination are feasible. In the case of ICARUS, a strong end‐users need is that any planning decision must be authorized by the on‐site operations coordinator. Therefore, according to a traditional classification of multi‐robot systems based on the coordination strategy [11], ICARUS follows a supervised, weakly‐coordinated, centralized approach where the cooperation and interaction between robots is negotiated during mission planning. The planning, coordination, and therefore the ultimate responsibility fall on the ICARUS team operator and occur at the C2I. Therefore, this coordination approach relaxes to a certain extent the need to have multi‐robot related concepts in the interoperable interface. The C2I encapsulates this functionality and can interact with each asset individually. However, a standard for coordinated multi‐robot operations remains extremely relevant and was taken into account in the analysis described in the next section.
\nSome of key concepts to be unambiguously defined as the basis for efficient mission planning are goal, role and task. A mission goal refers to the overall objective that the fleet must accomplish, for instance, the assessment of a disaster area. The mission planner is responsible for coordinating the fleet and allocating specific roles to each robot. A role defines the robot’s behaviour and its interactions with other members of the fleet or with humans. A task is the basic unit describing the actions requested from a robot. Typically, the role defines which tasks a robot should and should not execute. A robot is defined by the type of robot and its capabilities. For instance, the long‐endurance is a fixed‐wing aerial platform with surveillance, mapping, victim detection and communications relay capabilities. These characteristics define the set of tasks that it is able to perform, and therefore the roles it can take. A mission plan is therefore built upon the concepts of roles, tasks and responsibilities.
\nICARUS planning flow mirrors the concept of operation of international search and rescue teams. Table 6 illustrates how goals to roles and task decomposition occur on the field.
\nSystem | \nResponsibilities | \n
---|---|
C2I | \nSectorization + mission goals definition | \n
SAR team allocation to sectors | \n|
Robot(s) allocation to teams | \n|
Teams monitoring and control | \n|
RC2 | \nOperations scheduling | \n
Roles allocation | \n|
Robot task planning | \n|
Robots monitoring and control | \n|
Robot | \nTask plan execution | \n
Progress and status report | \n
ICARUS goals to roles and tasks decomposition.
One of the core services required from all the platforms is the dynamic discovery of features. It allows robots to advertise their capabilities over the network, enabling dynamic planning and supervision from the C2I based on the current state of the team. A robot may take different roles during a mission depending on the responsibilities that the C2I allocates to it. The allocation of mission goals to predefined roles, the decomposition of these roles into tasks, and the configuration of these tasks for a specific robot model are the responsibility of the mission planner. Some predefined profiles are available to facilitate this task. Whereas roles influence the robots’ behaviour, tasks influence the actions that robots perform. They are defined as a set of actions. Each task could be decomposed into subtasks. This subdivision could continue iteratively until a primitive task is reached. Table 7 shows some examples of the roles defined in the ICARUS concept of operations.
\nRoles | \nDescription | \nModes | \n
---|---|---|
Scout | \nProvides a quick assessment of an unexplored area or route | \nOverview of an entire disaster zone (fixed‐wing UAS). Traversability/best route exploration (UAS) | \n
Surveyor | \nScan in detail an area or building to support a thorough assessment and inspection (structural integrity, victims, hazards, etc.) | \n2D/3D geo‐referenced map of the entire disaster zone as basis for sectorization (fixed‐wing UAS). High‐resolution 2D/3D geo‐referenced map of a sector (fixed‐wing UAS at higher altitude, rotor‐craft at lower altitude) or a structure (rotor‐craft). Building indoor inspections (small rotor‐craft and small ground vehicle) | \n
Observer | \nSteady target observation and assessment, including victims and structures | \nSteady hover over a target (rotor‐craft), including harsh weather conditions. Victim medical assessment outdoors (rotor‐craft and USV) and indoors (small rotor‐craft and ground vehicles) | \n
Searcher | \nVictims search | \nOutdoors human detection on IR (UAS and USV), indoors (small rotor‐craft and UGV) | \n
Rescuer | \nSupport to victim rescue | \nHelps victim to escape from hazard areas (large ground vehicle) or support human rescuers | \n
Deliverer | \nSafety kit delivery. Robot delivery | \nDelivery of a survival kit to a victim, aerial (rotor‐craft) or terrestrial (UGV) | \n
Cruiser | \nTravel to a destination | \nAll platforms when transiting to a new location where another role is enabled. The larger platforms may also act as a carrier of tools, debris and smaller robotics assets | \n
Examples of ICARUS roles.
ISO defines a standard as a set of ‘requirements, specifications, guidelines or characteristics that can be used consistently to ensure that materials, products, processes and services are fit for their purpose’ [12]. In the context of interoperability, a standard shall unambiguously define data types, message and rules to implement the protocol. The analysis of the existing standardization initiatives shows that there exist several predominant initiatives for interoperability of unmanned systems [3]. However, harmonization among them is not yet a fact.
\nIn the context of this analysis, we divided the different initiatives in two different groups:
\nFully operational standards and
Partially operational resources.
The first group focuses on systems interoperability, providing a common communication framework between different agents. They provide all the basic functionality required for a multiplatform system. The second group includes initiatives that are, either very popular on specific fields, or are designed specifically for some particular tasks or domain. Most of them show some relevant contributions but they do not provide interoperability for all the possible types of platforms, systems and range of application.
\nThe lack of a single standard of reference for interoperability of unmanned systems makes any choice difficult since it will have an impact one way or another on legacy platforms. However, some alternatives may fit better for a given set of requirements. Harmonizing the existing standards, by combining them into one, or by proposing a brand new standard, would obviously solve most of the problems, but it would have serious implications both in industry and other programs that have adopted them as their standard [13]. This is clearly beyond the possibilities of the ICARUS project on itself.
\nAlong the studies, two candidates stood out from the rest, STANAG 4586 [14] and others related, and the Joint Architecture for Unmanned Systems—JAUS [16]. They are both stable, widely used and complete. STANAG pays a strong attention to the intelligence, surveillance and reconnaissance (ISR) data, while JAUS is instead more devoted to command and control interfaces of the platforms, robot navigation and perception.
\nIn this context, both were created to address specific requirements in different domains. STANAG related standards are predominantly military and, even though they have been promoted for civil applications, their requirements are heavily demanding in terms of compliance. STANAG 4586 is mostly focused on UAVs, even though some other types of unmanned systems have been developed to meet this standard. It is perhaps very relevant for the interoperability of military assets across the different NATO members, but it is hard to be adopted by civil or research platforms without a strong investment. For instance, certifying a small multi‐rotor UAV for the STANAG 7085 Interoperable Data Links for Imaging Systems is costly and probably a barrier for small platforms providers. Furthermore, the geographical constraints (NATO only), the focus on the bigger systems and the absence of open available implementation make this option less convenient. Likewise, JAUS was originally designed for UGVs. It is fair to say that JAUS has done great efforts to extend the coverage to any type of platform, and it currently considers any unmanned system as a generic asset in order to become truly multi‐domain. Its root is also military, but it was soon transferred to the Society of Automotive Engineers (SAE International) where it is currently hosted.
\nAccording to our analysis, JAUS is fairly aligned with the needs of small unmanned platforms in terms of the interoperability described in Section 2. Also, JAUS has been successfully demonstrated in recent years for collaborative UAV‐USV cooperative missions [15]. A quite direct traceability between ICARUS needs and the JAUS service sets is easily derived. It is already compatible with popular transport protocols (TCP, UDP, serial) independent of the communication link beneath it, which makes it more flexible. And it is already multi‐environment (air, ground and maritime). There exist both commercial and open source implementations. Unfortunately, there is a fee to access the JAUS documentation which may prevent some providers from using it. Nevertheless, the cost is deemed reasonable.
\nThere are many other initiatives with strong support in different communities. According to the principles and needs for standardization defined above, these are considered software frameworks and middleware rather than full standards. For instance, the robotic operating systems (ROS) is used nowadays in many multi‐robot systems. However, open‐source initiatives are open and flexible by definition which may not provide the expected reference specification for future developments. These initiatives definitely add a lot of value to the development of small‐unmanned systems, but they do not formally satisfy the interoperability requirements like the standards mentioned previously. They should remain at the platform level and the platforms should comply with an external interoperability standard. It is the scope of the interoperability work to harmonize this heterogeneity into a single standardized protocol.
\nThe ICARUS standard interface for interoperability of heterogeneous fleets is based on the Joint Architecture for Unmanned Systems (JAUS) [16]. JAUS is a service‐oriented architecture (SOA) that specifies a list of services commonly found in robotics. ICARUS interface describes the subset of standard messages that will be used in the ICARUS scenario and specifies all the details required to comply with the ICARUS interface.
\nThe interoperability interface is a service‐oriented architecture (SoA). The most common services for unmanned systems interoperability are already defined in JAUS as a set of advanced standards. They are grouped as ‘Service Sets’. The following ones will be used in ICARUS:
\nCore Service Set (SAE AS5710 [17]): essential services such as transport, events, discovery, etc.
Mobility Service Set (SAE AS6009 [18]): mobile platforms services.
Environment Sensing Service Set (SAE AS6060 [19]): platform‐independent sensor capabilities.
Manipulator Service Set (SAE AS6057 [20]): platform‐independent capabilities common across all serial manipulator types.
The concepts defined in the ICARUS data model can be matched against specific services in this architecture. Figure 1 shows the specific services used in a real ICARUS operation.
\nRelevant JAUS services (source: ICARUS).
However, as we progressed with all the integrations in the project, we discovered that some of the functionalities provided by some of the platforms were not supported by these standard services. We refer to this as the gap analysis. Table 8 shows some of these gaps.
\nGaps analysis | \n||||
---|---|---|---|---|
\n | Outhoor quadrotors | \nIndoor quadrotors | \nGround robots | \nLarge sea vehicles | \n
Survival kit deployment | \n\n | \n | \n | \n |
Rescue capsule deployment | \n\n | \n | \n | \n |
Platform‐specific components enable/disable | \n\n | \n | \n | \n |
Platform extended status | \n\n | \n | \n | \n |
Manipulator tool selection | \n\n | \n | \n | \n |
Voice transmission | \n\n | \n | \n | \n |
ICARUS interface gaps.
Given this, a new set of non‐standard services has been defined to fill the gaps of the standard. This new non‐standard service set is shown in the following Figure 2.
\nAdditional non‐standard JAUS services (source: ICARUS).
For each service, a strictly defined message‐passing interface (vocabulary) and protocol (rules) for data exchange are available. There are generally three types of messages: query, report and command. Furthermore, the transport service (from the core service set) acts as an interface to the transport layer. Therefore, ICARUS interface is, in principle, independent from the physical transport layer. However, the current implementation for ICARUS is only available for the UDP protocol.
\nJAUS also defines a hierarchical and flexible topology built up of subsystems, nodes and components. For the implementation of JAUS within ICARUS, the following assumptions have been made:
\nAn ICARUS team is considered a system,
Each platform is defined as a subsystem with a single node. Therefore, all components within the same platform will share the subsystem and node identifiers.
As described later, a node may contain several components. But a component will implement only one service, plus the core services, which are always present. This restriction allows the C2I to dynamically discover each of the services available on each robot.
Therefore, an ICARUS system is depicted in Figure 3.
\nICARUS JAUS topology (source: ICARUS).
All this functionality is provided to the ICARUS robotics partners as a software library referred to as the ICARUS interoperability layer. This module acts as a bridge between their internal and external development frameworks. This interoperability layer is also responsible for the integration of the ICARUS communication network and the command and control station on each individual platform.
\nA set of C++ classes has been designed to integrate the vehicles into the ICARUS network. The JAUS‐specific functionality has been encapsulated within them. To comply with the ICARUS interface, a system may directly integrate this library (native integration). However, most robotics systems nowadays are based on either proprietary or open‐source middleware (such as ROS). To accommodate these systems into an ICARUS compliant network, an alternative is to implement an adapter to the robot‐specific middleware (translator). The diagram in Figure 4 illustrates both cases, native integration (Robot C) and through an adapter (Robot A using ROS, and Robot B using MOOS).
\nRobot adaptation strategy (source: ICARUS).
The following sections describe the software classes encapsulated within the library and depicted in the previous diagram.
\nJAUS robot encapsulates all the functionality required on‐board the vehicle. It represents a subsystem in the JAUS topology (see Figure 3). In the ICARUS JAUS interface, a subsystem will contain only one node. This approach will allow us to provide a component name to each service. Therefore, all components within the same robot share the subsystem and node identifiers.
\nThere are two types of services available on a JAUS robot in addition to the core services: sensors and drivers.
\nSensors provide access to information generated on the robot (i.e. global pose, image, etc.). There are essentially two types of C++ functions required to integrate this functionality:
\nAdd sensor: a JAUS component is added to robot subsystem. For example, if the instance of JAUSRobot is myRobot, the following statement adds a GlobalPoseSensor service to our robot:
JAUSRobot myRobot;
myRobot.AddGlobalPoseSensor (‘OEMStar_GPS’, AT_1_HZ);
Set data: updates the data associated to a service. For the example above, the following lines will update the current GlobalPose of myRobot:
JAUS::GlobalPose globalPose;
myRobot.globalPoseSensor‐>SetGlobalPose(newGlobalPose);
Drivers, on the other hand, provide access to actuation capabilities provided by each robot (i.e. go to waypoint). There is an equivalent function required to integrate this functionality:
\nAdd driver: a JAUS component is added to the robot subsystem. For example, if the instance of JAUSRobot is myRobot the following line is adding a AddGlobalWaypointDriver service to receive waypoints request in global coordinate frame:
JAUSRobot myRobot;
myRobot.AddGlobalWaypointDriver(‘global_waypoints’, authority_code);
The authority code parameter of these services is used for pre‐emption and needs to be set lower to the one of the client accessing the driver. Otherwise, commands from the client are ignored.
\nTherefore, JAUSRobot creates a JAUS component for every new Sensor and Driver. This allows the JAUSFleetHandler class to discover and manage each of them independently.
\nThe ICARUS JAUS interface is based on callbacks for message reception. One more function will register a local callback in order to receive any message coming from the JAUS network:
\nvoid localProcessMessage(const JAUS::Message* message){ }
\nmyRobot.RegisterJAUSMessageCallback(localProcessMessage);
\nOn the C2I side, two classes have been designed:
\nJAUS fleet handler encapsulates all the functionality related to the fleet management. It includes the functionality to discover subsystems and services on the JAUS network and retrieve their names and their current status. For example, if the instance of JAUSFleetHandler is myFleet, the following line allows discovering all subsystems on the JAUS network and retrieving their services names:
\nmyFleet.DiscoverFleet();
\nOn the other hand, the following line also allows checking for system updates:
\nmyFleet.RefreshFleet();
\nIn terms of JAUS, it represents a basic JAUS component implementing the discovery service to retrieve the subsystems available in the network and their services.
\nJAUS robot handler is responsible for managing a single robot. After the discovery process, an instance of this class must be created and configured. This class will interface directly to the JAUS robot.
\nIn terms of JAUS, it represents a basic JAUS component. For each sensor service available on the real robot, it creates an event of type every change. This is the JAUS mechanism to configure the sensor service on‐board the robot to send data for every new set. Periodic events will also be available in the future.
\nOn the other hand, for each driver service available on the real robot, it configures the access control service needed to send commands.
\nICARUS tackles interoperability at different levels. This chapter focuses on the interoperability layer. This is a software‐defined protocol (SDP) that can run over any compatible communication layer underneath. However, ICARUS also addresses the interoperability at the communications level, which is further detailed in Chapter 6 of this book.
\nA tight cooperation between the interoperability and communications layer allows for a smart management of the network. The ICARUS communications are a cognitive self‐organizing multi‐node network. This layer exposes an interface to configure the required data flows, its priorities and other details. Given the current set and number of robots, sensors and the priorities for the mission assigned, the communications layer can assure a quality of service for each data stream.
\nThis is traditionally preconfigured manually for each mission. In ICARUS, a tight integration between the interoperability and communication layers has enabled the dynamic self‐configuration of the team. A team management node runs within the C2I and exploits the discovery mechanism to retrieve all robots and their capabilities. This information is transferred to the communication layers that organized the data flows based on a‐priori defined priorities. For instance, telemetry and telecontrol streams are given the highest priority since they are safety critical. For each robot, a camera is given the medium priority and all other sensors are lower priority. This a‐priori priority allocation depends on the number of robots and their characteristics.
\nThese configuration capabilities are also exposed to the C2I and therefore to the operator. The coordinator can at any time change the priority levels, enable new sensors, disable sensors that are not required, etc. The user is also informed with the current status of the network and he is therefore able to change the configuration if the network is overloaded.
\nEight messages have been defined to exchange all the information needed between both interfaces (COMMS and JAUS):
\nRegister message.
Activation of a stream.
Deactivation of a stream.
Deactivation warning.
Network status notification.
Robot’s register notification. This is a notification sent from the COMMS interface to the JAUS interface. It is used to ask for the robot’s register message presented before. It is needed when, for instance, the COMMS interface starts running later than the JAUS interface.
Robot’s unregister notification (COMMS to JAUS). It is sent when the COMMS interface loses a robot.
Robot’s unregister notification (JAUS to COMMS). It is sent when the JAUS interface loses a robot.
All ICARUS platforms have been adapted to the ICARUS interface. This automatically ensures the compatibility with the ICARUS C2I, enabling the multi‐robot coordination and combination of data as later described in this chapter.
\nAll aerial platforms within the ICARUS project share a similar approach when it comes to software and hardware design. The hardware setup comprises a low‐level board, responsible for the flight control of the vehicle (i.e. autopilot) and, optionally, a high‐level board (i.e. on‐board pc) responsible for the autonomous navigation and payload data. These autopilots communicate with the vehicle‐specific ground station through the MAVLink protocol [21]. The on‐board PCs, instead, run the robot operating system (ROS). A template has been developed to integrate ROS‐based platforms. Therefore, all of them have been adapted using this template.
\nThis template however should be configured to the specific characteristics of each platform, particularly in terms of sensors equipment. The proposed strategy is to implement a ROS‐based wrapper to subscribe to ROS topics and interface the ICARUS protocol. This node is intended to run on‐board the robot and provides a wrapper of ICARUS interface. The node is implemented within the robot.cpp file.
\nAll the required services described in Figure 1 are available for ROS‐based systems through this template launch file. These services can be easily enabled by configuring a template ROS launch file. The XML excerpt below shows an example for the specific case of a global pose service:
\n<?xml version=”1.0”?>
\n<launch>
\n<node pkg=”ros2jaus_node” type=”robot” name=”robot” output=”screen”>
\n<!--ROBOT CONFIG -->
\n<param name=”subsystemName” type=”string” value=”ctae_robot”/>
\n<param name=”subsystemID” type=”int” value=”99”/>
\n<param name=”nodeID” type=”int” value=”1”/>
\n<!-- GLOBAL POSE -->
\n<param name=”globalPoseEnable” type=”bool” value=”true”/>
\n<param name=”globalPoseSensorName” type=”string” value=”global_pose”/>
\n<param name=”globalPosepUpdateRate” type=”double” value=”25”/>
\n<param name=”globalPoseTopicName” value=”/EURECAT_robot/global_pose”/>
\n</node>
\n</launch>
\nTherefore, a ROS‐based robot just subscribes to a set of topics, with a predefined message type. An analysis of the existing ROS messages was performed to select the most appropriate interface definition. When an existing ROS message was deemed both valid and correct, this message was used. However, there were certain cases where the definition of the messages in ROS was either not existing, ambiguous or not valid. In these cases, a new message type was defined under the package icarus_msgs. The topic names and update rates can be configured from the ROS launch file:
\n/global_pose (icarus_msgs/GlobalPoseWithCovarianceStamped)
/local_pose (geometry_msgs/PoseWithCovarianceStamped)
/velocity_state (geometry_msgs/TwistWithCovarianceStamped)
And publishes:
\n/local_waypoint (icarus_msgs/LocalWaypointStamped)
/global_waypoint (icarus_msgs/GlobalWaypointStamped)
The control systems of the ground platforms are implemented using FINROC, a middleware developed by the Robotics Research Lab at the University of Kaiserslautern. The rescue capsule runs OceanSys and exposes a data repository over which any software in the same network can subscribe and receive custom messages. ROAZ implements a similar system, but it is also ROS capable. The U‐ranger on‐board autonomy is developed in MOOS and implements a behavioural control strategy.
\nA template example was provided to each of the partners, together with the use case for ROS and some documentation. Each partner was able to quickly adapt its existing frameworks to the standard interoperable interface and become complying with all ICARUS team technology, such as the communication infrastructure and the C2I. Tables 9 and 10 provide a summary of the ICARUS services provided by each system.
\nICARUS services per robot | \n||||||||
---|---|---|---|---|---|---|---|---|
SERVICE | \nAROT | \nASOLAR | \nU‐RANGER | \nROAZII | \nUCAP | \nFIREFLY | \nSUGV | \nLUGV | \n
Global pose sensor | \nGlobal pose (20 Hz) | \nGlobal pose (20 Hz) | \nGlobal pose (20 Hz) | \nGlobal pose (20 Hz) | \nGlobal pose (20 Hz) | \nGlobal pose (20 Hz) | \nGlobal pose (20 Hz) | \nGlobal pose (20 Hz) | \n
Velocity state sensor | \n\n | Velocity state (20 Hz) | \n\n | \n | \n | \n | \n | \n |
First camera | \nLeft camera (20 Hz) | \nVisual camera (20 Hz) | \n\n | Visual camera (20 Hz) | \nVisual camera (20 Hz) | \nLeft camera (20 Hz) | \nFront camera (20 Hz) | \nFront camera (20 Hz) | \n
Second camera | \nRight camera (20 Hz) | \nThermal camera (20 Hz) | \n\n | Thermal camera (20 Hz) | \n\n | Right camera (20 Hz) | \nManipul camera (20 Hz) | \nManipul camera (20 Hz) | \n
Third Camera | \nThermal camera (20 Hz) | \n\n | \n | \n | \n | Thermal camera (20 Hz) | \nRear camera (20 Hz) | \nGripper camera (20 Hz) | \n
Fourth camera | \nVisual camera (20 Hz) | \n\n | \n | \n | \n | \n | \n | \n |
Range sensor (Point clouds) | \n\n | \n | Laser | \nRadar | \n\n | Point cloud | \nPoint cloud | \nPoint cloud | \n
Global waypoint list driver | \nGlobal waypoints | \nGlobal waypoints | \nGlobal waypoints | \nGlobal waypoints | \nGlobal waypoints | \n\n | Global waypoints | \nGlobal waypoints | \n
Primitive driver | \nCmd velocity | \n\n | Cmd velocity | \n\n | Cmd velocity | \nCmd velocity | \nCmd velocity | \nCmd velocity | \n
Manipulator End effector pose sensor | \n\n | \n | \n | \n | \n | \n | End effector pose | \nEnd effector pose | \n
Manipulator joint sensor | \n\n | \n | \n | \n | \n | \n | Joint position | \nJoint position | \n
Manipulator end effector driver | \n\n | \n | \n | \n | \n | \n | End effector pose control | \nEnd effector pose control | \n
Manipulator joint position driver | \n\n | \n | \n | \n | \n | \n | Joint position control | \nJoint position control | \n
ICARUS services provided by each vehicle.
ICARUS custom services per robot | \n||||||||
---|---|---|---|---|---|---|---|---|
SERVICE | \nAROT | \nASOLAR | \nU‐RANGER | \nROAZII | \nUCAP | \nFIREFLY | \nSUGV | \nLUGV | \n
First switch driver | \nDelivery Kit | \n\n | \n | \n | Deploy UCAP | \n\n | Lights | \nLights | \n
Second switch driver | \n\n | \n | \n | \n | Inflate‐raft | \n\n | Manipulator | \nManipulator | \n
Third switch driver | \n\n | \n | \n | \n | \n | \n | Reset | \nReset | \n
Fourth switch driver | \n\n | \n | \n | \n | \n | \n | Audio | \nEngine | \n
Fifth switch driver | \n\n | \n | \n | \n | \n | \n | Speech | \nTool‐Lock | \n
First 3‐state switch driver | \n\n | \n | \n | \n | \n | \n | Gripper control | \nGripper control | \n
Second 3‐state switch driver | \n\n | \n | \n | \n | \n | \n | \n | Tool selector | \n
Text sensor | \n\n | \n | \n | \n | \n | \n | Text | \n\n |
Text driver | \n\n | \n | \n | \n | \n | \n | Text Cmd | \n\n |
CO2 sensor | \n\n | \n | \n | \n | \n | \n | CO2 sensor | \n\n |
Robot extended status | \nBattery status | \nBattery status | \n\n | \n | Battery status | \n\n | Battery status | \nBattery status | \n
Video stream | \n\n | \n | Video stream | \n\n | \n | \n | \n | \n |
Target detection | \nVictims | \nVictims | \n\n | Victims | \n\n | \n | Victims | \nVictims | \n
ICARUS custom services provided by each vehicle.
The ICARUS approach to interoperability was initially verified with a set of in‐lab integrations and simulated tests. Once the robotics platforms were finalized and ready for field tests, a series of field operations involving different combinations of pairs of air, ground and sea vehicles were organized during the integration trials carried out between July and September, 2014. The purpose was two‐fold: (i) verification of the completeness, correctness and feasibility of the ICARUS interoperability interface; (ii) experimentation on the possibilities on multi‐robot cooperative search and rescue missions. The results from one of these trials showed that the work on interoperability enabled large‐scale cooperative mapping with multiple aerial and ground robots in urban search and rescue [22].
\nThe final field validation was carried out together with the final integration and demonstration exercise of the ICARUS project described in Chapter 10. Three full‐team validations were performed during the final project demonstrations:
\nthe maritime trials and demonstration in Alfeite, Lisbon (Portugal) in July 2015,
the land trials and demonstration in Marche‐en‐Famenne (Belgium) in August 2015 and
the participation in the euRathlon competition in September 2015 where the project received the Best Multi‐Robot Coordination Award by the IEEE Robotics and Automation Society (RAS).
At this stage, all platforms had been integrated into the ICARUS system. After start‐up, the current capabilities of the team could be dynamically discovered and the ICARUS C2I automatically configures itself, allowing an ICARUS team operator to plan a mission, assigning roles and tasks to each system. During the mission, all the information flows and the current network status are displayed. The operator can follow the progress of the mission, enable, disable or change the update rate of each of the ICARUS services. The operator can, at any time, request new missions, take manual control of the platforms that provide this service, resume previous missions, etc. All this functionality was exercised and demonstrated during the validations.
\nTogether, these large‐scale operational exercises completed the validation of the ICARUS interoperability standard interface. Therefore, ICARUS as a project has demonstrated multi‐domain multi‐robot heterogeneous interoperability in realistic search and rescue operations.
\nSome examples of the multi‐robot collaboration experimented during ICARUS are described in the subsections below to illustrate the possibilities of multi‐robot cooperation provided by an interoperable team.
\nIn this multi‐robot collaboration concept, the overall mission imposed by the human commanding officer is to provide a general assessment of a predefined sector. This is a typical scenario to be performed when relief agencies arrive on a crisis site. Assets to be deployed for this task are the fixed‐wing UAV and the outdoor multi‐rotor, both with clearly distinctive roles:
\nThe fixed wing aircraft acts as a surveyor system which covers the entire area quickly, flying at an altitude of around 100 m. Note that the altitude limitation is deliberative, as many countries impose a maximum flight altitude of 400 feet (133 m) for unmanned systems and it was our specific target to test the operational capabilities within realistic legislative bounds. Flying at this altitude, the aircraft quickly provides a general low‐resolution assessment of the sector, such that areas of interest can be selected.
The multi‐rotor aircraft also acts as a surveyor system, but as it has a lower flight altitude of typically 40 m, it covers only a much smaller area. It is therefore used to provide a high‐resolution assessment of an area of interest, as identified by the fixed‐wing assessment.
The multi‐rotor aircraft also acts as observer system to provide high‐resolution multi‐view observation of points of interest (victims, buildings, etc.).
Operating multiple unmanned aerial systems in the same airspace is not easy from a safety perspective. In this case, vertical separation of the airspace was used for segregating the operations with the fixed wing and multi‐rotor aircraft. The operators were also in constant contact with one another to synchronize the landing operations.
\nFigure 5 shows the different aerial systems in the air at the same time, whereas Figure 6 shows the outcomes: the lower‐resolution assessment by the fixed wing aircraft and the high‐resolution assessment by the multi‐rotor aircraft. As all data is geo‐referenced, this information can be perfectly super‐imposed on one another.
\nMultiple ICARUS aircraft in the air at the same time (source: ICARUS).
Low‐resolution fixed wing assessment and high‐resolution multi‐rotor assessment (source: ICARUS).
A common problem for relief teams is that the route they have to take due to their designated destination cannot be reached using the ‘normal’ routes, as roads are blocked by debris or by floods. The mission resulting from this use case is to use a multi‐robot team to ensure the traversability of a route and provide an early identification of threats. The assets deployed for this mission are the fixed‐wing and outdoor multi‐rotor aircraft and the relief team on the ground, including ICARUS unmanned ground vehicles. The aircraft scans the area to detect blocked and cleared routes to the destination point and sends updated navigation information to the ground team, such that the ground team can travel to the destination as quickly as possible. Figure 7 shows the ICARUS multi‐rotor aircraft flying ahead of the ground team, searching for obstacles on the way to the destination.
\nICARUS outdoor rotorcraft ensuring optimal routing of ground team (source: ICARUS).
Victim search is a primary mission in any rescue operation. Following the typical mission profile, a search area is defined and a multi‐robot collaborative search is ordered by the human commanding officer in this predefined area. Multiple collaboration modalities are possible, depending on the search and rescue context:
\nIn outdoor search and rescue scenarios, the fixed‐wing and outdoor multi‐rotor aircraft are deployed. The fixed wing aircraft can quickly provide a scan of a large area, either clearing the area or indicating preliminary detections, which then need to be confirmed by the outdoor multi‐rotor aircraft. The latter then confirms the location and status of the detected victims and can count the number of victims. This operation can take place in an urban search and rescue context, where victims are to be sought in rubble fields after an earthquake (as shown in Figure 8), or in a maritime search and rescue context, where victims have to be found in the water (as shown in Figure 9).
In indoor urban search and rescue scenarios, the indoor multi‐rotor aircraft and the small‐unmanned ground vehicle are deployed to collaborative search for surviving victims inside semi‐demolished buildings, as shown in Figure 10.
In maritime search and rescue scenarios, the survival times in the water are often very short. Therefore, in an attempt to limit the time‐delay between the search phase and the rescue phase in the relief operation, the fixed‐wing and outdoor multi‐rotor aircraft are deployed, together with the ICARUS unmanned surface vehicles and the unmanned capsules. This enables the unmanned aircraft to immediately steer the maritime vehicles towards the victims detected and localized by the aircraft. The unmanned surface vehicles also have sensors (infrared cameras) enabling victim detection on board, but as these are relatively small platforms, their field of view in rough sea conditions with high waves is limited. Collaborative victim search between aerial and marine platforms is therefore not impossible, but the greatest benefit of a mutual deployment lies in the combination of the search and the rescue aspect, as illustrated by Figure 11, which shows a victim in the water being tracked and localized by the outdoor multi‐rotor aircraft, thereby guiding the unmanned surface vehicle to a position in the neighbourhood of the victim, such that an unmanned capsule can be deployed, which can inflate a life raft close to the victim in order to save the person.
Automated victim detection on a rubble field in an urban search and rescue operation (source: ICARUS).
Victim in the water being localized by outdoor multi‐rotor aircraft.
Collaborative indoor victim search (source: ICARUS).
Collaborative maritime victim search and rescue operation involving aerial and maritime platforms (source: ICARUS).
During any search and rescue operation, many assets need to be deployed as quickly as possible. This mission profile shows how collaborative robotic agents can help by acting as carrier platforms for small assets and equipment and also for other unmanned systems. As an example, the large unmanned ground vehicle acts as a carrier platform for the small‐unmanned ground vehicle and both the ROAZ II and the U‐ranger act as a carrier for the rescue capsules. They not only enable the cargo to be transported to the destination, without any extra burden to the human relief workers, but also act as deployment systems for the smaller unmanned systems.
\nAs an example, Figure 12 shows how the large unmanned ground vehicle deploys the small‐unmanned ground vehicle on the top of a building, whereas Figure 13 shows how the rescue capsules are deployed from an ICARUS unmanned surface vehicle.
\nICARUS large unmanned ground vehicle deploying the small unmanned ground vehicle on the top of a building (source: ICARUS).
ICARUS unmanned surface vehicle deploying the unmanned rescue capsule (source: ICARUS).
In the event of a large crisis, previously existing communication infrastructure is often broken or at least severely damaged. However, communication is crucial for having coordinated response operations. Collaborative unmanned systems can act as communication relay tools to extend the communication range over large distances. Of course, the assets which are most useful for this are the aerial tools, as they can provide line‐of‐sight communication relay over large distances. In the ICARUS project, an ad‐hoc link‐hopping network was developed, as detailed in Chapter 7 of this book, which allows to extend any communication link while the ICARUS aerial platforms are in the air. This allows the fixed wing aircraft and the outdoor multi‐rotor aircraft to act as communication relays for the ground and marine rescue teams.
\nOnly seldom, rescue operations have to be performed which span three domains (air, ground, marine). However, the Tohoku earthquake and subsequent Fukushima disaster showed that response protocols were ill prepared to approach such multi‐domain crises. Therefore, the euRathlon challenge focussed specifically on this problem. In brief, the mission imposed by the euRathlon challenge consists of detection of, after a Fukushima‐like incident, missing workers under water, outside on the ground and inside in a semi‐demolished reactor building. Full information of the concept of operation is available online [23]. These search and rescue operations require the simultaneous and coordinated deployment of unmanned aerial, ground and underwater vehicles, gathering environmental data and performing real‐time identification of critical hazards in a nuclear accident.
\nThe ICARUS team deployed for this purpose five robotic systems:
\nThe outdoor multi‐rotor was first deployed to search for the best route for the ground robots to reach the open entrance to the building. Then, it mapped the area in the RGB, gray and thermal spectrum. Finally, it performed real‐time detection and localization of missing workers, leaks, etc.
The Teodor UGV was used as carrier platform for the small UGV and for outdoor 3D mapping.
The small UGV was used for indoor detection and localization of missing workers, leaks, etc.
The ROAZ II vehicle was used as a carrier platform for the MARES unmanned underwater vehicle.
The MARES unmanned underwater vehicle was used for underwater detection and localization of missing workers, leaks, etc.
Even though behaviours specific to euRathlon, such as opening valves were not originally considered in the ICARUS concept of operations, they were easily integrated as a proof of the flexibility of the followed approach towards interoperability. Thanks to the different levels of interoperability and automation, the specialized operator could take over at this point and tele‐operate the system to finish the mission.
\nThe following figures show the ICARUS team operating in the euRathlon scenario. Figure 14 shows the ICARUS multi‐rotor during his flight around the building, while Figure 15 illustrates the Teodor UGV carrying the small UGV during the euRathlon challenge.
\nICARUS rotorcraft during the euRathlon challenge (source: ICARUS).
Teodor and small UGV during the euRathlon challenge (source: ICARUS).
Figure 16 shows the outcome of combining 3D maps obtained from the outdoor multi‐rotor and ground platforms during the euRathlon challenge.
\n3D map of the crisis area, obtained by combining 3D maps from the aerial and ground platforms (source: ICARUS).
The work described in this chapter intended to integrate unmanned air, ground and sea vehicles developed by the different ICARUS partners into a heterogeneous fleet, collaborating as a coordinated, seamlessly‐integrated team. A strong effort was devoted to appraise the existing body of work in standardization of multi‐robot systems. Given the particular requirements of ICARUS, emphasis was placed on initiatives considering multiple domains (air, ground and sea). Likewise, given the platforms used in ICARUS, standards and methods applicable to smaller and lightweight platforms were prioritized. There have been several initiatives addressing both issues. However, harmonization of them is not yet a fact. There is still the need for a single multi‐domain standard for interoperability, easily adaptable to both large and small systems. The contribution of the ICARUS project focused on the selection of the most appropriate existing initiative (JAUS), the evaluation of its application to multi‐robot Search and Rescue missions, the elaboration of recommendations for improvements, the adaptation of all ICARUS robots and the demonstration of the ICARUS interoperable and heterogeneous team in three large‐scale demonstration, exploring multi‐robot cooperation and real‐time centralized supervision and planning of an heterogeneous team.
\nThe research leading to these results has received funding from the European Community’s Seventh Framework Programme (FP7/2007-2013) under grant agreement number 285417.
\nHuman movement and behavior while performing their daily activities have inherent hierarchical structure. As enabling technology, real-time human activity monitoring plays an important role in many human-centric applications in different areas such as healthcare, security, surveillance, smart building, etc., particularly to protect elder people and children from some bad incidents. Due to the advanced development in medical, science and technology, the human average lifespan has increased rapidly where people are getting healthier and having longer lives, thus increased the aging population worldwide. The United States’ medical research agency, known as National Institute of Health (NIH), reported that in 2012, 8.0% (or 562 million) of the 7 billion global human population are aged 65 and over, and the percentage has increased by 0.5% (or 55 million) in 2015 [1]. Based on the aging trends, NIH has projected that by 2050, the older population will grow substantially up to 17% (or 1.6 billion) throughout the world [1]. However, most of elderly people spend their extra lifespan in unhealthy manner, with often debilitating illness and disability due to the deterioration of physical or mental functions caused by age-related diseases. In fact, the increase in the older population has a slight impact on the increase in disability rates of world’s population [2].
With the increasing of older and disabled population in most countries and regions across the world, human and activity monitoring has gained substantial attention from the research community for ambient assisted living or elderly care application. As reported in [3], majority of the elderly people are more comfortable to live independently at their own home and community. Nevertheless, in the modern society, the conventional ways of taking care of elders in the family are no longer effective. As a result, there is higher demand in the society for the assistive technologies such as an intelligent monitoring system that can record the elders’ daily activities in which family can respectfully monitor their loved ones who live alone at home. Due to the lower income earner after retirement and higher standards of living, many of the elders cannot afford to pay their healthcare cost as well as the expensive healthcare system or private nursing home care services. Nonetheless, various human monitoring technologies have been developed that help elderly people to age in place.
Traditionally, the human activity monitoring technology is a vision-based [4], which requires the use of video camera to monitor the human activity. Although this vision-based is an effective security measure approach as it can retains the records with high resolution, it also has several drawbacks that involves cost-inefficiency for large-scale deployments, energy consumption, and serious user privacy concerns if it is used in inappropriate places such as lavatory or bathroom, bedroom, and even nursing room. However, in several applications like elderly care and assisted living, monitoring human activities in these privacy areas is very crucial and necessary. For instance, lavatory or bathroom is one of the potential places for falling due to its slippery condition, thus activity monitoring in this place is very important for elderly fall detection system in detecting the falling event [5]. Meanwhile, the activity monitoring in the bedroom is very important for patient sleep monitoring system in detecting unusual sleeping behavior. In fact, the video camera requires a good lighting area ineffective in the dark and has limited view angles.
In recent years, thousands of research study on human activity monitoring has been conducted involving the replacement of traditional vision-based approach with various technologies such as acoustic-based [6, 7], motion-based [8, 9], body-worn sensors [10, 11], gyroscope [12], as well as smartphone [13, 14]. While such approaches address the privacy concern issue, they are sensor-based or in other words impose the requirement that special sensors, that is to be attached to, carried or worn by the subject for an effective activity monitoring. This is inconvenient and inappropriate for human usage especially the elders or people with brain-related diseases (Alzheimer, amnesia, dementia, etc.) to remember each day to wear or to activate those sensors. Furthermore, the whole monitoring process is ineffective and futile if the subject forgets to carry the sensor. Besides, the acoustic-based approach is range limited and prone to false detections since it can only be used in a short range and can easily be influenced by other audio signals [15]. The motion-based sensor such as a single accelerometer is not able to provide sufficient information to the system if used alone, hence need to combine with other sensors for more efficient activity monitoring [16]. Nevertheless, both vision- and sensor-based approaches bear with huge costs due to expensive equipment, installation, and maintenance. All the advantages and disadvantages of above approaches are summarized in Table 1.
Category | Sensor technology | (+) Advantages (−) Disadvantages |
---|---|---|
Vision-based | Video camera | + Effective security measure + Maintain records – Interfere with privacy – Ineffective in the dark – High computational cost |
Motion-based | Accelerometer Gyroscopes PIR | + No privacy issue + Lower cost (PIR) + High detection accuracy – Raise physical discomfort issue (accelerometer and gyroscopes) – No direct linear or angular position information – Low range and line-of-sight restriction (PIR) – Prone to false detection – Insensitive to very slow motions |
Sound-based | Ultrasonic Acoustic Audio (microphone) | + Very sensitive to motion + Objects and distances are typically determined precisely + Inexpensive (audio) – Work only directionally (ultrasonic) – Sensitive to temperature and angle of the target (ultrasonic) – Easily be influenced by other audio signals/noise – Prone to false detections – Range limited |
Sensor-based | Body-worn sensors (Body sensor networks) | + High detection accuracy + No privacy issue – Expensive devices (sensors) – Disturb or limit the activities of the users – Required sensors installation and calibration |
Advantages and disadvantages of existing sensor technologies.
Recently, Radio Frequency (RF)-based approaches have received significant research attentions to be employed in the human presence detection and activity monitoring based on different wireless radio technologies such as RFID [17, 18], Wi-Fi [19, 20], ZigBee [21, 22], FM radio [23, 24], microwave [25], etc. According to studies on the impact of human presence and activity on the RF signal strength [26, 27, 28], it has been proven that the existence and movement of human body in wireless radio network environment will interfere the wireless signal profiles, either in constructive or destructive manners, in which will change the RF communication pattern between the wireless transceivers. This phenomenon is called radio irregularity, which often consider as a drawback in RF communication. In RF-based human detection and activity monitoring, researchers have seen the radio irregularity phenomenon as a benefit in which it can be exploited as sensing tools to locate the human presence in the indoor environment and discriminate human activities or gestures. Since RF-based human activity monitoring approaches only exploit the wireless communication features, there is no need for expensive physical sensing equipment and modules, which accordingly reduce the cost, ease the deployment, reduce the energy consumption, and protect user privacy [29].
The RF-based approaches can be classified into device-bound and device-free. Like the sensor-based, the device-bound RF-based approach requires the on-body wireless sensors or devices (such as RFID tags or cards, Bluetooth wristbands, smart watches, etc.) to be attached to the subject, which has been known as one of the drawbacks. Hence, the subject is required to actively participate in the activity recognition and monitoring process by always remembering to activate and carry the wearable wireless devices. This device-bound system is also known as active monitoring system and the subjects are usually willing to be monitored by the system. Therefore, we refer the subject in this active monitoring system as an active target. As an example, daily activities, such as walking, sitting, lying, falling, etc., of an active target wearing a simple RFID tag can be tracked using RFID readers [30, 31]. Another example is that an active target carrying mobile phone or other Wi-Fi-embedded devices can be easily tracked by Wi-Fi detectors or monitor [32, 33].
Although the on-body wireless sensors such as RFID tags and RFID cards are commercially available and relatively low cost compared to other wireless technologies, their placement on the target’s body may cause physical discomfort [34], especially the elders under long-term monitoring. Recent research works introduce the placement of RFID in the environments and objects instead of target’s body for activity monitoring [35, 36]. However, reading multiple RFID tags at once may cause malfunction due to signal collision, thus anticollision algorithms are required in which incur an extra cost [37]. On the contrary, device-free RF-based approach, known as device-free localization (DFL), is a passive monitoring system that can locate and monitor human position and activity without the subject’s participation, where the subjects do not need to carry or wear any radio devices. They are usually unaware with the system’s existence, and possibly want to avoid being monitored [21]. The subject in this passive monitoring case is referred as passive target.
In this chapter, we review the recent progress of DFL for indoor environment prioritizing on human activity monitoring with a particular focus on the monitoring systems targeting personal health and assisted living applications. Our aims are to provide a comprehensive review on the topic and to quickly update the researchers beyond this field the state of art, potential, opportunities, challenges, opens issues, and future directions of activity recognition using DFL technology. To the best of our knowledge, although there exist surveys on human activity monitoring and recognition using vision-based [4, 38], wearable sensors [10, 39, 40], mobile phones [41, 42], there are only few surveys published in this research field on human activity monitoring using device-free RF-based [29, 43, 44, 45], including a general architecture of existing work especially in the context of healthcare and assisted living applications. Surveys as in Refs. [46, 47] are specifically on the Wi-Fi-based approaches. However, we do not focus on the classification approaches of human activity, as there exist several in-depth literatures on human activity classification methods [48, 49, 50, 51].
The organization of the chapter is as follows: Section 2 “RF-based DFL Technology” briefly discusses the concept of DFL in the perspective of human activity monitoring as understood within this study and provide an extensive review on the existing works. We decompose the taxonomy of the existing RF-based DFL technologies for human activity monitoring into measurement-based categories, regardless the type of wireless radio technologies used. Section 3 “Opportunities and Potential” presents the potential applications based on the state-of-art of RF-based DFL technology. Based hereon, in the last Section 4 “Challenges, Open Issues and Future Directions,” we outline the challenges and the possible solutions, discuss the open issues, and comment on the possible future research direction of activity recognition using DFL technology.
Historically, the DFL analogy was firstly introduced by Youssef et al. in 2009 as device-free passive (DfP) for location determination, in which the subject is not equipped with a radio device, or not required to actively participate in the localization system [52]. The concept of DFL relies on the fact that any changes on the radio network environment will fluctuate the received signal profiles, i.e., due to reflection, diffraction, absorption, or scattering phenomena. DFL exploits the potential of ubiquitous deployed Internet of Things (IoT) [53] devices for indoor localization by leveraging the RF fluctuations as an indicator of presence of obstruction, i.e., object or human body. In [54], we have briefly defined the concept of DFL technology in the context of human detection and counting, together with the comprehensive review on the publications related to DFL research.
In correspondence to the tremendous progresses on DFL research, Scholz et al. have expanded the area of DFL technology in the context of activity recognition by introducing the concept of device-free RF-based for human activity monitoring as device-free radio-based activity recognition (DFAR) [55]. Instead of utilizing radio signal analysis for object detection and tracking, it can also be utilized in the DFL technology to recognize specific human movement, and even their activities and gesture. For instance, the fluctuations of ambient and local continuous signals have been exploited in detecting human daily activities such as walking, lying, crawling, or standing [56]. To ease the reader’s understanding, we defined the DFL and DFAM systems as:
DFL: device-free localization system: a system which detects the presence of a passive target and locates the target’s position using radio signal information while the target is not equipped with a wireless device, nor required to actively participate in the localization system.
DFAM: device-free activity monitoring system: a system which monitors and recognizes the activity performed by a passive target using radio signal information while the target is not equipped with a wireless device, nor required to actively participate in the localization system.
We illustrate the overall conceptual framework of RF-based DFL technology for human activity monitoring as in Figure 1, including the three important modules: wireless radio sensor network (WRSN), human detection (HD), and human monitoring (HM). The WRSN is a self-configured wireless network consisting of radio devices connected wirelessly, acting as the sensors, for monitoring and recording of the physical or environmental conditions, and organizing the collected information to a predefined central location for processing. The WRSN module works by detecting the availability of radio-embedded devices (sensors) for human presence detection, localization, and activity monitoring, as well as the deployment of the radio sensor networks. WRSN can be deployed in the real-world environments using any radio devices that utilize the similar technology or IEEE standard. For instance, Wi-Fi-based sensor network can be deployed using any devices that utilized the IEEE802.11 wireless local area network (WLAN), while ZigBee-based sensor network can be deployed using devices that utilized the IEEE802.15.4 wireless personal area network (WPAN).
The overall conceptual framework of the RF-based DFL system.
The sensors in WRSN collect the information of current environment and forward the information to be processed by the HD module. HD module consists of detection and localization algorithms which analyze the information and automatically discover the presence of target, the number of target, the location of the target, the body temperature of the target, the activities performed by the target, the humidity of the environment, etc. HM module consists of activity classification algorithms connected with the designated context aware-based activity reasoning engines depending on the applications. Once an activity is detected, the HM module will observe, retrieve, and recognize the activities and alert the designated context aware-based activity reasoning engine to interpret ongoing events successfully or initiate actions as needed. For example, a ZigBee-based sensor network is deployed in a single bedroom apartment for elderly care application as depicted in Figure 2.
ZigBee-based sensor network deployed for elderly care application with the integration of mobile apps visualization.
Following on the DFAM research in [55], many research works on human motion detection and activity monitoring have been presented utilizing different radio technologies such as RFID [35, 36, 37], WiFi [5, 19, 20], ZigBee [21, 22], FM radio [23, 24], microwave [25], etc., adopting different signal descriptors such as Receive Signal Strength (RSS) [57, 58, 59, 60, 61], Channel State Information (CSI) [5, 20, 62, 63, 64], Doppler effect [25, 65], and Packet Received Rate (PRR) [66], without neglecting the easy-of-use problem and physical discomfort issue. In the following subsection, we decompose the taxonomy of the existing RF-based DFL technologies for human activity monitoring into signal descriptor categories such as RSS-based, CSI-based, amplitude-based, Doppler-based, and PRR-based, regardless the type of wireless radio technologies used.
Similar to human presence detection, activity monitoring using RSSI-based DFL technology exploits the RF-signal fluctuation features, in which the components of the received signal are blocked, absorbed, and reflected by the human while performing an activity, inducing the RF signal in the vicinity of receivers into a specific characteristic pattern. Such pattern can be identified and classified for the corresponding activity by exploiting the changes on the RSS of the affected wireless links.
In [57], Sigg et al. introduced three types of RF-based DFAR systems: active continuous signal-based, active RSSI-based, and passive continuous signal-based DFAR; which exploit the fluctuation of RSS due to human movement and activities. Both active and passive continuous signal-based proposed are USRP Software Defined Radio (SDR)-based system, which are deployed using specialized SDR devices. Meanwhile the RSSI-based DFAR system utilized the 2.4 GHz INGA sensor nodes [57]. The performance accuracy of the proposed DFAR systems is then compared with the performance accuracy of the motion-based recognition system. In the motion-based recognition system, accelerometers are attached to the subjects while performing the activities. By implementing three well-known classifier algorithms that are Naive Bayes, Classification Tree, and k-nearest neighbor (k-NN), their proposed RF-based DFARs are able to achieve comparable results with the motion-based system. Furthermore, they evaluated the performance of the proposed RF-based DFAR system in the presence of multiple subjects performing different activities and the impact of increasing the number of receiving devices. However, the proposed systems required specialized SDR devices, where the hardware availability remains as an open issue [60].
Sigg et al. expanded their work by designing an RSS-based activity recognition system for the mobile phones [58, 59] based on the advantages of mobile phones as personal devices that often carried everywhere. The proposed system utilized the Wi-Fi-RSSI values of incoming packets at a mobile phone for the activities classification. Unlike other body-worn devices, the function of mobile phone in an RSS-based activity monitoring system remains feasible even when it is not carried by the user. By default, the firmware and operating system (OS) of a standard mobile phone do not provide privilege for user to access its hardware as well as desired RSSI information. Thus, work in [58] utilized a modified firmware, which allows mobile phone to run Wi-Fi interface in monitor mode and developed tools to process RSSI sample captured on mobile phone in monitoring simple human activities such as walking and phone handling. Meanwhile, work in [59] focused on recognizing 10 different single-handed gestures utilizing the same modified firmware and tools developed in [58] with average accuracy of 0.51 when distinguishing all gestures and is able to achieve average accuracy of 0.60 and 0.72 when reducing to 7 and 4 gestures, respectively. Unfortunately, the OS root access incompatibility, complicated firmware modifications, and low accuracy are the major issues in the real-world applications.
The proposed RF-based DFAR systems in [57, 58, 59] utilized the RSS features as per listed in Table 2 and several combinations of those features for the activities classification. Assume that a wireless network environment consists of a static transmitter node or access point (AP), and a static receiver node or monitoring point (MP). Let
Feature | Description | Definition |
---|---|---|
Mean | Represents the static changes in RSS Provides means to distinguish a presence of static person as well as the exact location | |
Variance | Represents the volatility of RSS Provides the estimation on changes in nearby receivers such as movement of person | |
Standard deviation (SD) | Can be used instead of the variance The interpretation of SD and variance is identical | |
Median | Represents static changes in RSS. More robust to noise than the mean Let the ordered set of samples | |
Normalized spectral energy | Represents a measure in the frequency domain of the RSS Can be used to capture periodic patterns such as walking, running, or cycling | |
Minimum and maximum | Both represent extremal signal peaks Can be used to estimate movement and any changes in environment | |
Signal peaks within 10% of a maximum | Reflections of the obstructed signal strength at a receive antenna Peaks of similar magnitude indicate that movement is farther away Can be used to indicate near-far relations and activity of individuals | |
Mean difference between subsequent maxima | Similar magnitude of maximum peaks within a sample window indicates low activity in an environment or static activities The opposite will indicate dynamic activities |
Since works in [58, 59] focused more on hand gestures, Gu et al. [60] proposed an online Wi-Fi RSSI fingerprint-based DFAM concentrated on human activity, which has a flexible architecture and can be integrated in any existing indoor WLANs, regardless the environment conditions. Based on the preliminary results of the human activities impact on the Wi-Fi characteristic study [60], the Wi-Fi RSSI fingerprint can be extracted and exploited to distinguish different activities since each activity has their own RSSI fluctuation patterns. To reduce the difficulties in distinguishing activities having similar RSSI footprints, such as sitting and standing, the proposed system adopted a novel fusion classification tree-based algorithm. The system has been evaluated through extensive real-world experiments based on six main activities (that are sleeping, sitting, standing, walking, falling, and running) and achieved average accuracy of 72.47% for all activities, thus outperforms Naive Bayes, Bagging, and k-NN classifiers.
Monitoring human activity using RFID technology is often associated with the physical discomfort issues as user needs to wear or carry the RFID devices. However, there exist several studies that implemented the RFID technology in the different way for the device-free activity monitoring [61, 67]. Instead, the RFID devices are attached to the walls, furniture, and daily objects. This approach is known as passive RFID-based DFL. Thanks to the rapid advancement and sophistication in cheap sensing and wireless technology for introducing various RF-embedded devices with an open-source platform such as TelosB [68], IRIS [69], Waspmote [70], etc., that can operate in real-time environment based on the “plug and sense” concept where information like RSS can easily be captured. However, RSS measurements suffer from high uncertainties since the signal profiles tend to fluctuate depending on the environment, thus unpredictably experience interference, complex multipath propagation, and being noise-sensitive. In addition, RSS-based system experiences accuracy and coverage limitation due to the lack of the frequency diversity. Thus, RSS-based approach is only suitable for coarse-grained human activity monitoring.
Most of the research on Wi-Fi-based DFL utilized the CSI, one of the Wi-Fi features extracted from the physical layer of radio wireless system, for indoor location estimation and human motion and activities monitoring due to its stability and robustness in complex environment compared to RSSI. CSI information are available in commercial wireless devices such as network interface controller (NIC), which is also known as network interface card, network adapter, LAN adapter, or physical network interface. Unlike RSSI value which is usually measured from one packet, CSI value is measured per orthogonal frequency-division multiplexing (OFDM) from each packet and uses the frequency diversity technique to reflect the multipath propagation signals caused by human motion and activity, thus making it suitable for monitoring the fine-grained signals of human activities and motions.
Based on [19, 63], consider a Wi-Fi-based DFL system with NICs continuously measure the CSI variations in every received Wi-Fi frame of multiple wireless channels. Let
where
The CFR values consist of S metrices of CSI measurement with dimension of
Since the radio signals travel from a transmitter to a receiver through multiple paths depending on the surrounding, the measured
where
Figure 3 shows the scenario where Wi-Fi signals transmitted from an AP (Tx) to an MP (Rx) are traveled through different paths, which are the line-of-sight (LoS) path and paths reflected by wall and human body. Let the path of reflected signal due to human body is the
Multipaths scenario experienced by the Wi-Fi signals caused by human movement.
where
The phase of each path can be precisely measured only if the transmitter is in synchronization with the receiver. Unfortunately, due to hardware limitation and environment variations, the CFO of the commercial Wi-Fi devices, denoted as ∆f in Eq. (3), cannot be ignored. The impact of CFOs of devices running on IEEE 802.11n standard causes random variation in the phase of CSI, which allows devices to continuously transmit Wi-Fi frames based on frame aggregation mechanism, thus creating a phase interference scenario. It is difficult to precisely measure even the small phase shift in
To ignore the phase interference introduced by CFO, Wang et al. [63] introduced a CSI-speed model into their activity recognition and monitoring system (CARM), which considers the relationship of CFR power variations instead of CFR phase variation to the human movement speeds. Since the CSI streams of human movements are correlated, it is hard to extract the real trend of CSI caused by the human movement for feature classification purpose. Therefore, works in [5, 19, 63] applied the principal component analysis (PCA) to discover the principal component of the CSI fluctuation pattern caused by human activity motion to be used as features for activity classification. In [5], Li et al. analyzed five features from CSI principal component which are normalized standard deviation (STD), median absolute deviation (MAD), interquartile range (IR), signal entropy, and duration of human motion to recognize seven different human daily activities. By applying random forest-based classification algorithm, work in [5] verified the validity of their proposed human monitoring system in both the LoS and Non-LOS (NLoS) scenarios as 95.43% and 91.4%, respectively. Meanwhile, activity monitoring system based on hidden Markov model (HMM) classifier algorithm proposed by Wang et al. [19, 63] achieved an average recognition accuracy of 96%.
Although undesired noise from the environment may disturb some of the streams, since CSI is measured using OFDM method, other streams which are not affected by the noise still can provide the real trend of CSI information. Since CSI contains more information than RSSI, it is suitable for fine-grained activity monitoring. However, unlike RSSI which is available in almost all wireless devices, CSI only can be obtained from devices with specific NIC cards such as Intel 5300 and Atheros 9390 [19].
When wave such as ultrasonic and radio wave is transmitted to moving target, the wavelength of the reflected wave shifts depending on the direction and velocity of the movement. This is known as Doppler effect or Doppler shift. Recently, the principle of the Doppler effect has been proposed by researches in device-free radio sensor network for human activity monitoring and data gathering of real-world environment [25, 65] since the Doppler-based technology has the ability to accurately detect movement and eliminate the stationary noise of the environment [66]. The same principle of Doppler effect is applied to a Doppler sensor, having a beat signal as an output, in which frequency is defined as the difference between transmitted and received waves. Due to its high detection accuracy, work in [25] has deployed a 24-GHz microwave-Doppler sensor for a device-free activity monitoring system to recognize the daily activity of three passive targets with an average recognition rate of 90.6% based on eight different activities.
Based on the Doppler possibility study in [25], assume that a radio wave source at a fixed position transmits a radio wave with frequency
where
Let the signal of the transmitted wave
and
From Eq. (8), the received signal depends on the object size and its distance from the source. The beat signal
From Eq. (9), the amplitude and frequency of the Doppler shift are highly correlated with the range of the object and its motion speed. Thus, any human movement and activities with different speeds will have different Doppler shifts. Those human activities can be estimated and analyzed by extracting the features of Doppler signature in the frequency and time domains.
Work in [65] proposed an in-home Wi-Fi signal-based activity recognition framework for e-healthcare applications utilizing the passive micro-Doppler (m-D) signature classification. A fast Fourier transform (FFT) was used on the cross-correlation product of the baseline and monitored signals to find the exact delay
where
Although the constant false alarm rate (CFAR) detection is not suitable for the indoor environment due to the ambiguity peaks and direct signal interference (DSI) problems [65], DSI is an important feature in Doppler-based as it can be used to distinguish different signatures. Instead, a weighted standard deviation is proposed as the indicator to detect the m-D signature without eliminating the ambiguity peaks and DSI. PCA can be applied to reduce the dimension of dataset and eliminate the undesired noise. Finally, the Doppler signature is classified using a sparse representation classifier (SRC) with subspace pursuit (SP) technique, which outperforms the well-known support vector machine (SVM) in terms of classification accuracy and coverage. The sparsity level in SRC can easily be controlled and adjusted, thus making the proposed activity recognition framework a feasible tool, which is very suitable for the real-time healthcare applications, especially for the new users since it is not required to re-training the system.
It has been proved that RF signal features extracted from RSS and CSI information discussed in Sections 2.1 and 2.2 can be used to distinguish the type of movement as well as recognize the activities performed. However, RSS is sensitive to the shadowing effect and experiences the complex multipath propagation behavior, which makes it only suitable for monitoring coarse-grained activity. Meanwhile, CSI, which provides powerful information suitable for fine-grained activity monitoring, faces hardware issues since the information is only available from NIC embedded devices.
In [66], Huang and Dai presented a novel PRR-based DFL system for human movement recognition under the NLoS scenario based on packet state characteristic from link state information (LSI). The LSI, which contains more physical information such as RSSI, packet delivery rate, packet state, packet delay, packet loss, time arrival, and time interval of the received packet, etc., can be accessed from the network layer. Human movement in the radio network environment will block or reflect the signal and cause significant changes on the signal propagation path. This results in the fluctuation of channel link quality as well as slow fading effect.
By exploring the LSI features such as packet state and packet arrival time, different activities performed by a person in the monitoring area can be identified. Work in [66] exploited the PRR measurement to identify the link state. Assume the
Consider a wireless network environment in a hallway consists of a transmitter Tx and a receiver Rx as shown in Figure 4. When a person is moving into the hallway area, there will be four possible trajectories: walking from Tx to Rx, walking from Rx to Tx, walking from Tx to Tx, and from Rx to Rx. When the person moves into the hallway area, the link state quality tends to fluctuate in terms of the PRR. Different moving trajectories in the hallway will generate different fluctuation patterns of PRR with respect to the person position in hallway, thus the direction of walking can be identified. The distance of moving traces with different trajectories can be calculated using the Euclidean distance equation as in (12) and the walking direction of the traces can be identified using the K nearest neighbors (KNN) algorithm. The Euclidean distance between the PRR of the testing trace
Node deployment in the hallway area.
Since PRR cannot be used to distinguish the speed of the movement, other link state information known as the received packet arrival time is used to measure the speed. However, the time interval of received packets is highly correlated with the moving speed. Therefore, several parameters, such as autocorrelation function
DFL for human and activity monitoring is the promising technology for collecting data about the human presence and activity patterns. The technology is much cheaper than the existing traditional monitoring system using video camera. It consists of radio nodes comprising the appropriate sensor array along with computational devices that transmit and receive data wirelessly, and capable of providing information on an unprecedented temporal and spatial scale. The DFL system is an easy-to-install motion tracking system developed based on the IoT to improve the quality of life as well as provide intelligence and comforts to the user especially the disabled. Users, especially family, can respectfully monitor their loved ones who live alone at home, without requiring them to wear devices or change their habits. The system can be integrated with mobile and web apps which allow user to easily monitor their home/office from anywhere, in real time. The system can be made to replace the existing RFID monitoring system which always raises physical discomfort and is less reliable since more than one tag can respond at the same time.
In recent years, there has been an increase in the number of patient admission in hospitals worldwide, whether federal or nonfederal, especially in the developed countries due to the increase of older and disabled population [71, 72]. In England, for instance, the older population (aged 65–69) has grown by 34% in 2016 after a decade from 2.2 million in 2006, together with the series of increasing hospital admissions by 57% from 0.8 million, over the same period of time [71]. This causes most hospitals to experience inadequate bed problem to admit patients, thus slowing down the work of medical staff, especially at the casualty department or emergency department (ED). Patients started to complain about the slow services, which lead to bad reputation of the hospital. By implementing DFL system, federal and nonfederal bodies can introduce remote home healthcare services where patients can be monitored and advised from anywhere. These services help patients to improve their function and live with greater independence. Using this system, existing patients are taught to manage their wellness level, and safely manage their medication regimens; meanwhile, medical staff can remotely monitor and estimate the health condition of patients by interpreting the patients’ daily routines. In this situation, patients will remain at home, avoiding hospitalization or admission to long-term care institutions. If the daily routine of a patient is abnormal as expected such as too long sleeping or resting in bed, the patient might be sick and should be visited soon for closer examination.
Recent advances in medicine allow people to live longer and healthier compared to the previous generations, which lead to an increase in the number of elder people. Aging brings many challenges to them due to cognitive decline, chronic age-related diseases, as well as limitations in physical activity, vision, and hearing. With an increase in age-related diseases, there will also be a rise in individuals unable to live independently. However, due to the higher standards of living, children nowadays are too busy working to earn money for living and have no time to care for their parents. This leads to an increase in the number of elderly people in the federal- and nonfederal-owned welfare or nursing home; meanwhile, there will be a shortage of professionals trained or care-giver to work with the aging population. Given the fact that most of the elderly people prefer to stay in the comfort of their own homes, and given the costs of private nursing home care, it is imperative to develop technologies that help elderly people to age in place. By implementing the DFL technology as an ambient assisted living tool, family can respectfully monitor their loved ones who live alone at home, without requiring them to wear devices or change their habits. The DFL system can be integrated with mobile and web apps which allow user to easily monitor their home or office from anywhere, in real time. This advantage makes DFL technology very suitable for monitoring persons’ activities (especially the elderly, disable people, and patient suffering from Alzheimer’s disease) without causing them physical discomfort with the wearable devices or sensors. In addition, it is a challenge for them to remember each day to wear or to activate those devices.
Automatic and monitoring control in “smart” building, i.e., for home or office, was developed based on the IoT and WSN technologies to improve the quality of life as well as provide intelligence and comforts to the user especially the disabled. The DFL technology can be expanded not only for monitoring purposes, but also as an application server that can control and initiate actions as needed. For example, in an office building where few people are working together, the proposed DFL technology can enhance the existing lighting, heating, and air conditioning system by providing information of current environment such as the presence of people, the number of people as well as their location, the body temperature of the occupants, the activity performed by the occupants, the humidity of the environment, etc. If there are too many electronic devices in use or too many occupants in the office which leads to an increased temperature of the room, the building heating system can be adjusted or automatically lowered based on the information provided by the DFL system. If there is no people presence in several areas inside the office especially during lunch break, the lights and air-conditioning in those areas can be automatically switched off. Government as well as private bodies can implement this technology in all their buildings which definitely will reduce the utility cost.
In the recent years, the RF-based DFL approach has made tremendous achievements and becomes a popular research topic in localization and activity monitoring area. In the previous section, we have provided a review of existing human activity monitoring system based on different approaches. However, there are still significant challenges and open issues worth exploring and require further in-depth research. Moreover, the performance of existing systems can be further optimized, improved, and extended. In this section, we present a list of challenges and open issues together with the possible future research directions to be addressed by the researchers.
Unstructured approaches: Although several theories and models have been presented in the existing human activity monitoring researches, still, there is no general technique, methodological approach, and framework to DFAM. Most of the existing research focused on a particular application and specific technology based on empirical observation result. This topic requires more complete theoretical model as well as its general technique or framework for RF-based activity recognition. Additional in-depth research on the characteristic of human body which relates to the radio signal based on human body model is required. One of our research directions is to deploy the representation learning method such as deep learning method with high recognition rate. Unlike the traditional machine learning algorithm, which required manual feature selection as well as rules definition, deep learning approach is able to learn the correct auto-generated features and accurately predict the correct feature. In addition, although it is a challenging task to implement unsupervised learning technique into the DFL human activity monitoring system, it is worth to be explored.
Inconsistency and unreliability of the sensors: Most of the presented studies used common sensor such as RFID, MEMSIC motes, and other RF-devices used the continuous sensing technique, where the sensor will continuously collect sample for activity recognition. However, this continuous sensing technique is challenging since it depends on the battery lifetime in order to be consistently operated. In addition, various continuous sequences of human performing activity with several periodic variations may result in wrong activity prediction. In order to continuously monitor the human activity without disrupting the system, energy-efficient mechanisms can be implemented to the sensor, where under certain conditions, the sensors can be turned off or put under sleep mode. This technique is known as duty-cycling technique. Only selected sensors are active for sampling at specific state, whereas other inactive sensors are under sleep mode, waiting for any possible state transitions. It has been proven that by using this duty-cycling technique, the battery lifetime can be improved by 75%; however, the recognition latency will increase. Additional research work is needed to compare the performance of activity recognition system in terms of hardware or devices diversity. Some of sensors or devices can be used together for a complete human activity recognition.
Hardware, maintenance, and labor costs: For larger-scale environment, especially for system with finger-printing approaches, the deployment of hundreds of sensors in a building of multiple rooms will obtain good detection accuracy. However, deploying such high density of sensors will affect the overall energy consumption of the building and demand for additional installation, maintenance, and labor costs. If these sensors are powered up using battery, the maintenance and energy requirements should be taken into consideration for the long-term deployment. Thus, it is required to select the best hardware, feature extraction approach, and classification technique to be deployed in the system without the requirement of additional cost.
Noise and interference from other appliances: Recognizing activities in noisy environment, that is, communication channel consisting noisy channel and interferences from other devices running on the same channel, is quite challenging. With the presence of noise and interference due to the inherent volatility of wireless signal, the activity recognition becomes less accurate. Most of the studies are evaluated under controlled condition or laboratory environment, which allowed selected devices to be present in the monitoring environment. Some of the research implementation of frequency diversity technique may help to increase the system accuracy. However, frequency diversity technique is not suitable for indoor environment due to interference from nearby wireless devices. In order to develop the DFL system in the practical or real-world environment, the activity recognition algorithm should be performed in the large-scale area which consists not only the required devices, but other devices running on the same communication channel. Thus, further in-depth research on the noise elimination technique is required to effectively remove the noise of different sources in the radio channel.
Offline classification and training: Most of the presented studies proposed the offline activity classification methods where the data collected by the receiver are being trained and classified offline by the application server. The system performances presented in those studies are based on the offline recognition. In order to build reliable real-world activity monitoring applications, the activity classification and system performance should be performed and evaluated online on the application server. Offline classification process is suitable for application which does not require online recognition such as monitoring daily routine of a person. In this scenario, the data of the daily routine can be collected and stored into the application server and can be processed offline. However, online classification and recognition are required for those applications that interested in specific human activity, duration and sequence of activity, such as fitness coaching, fall detection and remote healthcare. In an ideal and reliable system, the system performance and classifier accuracy can always be improved and optimized as long as the system continuously collects enough data. This will make the system benefit to human centric applications.
Recognizing complex activity: Most of the presented studies focus on the coarse-grained or human basic activities such as walking, running, standing, falling, etc. However, the patterns of these activities are not strong enough to be directly linked to the more complex or fine-grained activities. Human behavior is spontaneous, and they tend to perform multiple tasks at the same time which introduce confusion in the activity recognition process, and sometimes may result in incorrect classification. For instance, it is rather straightforward to detect if the user is lying down on couch but inferring if the user is sleeping or watching television, or fainted is different. Although there exist several attempts in addressing this issue, further research is needed in exploring the information collection of the complex activities recognition and mapping for human-centric application domains, especially in persuasive applications for a behavior or lifestyle change.
Recognizing multiuser activities: It is noticeable that most of the presented studies focus on recognizing activity of a person. In fact, the real-world applications usually involve multiple user presence in environment such as people walking together, queuing in a line, watching television together, family dinner, etc.; however, none of the presented methods are applicable for the situation. This open issue should be further investigated for different application domains.
In this chapter, we provided an extensive review on human activity recognition using RF-based DFL technology, targeting human-centric applications such as healthcare, well-being, and assisted living applications. We provided the details information on concept of DFL and DFAM, together with the feature selection approaches based on different signal descriptors and the potential applications. We presented an extensive review on the existing and on-going works qualitatively and discussed on the challenges, limitations, and future research directions relevant to this field. We believe that this DFL technology has great potential in the future, which can benefit humans and will be one of the key areas of research that worth to be explored.
License
\n\nBook Chapters published in edited volumes are distributed under the Creative Commons Attribution 3.0 Unported License (CC BY 3.0). IntechOpen maintains a very flexible Copyright Policy that ensures that there is no copyright transfer to the publisher. Therefore, Authors retain exclusive copyright to their work. All Monographs are distributed under the Creative Commons Attribution-NonCommercial 4.0 International (CC BY-NC 4.0).
\n\n',metaTitle:"Open Access Statement",metaDescription:"Book chapters published in edited volumes are distributed under the Creative Commons Attribution 3.0 Unported License (CC BY 3.0)",metaKeywords:null,canonicalURL:"/page/open-access-statement/",contentRaw:'[{"type":"htmlEditorComponent","content":"Formats
\\n\\nBased on your preferences and the stage of your scientific projects, you have multiple options for publishing your scientific research with IntechOpen:
\\n\\nPeer Review Policies
\\n\\nAll scientific Works are subject to Peer Review prior to publishing.
\\n\\n\\n\\nCosts
\\n\\nThe Open Access publishing model followed by IntechOpen eliminates subscription charges and pay-per-view fees, thus enabling readers to access research at no cost to themselves. In order to sustain these operations, and keep our publications freely accessible, we levy an Open Access Publishing Fee on all manuscripts accepted for publication to help cover the costs of editorial work and the production of books.
\\n\\n\\n\\nDigital Archiving Policy
\\n\\nIntechOpen is dedicated to ensuring the long-term preservation and availability of the scholarly research it publishes.
\\n"}]'},components:[{type:"htmlEditorComponent",content:'Formats
\n\nBased on your preferences and the stage of your scientific projects, you have multiple options for publishing your scientific research with IntechOpen:
\n\nPeer Review Policies
\n\nAll scientific Works are subject to Peer Review prior to publishing.
\n\n\n\nCosts
\n\nThe Open Access publishing model followed by IntechOpen eliminates subscription charges and pay-per-view fees, thus enabling readers to access research at no cost to themselves. In order to sustain these operations, and keep our publications freely accessible, we levy an Open Access Publishing Fee on all manuscripts accepted for publication to help cover the costs of editorial work and the production of books.
\n\n\n\nDigital Archiving Policy
\n\nIntechOpen is dedicated to ensuring the long-term preservation and availability of the scholarly research it publishes.
\n'}]},successStories:{items:[]},authorsAndEditors:{filterParams:{sort:"featured,name"},profiles:[{id:"6700",title:"Dr.",name:"Abbass A.",middleName:null,surname:"Hashim",slug:"abbass-a.-hashim",fullName:"Abbass A. Hashim",position:null,profilePictureURL:"https://mts.intechopen.com/storage/users/6700/images/1864_n.jpg",biography:"Currently I am carrying out research in several areas of interest, mainly covering work on chemical and bio-sensors, semiconductor thin film device fabrication and characterisation.\nAt the moment I have very strong interest in radiation environmental pollution and bacteriology treatment. The teams of researchers are working very hard to bring novel results in this field. I am also a member of the team in charge for the supervision of Ph.D. students in the fields of development of silicon based planar waveguide sensor devices, study of inelastic electron tunnelling in planar tunnelling nanostructures for sensing applications and development of organotellurium(IV) compounds for semiconductor applications. I am a specialist in data analysis techniques and nanosurface structure. I have served as the editor for many books, been a member of the editorial board in science journals, have published many papers and hold many patents.",institutionString:null,institution:{name:"Sheffield Hallam University",country:{name:"United Kingdom"}}},{id:"54525",title:"Prof.",name:"Abdul Latif",middleName:null,surname:"Ahmad",slug:"abdul-latif-ahmad",fullName:"Abdul Latif Ahmad",position:null,profilePictureURL:"//cdnintech.com/web/frontend/www/assets/author.svg",biography:null,institutionString:null,institution:null},{id:"20567",title:"Prof.",name:"Ado",middleName:null,surname:"Jorio",slug:"ado-jorio",fullName:"Ado Jorio",position:null,profilePictureURL:"//cdnintech.com/web/frontend/www/assets/author.svg",biography:null,institutionString:null,institution:{name:"Universidade Federal de Minas Gerais",country:{name:"Brazil"}}},{id:"47940",title:"Dr.",name:"Alberto",middleName:null,surname:"Mantovani",slug:"alberto-mantovani",fullName:"Alberto Mantovani",position:null,profilePictureURL:"//cdnintech.com/web/frontend/www/assets/author.svg",biography:null,institutionString:null,institution:null},{id:"12392",title:"Mr.",name:"Alex",middleName:null,surname:"Lazinica",slug:"alex-lazinica",fullName:"Alex Lazinica",position:null,profilePictureURL:"https://mts.intechopen.com/storage/users/12392/images/7282_n.png",biography:"Alex Lazinica is the founder and CEO of IntechOpen. After obtaining a Master's degree in Mechanical Engineering, he continued his PhD studies in Robotics at the Vienna University of Technology. Here he worked as a robotic researcher with the university's Intelligent Manufacturing Systems Group as well as a guest researcher at various European universities, including the Swiss Federal Institute of Technology Lausanne (EPFL). During this time he published more than 20 scientific papers, gave presentations, served as a reviewer for major robotic journals and conferences and most importantly he co-founded and built the International Journal of Advanced Robotic Systems- world's first Open Access journal in the field of robotics. Starting this journal was a pivotal point in his career, since it was a pathway to founding IntechOpen - Open Access publisher focused on addressing academic researchers needs. Alex is a personification of IntechOpen key values being trusted, open and entrepreneurial. Today his focus is on defining the growth and development strategy for the company.",institutionString:null,institution:{name:"TU Wien",country:{name:"Austria"}}},{id:"19816",title:"Prof.",name:"Alexander",middleName:null,surname:"Kokorin",slug:"alexander-kokorin",fullName:"Alexander Kokorin",position:null,profilePictureURL:"https://mts.intechopen.com/storage/users/19816/images/1607_n.jpg",biography:"Alexander I. Kokorin: born: 1947, Moscow; DSc., PhD; Principal Research Fellow (Research Professor) of Department of Kinetics and Catalysis, N. Semenov Institute of Chemical Physics, Russian Academy of Sciences, Moscow.\r\nArea of research interests: physical chemistry of complex-organized molecular and nanosized systems, including polymer-metal complexes; the surface of doped oxide semiconductors. He is an expert in structural, absorptive, catalytic and photocatalytic properties, in structural organization and dynamic features of ionic liquids, in magnetic interactions between paramagnetic centers. The author or co-author of 3 books, over 200 articles and reviews in scientific journals and books. He is an actual member of the International EPR/ESR Society, European Society on Quantum Solar Energy Conversion, Moscow House of Scientists, of the Board of Moscow Physical Society.",institutionString:null,institution:{name:"Semenov Institute of Chemical Physics",country:{name:"Russia"}}},{id:"62389",title:"PhD.",name:"Ali Demir",middleName:null,surname:"Sezer",slug:"ali-demir-sezer",fullName:"Ali Demir Sezer",position:null,profilePictureURL:"https://mts.intechopen.com/storage/users/62389/images/3413_n.jpg",biography:"Dr. Ali Demir Sezer has a Ph.D. from Pharmaceutical Biotechnology at the Faculty of Pharmacy, University of Marmara (Turkey). He is the member of many Pharmaceutical Associations and acts as a reviewer of scientific journals and European projects under different research areas such as: drug delivery systems, nanotechnology and pharmaceutical biotechnology. Dr. Sezer is the author of many scientific publications in peer-reviewed journals and poster communications. Focus of his research activity is drug delivery, physico-chemical characterization and biological evaluation of biopolymers micro and nanoparticles as modified drug delivery system, and colloidal drug carriers (liposomes, nanoparticles etc.).",institutionString:null,institution:{name:"Marmara University",country:{name:"Turkey"}}},{id:"61051",title:"Prof.",name:"Andrea",middleName:null,surname:"Natale",slug:"andrea-natale",fullName:"Andrea Natale",position:null,profilePictureURL:"//cdnintech.com/web/frontend/www/assets/author.svg",biography:null,institutionString:null,institution:null},{id:"100762",title:"Prof.",name:"Andrea",middleName:null,surname:"Natale",slug:"andrea-natale",fullName:"Andrea Natale",position:null,profilePictureURL:"//cdnintech.com/web/frontend/www/assets/author.svg",biography:null,institutionString:null,institution:{name:"St David's Medical Center",country:{name:"United States of America"}}},{id:"107416",title:"Dr.",name:"Andrea",middleName:null,surname:"Natale",slug:"andrea-natale",fullName:"Andrea Natale",position:null,profilePictureURL:"//cdnintech.com/web/frontend/www/assets/author.svg",biography:null,institutionString:null,institution:{name:"Texas Cardiac Arrhythmia",country:{name:"United States of America"}}},{id:"64434",title:"Dr.",name:"Angkoon",middleName:null,surname:"Phinyomark",slug:"angkoon-phinyomark",fullName:"Angkoon Phinyomark",position:null,profilePictureURL:"https://mts.intechopen.com/storage/users/64434/images/2619_n.jpg",biography:"My name is Angkoon Phinyomark. I received a B.Eng. degree in Computer Engineering with First Class Honors in 2008 from Prince of Songkla University, Songkhla, Thailand, where I received a Ph.D. degree in Electrical Engineering. My research interests are primarily in the area of biomedical signal processing and classification notably EMG (electromyography signal), EOG (electrooculography signal), and EEG (electroencephalography signal), image analysis notably breast cancer analysis and optical coherence tomography, and rehabilitation engineering. I became a student member of IEEE in 2008. During October 2011-March 2012, I had worked at School of Computer Science and Electronic Engineering, University of Essex, Colchester, Essex, United Kingdom. In addition, during a B.Eng. I had been a visiting research student at Faculty of Computer Science, University of Murcia, Murcia, Spain for three months.\n\nI have published over 40 papers during 5 years in refereed journals, books, and conference proceedings in the areas of electro-physiological signals processing and classification, notably EMG and EOG signals, fractal analysis, wavelet analysis, texture analysis, feature extraction and machine learning algorithms, and assistive and rehabilitative devices. I have several computer programming language certificates, i.e. Sun Certified Programmer for the Java 2 Platform 1.4 (SCJP), Microsoft Certified Professional Developer, Web Developer (MCPD), Microsoft Certified Technology Specialist, .NET Framework 2.0 Web (MCTS). I am a Reviewer for several refereed journals and international conferences, such as IEEE Transactions on Biomedical Engineering, IEEE Transactions on Industrial Electronics, Optic Letters, Measurement Science Review, and also a member of the International Advisory Committee for 2012 IEEE Business Engineering and Industrial Applications and 2012 IEEE Symposium on Business, Engineering and Industrial Applications.",institutionString:null,institution:{name:"Joseph Fourier University",country:{name:"France"}}},{id:"55578",title:"Dr.",name:"Antonio",middleName:null,surname:"Jurado-Navas",slug:"antonio-jurado-navas",fullName:"Antonio Jurado-Navas",position:null,profilePictureURL:"https://mts.intechopen.com/storage/users/55578/images/4574_n.png",biography:"Antonio Jurado-Navas received the M.S. degree (2002) and the Ph.D. degree (2009) in Telecommunication Engineering, both from the University of Málaga (Spain). He first worked as a consultant at Vodafone-Spain. From 2004 to 2011, he was a Research Assistant with the Communications Engineering Department at the University of Málaga. In 2011, he became an Assistant Professor in the same department. From 2012 to 2015, he was with Ericsson Spain, where he was working on geo-location\ntools for third generation mobile networks. Since 2015, he is a Marie-Curie fellow at the Denmark Technical University. His current research interests include the areas of mobile communication systems and channel modeling in addition to atmospheric optical communications, adaptive optics and statistics",institutionString:null,institution:{name:"University of Malaga",country:{name:"Spain"}}}],filtersByRegion:[{group:"region",caption:"North America",value:1,count:5698},{group:"region",caption:"Middle and South America",value:2,count:5172},{group:"region",caption:"Africa",value:3,count:1689},{group:"region",caption:"Asia",value:4,count:10244},{group:"region",caption:"Australia and Oceania",value:5,count:888},{group:"region",caption:"Europe",value:6,count:15650}],offset:12,limit:12,total:117315},chapterEmbeded:{data:{}},editorApplication:{success:null,errors:{}},ofsBooks:{filterParams:{topicId:"535"},books:[],filtersByTopic:[{group:"topic",caption:"Agricultural and Biological Sciences",value:5,count:9},{group:"topic",caption:"Biochemistry, Genetics and Molecular Biology",value:6,count:18},{group:"topic",caption:"Business, Management and Economics",value:7,count:2},{group:"topic",caption:"Chemistry",value:8,count:7},{group:"topic",caption:"Computer and Information Science",value:9,count:11},{group:"topic",caption:"Earth and Planetary Sciences",value:10,count:5},{group:"topic",caption:"Engineering",value:11,count:15},{group:"topic",caption:"Environmental Sciences",value:12,count:2},{group:"topic",caption:"Immunology and Microbiology",value:13,count:5},{group:"topic",caption:"Materials Science",value:14,count:4},{group:"topic",caption:"Mathematics",value:15,count:1},{group:"topic",caption:"Medicine",value:16,count:62},{group:"topic",caption:"Nanotechnology and Nanomaterials",value:17,count:1},{group:"topic",caption:"Neuroscience",value:18,count:1},{group:"topic",caption:"Pharmacology, Toxicology and Pharmaceutical Science",value:19,count:6},{group:"topic",caption:"Physics",value:20,count:2},{group:"topic",caption:"Psychology",value:21,count:3},{group:"topic",caption:"Robotics",value:22,count:1},{group:"topic",caption:"Social Sciences",value:23,count:3},{group:"topic",caption:"Technology",value:24,count:1},{group:"topic",caption:"Veterinary Medicine and Science",value:25,count:2}],offset:12,limit:12,total:0},popularBooks:{featuredBooks:[{type:"book",id:"7802",title:"Modern Slavery and Human Trafficking",subtitle:null,isOpenForSubmission:!1,hash:"587a0b7fb765f31cc98de33c6c07c2e0",slug:"modern-slavery-and-human-trafficking",bookSignature:"Jane Reeves",coverURL:"https://cdn.intechopen.com/books/images_new/7802.jpg",editors:[{id:"211328",title:"Prof.",name:"Jane",middleName:null,surname:"Reeves",slug:"jane-reeves",fullName:"Jane Reeves"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"8545",title:"Animal Reproduction in Veterinary Medicine",subtitle:null,isOpenForSubmission:!1,hash:"13aaddf5fdbbc78387e77a7da2388bf6",slug:"animal-reproduction-in-veterinary-medicine",bookSignature:"Faruk Aral, Rita Payan-Carreira and Miguel Quaresma",coverURL:"https://cdn.intechopen.com/books/images_new/8545.jpg",editors:[{id:"25600",title:"Prof.",name:"Faruk",middleName:null,surname:"Aral",slug:"faruk-aral",fullName:"Faruk Aral"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"9961",title:"Data Mining",subtitle:"Methods, Applications and Systems",isOpenForSubmission:!1,hash:"ed79fb6364f2caf464079f94a0387146",slug:"data-mining-methods-applications-and-systems",bookSignature:"Derya Birant",coverURL:"https://cdn.intechopen.com/books/images_new/9961.jpg",editors:[{id:"15609",title:"Dr.",name:"Derya",middleName:null,surname:"Birant",slug:"derya-birant",fullName:"Derya Birant"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"9157",title:"Neurodegenerative Diseases",subtitle:"Molecular Mechanisms and Current Therapeutic Approaches",isOpenForSubmission:!1,hash:"bc8be577966ef88735677d7e1e92ed28",slug:"neurodegenerative-diseases-molecular-mechanisms-and-current-therapeutic-approaches",bookSignature:"Nagehan Ersoy Tunalı",coverURL:"https://cdn.intechopen.com/books/images_new/9157.jpg",editors:[{id:"82778",title:"Ph.D.",name:"Nagehan",middleName:null,surname:"Ersoy Tunalı",slug:"nagehan-ersoy-tunali",fullName:"Nagehan Ersoy Tunalı"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"8686",title:"Direct Torque Control Strategies of Electrical Machines",subtitle:null,isOpenForSubmission:!1,hash:"b6ad22b14db2b8450228545d3d4f6b1a",slug:"direct-torque-control-strategies-of-electrical-machines",bookSignature:"Fatma Ben Salem",coverURL:"https://cdn.intechopen.com/books/images_new/8686.jpg",editors:[{id:"295623",title:"Associate Prof.",name:"Fatma",middleName:null,surname:"Ben Salem",slug:"fatma-ben-salem",fullName:"Fatma Ben Salem"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"7434",title:"Molecular Biotechnology",subtitle:null,isOpenForSubmission:!1,hash:"eceede809920e1ec7ecadd4691ede2ec",slug:"molecular-biotechnology",bookSignature:"Sergey Sedykh",coverURL:"https://cdn.intechopen.com/books/images_new/7434.jpg",editors:[{id:"178316",title:"Ph.D.",name:"Sergey",middleName:null,surname:"Sedykh",slug:"sergey-sedykh",fullName:"Sergey Sedykh"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"9839",title:"Outdoor Recreation",subtitle:"Physiological and Psychological Effects on Health",isOpenForSubmission:!1,hash:"5f5a0d64267e32567daffa5b0c6a6972",slug:"outdoor-recreation-physiological-and-psychological-effects-on-health",bookSignature:"Hilde G. Nielsen",coverURL:"https://cdn.intechopen.com/books/images_new/9839.jpg",editors:[{id:"158692",title:"Ph.D.",name:"Hilde G.",middleName:null,surname:"Nielsen",slug:"hilde-g.-nielsen",fullName:"Hilde G. Nielsen"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"9208",title:"Welding",subtitle:"Modern Topics",isOpenForSubmission:!1,hash:"7d6be076ccf3a3f8bd2ca52d86d4506b",slug:"welding-modern-topics",bookSignature:"Sadek Crisóstomo Absi Alfaro, Wojciech Borek and Błażej Tomiczek",coverURL:"https://cdn.intechopen.com/books/images_new/9208.jpg",editors:[{id:"65292",title:"Prof.",name:"Sadek Crisostomo Absi",middleName:"C. Absi",surname:"Alfaro",slug:"sadek-crisostomo-absi-alfaro",fullName:"Sadek Crisostomo Absi Alfaro"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"9139",title:"Topics in Primary Care Medicine",subtitle:null,isOpenForSubmission:!1,hash:"ea774a4d4c1179da92a782e0ae9cde92",slug:"topics-in-primary-care-medicine",bookSignature:"Thomas F. Heston",coverURL:"https://cdn.intechopen.com/books/images_new/9139.jpg",editors:[{id:"217926",title:"Dr.",name:"Thomas F.",middleName:null,surname:"Heston",slug:"thomas-f.-heston",fullName:"Thomas F. Heston"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"9343",title:"Trace Metals in the Environment",subtitle:"New Approaches and Recent Advances",isOpenForSubmission:!1,hash:"ae07e345bc2ce1ebbda9f70c5cd12141",slug:"trace-metals-in-the-environment-new-approaches-and-recent-advances",bookSignature:"Mario Alfonso Murillo-Tovar, Hugo Saldarriaga-Noreña and Agnieszka Saeid",coverURL:"https://cdn.intechopen.com/books/images_new/9343.jpg",editors:[{id:"255959",title:"Dr.",name:"Mario Alfonso",middleName:null,surname:"Murillo-Tovar",slug:"mario-alfonso-murillo-tovar",fullName:"Mario Alfonso Murillo-Tovar"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"8697",title:"Virtual Reality and Its Application in Education",subtitle:null,isOpenForSubmission:!1,hash:"ee01b5e387ba0062c6b0d1e9227bda05",slug:"virtual-reality-and-its-application-in-education",bookSignature:"Dragan Cvetković",coverURL:"https://cdn.intechopen.com/books/images_new/8697.jpg",editors:[{id:"101330",title:"Dr.",name:"Dragan",middleName:"Mladen",surname:"Cvetković",slug:"dragan-cvetkovic",fullName:"Dragan Cvetković"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"7831",title:"Sustainability in Urban Planning and Design",subtitle:null,isOpenForSubmission:!1,hash:"c924420492c8c2c9751e178d025f4066",slug:"sustainability-in-urban-planning-and-design",bookSignature:"Amjad Almusaed, Asaad Almssad and Linh Truong - Hong",coverURL:"https://cdn.intechopen.com/books/images_new/7831.jpg",editors:[{id:"110471",title:"Dr.",name:"Amjad",middleName:"Zaki",surname:"Almusaed",slug:"amjad-almusaed",fullName:"Amjad Almusaed"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}}],offset:12,limit:12,total:5141},hotBookTopics:{hotBooks:[],offset:0,limit:12,total:null},publish:{},publishingProposal:{success:null,errors:{}},books:{featuredBooks:[{type:"book",id:"9208",title:"Welding",subtitle:"Modern Topics",isOpenForSubmission:!1,hash:"7d6be076ccf3a3f8bd2ca52d86d4506b",slug:"welding-modern-topics",bookSignature:"Sadek Crisóstomo Absi Alfaro, Wojciech Borek and Błażej Tomiczek",coverURL:"https://cdn.intechopen.com/books/images_new/9208.jpg",editors:[{id:"65292",title:"Prof.",name:"Sadek Crisostomo Absi",middleName:"C. Absi",surname:"Alfaro",slug:"sadek-crisostomo-absi-alfaro",fullName:"Sadek Crisostomo Absi Alfaro"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"9139",title:"Topics in Primary Care Medicine",subtitle:null,isOpenForSubmission:!1,hash:"ea774a4d4c1179da92a782e0ae9cde92",slug:"topics-in-primary-care-medicine",bookSignature:"Thomas F. Heston",coverURL:"https://cdn.intechopen.com/books/images_new/9139.jpg",editors:[{id:"217926",title:"Dr.",name:"Thomas F.",middleName:null,surname:"Heston",slug:"thomas-f.-heston",fullName:"Thomas F. Heston"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"8697",title:"Virtual Reality and Its Application in Education",subtitle:null,isOpenForSubmission:!1,hash:"ee01b5e387ba0062c6b0d1e9227bda05",slug:"virtual-reality-and-its-application-in-education",bookSignature:"Dragan Cvetković",coverURL:"https://cdn.intechopen.com/books/images_new/8697.jpg",editors:[{id:"101330",title:"Dr.",name:"Dragan",middleName:"Mladen",surname:"Cvetković",slug:"dragan-cvetkovic",fullName:"Dragan Cvetković"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"9343",title:"Trace Metals in the Environment",subtitle:"New Approaches and Recent Advances",isOpenForSubmission:!1,hash:"ae07e345bc2ce1ebbda9f70c5cd12141",slug:"trace-metals-in-the-environment-new-approaches-and-recent-advances",bookSignature:"Mario Alfonso Murillo-Tovar, Hugo Saldarriaga-Noreña and Agnieszka Saeid",coverURL:"https://cdn.intechopen.com/books/images_new/9343.jpg",editors:[{id:"255959",title:"Dr.",name:"Mario Alfonso",middleName:null,surname:"Murillo-Tovar",slug:"mario-alfonso-murillo-tovar",fullName:"Mario Alfonso Murillo-Tovar"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"9785",title:"Endometriosis",subtitle:null,isOpenForSubmission:!1,hash:"f457ca61f29cf7e8bc191732c50bb0ce",slug:"endometriosis",bookSignature:"Courtney Marsh",coverURL:"https://cdn.intechopen.com/books/images_new/9785.jpg",editors:[{id:"255491",title:"Dr.",name:"Courtney",middleName:null,surname:"Marsh",slug:"courtney-marsh",fullName:"Courtney Marsh"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"7831",title:"Sustainability in Urban Planning and Design",subtitle:null,isOpenForSubmission:!1,hash:"c924420492c8c2c9751e178d025f4066",slug:"sustainability-in-urban-planning-and-design",bookSignature:"Amjad Almusaed, Asaad Almssad and Linh Truong - Hong",coverURL:"https://cdn.intechopen.com/books/images_new/7831.jpg",editors:[{id:"110471",title:"Dr.",name:"Amjad",middleName:"Zaki",surname:"Almusaed",slug:"amjad-almusaed",fullName:"Amjad Almusaed"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"9376",title:"Contemporary Developments and Perspectives in International Health Security",subtitle:"Volume 1",isOpenForSubmission:!1,hash:"b9a00b84cd04aae458fb1d6c65795601",slug:"contemporary-developments-and-perspectives-in-international-health-security-volume-1",bookSignature:"Stanislaw P. Stawicki, Michael S. Firstenberg, Sagar C. Galwankar, Ricardo Izurieta and Thomas Papadimos",coverURL:"https://cdn.intechopen.com/books/images_new/9376.jpg",editors:[{id:"181694",title:"Dr.",name:"Stanislaw P.",middleName:null,surname:"Stawicki",slug:"stanislaw-p.-stawicki",fullName:"Stanislaw P. Stawicki"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"7769",title:"Medical Isotopes",subtitle:null,isOpenForSubmission:!1,hash:"f8d3c5a6c9a42398e56b4e82264753f7",slug:"medical-isotopes",bookSignature:"Syed Ali Raza Naqvi and Muhammad Babar Imrani",coverURL:"https://cdn.intechopen.com/books/images_new/7769.jpg",editors:[{id:"259190",title:"Dr.",name:"Syed Ali Raza",middleName:null,surname:"Naqvi",slug:"syed-ali-raza-naqvi",fullName:"Syed Ali Raza Naqvi"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"9279",title:"Concepts, Applications and Emerging Opportunities in Industrial Engineering",subtitle:null,isOpenForSubmission:!1,hash:"9bfa87f9b627a5468b7c1e30b0eea07a",slug:"concepts-applications-and-emerging-opportunities-in-industrial-engineering",bookSignature:"Gary Moynihan",coverURL:"https://cdn.intechopen.com/books/images_new/9279.jpg",editors:[{id:"16974",title:"Dr.",name:"Gary",middleName:null,surname:"Moynihan",slug:"gary-moynihan",fullName:"Gary Moynihan"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}},{type:"book",id:"7807",title:"A Closer Look at Organizational Culture in Action",subtitle:null,isOpenForSubmission:!1,hash:"05c608b9271cc2bc711f4b28748b247b",slug:"a-closer-look-at-organizational-culture-in-action",bookSignature:"Süleyman Davut Göker",coverURL:"https://cdn.intechopen.com/books/images_new/7807.jpg",editors:[{id:"190035",title:"Associate Prof.",name:"Süleyman Davut",middleName:null,surname:"Göker",slug:"suleyman-davut-goker",fullName:"Süleyman Davut Göker"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}}],latestBooks:[{type:"book",id:"7434",title:"Molecular Biotechnology",subtitle:null,isOpenForSubmission:!1,hash:"eceede809920e1ec7ecadd4691ede2ec",slug:"molecular-biotechnology",bookSignature:"Sergey Sedykh",coverURL:"https://cdn.intechopen.com/books/images_new/7434.jpg",editedByType:"Edited by",editors:[{id:"178316",title:"Ph.D.",name:"Sergey",middleName:null,surname:"Sedykh",slug:"sergey-sedykh",fullName:"Sergey Sedykh"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"8545",title:"Animal Reproduction in Veterinary Medicine",subtitle:null,isOpenForSubmission:!1,hash:"13aaddf5fdbbc78387e77a7da2388bf6",slug:"animal-reproduction-in-veterinary-medicine",bookSignature:"Faruk Aral, Rita Payan-Carreira and Miguel Quaresma",coverURL:"https://cdn.intechopen.com/books/images_new/8545.jpg",editedByType:"Edited by",editors:[{id:"25600",title:"Prof.",name:"Faruk",middleName:null,surname:"Aral",slug:"faruk-aral",fullName:"Faruk Aral"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"9569",title:"Methods in Molecular Medicine",subtitle:null,isOpenForSubmission:!1,hash:"691d3f3c4ac25a8093414e9b270d2843",slug:"methods-in-molecular-medicine",bookSignature:"Yusuf Tutar",coverURL:"https://cdn.intechopen.com/books/images_new/9569.jpg",editedByType:"Edited by",editors:[{id:"158492",title:"Prof.",name:"Yusuf",middleName:null,surname:"Tutar",slug:"yusuf-tutar",fullName:"Yusuf Tutar"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"9839",title:"Outdoor Recreation",subtitle:"Physiological and Psychological Effects on Health",isOpenForSubmission:!1,hash:"5f5a0d64267e32567daffa5b0c6a6972",slug:"outdoor-recreation-physiological-and-psychological-effects-on-health",bookSignature:"Hilde G. Nielsen",coverURL:"https://cdn.intechopen.com/books/images_new/9839.jpg",editedByType:"Edited by",editors:[{id:"158692",title:"Ph.D.",name:"Hilde G.",middleName:null,surname:"Nielsen",slug:"hilde-g.-nielsen",fullName:"Hilde G. Nielsen"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"7802",title:"Modern Slavery and Human Trafficking",subtitle:null,isOpenForSubmission:!1,hash:"587a0b7fb765f31cc98de33c6c07c2e0",slug:"modern-slavery-and-human-trafficking",bookSignature:"Jane Reeves",coverURL:"https://cdn.intechopen.com/books/images_new/7802.jpg",editedByType:"Edited by",editors:[{id:"211328",title:"Prof.",name:"Jane",middleName:null,surname:"Reeves",slug:"jane-reeves",fullName:"Jane Reeves"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"8063",title:"Food Security in Africa",subtitle:null,isOpenForSubmission:!1,hash:"8cbf3d662b104d19db2efc9d59249efc",slug:"food-security-in-africa",bookSignature:"Barakat Mahmoud",coverURL:"https://cdn.intechopen.com/books/images_new/8063.jpg",editedByType:"Edited by",editors:[{id:"92016",title:"Dr.",name:"Barakat",middleName:null,surname:"Mahmoud",slug:"barakat-mahmoud",fullName:"Barakat Mahmoud"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"10118",title:"Plant Stress Physiology",subtitle:null,isOpenForSubmission:!1,hash:"c68b09d2d2634fc719ae3b9a64a27839",slug:"plant-stress-physiology",bookSignature:"Akbar Hossain",coverURL:"https://cdn.intechopen.com/books/images_new/10118.jpg",editedByType:"Edited by",editors:[{id:"280755",title:"Dr.",name:"Akbar",middleName:null,surname:"Hossain",slug:"akbar-hossain",fullName:"Akbar Hossain"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"9157",title:"Neurodegenerative Diseases",subtitle:"Molecular Mechanisms and Current Therapeutic Approaches",isOpenForSubmission:!1,hash:"bc8be577966ef88735677d7e1e92ed28",slug:"neurodegenerative-diseases-molecular-mechanisms-and-current-therapeutic-approaches",bookSignature:"Nagehan Ersoy Tunalı",coverURL:"https://cdn.intechopen.com/books/images_new/9157.jpg",editedByType:"Edited by",editors:[{id:"82778",title:"Ph.D.",name:"Nagehan",middleName:null,surname:"Ersoy Tunalı",slug:"nagehan-ersoy-tunali",fullName:"Nagehan Ersoy Tunalı"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"9961",title:"Data Mining",subtitle:"Methods, Applications and Systems",isOpenForSubmission:!1,hash:"ed79fb6364f2caf464079f94a0387146",slug:"data-mining-methods-applications-and-systems",bookSignature:"Derya Birant",coverURL:"https://cdn.intechopen.com/books/images_new/9961.jpg",editedByType:"Edited by",editors:[{id:"15609",title:"Dr.",name:"Derya",middleName:null,surname:"Birant",slug:"derya-birant",fullName:"Derya Birant"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}},{type:"book",id:"8686",title:"Direct Torque Control Strategies of Electrical Machines",subtitle:null,isOpenForSubmission:!1,hash:"b6ad22b14db2b8450228545d3d4f6b1a",slug:"direct-torque-control-strategies-of-electrical-machines",bookSignature:"Fatma Ben Salem",coverURL:"https://cdn.intechopen.com/books/images_new/8686.jpg",editedByType:"Edited by",editors:[{id:"295623",title:"Associate Prof.",name:"Fatma",middleName:null,surname:"Ben Salem",slug:"fatma-ben-salem",fullName:"Fatma Ben Salem"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}}]},subject:{topic:{id:"1307",title:"Medical Robot",slug:"medical-robot",parent:{title:"Robotic Surgery",slug:"robotic-surgery"},numberOfBooks:1,numberOfAuthorsAndEditors:1,numberOfWosCitations:126,numberOfCrossrefCitations:60,numberOfDimensionsCitations:138,videoUrl:null,fallbackUrl:null,description:null},booksByTopicFilter:{topicSlug:"medical-robot",sort:"-publishedDate",limit:12,offset:0},booksByTopicCollection:[{type:"book",id:"3583",title:"Medical Robotics",subtitle:null,isOpenForSubmission:!1,hash:"96c30765ef134cb6d1ee0aed7527c9cb",slug:"medical_robotics",bookSignature:"Vanja Bozovic",coverURL:"https://cdn.intechopen.com/books/images_new/3583.jpg",editedByType:"Edited by",editors:[{id:"256422",title:"Dr.",name:"Vanja",middleName:null,surname:"Bozovic",slug:"vanja-bozovic",fullName:"Vanja Bozovic"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter",authoredCaption:"Edited by"}}],booksByTopicTotal:1,mostCitedChapters:[{id:"660",doi:"10.5772/5262",title:"Prototypic Force Feedback Instrument for Minimally Invasive Robotic Surgery",slug:"prototypic_force_feedback_instrument_for_minimally_invasive_robotic_surgery",totalDownloads:5612,totalCrossrefCites:11,totalDimensionsCites:24,book:{slug:"medical_robotics",title:"Medical Robotics",fullTitle:"Medical Robotics"},signatures:"Ulrich Seibold, Bernhard Kuebler and Gerd Hirzinger",authors:null},{id:"642",doi:"10.5772/5244",title:"Motion Tracking for Minimally Invasive Robotic Surgery",slug:"motion_tracking_for_minimally_invasive_robotic_surgery",totalDownloads:3503,totalCrossrefCites:17,totalDimensionsCites:22,book:{slug:"medical_robotics",title:"Medical Robotics",fullTitle:"Medical Robotics"},signatures:"Martin Groeger, Klaus Arbter and Gerd Hirzinger",authors:null},{id:"640",doi:"10.5772/5242",title:"Robotic Long Bone Fracture Reduction",slug:"robotic_long_bone_fracture_reduction",totalDownloads:3114,totalCrossrefCites:0,totalDimensionsCites:11,book:{slug:"medical_robotics",title:"Medical Robotics",fullTitle:"Medical Robotics"},signatures:"A. E. Graham, S. Q. Xie, K. C. Aw, W. L. Xu and S. Mukherjee",authors:null}],mostDownloadedChaptersLast30Days:[{id:"642",title:"Motion Tracking for Minimally Invasive Robotic Surgery",slug:"motion_tracking_for_minimally_invasive_robotic_surgery",totalDownloads:3503,totalCrossrefCites:17,totalDimensionsCites:22,book:{slug:"medical_robotics",title:"Medical Robotics",fullTitle:"Medical Robotics"},signatures:"Martin Groeger, Klaus Arbter and Gerd Hirzinger",authors:null},{id:"668",title:"Robotics in General Surgery",slug:"robotics_in_general_surgery",totalDownloads:9377,totalCrossrefCites:5,totalDimensionsCites:8,book:{slug:"medical_robotics",title:"Medical Robotics",fullTitle:"Medical Robotics"},signatures:"James Wall, Venita Chandra and Thomas Krummel",authors:null},{id:"649",title:"Nerve Sparing Axillary Dissection Using the da Vinci Surgical System",slug:"nerve_sparing_axillary_dissection_using_the_da_vinci_surgical_system",totalDownloads:3994,totalCrossrefCites:0,totalDimensionsCites:0,book:{slug:"medical_robotics",title:"Medical Robotics",fullTitle:"Medical Robotics"},signatures:"Susan M. L. Lim, Cheng-Kiong Kum and Foong-Lian Lam",authors:null},{id:"669",title:"Telemanipulated Long Bone Fracture Reduction",slug:"telemanipulated_long_bone_fracture_reduction",totalDownloads:2763,totalCrossrefCites:5,totalDimensionsCites:8,book:{slug:"medical_robotics",title:"Medical Robotics",fullTitle:"Medical Robotics"},signatures:"Ralf Westphal, Simon Winkelbach, Thomas Goesling, Markus Oszwald, Tobias Huefner, Christian Krettek and Friedrich Wahl",authors:null},{id:"634",title:"The Must-Have in Robotic Heart Surgery: Haptic Feedback",slug:"the_must-have_in_robotic_heart_surgery__haptic_feedback",totalDownloads:3730,totalCrossrefCites:0,totalDimensionsCites:10,book:{slug:"medical_robotics",title:"Medical Robotics",fullTitle:"Medical Robotics"},signatures:"Eva U. Braun, Hermann Mayer, Alois Knoll, Ruediger Lange and Robert Bauernschmitt",authors:null},{id:"661",title:'"RoboLase": Internet-Accessible Robotic Laser Scissors and Laser Tweezers Microscope Systems',slug:"_robolase___internet-accessible_robotic_laser_scissors_and_laser_tweezers_microscope_systems",totalDownloads:2876,totalCrossrefCites:0,totalDimensionsCites:2,book:{slug:"medical_robotics",title:"Medical Robotics",fullTitle:"Medical Robotics"},signatures:"Linda Z. Shi, Michael W. Berns and Elliot Botvinick",authors:null},{id:"664",title:"Dental Patient Robot",slug:"dental_patient_robot",totalDownloads:2554,totalCrossrefCites:0,totalDimensionsCites:3,book:{slug:"medical_robotics",title:"Medical Robotics",fullTitle:"Medical Robotics"},signatures:"Hideaki Takanobu",authors:null},{id:"659",title:"Robotic Assisted Microsurgery (RAMS): Application in Plastic Surgery",slug:"robotic_assisted_microsurgery__rams___application_in_plastic_surgery",totalDownloads:5571,totalCrossrefCites:1,totalDimensionsCites:5,book:{slug:"medical_robotics",title:"Medical Robotics",fullTitle:"Medical Robotics"},signatures:"Sanjay Saraf",authors:null},{id:"660",title:"Prototypic Force Feedback Instrument for Minimally Invasive Robotic Surgery",slug:"prototypic_force_feedback_instrument_for_minimally_invasive_robotic_surgery",totalDownloads:5612,totalCrossrefCites:11,totalDimensionsCites:24,book:{slug:"medical_robotics",title:"Medical Robotics",fullTitle:"Medical Robotics"},signatures:"Ulrich Seibold, Bernhard Kuebler and Gerd Hirzinger",authors:null},{id:"647",title:"Intelligent Laparoscopic Assistant Robot through Surgery Task Model: How to Give Intelligence to Medical Robots",slug:"intelligent_laparoscopic_assistant_robot_through_surgery_task_model__how_to_give_intelligence_to_med",totalDownloads:3100,totalCrossrefCites:1,totalDimensionsCites:3,book:{slug:"medical_robotics",title:"Medical Robotics",fullTitle:"Medical Robotics"},signatures:"Dong-Soo Kwon, Seong-Young Ko and Jonathan Kim",authors:null}],onlineFirstChaptersFilter:{topicSlug:"medical-robot",limit:3,offset:0},onlineFirstChaptersCollection:[],onlineFirstChaptersTotal:0},preDownload:{success:null,errors:{}},aboutIntechopen:{},privacyPolicy:{},peerReviewing:{},howOpenAccessPublishingWithIntechopenWorks:{},sponsorshipBooks:{sponsorshipBooks:[{type:"book",id:"10176",title:"Microgrids and Local Energy Systems",subtitle:null,isOpenForSubmission:!0,hash:"c32b4a5351a88f263074b0d0ca813a9c",slug:null,bookSignature:"Prof. Nick Jenkins",coverURL:"https://cdn.intechopen.com/books/images_new/10176.jpg",editedByType:null,editors:[{id:"55219",title:"Prof.",name:"Nick",middleName:null,surname:"Jenkins",slug:"nick-jenkins",fullName:"Nick Jenkins"}],equalEditorOne:null,equalEditorTwo:null,equalEditorThree:null,productType:{id:"1",chapterContentType:"chapter"}}],offset:8,limit:8,total:1},route:{name:"profile.detail",path:"/profiles/177343/anwar-ahmed",hash:"",query:{},params:{id:"177343",slug:"anwar-ahmed"},fullPath:"/profiles/177343/anwar-ahmed",meta:{},from:{name:null,path:"/",hash:"",query:{},params:{},fullPath:"/",meta:{}}}},function(){var e;(e=document.currentScript||document.scripts[document.scripts.length-1]).parentNode.removeChild(e)}()