@misc{16197, keywords = {Real-time segmentation, minimally invasive surgery, surgical instruments, laparoscopy, deep learning}, author = {Debesh Jha and Sharib Ali and Nikhil Tomar and Michael Riegler and Dag Johansen and H{\r a}vard Johansen and P{\r a}l Halvorsen}, title = {Exploring Deep Learning Methods for Real-Time Surgical Instrument Segmentation in Laparoscopy}, abstract = {Minimally Invasive Surgery (MIS) is a surgical intervention used to examine the organs inside the abdomen and has been widely used due to its effectiveness over open surgery. Due to the hardware improvements such as high definition cameras, this procedure has significantly improved and new software methods have open potential for computer-assisted procedures. However, there exists challenges and requirement to improve detection and tracking of the position of the instruments during these surgical procedures. To this end, we evaluate and compare some popular deep learning methods that can potentially be explored for the automated segmentation of surgical instruments in laparoscopy, an important step towards tool tracking. Our experimental results demonstrate that the Dual decoder attention network (DDANet) produces a superior result compared to other recent deep learning methods. DDANet produces a dice coefficient of 0.8739 and mean intersection over union of 0.8183 for the Robust Medical Instrument Segmentation (ROBUST-MIS) Challenge 2019 dataset, at a real-time speed of 101.36 frames per second which is critical for such procedures.}, year = {2021}, journal = {2021 IEEE EMBS International Conference on Biomedical and Health Informatics (BHI)}, pages = {1-4}, publisher = {IEEE}, }