2020 |
Argüeso, David; Picon, Artzai; Irusta, Unai; Medela, Alfonso; San-Emeterio, Miguel G; Bereciartua, Arantza; Alvarez-Gila, Aitor Few-Shot Learning approach for plant disease classification using images taken in the field (Artículo de revista) Computers and Electronics in Agriculture, 175 , 2020. (Enlaces | BibTeX | Etiquetas: bacterial plant disease, contrastive loss, convolutional neural networks, deep learning, few shot learning, fungal plant disease, plant disease, triplet loss) @article{Argüeso2020, title = {Few-Shot Learning approach for plant disease classification using images taken in the field}, author = {David Argüeso and Artzai Picon and Unai Irusta and Alfonso Medela and Miguel G San-Emeterio and Arantza Bereciartua and Aitor Alvarez-Gila}, url = {https://doi.org/10.1016/j.compag.2020.105542}, year = {2020}, date = {2020-06-20}, journal = {Computers and Electronics in Agriculture}, volume = {175}, keywords = {bacterial plant disease, contrastive loss, convolutional neural networks, deep learning, few shot learning, fungal plant disease, plant disease, triplet loss}, pubstate = {published}, tppubtype = {article} } |
Shahriari, M; Pardo, D; Picon, A; Del Ser, J; Torres-Verdín, C A deep learning approach to the inversion of borehole resistivity measurements (Artículo de revista) Computational Geosciences, 2020. (Enlaces | BibTeX | Etiquetas: deep learning, deep neural networks, logging-while-drilling, real-time inversion, resistivity measurements, well geosteering) @article{Shahriari2020, title = {A deep learning approach to the inversion of borehole resistivity measurements}, author = {Shahriari, M. and Pardo, D. and Picon, A. and Del Ser, J. and Torres-Verdín, C.}, url = {https://rdcu.be/b3yHn}, year = {2020}, date = {2020-04-13}, journal = {Computational Geosciences}, keywords = {deep learning, deep neural networks, logging-while-drilling, real-time inversion, resistivity measurements, well geosteering}, pubstate = {published}, tppubtype = {article} } |
Picon, Artzai; Alvarez-Gila, Aitor; Irusta, Unai; Echazarra, Jone Why deep learning performs better than classical machine learning? (Artículo de revista) DYNA, 95 , pp. 119-122, 2020. (Enlaces | BibTeX | Etiquetas: deep learning) @article{Picon2020b, title = {Why deep learning performs better than classical machine learning?}, author = {Artzai Picon and Aitor Alvarez-Gila and Unai Irusta and Jone Echazarra}, url = {http://dx.doi.org/10.6036/9574 }, year = {2020}, date = {2020-03-01}, journal = {DYNA}, volume = {95}, pages = {119-122}, keywords = {deep learning}, pubstate = {published}, tppubtype = {article} } |
2019 |
Barredo-Arrieta, Alejandro; Díaz-Rodríguez, Natalia; Ser, Javier Del; Bennetot, Adrien; Tabik, Siham; Barbado, Alberto; García, Salvador; Gil-López, Sergio; Molina, Daniel; Benjamins, Richard; Chatila, Raja; Herrera, Francisco Explainable Artificial Intelligence (XAI): Concepts, Taxonomies, Opportunities and Challenges toward Responsible AI (Artículo de revista) arXiv preprint arXiv:1910.10045, 2019. (Enlaces | BibTeX | Etiquetas: accountability, comprehensibility, data fusion, deep learning, explainability, Explainable Artificial Intelligence, fairness, interpretability, machine learning, privacy, responsible artificial intelligence, transparency) @article{arrieta2019explainable, title = {Explainable Artificial Intelligence (XAI): Concepts, Taxonomies, Opportunities and Challenges toward Responsible AI}, author = {Alejandro Barredo-Arrieta and Natalia Díaz-Rodríguez and Javier Del Ser and Adrien Bennetot and Siham Tabik and Alberto Barbado and Salvador García and Sergio Gil-López and Daniel Molina and Richard Benjamins and Raja Chatila and Francisco Herrera}, url = {https://arxiv.org/abs/1910.10045}, year = {2019}, date = {2019-10-22}, journal = {arXiv preprint arXiv:1910.10045}, keywords = {accountability, comprehensibility, data fusion, deep learning, explainability, Explainable Artificial Intelligence, fairness, interpretability, machine learning, privacy, responsible artificial intelligence, transparency}, pubstate = {published}, tppubtype = {article} } |
Picón-Ruiz, Artzai; IRUSTA, UNAI; ÁLVAREZ-GILA, AITOR; ARAMENDI, ELISABETE; ALONSO-ATIENZA, FELIPE; FIGUERA, CARLOS; AYALA, UNAI; Garrote-Contreras, Estibaliz; WIK, LARS; KRAMER-JOHANSEN, JO; EFTESTØL, TRYGVE Mixed convolutional and long short-term memory network for the detection of lethal ventricular arrhythmia (Artículo de revista) PloS one, 14 (5), pp. e0216756, 2019. (Resumen | Enlaces | BibTeX | Etiquetas: deep learning) @article{picon2019plos, title = {Mixed convolutional and long short-term memory network for the detection of lethal ventricular arrhythmia}, author = {Artzai Picón-Ruiz and UNAI IRUSTA and AITOR ÁLVAREZ-GILA and ELISABETE ARAMENDI and FELIPE ALONSO-ATIENZA and CARLOS FIGUERA and UNAI AYALA and Estibaliz Garrote-Contreras and LARS WIK and JO KRAMER-JOHANSEN and TRYGVE EFTESTØL}, url = {https://journals.plos.org/plosone/article?id=10.1371/journal.pone.0216756}, year = {2019}, date = {2019-05-20}, journal = {PloS one}, volume = {14}, number = {5}, pages = {e0216756}, abstract = {Early defibrillation by an automated external defibrillator (AED) is key for the survival of out-of-hospital cardiac arrest (OHCA) patients. ECG feature extraction and machine learning have been successfully used to detect ventricular fibrillation (VF) in AED shock decision algorithms. Recently, deep learning architectures based on 1D Convolutional Neural Networks (CNN) have been proposed for this task. This study introduces a deep learning architecture based on 1D-CNN layers and a Long Short-Term Memory (LSTM) network for the detection of VF. Two datasets were used, one from public repositories of Holter recordings captured at the onset of the arrhythmia, and a second from OHCA patients obtained minutes after the onset of the arrest. Data was partitioned patient-wise into training (80%) to design the classifiers, and test (20%) to report the results. The proposed architecture was compared to 1D-CNN only deep learners, and to a classical approach based on VF-detection features and a support vector machine (SVM) classifier. The algorithms were evaluated in terms of balanced accuracy (BAC), the unweighted mean of the sensitivity (Se) and specificity (Sp). The BAC, Se, and Sp of the architecture for 4-s ECG segments was 99.3%, 99.7%, and 98.9% for the public data, and 98.0%, 99.2%, and 96.7% for OHCA data. The proposed architecture outperformed all other classifiers by at least 0.3-points in BAC in the public data, and by 2.2-points in the OHCA data. The architecture met the 95% Sp and 90% Se requirements of the American Heart Association in both datasets for segment lengths as short as 3-s. This is, to the best of our knowledge …}, keywords = {deep learning}, pubstate = {published}, tppubtype = {article} } Early defibrillation by an automated external defibrillator (AED) is key for the survival of out-of-hospital cardiac arrest (OHCA) patients. ECG feature extraction and machine learning have been successfully used to detect ventricular fibrillation (VF) in AED shock decision algorithms. Recently, deep learning architectures based on 1D Convolutional Neural Networks (CNN) have been proposed for this task. This study introduces a deep learning architecture based on 1D-CNN layers and a Long Short-Term Memory (LSTM) network for the detection of VF. Two datasets were used, one from public repositories of Holter recordings captured at the onset of the arrhythmia, and a second from OHCA patients obtained minutes after the onset of the arrest. Data was partitioned patient-wise into training (80%) to design the classifiers, and test (20%) to report the results. The proposed architecture was compared to 1D-CNN only deep learners, and to a classical approach based on VF-detection features and a support vector machine (SVM) classifier. The algorithms were evaluated in terms of balanced accuracy (BAC), the unweighted mean of the sensitivity (Se) and specificity (Sp). The BAC, Se, and Sp of the architecture for 4-s ECG segments was 99.3%, 99.7%, and 98.9% for the public data, and 98.0%, 99.2%, and 96.7% for OHCA data. The proposed architecture outperformed all other classifiers by at least 0.3-points in BAC in the public data, and by 2.2-points in the OHCA data. The architecture met the 95% Sp and 90% Se requirements of the American Heart Association in both datasets for segment lengths as short as 3-s. This is, to the best of our knowledge … |
ELOLA, ANDONI; ARAMENDI, ELISABETE; IRUSTA, UNAI; Picón-Ruiz, Artzai; ALONSO, ERIK; OWENS, PAMELA; IDRIS, AHAMED Deep Neural Networks for ECG-Based Pulse Detection during Out-of-Hospital Cardiac Arrest (Artículo de revista) Entropy, 21 (3), pp. 305, 2019. (Resumen | Enlaces | BibTeX | Etiquetas: biomedical engineering, deep learning, ecg) @article{elola2019b, title = {Deep Neural Networks for ECG-Based Pulse Detection during Out-of-Hospital Cardiac Arrest}, author = {ANDONI ELOLA and ELISABETE ARAMENDI and UNAI IRUSTA and Artzai Picón-Ruiz and ERIK ALONSO and PAMELA OWENS and AHAMED IDRIS}, url = {https://www.mdpi.com/1099-4300/21/3/305}, year = {2019}, date = {2019-03-01}, journal = {Entropy}, volume = {21}, number = {3}, pages = {305}, abstract = {The automatic detection of pulse during out-of-hospital cardiac arrest (OHCA) is necessary for the early recognition of the arrest and the detection of return of spontaneous circulation (end of the arrest). The only signal available in every single defibrillator and valid for the detection of pulse is the electrocardiogram (ECG). In this study we propose two deep neural network (DNN) architectures to detect pulse using short ECG segments (5 s), ie, to classify the rhythm into pulseless electrical activity (PEA) or pulse-generating rhythm (PR). A total of 3914 5-s ECG segments, 2372 PR and 1542 PEA, were extracted from 279 OHCA episodes. Data were partitioned patient-wise into training (80%) and test (20%) sets. The first DNN architecture was a fully convolutional neural network, and the second architecture added a recurrent layer to learn temporal dependencies. Both DNN architectures were tuned using Bayesian optimization, and the results for the test set were compared to state-of-the art PR/PEA discrimination algorithms based on machine learning and hand crafted features. The PR/PEA classifiers were evaluated in terms of sensitivity (Se) for PR, specificity (Sp) for PEA, and the balanced accuracy (BAC), the average of Se and Sp. The Se/Sp/BAC of the DNN architectures were 94.1%/92.9%/93.5% for the first one, and 95.5%/91.6%/93.5% for the second one. Both architectures improved the performance of state of the art methods by more than 1.5 points in BAC. View Full-Text}, keywords = {biomedical engineering, deep learning, ecg}, pubstate = {published}, tppubtype = {article} } The automatic detection of pulse during out-of-hospital cardiac arrest (OHCA) is necessary for the early recognition of the arrest and the detection of return of spontaneous circulation (end of the arrest). The only signal available in every single defibrillator and valid for the detection of pulse is the electrocardiogram (ECG). In this study we propose two deep neural network (DNN) architectures to detect pulse using short ECG segments (5 s), ie, to classify the rhythm into pulseless electrical activity (PEA) or pulse-generating rhythm (PR). A total of 3914 5-s ECG segments, 2372 PR and 1542 PEA, were extracted from 279 OHCA episodes. Data were partitioned patient-wise into training (80%) and test (20%) sets. The first DNN architecture was a fully convolutional neural network, and the second architecture added a recurrent layer to learn temporal dependencies. Both DNN architectures were tuned using Bayesian optimization, and the results for the test set were compared to state-of-the art PR/PEA discrimination algorithms based on machine learning and hand crafted features. The PR/PEA classifiers were evaluated in terms of sensitivity (Se) for PR, specificity (Sp) for PEA, and the balanced accuracy (BAC), the average of Se and Sp. The Se/Sp/BAC of the DNN architectures were 94.1%/92.9%/93.5% for the first one, and 95.5%/91.6%/93.5% for the second one. Both architectures improved the performance of state of the art methods by more than 1.5 points in BAC. View Full-Text |
MEDELA, ALFONSO; Picón-Ruiz, Artzai; SARATXAGA, CRISTINA L; BELAR, OIHANA; CABEZON, VIRGINIA; CICCHI, RICCARDO; BILBAO, ROBERTO; BEN, GLOVER Few shot learning in Histopathological images: Reducing the need of labeled data on biological datasets (Artículo en actas) IEEE International Symposium on Biomedical Imaging, 2019. (Enlaces | BibTeX | Etiquetas: deep learning, few shot learning) @inproceedings{medela2019b, title = {Few shot learning in Histopathological images: Reducing the need of labeled data on biological datasets}, author = {ALFONSO MEDELA and Artzai Picón-Ruiz and CRISTINA L. SARATXAGA and OIHANA BELAR and VIRGINIA CABEZON and RICCARDO CICCHI and ROBERTO BILBAO and GLOVER BEN}, url = {https://computervision.tecnalia.com/wp-content/uploads/2019/06/ISBI_paper__final_version_-2.pdf}, year = {2019}, date = {2019-01-01}, booktitle = {IEEE International Symposium on Biomedical Imaging}, keywords = {deep learning, few shot learning}, pubstate = {published}, tppubtype = {inproceedings} } |
Alvarez-Gila, Aitor; Galdran, Adrian; Garrote, Estibaliz; van de Weijer, Joost Self-Supervised Blur Detection from Synthetically Blurred Scenes (Artículo de revista) Image and Vision Computing, 92 , pp. 103804, 2019, ISSN: 0262-8856. (Resumen | Enlaces | BibTeX | Etiquetas: Blur detection, deep learning, Defocus blur, Motion blur, Self-supervised learning, Synthetic) @article{alvarez-gila_self-supervised_2019, title = {Self-Supervised Blur Detection from Synthetically Blurred Scenes}, author = {Aitor Alvarez-Gila and Adrian Galdran and Estibaliz Garrote and Joost van de Weijer}, doi = {10.1016/j.imavis.2019.08.008}, issn = {0262-8856}, year = {2019}, date = {2019-01-01}, journal = {Image and Vision Computing}, volume = {92}, pages = {103804}, abstract = {Blur detection aims at segmenting the blurred areas of a given image. Recent deep learning-based methods approach this problem by learning an end-to-end mapping between the blurred input and a binary mask representing the localization of its blurred areas. Nevertheless, the effectiveness of such deep models is limited due to the scarcity of datasets annotated in terms of blur segmentation, as blur annotation is labor intensive. In this work, we bypass the need for such annotated datasets for end-to-end learning, and instead rely on object proposals and a model for blur generation in order to produce a dataset of synthetically blurred images. This allows us to perform self-supervised learning over the generated image and ground truth blur mask pairs using CNNs, defining a framework that can be employed in purely self-supervised, weakly supervised or semi-supervised configurations. Interestingly, experimental results of such setups over the largest blur segmentation datasets available show that this approach achieves state of the art results in blur segmentation, even without ever observing any real blurred image.}, keywords = {Blur detection, deep learning, Defocus blur, Motion blur, Self-supervised learning, Synthetic}, pubstate = {published}, tppubtype = {article} } Blur detection aims at segmenting the blurred areas of a given image. Recent deep learning-based methods approach this problem by learning an end-to-end mapping between the blurred input and a binary mask representing the localization of its blurred areas. Nevertheless, the effectiveness of such deep models is limited due to the scarcity of datasets annotated in terms of blur segmentation, as blur annotation is labor intensive. In this work, we bypass the need for such annotated datasets for end-to-end learning, and instead rely on object proposals and a model for blur generation in order to produce a dataset of synthetically blurred images. This allows us to perform self-supervised learning over the generated image and ground truth blur mask pairs using CNNs, defining a framework that can be employed in purely self-supervised, weakly supervised or semi-supervised configurations. Interestingly, experimental results of such setups over the largest blur segmentation datasets available show that this approach achieves state of the art results in blur segmentation, even without ever observing any real blurred image. |
2018 |
BOTE-CURIEL, L; MORÁN, JF ORTEGA; PAGADOR, JB.; MARGALLO, FM. SÁNCHEZ; GLOVER, B; TEARE, J; POLO, F; ARBIDE, N; SARATXAGA, C L; SOLLEDER, P; ALFIERI, D; NOIA, DI F; ROYCROFT, B; BAIN, J; CICCHI, R; PAVONE, FS.; Picón-Ruiz, Artzai Innovative multiphotonic endoscope to address technological challenges in current colonoscopy procedure (Artículo de revista) CASEIB 2018, 2018. (Enlaces | BibTeX | Etiquetas: deep learning) @article{bote2018, title = {Innovative multiphotonic endoscope to address technological challenges in current colonoscopy procedure}, author = {L. BOTE-CURIEL and JF ORTEGA MORÁN and JB. PAGADOR and FM. SÁNCHEZ MARGALLO and B. GLOVER and J. TEARE and F. POLO and N. ARBIDE and C.L. SARATXAGA and P. SOLLEDER and D. ALFIERI and F. DI NOIA and B. ROYCROFT and J. BAIN and R. CICCHI and FS. PAVONE and Artzai Picón-Ruiz}, url = {https://computervision.tecnalia.com/wp-content/uploads/2019/06/CASEIB2018_PICCOLO_v9_conAutores-1.pdf}, year = {2018}, date = {2018-11-13}, journal = {CASEIB 2018}, keywords = {deep learning}, pubstate = {published}, tppubtype = {article} } |
Picón-Ruiz, Artzai; ALVAREZ-GILA, AITOR; SEITZ, MAXIMILIAM; ORTIZ-BARREDO, AMAIA; ECHAZARRA, JONE; JOHANNES, ALEXANDER Deep convolutional neural networks for mobile capture device-based crop disease classification in the wild (Artículo de revista) Computers and Electronics in Agriculture, 2018, ISSN: 0168-1699. (Resumen | Enlaces | BibTeX | Etiquetas: cnn, convolutional neural networks, deep learning, disease identification, early pest, image processing, phytopathology, plant disease, precision agriculture) @article{picon_deep_2018, title = {Deep convolutional neural networks for mobile capture device-based crop disease classification in the wild}, author = {Artzai Picón-Ruiz and AITOR ALVAREZ-GILA and MAXIMILIAM SEITZ and AMAIA ORTIZ-BARREDO and JONE ECHAZARRA and ALEXANDER JOHANNES}, url = {http://www.sciencedirect.com/science/article/pii/S0168169917312619 https://computervision.tecnalia.com/wp-content/uploads/2018/10/preprint_computer_and_electronics_10.1016@j.compag.2018.04.002-1.pdf}, doi = {10.1016/j.compag.2018.04.002}, issn = {0168-1699}, year = {2018}, date = {2018-01-01}, urldate = {2018-06-26}, journal = {Computers and Electronics in Agriculture}, abstract = {Fungal infection represents up to 50% of yield losses, making it necessary to apply effective and cost efficient fungicide treatments, whose efficacy depends on infestation type, situation and time. In these cases, a correct and early identification of the specific infection is mandatory to minimize yield losses and increase the efficacy and efficiency of the treatments. Over the last years, a number of image analysis-based methodologies have been proposed for automatic image disease identification. Among these methods, the use of Deep Convolutional Neural Networks (CNNs) has proven tremendously successful for different visual classification tasks. In this work we extend previous work by Johannes et al. (2017) with an adapted Deep Residual Neural Network-based algorithm to deal with the detection of multiple plant diseases in real acquisition conditions where different adaptions for early disease detection have been proposed. This work analyses the performance of early identification of three relevant European endemic wheat diseases: Septoria (Septoria triciti), Tan Spot (Drechslera triciti-repentis) and Rust (Puccinia striiformis & Puccinia recondita). The analysis was done using different mobile devices, and more than 8178 images were captured in two pilot sites in Spain and Germany during 2014,2015 and 2016. Obtained results reveal an overall improvement of the balanced accuracy from 0.78 (Johannes et al., 2017) up to 0.87 under exhaustive testing, and balanced accuracies greater than 0.96 on a pilot test performed in Germany.}, keywords = {cnn, convolutional neural networks, deep learning, disease identification, early pest, image processing, phytopathology, plant disease, precision agriculture}, pubstate = {published}, tppubtype = {article} } Fungal infection represents up to 50% of yield losses, making it necessary to apply effective and cost efficient fungicide treatments, whose efficacy depends on infestation type, situation and time. In these cases, a correct and early identification of the specific infection is mandatory to minimize yield losses and increase the efficacy and efficiency of the treatments. Over the last years, a number of image analysis-based methodologies have been proposed for automatic image disease identification. Among these methods, the use of Deep Convolutional Neural Networks (CNNs) has proven tremendously successful for different visual classification tasks. In this work we extend previous work by Johannes et al. (2017) with an adapted Deep Residual Neural Network-based algorithm to deal with the detection of multiple plant diseases in real acquisition conditions where different adaptions for early disease detection have been proposed. This work analyses the performance of early identification of three relevant European endemic wheat diseases: Septoria (Septoria triciti), Tan Spot (Drechslera triciti-repentis) and Rust (Puccinia striiformis & Puccinia recondita). The analysis was done using different mobile devices, and more than 8178 images were captured in two pilot sites in Spain and Germany during 2014,2015 and 2016. Obtained results reveal an overall improvement of the balanced accuracy from 0.78 (Johannes et al., 2017) up to 0.87 under exhaustive testing, and balanced accuracies greater than 0.96 on a pilot test performed in Germany. |
2017 |
Picón-Ruiz, Artzai; ALVAREZ-GILA, AITOR; DURO, GORKA; CRUZ-LOPEZ, ANTONIO ; LINARES, MIGUEL; LAGO, ALBERTO; Garrote-Contreras, Estibaliz; Gutiérrez-Olabarria, Jose A High speed quality inspection of hot long metal products surface based on Deep Convolutional Neural Networks (Artículo en actas) Open session - International Summer School on Deep Learning 2017, Bilbao, Spain, 2017. (Enlaces | BibTeX | Etiquetas: deep learning, industry) @inproceedings{picon_high_2017, title = {High speed quality inspection of hot long metal products surface based on Deep Convolutional Neural Networks}, author = {Artzai Picón-Ruiz and AITOR ALVAREZ-GILA and GORKA DURO and ANTONIO {CRUZ-LOPEZ} and MIGUEL LINARES and ALBERTO LAGO and Estibaliz Garrote-Contreras and Jose A. Gutiérrez-Olabarria}, url = {https://computervision.tecnalia.com/wp-content/uploads/2017/06/Surfin_DeepLearn_Bilbao2017.pdf}, year = {2017}, date = {2017-01-01}, booktitle = {Open session - International Summer School on Deep Learning 2017}, address = {Bilbao, Spain}, keywords = {deep learning, industry}, pubstate = {published}, tppubtype = {inproceedings} } |
2016 |
CRUZ-LOPEZ, ANTONIO; LAGO, ALBERTO; GONZALEZ, ROBERTO; ALVAREZ-GILA, AITOR; OLABARRIA, Jose Gutiérrez-Olabarria A High-speed inspection system finds defects in steel (Artículo de revista) Vision Systems Design, (December 2016 - 1), pp. 24–27, 2016. (Enlaces | BibTeX | Etiquetas: cnn, deep learning, neural networks, surface quality) @article{cruz-lopez_high-speed_2016, title = {High-speed inspection system finds defects in steel}, author = {ANTONIO CRUZ-LOPEZ and ALBERTO LAGO and ROBERTO GONZALEZ and AITOR ALVAREZ-GILA and Jose A. Gutiérrez-Olabarria OLABARRIA}, url = {http://digital.vision-systems.com/visionsystems/201612?pg=26 https://computervision.tecnalia.com/wp-content/uploads/2016/12/visionsystemsdesign201612-dl.pdf}, year = {2016}, date = {2016-01-01}, urldate = {2016-12-12}, journal = {Vision Systems Design}, number = {December 2016 - 1}, pages = {24--27}, keywords = {cnn, deep learning, neural networks, surface quality}, pubstate = {published}, tppubtype = {article} } |
ALVAREZ-GILA, AITOR; CRUZ-LOPEZ, ANTONIO; RODRIGUEZ-VAAMONDE, SERGIO; LINARES, MIGUEL; GUTIERREZ-OLABARRIA, JOSÉ A; Garrote-Contreras, Estibaliz Deep Convolutional Neural Networks for surface quality inspection of hot long metal products (Artículo en actas) First European Machine Vision Forum, Heidelberg, Germany, 2016. (Enlaces | BibTeX | Etiquetas: cnn, convolutional neural networks, deep learning, neural networks, surface quality) @inproceedings{alvarez-gila_deep_2016b, title = {Deep Convolutional Neural Networks for surface quality inspection of hot long metal products}, author = {AITOR ALVAREZ-GILA and ANTONIO CRUZ-LOPEZ and SERGIO RODRIGUEZ-VAAMONDE and MIGUEL LINARES and JOSÉ A GUTIERREZ-OLABARRIA and Estibaliz Garrote-Contreras}, url = {https://computervision.tecnalia.com/wp-content/uploads/2016/09/EMVA-Deep-Convolutional-Neuronal-Networks-for-surface-quality-inspection-of-hot-long-metal-products.pdf}, year = {2016}, date = {2016-01-01}, booktitle = {First European Machine Vision Forum}, address = {Heidelberg, Germany}, keywords = {cnn, convolutional neural networks, deep learning, neural networks, surface quality}, pubstate = {published}, tppubtype = {inproceedings} } |
Publicaciones Científico TecnológicasComputer Vision by Tecnalia2020-02-10T16:35:43+02:00