@Article{EAAI2023, author = {Krzysztof Okarma and Piotr Lech}, journal = {Engineering Applications of Artificial Intelligence}, title = {A method supporting fault-tolerant optical text recognition from video sequences recorded with handheld cameras}, year = {2023}, issn = {0952-1976}, pages = {106330}, volume = {123}, abstract = {In the paper a method supporting the optical character recognition from video sequences recorded with cameras without good stabilization is proposed. Due to the presence of various distortions, such as motion blur, shadows, lossy compression artifacts, auto-focusing errors, etc., the quality of individual video frames, e.g., recorded by a smartphone camera, differs noticeably, influencing the results of text recognition, causing the presence of additional errors that may be unacceptable even in fault-tolerant applications. Although the quality of individual video frames may be assessed using state-of-the-art no-reference image quality metrics, the obtained results, particularly using the relatively fast metrics, are poorly correlated with recognition results. Therefore, a novel hybrid approach to image quality assessment supporting fault-tolerant optical character recognition is proposed, making it possible to select automatically the video frames leading to the smallest number of errors. Since the data extraction is rarely perfect, the proposed method may be particularly useful in systems where users should identify the faults defined as a too high number of text recognition errors. The proposed method makes it possible to eliminate low quality fragments of video sequences from further analysis, reducing the overall computational burden. It may also be useful in industrial OCR applications based on offline video recording with memory constraints where the use of online OCR services is not possible.}, doi = {https://doi.org/10.1016/j.engappai.2023.106330}, keywords = {Optical character recognition, Document images, No-reference image quality assessment, Video-based OCR, Hybrid image quality metrics}, url = {https://www.sciencedirect.com/science/article/pii/S0952197623005146}, }