Commits

Vincent Rabeux committed 3fbd1b9

Added slides from PHD to create ICDAR2013 slides.

Comments (0)

Files changed (357)

ICDAR2013/Presentation/Makefile

+all : presentation.tex
+	pdflatex presentation.tex
+	bibtex presentation
+	pdflatex presentation.tex
+	pdflatex presentation.tex
+
+
+bib : biblio.bib
+	bibtex presentation
+
+pdf : presentation.tex
+	pdflatex presentation.tex
+
+clean : presentation.pdf
+	rm *.aux *.pdf *.bbl *.blg *.nav *.snm *.toc *.synctex.gz  *.log *.out

ICDAR2013/Presentation/biblio.bib

+%% This BibTeX bibliography file in UTF-8 format was created using Papers.
+%% http://mekentosj.com/papers/
+
+@article{kanungo2000statistical,
+author = {T Kanungo and RM Haralick and HS Baird and W Stuezle and D Madigan}, 
+journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence},
+title = {A statistical, nonparametric methodology for document degradation model validation},
+number = {11},
+pages = {1209--1223},
+volume = {22},
+year = {2000},
+date-added = {2010-06-27 16:54:22 +0200},
+date-modified = {2010-06-27 16:55:06 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2000/Kanungo/IEEE%20Transactions%20on%20Pattern%20Analysis%20and%20Machine%20Intelligence%202000%20Kanungo.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p24},
+rating = {0}
+}
+
+@conference{kanungo1994document,
+author = {T Kanungo and RM Haralick and HS Baird and W Stuetzle and D Madigan}, 
+journal = {Conference},
+title = {Document degradation models: Parameter estimation and model validation},
+pages = {552--557},
+year = {1994},
+keywords = {ALIRE, models}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p3},
+rating = {0}
+}
+
+@article{Journet:2010p1037,
+author = {N Journet and A Vialard and J Domenger}, 
+journal = {hal.archives-ouvertes.fr},
+title = {Analyse de fontes anciennes: de la g{\'e}n{\'e}ration de donn{\'e}es synth{\'e}tiques {\`a} la reconnaissance},
+abstract = { On y retrouve, entre autres, le nom de la  et les caract{\`e}res extraits auxquels sont associ{\'e}es des informations de position (points d'accroche). On hal-00488500, version 1 - 2 Jun 2010 Page 8. Nicholas , Anne Vialard, Jean-Philippe Domenger },
+year = {2010},
+month = {Jan},
+date-added = {2010-07-07 10:56:06 +0200},
+date-modified = {2010-07-07 10:56:06 +0200},
+pmid = {related:z8XaHwPL0nsJ},
+URL = {http://hal.archives-ouvertes.fr/hal-00488500/},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2010/Journet/hal.archives-ouvertes.fr%202010%20JournetAnalyse%20de%20fontes%20anciennes%20de.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1037},
+rating = {0}
+}
+
+@article{Zitova:2003p1216,
+author = {B Zitova and J Flusser}, 
+journal = {Image and vision computing},
+title = {Image registration methods: a survey},
+abstract = {This paper aims to present a review of recent as well as classic image registration methods. Image registration is the process of overlaying images (two or more) of the same scene taken at different times, from different viewpoints, and/or by different sensors. The registration geometrically },
+year = {2003},
+month = {Jan},
+date-added = {2010-07-22 10:18:49 +0200},
+date-modified = {2010-07-22 10:18:49 +0200},
+pmid = {13313639168318216493related:LZlV70iMw7gJ},
+URL = {http://linkinghub.elsevier.com/retrieve/pii/S0262885603001379},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2003/Zitova/Image%20and%20vision%20computing%202003%20ZitovaImage%20registration%20methods%20a%20survey.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1216},
+read = {Yes},
+rating = {0}
+}
+
+@article{Bienvenue:2001p1265,
+author = {Y Bienvenue}, 
+journal = {site.uottawa.ca},
+title = {Registration of two-sided documents suffering from bleed through},
+abstract = {When documents are written on both sides, the ink might bleed through the paper. This is particularly true with old documents. Reading and studying such documents is not easy when serious bleed through is present. This report proposes a solution to correct the problem and shows how },
+year = {2001},
+month = {Jan},
+date-added = {2010-07-22 10:22:32 +0200},
+date-modified = {2010-07-22 10:22:32 +0200},
+pmid = {related:cQlo1vS5vo4J},
+URL = {http://www.site.uottawa.ca/~edubois/documents/csi4900_report_bienvenue.pdf},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2001/Bienvenue/site.uottawa.ca%202001%20BienvenueRegistration%20of%20two-sided%20documents%20suffering.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1265},
+read = {Yes},
+rating = {0}
+}
+
+@article{trier1995evaluation,
+author = {OD Trier and T Taxt}, 
+journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence},
+title = {Evaluation of binarization methods for document images},
+number = {3},
+pages = {312--315},
+volume = {17},
+year = {1995},
+date-added = {2010-06-27 16:54:42 +0200},
+date-modified = {2010-06-27 16:55:06 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p30},
+rating = {0}
+}
+
+@article{moghaddam2009low,
+author = {RF Moghaddam and M Cheriet}, 
+journal = {International Journal on Document Analysis and Recognition},
+title = {Low quality document image modeling and enhancement},
+number = {4},
+pages = {183--201},
+volume = {11},
+year = {2009},
+keywords = {models}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2009/Moghaddam/International%20Journal%20on%20Document%20Analysis%20and%20Recognition%202009%20Moghaddam.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p9},
+read = {Yes},
+rating = {0}
+}
+
+@article{Upton:2008p720,
+author = {G Upton and I Cook}, 
+journal = {books.google.com},
+title = {A dictionary of statistics},
+abstract = { 2005). Ian Cook is a graduate of  and London Universities, and also of Hull, where he was a lecturer in mathematics. He  Zoology *forthcoming Page 5. A of  graham upton and ian cook 1 Page 6. 3 Great },
+year = {2008},
+month = {Jan},
+date-added = {2010-07-05 17:02:00 +0200},
+date-modified = {2010-07-05 17:02:00 +0200},
+pmid = {11284352546206433093related:Rb_ojNoSmpwJ},
+URL = {http://books.google.com/books?hl=en&lr=&id=u97pzxRjaCQC&oi=fnd&pg=PR6&dq=Cambridge+Dictionary+of+Statistics&ots=ixtCmcuMI4&sig=4Fhc3a9kAHdYAVY4wFbvl6OJaSc},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p720},
+rating = {0}
+}
+
+@misc{knox1997show,
+author = {KT Knox}, 
+journal = {Miscellaneous},
+title = {Show-through correction for two-sided documents},
+year = {1997},
+date-added = {2010-06-27 16:54:55 +0200},
+date-modified = {2010-07-05 17:36:17 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/1997/Knox/Miscellaneous%201997%20Knox.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p27},
+rating = {0}
+}
+
+@article{Soukoreff:2001p259,
+author = {RW Soukoreff and IS MacKenzie}, 
+journal = {CHI'01 extended abstracts on Human factors in computing systems},
+title = {Measuring errors in text entry tasks: an application of the Levenshtein string distance statistic},
+pages = {320},
+year = {2001},
+date-added = {2010-07-02 12:00:48 +0200},
+date-modified = {2010-07-02 12:01:07 +0200},
+pmid = {15792408940350114027related:64hFbULoKdsJ},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2001/Soukoreff/CHI'01%20extended%20abstracts%20on%20Human%20factors%20in%20computing%20systems%202001%20SoukoreffMeasuring%20errors%20in%20text%20entry.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p259},
+read = {Yes},
+rating = {0}
+}
+
+@article{Blando:1995p64,
+author = {L Blando and J Kanai and T Nartker and J Gonzalez}, 
+journal = {Proceedings of the Third {\ldots}},
+title = {Prediction of OCR Accuracy},
+abstract = { Reliable algorithms for measuring print quality and   accuracy would be valuable in several ways.  The x's show the location of mean vectors. Page 9. 59 6 Summary A method for  the accuracy of  generated text has been presented. },
+year = {1995},
+month = {Jan},
+date-added = {2010-07-01 16:50:44 +0200},
+date-modified = {2010-07-01 16:50:44 +0200},
+pmid = {6264426154105851623related:504llWW271YJ},
+URL = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.42.5435&rep=rep1&type=pdf},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/1995/Blando/Proceedings%20of%20the%20Third%20%E2%80%A6%201995%20BlandoPrediction%20of%20OCR%20Accuracy.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p64},
+read = {Yes},
+rating = {0}
+}
+
+@conference{yamashita2004shape,
+author = {A Yamashita and A Kawarago and T Kaneko and KT Miura}, 
+journal = {Conference},
+title = {Shape reconstruction and image restoration for non-flat surfaces of documents with a stereo vision system},
+pages = {482--485},
+volume = {1},
+year = {2004},
+keywords = {courbure}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p6},
+rating = {0}
+}
+
+@conference{cao2003rectifying,
+author = {H Cao and X Ding and C Liu}, 
+journal = {Conference},
+title = {Rectifying the bound document image captured by the camera: A model based approach},
+affiliation = {Citeseer},
+pages = {71--75},
+volume = {3},
+year = {2003},
+keywords = {courbure}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p13},
+rating = {0}
+}
+
+@article{Govindaraju:2010p1366,
+author = {V Govindaraju and S Srihari}, 
+journal = {spie.org},
+title = {Assessment of image quality to predict readability of documents (Proceedings Paper)},
+abstract = {Determining the  of  is an important task. Human  pertains to the scenario when a  is ultimately presented to a human to read. Machine pertains to the scenario when the  is subjected to an OCR process. In either },
+date-added = {2010-08-11 18:55:43 +0200},
+date-modified = {2010-08-11 18:55:43 +0200},
+URL = {http://www.spie.org/x648.html?product_id=234714},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1366},
+rating = {0}
+}
+
+@article{Dano:2010p1266,
+author = {Patrick Dano}, 
+journal = {JOINT RESTORATION AND COMPRESSION OF DOCUMENT IMAGES WITH BLEED-THROUGH DISTORTION},
+title = {PatrickDanoThesis2003_final_main},
+pages = {1--72},
+year = {2010},
+month = {May},
+date-added = {2010-07-22 15:01:38 +0200},
+date-modified = {2010-07-22 15:03:08 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2010/Dano/JOINT%20RESTORATION%20AND%20COMPRESSION%20OF%20DOCUMENT%20IMAGES%20WITH%20BLEED-THROUGH%20DISTORTION%202010%20DanoPatrickDanoThesis2003_final_main.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1266},
+read = {Yes},
+rating = {0}
+}
+
+@article{Rice:1993p161,
+author = {S Rice and J Kanai and T Nartker}, 
+journal = {{\ldots} Research Institute},
+title = {An evaluation of OCR accuracy},
+abstract = {An  of  Stephen V. Rice, Junichi Kanai and Thomas A. Nartker 1 Introduction ISRI has conducted its second annual assessment of the  of devices for optical character recognition ( ) of machine-printed, English-language documents. This year's test },
+year = {1993},
+month = {Jan},
+date-added = {2010-07-01 16:53:34 +0200},
+date-modified = {2010-07-01 17:30:01 +0200},
+pmid = {13727607441596904887related:t3VkdDxCgr4J},
+URL = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.80.7878&rep=rep1&type=pdf%23page=9},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/1993/Rice/%E2%80%A6%20Research%20Institute%201993%20RiceAn%20evaluation%20of%20OCR%20accuracy.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p161},
+read = {Yes},
+rating = {5}
+}
+
+@article{wolf2006document,
+author = {C Wolf and B INSA-Lyon and J Verne and AA Einstein}, 
+journal = {Laboratoire d'Informatique en Images et Syst{\'e}mes d'Information, INSA de Lyon, France, Tech. Rep. RR-LIRIS-2006-019},
+title = {Document Ink bleed-through removal with two hidden Markov random fields and a single observation field},
+year = {2006},
+keywords = {transparance}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2006/Wolf/Laboratoire%20d'Informatique%20en%20Images%20et%20Syst%C3%A9mes%20d'Information%20INSA%20de%20Lyon%20France%20Tech.%20Rep.%20RR-LIRIS-2006-019%202006%20Wolf.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p15},
+read = {Yes},
+rating = {0}
+}
+
+@article{Bairn:1995p1430,
+author = {V Bairn}, 
+journal = {{\ldots} 1995 Symposium on Document Image {\ldots}},
+title = {Enhancement for Imaged Document Processing},
+abstract = { for  Victor T. Tom Paul W. Bairn Atlantic Aerospace Electronics Corporation 470 Totten Pond Road Waltham, Massachusetts 02154 Abstract A set of tools for analyzing and correcting degradation in  text documents to improve },
+year = {1995},
+month = {Jan},
+date-added = {2010-08-11 19:20:43 +0200},
+date-modified = {2010-08-11 19:20:43 +0200},
+pmid = {related:9QTL03S75xkJ},
+URL = {http://books.google.com/books?hl=en&lr=&id=zlZQTzjmxwgC&oi=fnd&pg=PA154&dq=Enhancement+for+Imaged+Document+Processing&ots=wkDy4a9gwZ&sig=obxU64zcuwQzIu_8XS3c7RwdDCA},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1430},
+rating = {0}
+}
+
+@conference{dubois2001reduction,
+author = {E Dubois and A Pathak}, 
+journal = {Conference},
+title = {Reduction of bleed-through in scanned manuscript documents},
+affiliation = {SOCIETY FOR IMAGING SCIENCE \{\&} TECHNOLOGY},
+pages = {177--180},
+year = {2001},
+date-added = {2010-06-27 16:54:55 +0200},
+date-modified = {2010-07-05 17:36:13 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2001/Dubois/Conference%202001%20Dubois.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p32},
+read = {Yes},
+rating = {3}
+}
+
+@article{VN:2007p1459,
+author = {MA VN and G Hemantha Kumar and P Shivakumara}, 
+journal = {International Journal of Information Technology},
+title = {Skew Detection Technique for Binary Document Images based on Hough Transform},
+number = {3},
+volume = {3},
+year = {2007},
+date-added = {2010-08-25 14:12:20 +0200},
+date-modified = {2010-08-25 14:13:32 +0200},
+pmid = {related:nH5Vh2xKkhEJ},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2007/VN/International%20Journal%20of%20Information%20Technology%202007%20VNSkew%20Detection%20Technique%20for%20Binary.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1459},
+read = {Yes},
+rating = {0}
+}
+
+@article{Cannon:1997p525,
+author = {M Cannon and P Kelly and S Iyengar}, 
+journal = {Proceedings 1997 {\ldots}},
+title = {An automated system for numerically rating document image quality},
+abstract = {An  system for numerically rating document image quality Michael , Judith Hochberg, Patrick Kelly, and James White Los Alamos National Laboratory Abstract As part of the Department of Energy document declassification program, we have developed a },
+year = {1997},
+month = {Jan},
+date-added = {2010-07-05 15:55:15 +0200},
+date-modified = {2010-07-05 15:55:15 +0200},
+pmid = {10086767156102202954related:SoaMV0ll-4sJ},
+URL = {http://books.google.com/books?hl=en&lr=&id=FyWjVtvYzY0C&oi=fnd&pg=PA162&dq=cannon+1997+automated&ots=CpMUEtfVmj&sig=ugdUIYVKWNhpRRiqe3yfpNmgLyk},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/1997/Cannon/Proceedings%201997%20%E2%80%A6%201997%20CannonAn%20automated%20system%20for%20numerically.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p525},
+rating = {0}
+}
+
+@article{ballester2001filling,
+author = {C Ballester and M Bertalmio and V Caselles and G Sapiro and J Verdera}, 
+journal = {IEEE Transactions on Image Processing},
+title = {Filling-in by joint interpolation of vector fields and gray levels},
+number = {8},
+pages = {1200--1211},
+volume = {10},
+year = {2001},
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p21},
+rating = {0}
+}
+
+@conference{hobby1997enhancing,
+author = {JD Hobby and TK Ho}, 
+journal = {Conference},
+title = {Enhancing degraded document images via bitmap clustering and averaging},
+affiliation = {Citeseer},
+pages = {394--400},
+year = {1997},
+keywords = {char_cass{\'e}}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p11},
+rating = {0}
+}
+
+@article{Bohner:1977p1336,
+author = {M Bohner and M Sties and K Bers}, 
+journal = {Pattern Recognition},
+title = {An automatic measurement device for the evaluation of the print quality of printed characters},
+abstract = {The paper gives a review of the standardization of the character fonts OCR A and OCR B for optical character recognition. All relevant parameters are discussed which define the of . A critical review of two  methods used so far for the },
+year = {1977},
+month = {Jan},
+date-added = {2010-08-11 18:22:40 +0200},
+date-modified = {2010-08-11 18:22:40 +0200},
+pmid = {8185080642102470024related:iKVxQno_l3EJ},
+URL = {http://linkinghub.elsevier.com/retrieve/pii/0031320377900267},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1336},
+rating = {0}
+}
+
+@phdthesis{Liris-4311,
+author = {Fadoua Drira}, 
+journal = {PhD Thesis},
+title = {Contribution {\`a} la restauration des images de documents anciens d{\'e}grad{\'e}es},
+affiliation = {INSA de Lyon},
+year = {2007},
+language = {fr},
+keywords = {theses}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+URL = {http://liris.cnrs.fr/publis/?id=4311},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2007/Drira/PhD%20Thesis%202007%20Drira.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p14},
+rating = {2}
+}
+
+@article{Gupta:2007p973,
+author = {M Gupta and N Jacobson and E Garcia}, 
+journal = {Pattern Recognition},
+title = {OCR binarization and image pre-processing for searching historical documents},
+abstract = {  global threshold  [2] finds the global threshold t that minimizes the intraclass variance of the resulting black and white pixels. This is a standard  technique, and was implemented using the built-in Matlab function ``graythresh'' [3]. Then the  is },
+year = {2007},
+month = {Jan},
+date-added = {2010-07-05 17:47:58 +0200},
+date-modified = {2010-08-11 12:38:59 +0200},
+pmid = {9648509803178549687related:t-FUxotk5oUJ},
+URL = {http://linkinghub.elsevier.com/retrieve/pii/S0031320306002202},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2007/Gupta/Pattern%20Recognition%202007%20GuptaOCR%20binarization%20and%20image%20pre-processing.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p973},
+read = {Yes},
+rating = {1}
+}
+
+@article{nishida2002correcting,
+author = {H Nishida and T Suzuki}, 
+journal = {Pattern Recognition},
+title = {Correcting show-through effects on document images by multiscale analysis},
+pages = {30065},
+volume = {3},
+year = {2002},
+date-added = {2010-06-27 16:54:55 +0200},
+date-modified = {2010-07-05 17:36:09 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2002/Nishida/Pattern%20Recognition%202002%20Nishida.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p25},
+rating = {0}
+}
+
+@article{Draper:1981p797,
+author = {N Draper and H Smith}, 
+journal = {explorer.csse.uwa.edu.au},
+title = {Applied regression analysis},
+abstract = {Title:  3rd Edition Authors: Norman R. Draper and Harry Smith Publisher: Wiley Copyright: 1998 ISBN-10: 0-471-17082-8 ISBN-13: 978-0-471-17082-2 Pages: 706; hardcover Price: {\$}104.00 See a large photo of the front cover See the back },
+year = {1981},
+month = {Jan},
+date-added = {2010-07-05 17:04:24 +0200},
+date-modified = {2010-07-05 17:04:24 +0200},
+pmid = {11719907134998601592related:eF8oznF5paIJ},
+URL = {http://explorer.csse.uwa.edu.au/reference/paper/233281773.pdf},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/1981/Draper/explorer.csse.uwa.edu.au%201981%20DraperApplied%20regression%20analysis.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p797},
+rating = {0}
+}
+
+@article{Tonazzini:2009p1267,
+author = {Anna Tonazzini and Gianfranco Bianco and Emanuele Salerno}, 
+title = {Registration and Enhancement of Double-Sided Degraded Manuscripts Acquired in Multispectral Modality},
+pages = {546--550},
+year = {2009},
+month = {Jul},
+date-added = {2010-07-22 15:03:42 +0200},
+date-modified = {2010-07-22 15:03:42 +0200},
+doi = {10.1109/ICDAR.2009.131},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2009/Tonazzini/2009%20TonazziniRegistration%20and%20Enhancement%20of%20Double-Sided.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1267},
+read = {Yes},
+rating = {0}
+}
+
+@article{Blando:1995p1317,
+author = {Luis R Blando}, 
+title = {Evaluation of Page Quality Using Simple Features},
+abstract = {A classifier to determine page quality from an Optical Character Recognition(OCR) perspective is developed. It classifies a given page image as either "good"(i.e., high OCR accuracy is expected) or "bad" (i.e., low OCR accuracy expected).The classifier is based upon measuring the amount of white speckle, the amount ofbroken pieces, and the overall size information in the page. Two different sets of testdata were used to evaluate the classifier: the Sample 2 dataset containing 439 pagesand the Magazines dataset containing 200 pages. The classifier recognized 85% of thepages in the Sample 2 correctly. However, approximately 40% of the low quality pageswere misclassified as "good." To solve this problem, the classifier was modified toreject pages containing tables or less than 200 connected components. The modifiedclassifier rejected 41% of the pages, correctly recognized 86% of the remaining pages,and did not misclassify any low quality page as "good". Similarly, it recognized...},
+affiliation = {University of Nevada , Las Vegas},
+year = {1995},
+month = {Oct},
+language = {eng},
+date-added = {2010-08-11 18:13:48 +0200},
+date-modified = {2010-08-11 18:15:41 +0200},
+pmid = {349243},
+URL = {http://citeseer.ist.psu.edu/349243},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/1995/Blando/1995%20BlandoEvaluation%20of%20Page%20Quality%20Using.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1317},
+read = {Yes},
+rating = {1}
+}
+
+@article{wu2002document,
+author = {C Wu and G Agam}, 
+journal = {Lecture notes in computer science},
+title = {Document image de-warping for text/graphics recognition},
+pages = {348--357},
+year = {2002},
+keywords = {courbure}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p19},
+rating = {0}
+}
+
+@conference{obafemi-ensemble,
+author = {T Obafemi-Ajayi and G Agam and O Frieder}, 
+journal = {Conference},
+title = {Ensemble LUT classification for degraded document enhancement},
+affiliation = {Citeseer},
+keywords = {ALIRE}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/Unknown/Obafemi-Ajayi/Conference%20%20Obafemi-Ajayi.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p16},
+rating = {0}
+}
+
+@conference{kanungo1993global,
+author = {T Kanungo and RM Haralick and I Phillips}, 
+journal = {Conference},
+title = {Global and local document degradation models},
+pages = {730--734},
+year = {1993},
+date-added = {2010-06-27 16:54:22 +0200},
+date-modified = {2010-06-27 16:55:06 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/1993/Kanungo/Conference%201993%20Kanungo.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p31},
+rating = {0}
+}
+
+@article{allier2003contribution,
+author = {B ALLIER and M Anne}, 
+title = {Contribution {\`a} la num{\'e}risation des collections: apports des contours actifs},
+year = {2003},
+keywords = {theses}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p4},
+rating = {0}
+}
+
+@article{Thompson:1978p550,
+author = {M Thompson}, 
+journal = {{\ldots} Statistical Review/Revue Internationale de Statistique},
+title = {Selection of variables in multiple regression: Part I. A review and evaluation},
+abstract = { Mary L.  University of Gottingen, Nikolausberger Weg 9c, 3400 Gottingen, West Germany  3.3.5  :   is an extension of the forward selection pro- cedure of  as for forward selection with the addition that at each , after the },
+year = {1978},
+month = {Jan},
+date-added = {2010-07-05 16:48:46 +0200},
+date-modified = {2010-07-05 16:49:02 +0200},
+pmid = {17859753738665127224related:OO0WeWaX2vcJ},
+URL = {http://www.jstor.org/stable/1402505},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p550},
+rating = {2}
+}
+
+@article{tonazzini2007fast,
+author = {A Tonazzini and E Salerno and L Bedini}, 
+journal = {International Journal on Document Analysis and Recognition},
+title = {Fast correction of bleed-through distortion in grayscale documents by a blind source separation technique},
+number = {1},
+pages = {17--25},
+volume = {10},
+year = {2007},
+date-added = {2010-06-27 16:54:42 +0200},
+date-modified = {2010-07-05 17:36:05 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2007/Tonazzini/International%20Journal%20on%20Document%20Analysis%20and%20Recognition%202007%20Tonazzini.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p28},
+rating = {2}
+}
+
+@article{obafemi-historical,
+author = {T Obafemi-Ajayi and G Agam and O Frieder}, 
+journal = {International Journal on Document Analysis and Recognition},
+title = {Historical document enhancement using LUT classification},
+pages = {1--17},
+keywords = {ALIRE}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/Unknown/Obafemi-Ajayi/International%20Journal%20on%20Document%20Analysis%20and%20Recognition%20%20Obafemi-Ajayi.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p10},
+rating = {3}
+}
+
+@article{Garain:2006p1485,
+author = {U Garain and T Paquet and L Heutte}, 
+journal = {International Journal on Document {\ldots}},
+title = {On foreground---background separation in low quality document images},
+abstract = {Abstract This paper deals with effective  of fore- ground and  in  suf- fering from various types of degradations including scan- ning noise, aging effects, uneven , or , etc. The proposed algorithm shows an },
+year = {2006},
+month = {Jan},
+date-added = {2010-08-30 10:52:45 +0200},
+date-modified = {2010-08-30 10:52:45 +0200},
+pmid = {6663926047223670449related:sWY91m4Fe1wJ},
+URL = {http://www.springerlink.com/index/M588697871V9045X.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1485},
+rating = {0}
+}
+
+@article{sharma2001show,
+author = {G Sharma}, 
+journal = {IEEE Transactions on Image Processing},
+title = {Show-through cancellation in scans of duplex printed documents},
+number = {5},
+pages = {736--754},
+volume = {10},
+year = {2001},
+keywords = {transparance}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-07-05 17:35:56 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/2001/Sharma/IEEE%20Transactions%20on%20Image%20Processing%202001%20SharmaShow-through%20cancellation%20in%20scans%20of.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p22},
+read = {Yes},
+rating = {4}
+}
+
+@article{chan2001nontexture,
+author = {TF Chan and J Shen}, 
+journal = {Journal of Visual Communication and Image Representation},
+title = {Nontexture inpainting by curvature-driven diffusions},
+number = {4},
+pages = {436--449},
+volume = {12},
+year = {2001},
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p7},
+rating = {0}
+}
+
+@article{Junichi:1992p1345,
+author = {L Junichi and J Kanai and T Nartker and J Gonzalez}, 
+journal = {Citeseer},
+title = {Prediction of OCR Accuracy},
+abstract = { 6, Operational Factors in the Creation of Large Full-Text Databases -- Dickey - 1991. 2, An   for the  of the   of  Character -- al - 1977. 1,  of Page  Using Simple Features -- Blando - 1995. },
+year = {1992},
+month = {Jan},
+date-added = {2010-08-11 18:19:30 +0200},
+date-modified = {2010-08-11 18:19:30 +0200},
+URL = {http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.42.5435},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/1992/Junichi/Citeseer%201992%20JunichiPrediction%20of%20OCR%20Accuracy.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1345},
+rating = {0}
+}
+
+@conference{wang2003directional,
+author = {Q Wang and T Xia and CL Tan and L Li}, 
+journal = {Conference},
+title = {Directional wavelet approach to remove document image interference},
+affiliation = {Citeseer},
+pages = {736--740},
+year = {2003},
+keywords = {transparance}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1},
+rating = {0}
+}
+
+@article{Cannon:1999ly,
+author = {Michael Cannon and Judith Hochberg and Patrick Kelly}, 
+journal = {International Journal on Document Analysis and Recognition},
+title = {Quality assessment and restoration of typewritten document images},
+abstract = {Abstract.~~ We present a useful method for assessing the quality of a typewritten document image and automatically selecting an optimal restoration method based on that assessment. We use five quality measures that assess the severity of background speckle, touching characters, and broken characters. A linear classifier uses these measures to select a restoration method. On a 139-document corpus, our methodology reduced the corpus OCR character error rate from 20.27% to 12.60%.},
+number = {2},
+pages = {80--89},
+volume = {2},
+year = {1999},
+keywords = {qualite}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+URL = {http://dx.doi.org/10.1007/s100320050039},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/1999/Cannon/International%20Journal%20on%20Document%20Analysis%20and%20Recognition%201999%20CannonQuality%20assessment%20and%20restoration%20of.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p12},
+read = {Yes},
+rating = {2}
+}
+
+@article{Brown:1992p1177,
+author = {L Brown}, 
+journal = {ACM computing surveys (CSUR)},
+title = {A survey of image registration techniques},
+abstract = {Registration M a fundamental task in image processing used to match two or more pictures taken, for example, at different times, from different sensors,or from different viewpoints. Virtually all large systems which evaluate images require the registration of images, or a closely },
+year = {1992},
+month = {Jan},
+date-added = {2010-07-22 10:16:01 +0200},
+date-modified = {2010-07-22 10:16:01 +0200},
+pmid = {5180153292716598542related:DmVxXvmZ40cJ},
+URL = {http://portal.acm.org/citation.cfm?id=146374},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1177},
+rating = {0}
+}
+
+@conference{ittner1995text,
+author = {DJ Ittner and DD Lewis and DD Ahn}, 
+journal = {Conference},
+title = {Text categorization of low quality images},
+affiliation = {Citeseer},
+pages = {301--315},
+year = {1995},
+date-added = {2010-06-27 16:54:42 +0200},
+date-modified = {2010-06-27 16:55:06 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/1995/Ittner/Conference%201995%20Ittner.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p23},
+read = {Yes},
+rating = {0}
+}
+
+@article{baird1995document,
+author = {HS Baird}, 
+title = {Document image defect models},
+year = {1995},
+keywords = {models}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p17},
+rating = {0}
+}
+
+@article{leydier2004serialized,
+author = {Y Leydier and F Le Bourgeois and H Emptoz}, 
+journal = {Document Analysis Systems VI},
+title = {Serialized k-means for adaptative color image segmentation},
+pages = {252--263},
+year = {2004},
+keywords = {transparance}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p2},
+rating = {0}
+}
+
+@conference{baird1993document,
+author = {HS Baird}, 
+journal = {Conference},
+title = {Document image defect models and their uses},
+pages = {62--67},
+year = {1993},
+date-added = {2010-06-27 16:54:22 +0200},
+date-modified = {2010-06-27 16:55:06 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p29},
+rating = {0}
+}
+
+@article{baird-state,
+author = {H Baird}, 
+journal = {Digital Document Processing},
+title = {The state of the art of document image degradation modelling},
+pages = {261--279},
+date-added = {2010-06-27 16:54:22 +0200},
+date-modified = {2010-06-27 16:55:06 +0200},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/Unknown/Baird/Digital%20Document%20Processing%20%20Baird.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p26},
+rating = {0}
+}
+
+@misc{kim2001image,
+author = {H Takahashi and H Hata and K Noguchi}, 
+journal = {Miscellaneous},
+title = {Image processing device for providing correction of a curvature distortion of a bound portion of a spread book},
+year = {2001},
+keywords = {courbure}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-07-05 17:36:22 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p8},
+rating = {0}
+}
+
+@conference{bertalmio2000image,
+author = {M Bertalmio and G Sapiro and V Caselles and C Ballester}, 
+journal = {Conference},
+title = {Image inpainting},
+affiliation = {ACM Press/Addison-Wesley Publishing Co. New York, NY, USA},
+pages = {417--424},
+year = {2000},
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p20},
+rating = {0}
+}
+
+@article{SRINIVASA:1996p1088,
+author = {R SRINIVASA{\ldots}}, 
+journal = {IEEE Transactions on Image Processing},
+title = {An FFT-based technique for translation, rotation, and scale-invariant image registration},
+abstract = { Titre du document / Document title.  -    , , -   Auteur(s) / Author(s). SRINIVASA REDDY B. (1) ; CHATTERJI BN (1) ; Affiliation(s) du ou des auteurs / Author(s) Affiliation(s). },
+year = {1996},
+month = {Jan},
+date-added = {2010-07-22 09:43:44 +0200},
+date-modified = {2010-07-22 09:43:44 +0200},
+pmid = {7658845033672087277related:7RbK1fquSWoJ},
+URL = {http://cat.inist.fr/?aModele=afficheN&cpsidt=3181545},
+local-url = {file://localhost/Users/zoolonly/Dropbox/Documents/Papers/1996/SRINIVASA%E2%80%A6/IEEE%20Transactions%20on%20Image%20Processing%201996%20SRINIVASA%E2%80%A6An%20FFT-based%20technique%20for%20translation.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p1088},
+rating = {0}
+}
+
+@article{smigiel2004self,
+author = {E Smigiel and A Bela\\"\id and H Hamza}, 
+journal = {Lecture notes in computer science},
+title = {Self-organizing maps and ancient documents},
+pages = {125--134},
+year = {2004},
+keywords = {transparance}, 
+date-added = {2010-06-27 16:52:50 +0200},
+date-modified = {2010-06-27 16:52:50 +0200},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p5},
+rating = {0}
+}
+
+@article{Nahler:2009p923,
+author = {G Nahler}, 
+journal = {Dictionary of Pharmaceutical Medicine},
+title = {Pearson correlation coefficient},
+abstract = { peak to trough concentration Ratio of peak concentration of a drug to it's average concentration; used for characterising properties of slow release formulations; → see formulation.    → see  . },
+year = {2009},
+month = {Jan},
+date-added = {2010-07-05 17:14:48 +0200},
+date-modified = {2010-07-05 17:14:49 +0200},
+URL = {http://www.springerlink.com/index/W3044085N92504R4.pdf},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p923},
+rating = {0}
+}
+
+@article{FarrahiMoghaddam:2009p351,
+author = {R Farrahi Moghaddam and M Cheriet}, 
+journal = {Pattern Recognition},
+title = {RSLDI: Restoration of single-sided low-quality document images},
+abstract = {This paper addresses the problem of enhancing and restoring single-sided low-quality single-sided document images. Initially, a series of multi-level classifiers is introduced covering several levels, including the regional and content levels. These classifiers can then be },
+year = {2009},
+month = {Jan},
+date-added = {2010-07-05 15:40:50 +0200},
+date-modified = {2010-07-05 15:40:55 +0200},
+pmid = {8268100486839085125related:RShHJZYxvnIJ},
+URL = {http://linkinghub.elsevier.com/retrieve/pii/S0031320308004500},
+uri = {papers://FB742A7C-551E-4763-8BA8-F9CA705158F1/Paper/p351},
+rating = {4}
+}
+

ICDAR2013/Presentation/bin/createFrame.sh

+#/usr/bin/sh
+
+touch ./frames/$1.tex
+echo "\begin{frame}" >> ./frames/$1.tex
+echo "   \frametitle{"$1"}" >> ./frames/$1.tex
+echo "	 \framesubtitle{Subtitle Frame}" >> ./frames/$1.tex 
+echo "\end{frame}" >> ./frames/$1.tex
+
+echo  "\input{"/frames/$1"}" >> selectedslides.tex 

ICDAR2013/Presentation/frames/conclusion.tex

+
+\section*{Conclusion et Perspectives}
+
+\begin{frame}
+   \frametitle{Conclusion}
+
+	\begin{exampleblock}{Apports scientifiques suivants \cite{rabeux2011ancient,rabeux2013ijdar}: }
+	\begin{itemize}
+		\item \textbf{Nouveaux descripteurs} caractérisant la qualité d'une image de document.
+		\item Modèles de prédiction de \textbf{11 algorithmes de binarisation}.
+		\item Méthode de \textbf{sélection de l'algorithme de binarisation} optimal (pour chaque image).
+		\item \textbf{Modèles de prédiction de 2 OCRs} en fonction de la qualité d'une image .
+	\end{itemize}
+	\end{exampleblock}
+	
+\end{frame}
+
+\begin{frame}[shrink=1.2]
+   \frametitle{Autres apports}
+
+   	\begin{exampleblock}{Extraction du défaut de transparence \cite{vincent2011document}}
+			\textbf{Extraction de la transparence} (visibilité de l'encre du verso à travers le recto).
+			\begin{itemize}
+				\item Recalage recto-verso basé sur les informations structurelles.
+				\item 50 fois plus rapide que la méthode de l'état de l'art.
+				\item Précision en moyenne similaire.
+		\end{itemize}
+	\end{exampleblock}
+   
+   	\begin{exampleblock}{Création de vérité-terrain \cite{rabeux2011das}.}
+	\textbf{Vérité-terrain} sur plusieurs niveaux.
+			\begin{itemize}
+				\item Semi-synthétiques.
+				\item Annotations par des experts.
+				\item Informations perceptuelles.
+		\end{itemize}
+	\end{exampleblock}
+	
+	\note{
+		\begin{itemize}
+			\item Isoler la transparence des perturbation fond-encre.
+			\item Pour cela il faut recaller le verso et le recto à cause des transformation faite l'ors de la num page par page.
+			\item Méthode de l'état de l'art sont complexe en temps.
+			\item Création d'une nouvelle méthode.
+		\end{itemize}
+	}
+\end{frame}
+
+
+
+\begin{frame}
+   \frametitle{Perspectives}
+   
+   	\begin{alertblock}{Prédiction de traitements de \textbf{haut niveau}}
+	\textbf{Descripteurs} pour caractériser la complexité d'une image de document.
+			\begin{itemize}
+				\item Bas-niveaux : reconnaissance de symbole \cite{llados2002symbol}, pertinence des partitions \cite{davies1979cluster,dunn1973fuzzy,rand1971objective,jaccard1901etude,fowlkes1983method}, profils verticaux et horizontaux.
+				\item Moyen-niveaux : caractérisation de la distribution des tailles des caractères \cite{blando1995prediction,cannon1999quality,cannon1997automated}. Structure physique \cite{ChevalierDD07}. Adaptation en niveaux de gris ?
+				\item Haut-niveaux : utilisation de la  structure logique \cite{robadey20012}
+		\end{itemize}
+	\end{alertblock}
+
+	
+\end{frame}
+
+
+%
+%\begin{frame}
+%   \frametitle{Perspectives}
+%   
+%   	\begin{alertblock}{Automatisation du \textbf{contrôle qualité}}
+%	\begin{itemize}
+%		\item Descripteurs des dégradations provenant de la numérisation.
+%		\item Utilisation de notions perceptuelles :
+%		\begin{itemize}
+%			\item l'apprentissage supervisé, semi-supervisé ou non supervisé \cite{mitchell1997machine}, incrémental ou statique \cite{boukharouba2009incremental}.
+%			\item Pendant la numérisation ou après la numérisation.
+%		\end{itemize}
+%		\item Projet ANR DIGIDOC.
+%	\end{itemize}
+%	\end{alertblock}
+%	
+%\end{frame}
+
+
+
+	
+%	\begin{itemize}
+%		\item Création de \textbf{nouveaux descripteurs} caractérisant la qualité d'une image de document.
+%		\item Un nouvel algorithme de \textbf{recalage recto verso}.
+%		\item Des modèles de \textbf{prédiction des performances de 11 méthodes de binarisation}.
+%		\item Une méthode de \textbf{sélection automatique de la méthode de binarisation} proposant les meilleurs résultats pour chaque image.
+%		\item Deux modèles de \textbf{prédiction des performances de deux OCR} (Abbyy FineReader et OCRopus) mettant en avant l'influence du défaut de transparence sur ces derniers.
+%		\item Un logiciel de \textbf{création de documents anciens semi-synthétique}s.
+%		\item Un \textbf{logiciel d'annotation dédié à la création de vérités-terrains} pour la qualité des images  de documents.
+%		\item Une nouvelle méthode d'\textbf{acquisition d'informations perceptuelle}.
+%		\item Une nouvelle \textbf{plateforme collaborative} permettant la création, l'utilisation et la diffusion de vérités-terrains.
+%	\end{itemize}
+
+
+
+%\begin{frame}[shrink=2]
+%   \frametitle{Perspectives}
+%
+%	Vers des mesures de la \textbf{complexité d'analyse} d'une image de documents :
+%	\begin{itemize}
+%		\item La qualité n'est pas la seule caractéristique ayant une influence sur les algorithmes.
+%		\item Créer des descripteurs caractérisant la complexité d'une image.
+%		\begin{itemize}
+%			\item Descripteurs utilisés en reconnaissance de symbole pour caractériser la fonte d'une page.
+%			\item Les profiles (verticaux ou horizontaux) peuvent être utilisés pour caractériser la complexité de la mise en page.
+%			\item Informations sur le document (nombre de paragraphes, de caractères, de colonnes, etc.).
+%			\item Utilisation de graphes pour représenter la structure logique du document.
+%		\end{itemize}
+%	\end{itemize}
+%	
+%	Vers l'\textbf{automatisation du contrôle qualité} :
+%	\begin{itemize}
+%		\item Notions perceptuelles.
+%		\item Quelle méthode d'apprentissage supervisé ou non, incrémental ou statique ?
+%		\item Pendant la phase de numérisation ou une fois l'ouvrage numérisé entièrement ?
+%	\end{itemize}
+%
+%\end{frame}
+
+
+\begin{frame}
+	\begin{center}
+		\huge Merci !
+	\end{center}
+\end{frame}

ICDAR2013/Presentation/frames/conclusion.texshop

+../presentation.tex

ICDAR2013/Presentation/frames/context_motivations.tex

+
+\section{Context et motivations}
+\begin{frame}
+   \frametitle{Contextes et motivations}
+   \framesubtitle{Chaîne des numérisation de documents}
+   
+   L'avènement de l'informatique engendre la création de nombreuses campagnes de numérisation dont les enjeux principaux sont :
+   \begin{itemize}
+	   \item Archivage et conservation 
+	   \item Valorisation du patrimoine   	
+   \end{itemize}
+
+   \begin{alertblock}{Numérisation massive}
+	   	\begin{itemize}
+			\item 2500 documents sont numérisés et mis en ligne par semaine sur le site de Gallica.
+			\item En 2010, Google Books propose quinze million d'ouvrages. 
+	  \end{itemize}
+   \end{alertblock}
+      
+\end{frame}
+
+
+\begin{frame}
+   \frametitle{Contextes et motivations}
+   \framesubtitle{Chaîne des numérisation de documents}
+	
+	Plusieurs étapes :
+	\begin{enumerate}
+		\item Établissament du cahier des charges.
+		\item La numérisation
+		\item Le contrôle qualité (réalisé par le prestataire, puis par échantillonnage chez le client)
+		\item Analyses et traitements de l’image de document pour sa valorisation
+	\end{enumerate}   
+      
+\end{frame}
+
+
+\begin{frame}
+   \frametitle{Contextes et motivations}
+   \framesubtitle{Les dégradations provenant de l'ouvrages}
+   \begin{figure}[h]
+\centering
+
+\subfloat[][]{
+\label{defautExempleOndulation}
+\includegraphics[height=50px]{./imgs/intros/defauts/doc/FRAC044143_0001W019_1977_03_04_25.jpeg}
+} 
+\subfloat[][]{
+\label{defautExempleTransparence1}
+\includegraphics[height=50px]{./imgs/intros/defauts/doc/FRB340325101_Ms007_00006_B.JPG}
+}
+\subfloat[][]{
+\includegraphics[height=50px]{./imgs/intros/defauts/doc/tacheGras2.png}
+}
+\subfloat[][]{
+\includegraphics[height=50px]{./imgs/intros/defauts/doc/trou1.png}
+}
+
+\subfloat[][]{
+\label{defautExempleTaches}
+\includegraphics[height=50px]{./imgs/intros/defauts/doc/taches.png}
+} 
+
+\caption{Exemples de défauts présents sur les documents avant même leur numérisation. Les images ont été numérisées par la société Arkhénum.}
+\label{examplesPreNum}
+
+\end{figure}
+	
+
+
+\end{frame}
+
+
+\begin{frame}
+   \frametitle{Contextes et motivations}
+   \framesubtitle{Les dégradations provenant de la numérisation}
+  \begin{figure}[h!]
+\centering
+
+\subfloat[][]{
+\label{marquepage}
+ \includegraphics[height=80px]{./imgs/intros/defauts/num/41ETP0294_009624_0026_marque_page.jpg}
+}
+\subfloat[][]{
+\label{orientationDoc}
+\includegraphics[height=80px]{./imgs/intros/defauts/num/orientationDocument.jpeg}
+}
+
+\subfloat[][]{
+\label{possibiliteDepli}
+\includegraphics[width=100px]{./imgs/intros/defauts/doc/pli2.png}
+}
+\subfloat[][]{
+\label{corne}
+\includegraphics[width=100px]{./imgs/intros/defauts/num/corne.jpeg}
+} 
+
+\caption{Exemples de défauts engendrés par la numérisation.}
+\label{default}
+\end{figure}
+	
+\end{frame}
+
+
+\begin{frame}
+   \frametitle{Contextes et motivations}
+   \framesubtitle{Influences des dégradations sur les algorithmes}
+
+	\begin{alertblock}{Problèmatique}
+		Comment étudier l'influence des dégradations sur les algorithmes de traitements ?
+	\end{alertblock}
+	\begin{itemize}
+		\item Influence sur une châine globale de traitements.
+		\item Influence sur chaque maillions de fa\c con individuel.
+	\end{itemize}
+
+\end{frame}
+
+
+\begin{frame}
+   \frametitle{Contextes et motivations}
+   \framesubtitle{Les études de l'influence sur une chaîne globale de traitements}
+   
+   Exemples sur la reconnaissance de caractères (\cite{rice1993evaluation}).
+
+   \begin{figure}
+\begin{center}
+      \subfloat[\tiny Groupe 1]{
+      \includegraphics[height=20px]{./imgs/intros/group1.png}
+      } 
+      \subfloat[\tiny Groupe 2]{
+      \includegraphics[height=20px]{./imgs/intros/group2.png}
+      } 
+       \subfloat[\tiny Groupe 3]{
+      \includegraphics[height=20px]{./imgs/intros/group3.png}
+      } \\
+       \subfloat[\tiny Groupe 4]{
+       \includegraphics[height=20px]{./imgs/intros/group4.png}
+       }
+        \subfloat[\tiny Groupe 5]{
+        \includegraphics[height=20px]{./imgs/intros/group5.png} 
+        } 
+\caption{default}
+\label{default}
+\end{center}
+\end{figure}    
+   	
+\end{frame}
+
+
+
+\begin{frame}
+   \frametitle{Contextes et motivations}
+   \framesubtitle{Les études de l'influence sur une chaîne globale de traitements}
+
+	Exemples sur la reconnaissance de caractères (\cite{rice1993evaluation}).
+	
+	\begin{figure}
+  \begin{minipage}[c]{0.5\textwidth}
+		\includegraphics[height=170px]{./imgs/intros/ocrVsQualiteRice.png}
+  \end{minipage}\hfill
+  \begin{minipage}[c]{0.5\textwidth}
+		\caption{\it  En ordonnée, la précision de l'OCR. En abscisse, le groupe de qualité de l'image (du meilleur groupe au plus mauvais  - figure \ref{groupesOCRRice}). On constate de façon globale que la précision de l'OCR baisse en fonction de la qualité de la page. Néanmoins, la classification sous forme de groupe ne nous permet pas de conclure sur la nature de leurs relations (linéaires ou polynomiales). }
+  \end{minipage}
+\end{figure}
+\end{frame}
+
+
+\begin{frame}
+   \frametitle{Contextes et motivations}
+   \framesubtitle{Les études de l'influence sur une chaîne globale de traitements}
+
+	Exemples sur la reconnaissance de caractères (étude réalisée par la BNF).
+\begin{figure}[h!]
+\begin{center}
+\includegraphics[width=300px]{./imgs/intros/dateEditionVSOCR.png}
+\caption{Taux de reconnaissance d'OCRs en fonction de la date d'édition.}
+\label{dateEditionVSOCR}
+\end{center}
+\end{figure}
+   	
+\end{frame}
+
+
+
+
+\begin{frame}
+   \frametitle{Contextes et motivations}
+   \framesubtitle{Les études de l'influence sur chaque algorithme}
+
+\begin{figure}[h]
+\begin{center}
+\it
+\subfloat[]{
+ \includegraphics[width=50px,height=55px]{imgs/intros/IRHT_zoom.jpg} 
+}
+\subfloat[]{
+ \includegraphics[width=50px,height=55px]{imgs/intros/IRHT_otsu_zoom.png}
+}
+\subfloat[]{
+\includegraphics[width=50px,height=55px]{imgs/intros/IRHT_sauvola_zoom.png} 
+}
+\subfloat[]{
+ \includegraphics[width=50px,height=55px]{imgs/intros/IRHT_Bernsen_zoom.png}
+}\\
+\subfloat[]{
+\includegraphics[width=50px,height=55px]{./imgs/intros/IRHT.jpg}
+}
+\subfloat[]{
+ \includegraphics[width=50px,height=55px]{imgs/intros/Otsu.png}
+}
+\subfloat[]{
+ \includegraphics[width=50px,height=55px]{imgs/intros/Sauvola.png}
+}
+\subfloat[]{
+ \includegraphics[width=50px,height=55px]{imgs/intros/Bernsen.png}
+}
+\caption{Impact de la binarisation sur un algorithme d'extraction de structure physique (algorithme de R. Vieux \cite{vieux2012segmentation}) : a un zoom sur l'image originale (en e), b, binarisation Otsu, c, binarisation Sauvola, d, binarisation Bernsen, e, l'image originale, f, segmentation sur la binarisation d'Otsu, g, segmentation sur la binarisation de Sauvola, h, segmentation sur la binarisation de Bernsen.}
+\label{figure-binarisation-versus-segmentation}
+\end{center}
+\end{figure}   	
+\end{frame}
+
+
+%\begin{frame}
+%   \frametitle{Contextes et motivations}
+%   \framesubtitle{Influence sur la binarisation}
+%   
+%\begin{figure}[htbp]
+%\begin{center}
+%\includegraphics[width=200px]{./imgs/intros/docAncienBinar.png} \\
+%\includegraphics[width=200px]{./imgs/intros/docAncienOtsu.png} \\
+%\includegraphics[width=200px]{./imgs/intros/docAncienSauvola.png}
+%\caption{Exemples de résultats de binarisation sur la même image (image extraite du corpus DIBCO \cite{gatos2009icdar})}
+%\label{exemplesBinar}
+%\end{center}
+%\end{figure}
+%
+%\end{frame}
+
+
+\begin{frame}
+   \frametitle{Contextes et motivations}
+   \framesubtitle{Influence sur la binarisation}
+   	
+	\begin{columns}[t]
+	
+	  \begin{column}{5cm}
+	\textbf{Méthodes globales :} \\
+   	Sensibles aux dégradations modifiant la distribution des niveaux de gris. Par exemple :
+	\begin{itemize}
+		\item taches d'eau,
+		\item problèmes d'illuminations,
+	\end{itemize} 
+  \end{column}
+
+  \begin{column}{5cm}
+	\textbf{Méthodes adaptatives : \\}
+	Sensibles aux dégradations formant des composantes grises de petites ou moyennes tailles dont les caractéristiques sont proche de celle du texte. 
+  \end{column}
+  
+ \end{columns}  
+ 
+% \begin{exampleblock}{Proposition}
+% 		Sélectionner un algorithme en fonction du type de dégradations présentent sur l'image de document.
+% \end{exampleblock}
+	
+	
+\end{frame}
+
+
+\begin{frame}
+   \frametitle{Contextes et motivations}
+   \framesubtitle{Garantir les performance d'un type de traitement}
+   	
+	\textbf{Comment obtenir les meilleures performances pour un type de traitement donné ?}
+    
+    Première approche : 
+    \begin{itemize}
+		 \item Proposer à un algorithme une version de l'image sans dégradations. 
+	    	 \item Grand nombre d'algorithme de restauration existe.
+	 \end{itemize}
+	 
+	\begin{alertblock}{Problèmes}
+		\begin{itemize}
+			\item Les algorithmes de restauration se concentre sur un type de dégradations.
+			\item Sensibilité aux autres dégradation.
+			\item Quel algorithmes de restauration choisir ?
+			\item Comment les enchainer ?
+		\end{itemize}
+	\end{alertblock}
+    
+\end{frame}
+
+
+\begin{frame}
+   \frametitle{Contextes et motivations}
+   \framesubtitle{Garantir les performance d'un type de traitement}
+   	
+	\begin{exampleblock}{Notre proposition}
+		Evaluation de la qualité pour prédire les performances des algorithmes.
+	\end{exampleblock}
+	
+	
+	\begin{itemize}
+		\item Identifier les types de dégradations présentes.
+		\item Prédire le résultat final d'un ensemble d'algorithme.
+		\item Construire une chaine de traitement en sélectionnant à chaque étape l'algorithme qui proposera, pour chaque image, les meilleures performances.  
+	\end{itemize}
+    
+    
+\end{frame}
+
+\begin{frame}
+   \frametitle{Contextes et motivations}
+   \framesubtitle{Les étapes principales}
+   	
+	\begin{itemize}    
+		\item Création de descripteurs caractérisant la qualité d'une image de document.
+		\item Utilisation des descripteurs pour créer des modèles de prédiction des performances de plusieurs algorithme de même type.
+		\item Utilisation des modèles de prédiction pour sélectionner automatiquement l'algorithme proposant les meilleures performances. 
+	\end{itemize}
+    
+\end{frame}
+
+
+
+

ICDAR2013/Presentation/frames/context_motivations.texshop

+../presentation.tex

ICDAR2013/Presentation/frames/features.tex

+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% fond-encre
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\section{Degradations caracrs}
+\subsection{Descripteurs pour les perturbations fond-encre.}
+
+\begin{frame}
+	\tableofcontents[currentsection]
+\end{frame}
+
+
+\begin{frame}
+   \frametitle{Étape 1 : Étude de la sensibilité d'un algorithme.}
+  	\framesubtitle{Sensibilité des algorithmes de binarisation}
+
+\begin{center}
+\includegraphics<1->[width=150px]{./imgs/mesures/H04-2.png} 
+\end{center}
+
+\begin{columns}
+\begin{column}[l]{5cm}
+Binarisation globale :
+\includegraphics<1->[width=150px]{./imgs/annalysePerturbationFondEncre/grandTacheBinGlobal.png}
+\end{column}
+\begin{column}[r]{5cm}
+Binarisation locale :
+\includegraphics<1->[width=150px]{./imgs/annalysePerturbationFondEncre/grandeTacheBinSauvola.png} 
+\end{column}
+\end{columns}
+
+\note{
+\begin{enumerate}
+	\item ON etudie la sensibilité des algo de bin.
+	\item Décrit l'image.
+	\item Premiere famille de méthode : Globale 
+	\item  Calcule d'un seuil de séparation entre encre et le font grâce au infos globale
+	\item Seconde famille Locale :
+	\item Utilisation d'une voisinage pour binariser un pixel.
+\end{enumerate}
+
+\begin{enumerate}
+	\item Difficulté : Sensibilité différente à une même dégradation.
+\end{enumerate}
+}
+
+\end{frame}
+
+
+\begin{frame}
+   \frametitle{Étape 1 : Étude de la sensibilité d'un algorithme.}
+  	\framesubtitle{Sensibilité des algorithmes de binarisation}
+
+\begin{center}
+\includegraphics<1->[width=150px]{./imgs/mesures/H03.png} 
+\end{center}
+
+\begin{columns}
+\begin{column}[l]{5cm}
+Binarisation globale :
+\includegraphics<1->[width=150px]{./imgs/annalysePerturbationFondEncre/bruitsBinLocalOtsu.png}
+\end{column}
+\begin{column}[r]{5cm}
+Binarisation locale :
+\includegraphics<1->[width=150px]{./imgs/annalysePerturbationFondEncre/bruitsBinLocalSauvola.png} 
+\end{column}
+\end{columns}
+
+
+\end{frame}
+
+
+
+\begin{frame}
+
+   \frametitle{Étape 2 : Caractérisation de la dégradation en fonction de son influence.}
+  	\framesubtitle{Les perturbations fond-encre}   
+
+   
+	\gtwosides{1}{./imgs/annalysePerturbationFondEncre/grandTache.png}{./imgs/annalysePerturbationFondEncre/H03.png}
+
+	
+%	\textbf{Caractérisation des perturbations fond-encre après l'analyse des résultats de plusieurs méthodes de binarisation.} \\
+	Sensibilité aux caractéristiques suivantes :
+	\begin{itemize}
+		\item Quantité.
+		\item Intensité.
+		\item Taille. 
+		\item Localisation et positionnement relatif.
+	\end{itemize} 
+	
+	\note{
+		\begin{itemize}
+			\item Bin est sensible aux dégradations perturbant les distribution de l'encre (tache, bruits, effacement de l'encre). 
+			\item Ils faut considérer plusieurs carac.
+		\end{itemize}
+	}
+	
+\end{frame}
+
+\begin{frame}
+
+   \frametitle{Étape 3 : Extraction des pixels dégradé}
+  	\framesubtitle{Extraction de l'encre, des perturbations et du fond.}   
+
+\textbf{	Extraction de 3 couches \cite{moghaddam2009low} de l'histogramme global : encre, dégradation, fond.}
+	
+\begin{center}
+
+\includegraphics<1>[width=\gtwosidewidth, height=0.2\textheight,keepaspectratio]{./imgs/mesures/grayDegradation.png} 
+\hspace{0.5cm}
+\includegraphics<1>[width=\gtwosidewidth, height=0.2\textheight,keepaspectratio]{./imgs/mesures/grayComponentsHistogram.png} 
+\\
+\includegraphics[width=200px]{./imgs/mesures/grayComponents.png}
+\end{center}
+
+\end{frame}		
+
+
+
+%\begin{frame}
+%   \frametitle{Définition des descripteurs}
+%
+%
+%	On considèrera deux niveaux de descripteurs :
+%	\begin{enumerate}
+%		\item les descripteurs globaux : intensité et quantité.
+%		\item les descripteurs locaux : position et taille.
+%	\end{enumerate}	
+%
+%\end{frame}		
+
+
+\begin{frame}
+
+   \frametitle{Étape 4 : Définition des descripteurs}
+  	\framesubtitle{Les descripteurs globaux}   
+
+\textbf{Caractérisation de la distribution globale des différentes couches.}
+	
+\begin{center}
+\gtwosides{1}{./imgs/mesures/H03.png}{./imgs/mesures/H03-histo_explications.png} \\
+\gtwosides{1}{./imgs/mesures/H04-2.png}{./imgs/mesures/H04-2-histo_explications.png} 
+\end{center}
+
+\note{
+	Caractèrise la distributions des couches.
+	\begin{enumerate}
+		\item On se sert de l'extraction précédente pour faire les descripteur.
+		\item En 1 on etudie les distrib
+		\item Expliquer chaque couche une a une.
+		\item Un premier ensemble de descripteur carcatèrise les distributions.
+	\end{enumerate}
+}
+
+\end{frame}		
+
+
+%\begin{frame}
+%   \frametitle{Les descripteurs globaux}
+%
+%	Utilisation des moments colorimétriques (moyenne, variance, skewness) sur les histogrammes des niveaux de gris suivants :
+%	\begin{itemize}
+%		\item de l'image,
+%		\item de l'encre,
+%		\item de la couche de dégradations,
+%		\item du fond.
+%	\end{itemize}
+%	
+%	
+%	\begin{exampleblock}{Descripteurs des histogrammes des niveaux de gris}
+%		Nous avons à ce stade 12 valeurs décrivant la distribution des niveaux de gris de l'image.
+%	\end{exampleblock}
+%\end{frame}	
+
+
+\begin{frame}
+   \frametitle{Étape 4 : Définition des descripteurs}
+  	\framesubtitle{Les descripteurs globaux}   
+
+\only<1-2>{
+\begin{center}
+\includegraphics<1>[width=170px]{./imgs/histosMII/mII-fort.png}
+\includegraphics<2>[width=170px]{./imgs/histosMII/mII-faible.png}
+\label{default}
+\end{center}
+}
+	
+\note{
+	\begin{itemize}
+			\item Necessaire de Metre en relation les ditributions.
+		\item la proximité entre les moyennes des ditrib à aussi son importance.
+
+		\item Expliquer l'exemple
+	\end{itemize}
+}	
+
+\end{frame}	
+
+
+\begin{frame}
+   \frametitle{Étape 4 : Définition des descripteurs}