@conference {879, title = {Parallel Thinning Based on Isthmuses}, booktitle = {A K{\'e}pfeldolgoz{\'o}k {\'e}s Alakfelismer{\H o}k T{\'a}rsas{\'a}g{\'a}nak konferenci{\'a}ja - K{\'E}PAF 2013}, year = {2013}, month = {Jan 2013}, pages = {512 - 525}, publisher = {NJSZT-K{\'E}PAF}, organization = {NJSZT-K{\'E}PAF}, type = {Conference paper}, address = {Veszpr{\'e}m}, author = {G{\'a}bor N{\'e}meth and K{\'a}lm{\'a}n Pal{\'a}gyi}, editor = {L{\'a}szl{\'o} Cz{\'u}ni} } @article {880, title = {Spectrum Skeletonization: A New Method for Acoustic Signal Feature Extraction.}, journal = {ACTA CYBERNETICA-SZEGED}, volume = {21}, year = {2013}, note = {$\#$Szerz{\H o} vagy Forr{\'a}skiad{\'a}s k{\'e}sz{\'\i}t{\H o}je vagy Kritikai kiad{\'a}s k{\'e}sz{\'\i}t{\H o}je ismeretlen}, month = {2013}, pages = {89 - 103}, publisher = {University of Szeged, Institute of Informatics}, type = {Journal article}, address = {Szeged}, abstract = {
Vibration Analysis Tests (VAT) and Acoustic Emission tests (AE) are used in several industrial applications. Many of them perform analysis in the frequency domain. Peaks in the power density spectrum hold relevant information about acoustic events. In this paper we propose a novel method for feature extraction of vibration samples by analyzing the shape of their auto power spectrum density function. The approach uses skeletonization techniques in order to find the hierarchical structure of the spectral peaks. The proposed method can be applied as a preprocessing step for spectrum analysis of vibration signals.
}, isbn = {0324-721X}, author = {Tibor Dobj{\'a}n and G{\'a}bor N{\'e}meth} } @inbook {875, title = {Topology Preserving Parallel 3D Thinning Algorithms}, booktitle = {Digital Geometry Algorithms}, series = {Lecture Notes in Computational Vision and Biomechanics}, number = {2}, year = {2012}, note = {doi: 10.1007/978-94-007-4174-4_6}, month = {2012}, pages = {165 - 188}, publisher = {Springer-Verlag}, organization = {Springer-Verlag}, type = {Book chapter}, chapter = {6}, abstract = {A widely used technique to obtain skeletons of binary objects is thinning, which is an iterative layer-by-layer erosion in a topology preserving way. Thinning in 3D is capable of extracting various skeleton-like shape descriptors (i.e., centerlines, medial surfaces, and topological kernels). This chapter describes a family of new parallel 3D thinning algorithms for (26, 6) binary pictures. The reported algorithms are derived from some sufficient conditions for topology preserving parallel reduction operations, hence their topological correctness is guaranteed.
}, isbn = {978-94-007-4173-7}, doi = {10.1007/978-94-007-4174-4_6}, author = {K{\'a}lm{\'a}n Pal{\'a}gyi and G{\'a}bor N{\'e}meth and P{\'e}ter Kardos}, editor = {Valentin E Brimkov and Reneta P Barneva} } @conference {874, title = {2D Parallel Thinning Algorithms Based on Isthmus-Preservation}, booktitle = {Proceedings of the International Symposium on Image and Signal Processing and Analysis (ISPA)}, year = {2011}, note = {ScopusID: 83455172782}, month = {Sep 2011 }, pages = {585 - 590}, publisher = {IEEE}, organization = {IEEE}, type = {Conference paper}, address = {Dubrovnik, Croatia}, abstract = {Skeletons are widely used shape descriptors which summarize the general form of binary objects. A technique to obtain skeletons is the thinning, that is an iterative layer-by-layer erosion in a topology-preserving way. Conventional thinning algorithms preserve line endpoints to provide important geometric information relative to the object to be represented. Bertrand and Couprie proposed an alternative strategy by accumulating isthmus points that are line interior points. In this paper we present six new 2D parallel thinning algorithms that are derived from some sufficient conditions for topology preserving reductions and based on isthmus-preservation.
}, isbn = {978-1-4577-0841-1 }, author = {G{\'a}bor N{\'e}meth and K{\'a}lm{\'a}n Pal{\'a}gyi}, editor = {Sven Lon{\v c}ari{\'c} and Giovanni Ramponi and D. Sersic} } @article {870, title = {2D parallel thinning and shrinking based on sufficient conditions for topology preservation}, journal = {ACTA CYBERNETICA-SZEGED}, volume = {20}, year = {2011}, note = {ScopusID: 79960666919}, month = {2011}, pages = {125 - 144}, publisher = {University of Szeged, Institute of Informatics}, type = {Journal article}, address = {Szeged}, abstract = {Thinning and shrinking algorithms, respectively, are capable of extracting medial lines and topological kernels from digital binary objects in a topology preserving way. These topological algorithms are composed of reduction operations: object points that satisfy some topological and geometrical constraints are removed until stability is reached. In this work we present some new sufficient conditions for topology preserving parallel reductions and fiftyfour new 2D parallel thinning and shrinking algorithms that are based on our conditions. The proposed thinning algorithms use five characterizations of endpoints.
}, isbn = {0324-721X}, author = {G{\'a}bor N{\'e}meth and P{\'e}ter Kardos and K{\'a}lm{\'a}n Pal{\'a}gyi} } @inbook {864, title = {A family of topology-preserving 3d parallel 6-subiteration thinning algorithms}, booktitle = {Combinatorial Image Analysis (IWCIA)}, series = {Lecture Notes in Computer Science}, number = {6636}, year = {2011}, note = {ScopusID: 79957651399doi: 10.1007/978-3-642-21073-0_5}, month = {May 2011}, pages = {17 - 30}, publisher = {Springer Verlag}, organization = {Springer Verlag}, type = {Conference paper}, address = {Madrid, Spain}, abstract = {Thinning is an iterative layer-by-layer erosion until only the skeleton-like shape features of the objects are left. This paper presents a family of new 3D parallel thinning algorithms that are based on our new sufficient conditions for 3D parallel reduction operators to preserve topology. The strategy which is used is called subiteration-based: each iteration step is composed of six parallel reduction operators according to the six main directions in 3D. The major contributions of this paper are: 1) Some new sufficient conditions for topology preserving parallel reductions are introduced. 2) A new 6-subiteration thinning scheme is proposed. Its topological correctness is guaranteed, since its deletion rules are derived from our sufficient conditions for topology preservation. 3) The proposed thinning scheme with different characterizations of endpoints yields various new algorithms for extracting centerlines and medial surfaces from 3D binary pictures. {\textcopyright} 2011 Springer-Verlag Berlin Heidelberg.
}, isbn = {978-3-642-21072-3}, doi = {10.1007/978-3-642-21073-0_5}, author = {G{\'a}bor N{\'e}meth and P{\'e}ter Kardos and K{\'a}lm{\'a}n Pal{\'a}gyi}, editor = {Jake K Aggarwal and Reneta P Barneva and Valentin E Brimkov and Kostadin N Koroutchev and Elka R Korutcheva} } @conference {876, title = {Iter{\'a}ci{\'o}nk{\'e}nti sim{\'\i}t{\'a}ssal kombin{\'a}lt v{\'e}kony{\'\i}t{\'a}s}, booktitle = {A K{\'e}pfeldolgoz{\'o}k {\'e}s Alakfelismer{\H o}k T{\'a}rsas{\'a}g{\'a}nak konferenci{\'a}ja - K{\'E}PAF 2011}, year = {2011}, month = {Jan 2011}, pages = {174 - 189}, publisher = {NJSZT}, organization = {NJSZT}, type = {Conference paper}, address = {Szeged}, url = {http://www.inf.u-szeged.hu/kepaf2011/pdfs/S05_01.pdf}, author = {P{\'e}ter Kardos and G{\'a}bor N{\'e}meth and K{\'a}lm{\'a}n Pal{\'a}gyi}, editor = {Zoltan Kato and K{\'a}lm{\'a}n Pal{\'a}gyi} } @article {871, title = {Thinning combined with iteration-by-iteration smoothing for 3D binary images}, journal = {GRAPHICAL MODELS}, volume = {73}, year = {2011}, note = {ScopusID: 79952613010doi: 10.1016/j.gmod.2011.02.001}, month = {Nov 2011}, pages = {335 - 345}, type = {Journal article}, abstract = {In this work we present a new thinning scheme for reducing the noise sensitivity of 3D thinning algorithms. It uses iteration-by-iteration smoothing that removes some border points that are considered as extremities. The proposed smoothing algorithm is composed of two parallel topology preserving reduction operators. An efficient implementation of our algorithm is sketched and its topological correctness for (26, 6) pictures is proved. {\textcopyright} 2011 Elsevier Inc. All rights reserved.
}, isbn = {1524-0703}, doi = {10.1016/j.gmod.2011.02.001}, author = {G{\'a}bor N{\'e}meth and P{\'e}ter Kardos and K{\'a}lm{\'a}n Pal{\'a}gyi} } @conference {877, title = {A topol{\'o}gia-meg{\H o}rz{\'e}s elegend{\H o} felt{\'e}telein alapul{\'o} 3D p{\'a}rhuzamos v{\'e}kony{\'\i}t{\'o} algoritmusok}, booktitle = {A K{\'e}pfeldolgoz{\'o}k {\'e}s Alakfelismer{\H o}k T{\'a}rsas{\'a}g{\'a}nak konferenci{\'a}ja - K{\'E}PAF 2011}, year = {2011}, month = {Jan 2011}, pages = {190 - 205}, publisher = {NJSZT}, organization = {NJSZT}, type = {Conference paper}, address = {Szeged}, url = {http://www.inf.u-szeged.hu/kepaf2011/pdfs/S05_02.pdf}, author = {G{\'a}bor N{\'e}meth and P{\'e}ter Kardos and K{\'a}lm{\'a}n Pal{\'a}gyi}, editor = {Zoltan Kato and K{\'a}lm{\'a}n Pal{\'a}gyi} } @article {863, title = {Topology Preserving Parallel Thinning Algorithms}, journal = {INTERNATIONAL JOURNAL OF IMAGING SYSTEMS AND TECHNOLOGY}, volume = {21}, year = {2011}, note = {UT: 000287789100005ScopusID: 79951782238doi: 10.1002/ima.20272}, month = {Feb 2011}, pages = {37 - 44}, publisher = {Wiley Periodicals, Inc.}, type = {Journal article}, abstract = {Thinning is an iterative object reduction technique for extracting medial curves from binary objects. During a thinning process, some border points that satisfy certain topological and geometric constraints are deleted in iteration steps. Parallel thinning algorithms are composed of parallel reduction operators that delete a set of object points simultaneously. This article presents 21 parallel thinning algorithms for (8,4) binary pictures that are derived from the sufficient conditions for topology preservation accommodated to the three parallel thinning approaches. {\textcopyright} 2011 Wiley Periodicals, Inc.
}, isbn = {0899-9457}, doi = {10.1002/ima.20272}, author = {G{\'a}bor N{\'e}meth and K{\'a}lm{\'a}n Pal{\'a}gyi} } @article {862, title = {Bej{\'a}r{\'a}sf{\"u}ggetlen szekvenci{\'a}lis v{\'e}kony{\'\i}t{\'a}s}, journal = {ALKALMAZOTT MATEMATIKAI LAPOK}, volume = {27}, year = {2010}, month = {2010}, pages = {17 - 40}, type = {Journal article}, isbn = {0133-3399}, author = {P{\'e}ter Kardos and G{\'a}bor N{\'e}meth and K{\'a}lm{\'a}n Pal{\'a}gyi} } @conference {872, title = {Topology preserving 2-subfield 3D thinning algorithms}, booktitle = {Proceedings of the International Conference on Signal Processing, Pattern Recognition and Applications (SPPRA)}, year = {2010}, note = {ScopusID: 77954590365}, month = {Feb 2010}, pages = {310 - 316}, publisher = {IASTED ACTA Press}, organization = {IASTED ACTA Press}, type = {Conference paper}, address = {Innsbruck, Austria}, abstract = {This paper presents a new family of 3D thinning algorithms for extracting skeleton-like shape features (i.e, centerline, medial surface, and topological kernel) from volumetric images. A 2-subfield strategy is applied: all points in a 3D picture are partitioned into two subsets which are alternatively activated. At each iteration, a parallel operator is applied for deleting some border points in the active subfield. The proposed algorithms are derived from Ma{\textquoteright}s sufficient conditions for topology preservation, and they use various endpoint characterizations.
}, author = {G{\'a}bor N{\'e}meth and P{\'e}ter Kardos and K{\'a}lm{\'a}n Pal{\'a}gyi}, editor = {B Zagar and A Kuijper and H Sahbi} } @inbook {865, title = {Topology Preserving 3D Thinning Algorithms using Four and Eight Subfields}, booktitle = {Proceedings of the International Conference on Image Analysis and Recognition (ICIAR)}, series = {Lecture Notes in Computer Science}, volume = {6111}, year = {2010}, note = {ScopusID: 77955432947doi: 10.1007/978-3-642-13772-3_32}, month = {June 2010}, pages = {316 - 325}, publisher = {Springer Verlag}, organization = {Springer Verlag}, type = {Conference paper}, address = {P{\'o}voa de Varzim, Portugal}, abstract = {Thinning is a frequently applied technique for extracting skeleton-like shape features (i.e., centerline, medial surface, and topological kernel) from volumetric binary images. Subfield-based thinning algorithms partition the image into some subsets which are alternatively activated, and some points in the active subfield are deleted. This paper presents a set of new 3D parallel subfield-based thinning algorithms that use four and eight subfields. The three major contributions of this paper are: 1) The deletion rules of the presented algorithms are derived from some sufficient conditions for topology preservation. 2) A novel thinning scheme is proposed that uses iteration-level endpoint checking. 3) Various characterizations of endpoints yield different algorithms. {\textcopyright} 2010 Springer-Verlag.
}, doi = {10.1007/978-3-642-13772-3_32}, author = {G{\'a}bor N{\'e}meth and P{\'e}ter Kardos and K{\'a}lm{\'a}n Pal{\'a}gyi}, editor = {Aur{\'e}lio Campilho and Mohamed Kamel} } @inbook {866, title = {Topology Preserving Parallel Smoothing for 3D Binary Images}, booktitle = {Proceedings of the Computational Modeling of Objects Represented in Images (CMORI)}, volume = {6026}, year = {2010}, note = {ScopusID: 77952401887doi: 10.1007/978-3-642-12712-0_26}, month = {May 2010}, pages = {287 - 298}, publisher = {Springer Verlag}, organization = {Springer Verlag}, type = {Conference paper}, address = {Buffalo, USA}, abstract = {This paper presents a new algorithm for smoothing 3D binary images in a topology preserving way. Our algorithm is a reduction operator: some border points that are considered as extremities are removed. The proposed method is composed of two parallel reduction operators. We are to apply our smoothing algorithm as an iteration-by-iteration pruning for reducing the noise sensitivity of 3D parallel surface-thinning algorithms. An efficient implementation of our algorithm is sketched and its topological correctness for (26,6) pictures is proved. {\textcopyright} 2010 Springer-Verlag.
}, doi = {10.1007/978-3-642-12712-0_26}, author = {G{\'a}bor N{\'e}meth and P{\'e}ter Kardos and K{\'a}lm{\'a}n Pal{\'a}gyi}, editor = {Reneta P Barneva and Valentin E Brimkov and Herbert A Hauptman and Renato M Natal Jorge and Jo{\~a}o Manuel R S Tavares} } @article {873, title = {Comparison and evaluation of methods for liver segmentation from CT datasets}, journal = {IEEE TRANSACTIONS ON MEDICAL IMAGING}, volume = {28}, year = {2009}, note = {ScopusID: 68249121543doi: 10.1109/TMI.2009.2013851}, month = {Aug 2009}, pages = {1251 - 1265}, type = {Journal article}, address = {Price, K., Anything you can do, I can do better (no you can{\textquoteright}t) (1986) Comput. Vis. Graph. Image Process, 36 (2-3), pp. 387-391;S. G. Armato, G. McLennan, M. F. McNitt-Gray, C. R. Meyer, D. Yankelevitz, D. R. Aberle, C. I. Henschke, E. A. Hoffman, E. A. Ka}, abstract = {This paper presents a comparison study between 10 automatic and six interactive methods for liver segmentation from contrast-enhanced CT images. It is based on results from the "MICCAI 2007 Grand Challenge" workshop, where 16 teams evaluated their algorithms on a common database. A collection of 20 clinical images with reference segmentations was provided to train and tune algorithms in advance. Participants were also allowed to use additional proprietary training data for that purpose. All teams then had to apply their methods to 10 test datasets and submit the obtained results. Employed algorithms include statistical shape models, atlas registration, level-sets, graph-cuts and rule-based systems. All results were compared to reference segmentations five error measures that highlight different aspects of segmentation accuracy. All measures were combined according to a specific scoring system relating the obtained values to human expert variability. In general, interactive methods reached higher average scores than automatic approaches and featured a better consistency of segmentation quality. However, the best automatic methods (mainly based on statistical shape models with some additional free deformation) could compete well on the majority of test images. The study provides an insight in performance of different segmentation approaches under real-world conditions and highlights achievements and limitations of current image analysis techniques. {\textcopyright} 2009 IEEE.
}, isbn = {0278-0062}, doi = {10.1109/TMI.2009.2013851}, author = {Tobias Heimann and Brahm Van Ginneken and Martin A Styner and Yulia Arzhaeva and Volker Aurich and Christian Bauer and Andreas Beck and Christoph Becker and Reinhardt Beichel and Gy{\"o}rgy Bekes and Fernando Bello and Gerd Binnig and Horst Bischof and Alexander Bornik and Peter MM Cashman and Ying Chi and Andres C{\'o}rdova and Benoit M Dawant and M{\'a}rta Fidrich and Jacob D Furst and Daisuke Furukawa and Lars Grenacher and Joachim Hornegger and Dagmar Kainm{\"u}ller and Richard I Kitney and Hidefumi Kobatake and Hans Lamecker and Thomas Lange and Jeongjin Lee and Brian Lennon and Rui Li and Senhu Li and Hans-Peter Meinzer and G{\'a}bor N{\'e}meth and Daniela S Raicu and Anne-Mareike Rau and Eva M Van Rikxoort and Mikael Rousson and L{\'a}szl{\'o} Rusk{\'o} and Kinda A Saddi and G{\"u}nter Schmidt and Dieter Seghers and Akinobi Shimizu and Pieter Slagmolen and Erich Sorantin and Grzegorz Soza and Ruchaneewan Susomboon and Jonathan M Waite and Andreas Wimmer and Ivo Wolf} } @inbook {867, title = {Fully Parallel 3D Thinning Algorithms based on Sufficient Conditions for Topology Preservation}, booktitle = {Proceedings of Discrete Geometry for Computer Imagery (DGCI)}, number = {5810}, year = {2009}, note = {ScopusID: 77952414581doi: 10.1007/978-3-642-04397-0_41}, month = {Sep 2009}, pages = {481 - 492}, publisher = {Springer Verlag}, organization = {Springer Verlag}, type = {Conference paper}, address = {Montreal, Quebec, Canada}, abstract = {This paper presents a family of parallel thinning algorithms for extracting medial surfaces from 3D binary pictures. The proposed algorithms are based on sufficient conditions for 3D parallel reduction operators to preserve topology for (26,6) pictures. Hence it is self-evident that our algorithms are topology preserving. Their efficient implementation on conventional sequential computers is also presented. {\textcopyright} 2009 Springer Berlin Heidelberg.
}, isbn = {978-3-642-04396-3}, doi = {10.1007/978-3-642-04397-0_41}, author = {K{\'a}lm{\'a}n Pal{\'a}gyi and G{\'a}bor N{\'e}meth}, editor = {Srecko Brlek and Christophe Reutenauer and Xavier Proven{\c c}al} } @conference {973, title = {Kritikus p{\'a}rokat vizsg{\'a}l{\'o} bej{\'a}r{\'a}sf{\"u}ggetlen szekvenci{\'a}lis v{\'e}kony{\'\i}t{\'o} algoritmus}, booktitle = {A K{\'e}pfeldolgoz{\'o}k {\'e}s Alakfelismer{\H o}k T{\'a}rsas{\'a}g{\'a}nak konferenci{\'a}ja - K{\'E}PAF 2009}, year = {2009}, month = {Jan 2009}, pages = {1 - 8}, publisher = {Akaprint}, organization = {Akaprint}, type = {Conference paper}, address = {Budapest} } @conference {972, title = {A morfol{\'o}giai v{\'a}z {\'a}ltal{\'a}nos{\'\i}t{\'a}sa szomsz{\'e}ds{\'a}gi szekvenci{\'a}kkal}, booktitle = {A K{\'e}pfeldolgoz{\'o}k {\'e}s Alakfelismer{\H o}k T{\'a}rsas{\'a}g{\'a}nak konferenci{\'a}ja - K{\'E}PAF 2009}, year = {2009}, month = {Jan 2009}, pages = {1 - 10}, publisher = {Akaprint}, organization = {Akaprint}, type = {Conference paper}, address = {Budapest} } @inbook {868, title = {An order-independent sequential thinning algorithm}, booktitle = {Proceedings of the International Workshop on Combinatorial Image Analysis (IWCIA)}, number = {5852}, year = {2009}, note = {ScopusID: 78650496028doi: 10.1007/978-3-642-10210-3_13}, month = {Nov 2009}, pages = {162 - 175}, publisher = {Springer Verlag}, organization = {Springer Verlag}, type = {Conference paper}, address = {Playa del Carmen, Mexico}, abstract = {Thinning is a widely used approach for skeletonization. Sequential thinning algorithms use contour tracking: they scan border points and remove the actual one if it is not designated a skeletal point. They may produce various skeletons for different visiting orders. In this paper, we present a new 2-dimensional sequential thinning algorithm, which produces the same result for arbitrary visiting orders and it is capable of extracting maximally thinned skeletons. {\textcopyright} Springer-Verlag Berlin Heidelberg 2009.
}, isbn = {978-3-642-10208-0}, doi = {10.1007/978-3-642-10210-3_13}, url = {http://link.springer.com/chapter/10.1007/978-3-642-10210-3_13}, author = {P{\'e}ter Kardos and G{\'a}bor N{\'e}meth and K{\'a}lm{\'a}n Pal{\'a}gyi}, editor = {Petra Wiederhold and Reneta P Barneva} } @inbook {869, title = {Skeletonization based on metrical neighborhood sequences}, booktitle = {Computer Vision Systems}, number = {5008}, year = {2008}, note = {ScopusID: 44649159529doi: 10.1007/978-3-540-79547-6}, month = {May 2008}, pages = {333 - 342}, publisher = {Springer Verlag}, organization = {Springer Verlag}, type = {Conference paper}, address = {Santorini, Greece}, abstract = {Skeleton is a shape descriptor which summarizes the general formof objects. It can be expressed in terms of the fundamental morphological operations. The limitation of that characterization is that its construction based on digital disks such that cannot provide good approximation to the Euclidean disks. In this paper we define a new type of skeleton based on neighborhood sequences that is much closer to the Euclidean skeleton. A novel method for quantitative comparison of skeletonization algorithms is also proposed. {\textcopyright} 2008 Springer- Verlag Berlin Heidelberg.
}, isbn = {978-3-540-79546-9}, issn = {0302-9743}, doi = {10.1007/978-3-540-79547-6_32}, author = {Attila Fazekas and K{\'a}lm{\'a}n Pal{\'a}gyi and Gy{\"o}rgy Kov{\'a}cs and G{\'a}bor N{\'e}meth}, editor = {Antonios Gasteratos and Markus Vincze and John K Tsotsos} }