
@Article{cmc.2023.039818,
AUTHOR = {Raheem Ogla, Eman Shakar Mahmood, Rasha I. Ahmed, Abdul Monem S. Rahma},
TITLE = {New Fragile Watermarking Technique to Identify Inserted Video Objects Using H.264 and Color Features},
JOURNAL = {Computers, Materials \& Continua},
VOLUME = {76},
YEAR = {2023},
NUMBER = {3},
PAGES = {3075--3096},
URL = {http://www.techscience.com/cmc/v76n3/54316},
ISSN = {1546-2226},
ABSTRACT = {The transmission of video content over a network raises various issues relating to copyright authenticity, ethics,
legality, and privacy. The protection of copyrighted video content is a significant issue in the video industry, and
it is essential to find effective solutions to prevent tampering and modification of digital video content during its
transmission through digital media. However, there are still many unresolved challenges. This paper aims to address
those challenges by proposing a new technique for detecting moving objects in digital videos, which can help prove
the credibility of video content by detecting any fake objects inserted by hackers. The proposed technique involves
using two methods, the H.264 and the extraction color features methods, to embed and extract watermarks in
video frames. The study tested the performance of the system against various attacks and found it to be robust.
The evaluation was done using different metrics such as Peak-Signal-to-Noise Ratio (PSNR), Mean Squared Error
(MSE), Structural Similarity Index Measure (SSIM), Bit Correction Ratio (BCR), and Normalized Correlation.
The accuracy of identifying moving objects was high, ranging from 96.3% to 98.7%. The system was also able to
embed a fragile watermark with a success rate of over 93.65% and had an average capacity of hiding of 78.67. The
reconstructed video frames had high quality with a PSNR of at least 65.45 dB and SSIM of over 0.97, making them
imperceptible to the human eye. The system also had an acceptable average time difference (T = 1.227/s) compared
with other state-of-the-art methods.},
DOI = {10.32604/cmc.2023.039818}
}



