版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/wuzh1230/article/details/52504963
FileSystem
- Remove duplicate files
#!/bin/bash
find testdir -type f -exec md5sum \{\} \; > hash.txt
# 65xxxxxxxxxxxxxxxxxxxxxxxxxxxx3f *testdir/abc.MOV
sort hash.txt > order.txt
count=0
dup=0
prev=""
while true ; do
read line
count=$((count + 1))
hash=$(echo -n "$line" | cut -d\* -f 1)
path=$(echo -n "$line" | cut -d\* -f 2)
if [ x"$hash" == "x" ]; then
echo "error Hash, exit"
exit 1
fi
if [ x"$path" == "x" ]; then
echo "error Path, exit"
exit 2
fi
if [ x"$prev" == "x" ]; then
prev=$hash
continue
fi
echo "[$count] $prev -> $hash"
if [ x"$prev" == x"$hash" ]; then
dup=$((dup + 1))
echo " Dup [$dup]"
echo " Prev Hash: $prev"
echo " Hash [$hash]"
echo " Remove [$path]"
rm -rf "$path"
fi
prev=$hash
done < order.txt