-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathduplicatefiles.py
executable file
·160 lines (138 loc) · 5.13 KB
/
duplicatefiles.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
#!/usr/bin/python2
#
# Copyright (c) 2011 Matthias Matousek <matou@taunusstein.net>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
import os,sys,hashlib,logging,sqlite3,random
# init logging
level = logging.INFO
# very verbose output?
SPAM = False
# a database file to store the huge amount of data that will be gathered
database = "/tmp/dupfdb.%d" % random.randint(0,2**32)
# files that are smaller than the threshold will be ignored
threshold = 1024
loglevel = {"debug":logging.DEBUG,"info":logging.INFO,"warning":logging.WARNING,
"error":logging.ERROR,"fatal":logging.FATAL,"spam":logging.DEBUG}
# parse arguments:
for i in range(len(sys.argv)):
if sys.argv[i] == "-l":
level = loglevel[sys.argv[i+1].lower()]
if sys.argv[i+1].lower() == "spam":
SPAM = True
if sys.argv[i] == "-t":
threshold = int(sys.argv[i+1])
if sys.argv[i] == "-d":
database = sys.argv[i+1]
logging.basicConfig(level=level)
# connect to the database
dbconnection = sqlite3.connect(database)
db = dbconnection.cursor()
# create tables for the data
db.execute("CREATE TABLE files (size INTEGER, path TEXT)")
db.execute("CREATE TABLE same (tag TEXT, path TEXT)")
def spam(msg):
if SPAM:
logging.debug("SPAM:%s" % msg)
logging.debug("threshold is %d" % threshold)
def hash_file(path):
"returns hashsum as string"
spam("hashing %s" % path)
f = open(path)
md5 = hashlib.md5()
while True:
# do NOT load the whole file into memory
# whole files in memory aren't kewl
byte = f.read(10*1024)
if not byte:
break
md5.update(byte)
f.close()
return md5.hexdigest()
# first collect all files that aren't directories or symlinks
logging.info("searching for files in current directory ('%s')"
% os.path.abspath(os.curdir))
# don't store this in the database. hopefully we won't have so many directories
# that the programm will run out of memory
dirs = [os.curdir]
# this dictionary is replaced by the files table in the database
#files = {}
filecounter = 0
while len(dirs) > 0:
curdir = dirs.pop()
for f in os.listdir(curdir):
f = curdir + os.sep + f
if os.path.islink(f):
# don't bother us with links *grrr*
spam("ignoring link: %s" % f)
continue
if os.path.isfile(f):
size = os.path.getsize(f)
if size <= threshold:
spam("ignored %s" % f)
continue
try:
db.execute("INSERT INTO files VALUES(?, ?)", (size, unicode(f, "UTF-8")))
except UnicodeDecodeError:
logging.error("%s caused a UnicodeDecodeError. Ignoring and moving on." % f)
filecounter += 1
# debug
spam("found %d files" % filecounter)
if filecounter%10000 == 0:
dbconnection.commit()
logging.debug("found %d files" % filecounter)
# end debug
elif os.path.isdir(f):
dirs.append(f)
# else ignore (if neither file nor directory, e.g. symlink)
dbconnection.commit()
logging.info("found %d files bigger than %d bytes" % (filecounter, threshold))
logging.info("starting hashing of files")
# replaced by table same
# same = {}
count = 0
cur = dbconnection.cursor()
cur.execute("SELECT DISTINCT size FROM files")
while True:
row = cur.fetchone()
if not row:
break
size = row[0]
db.execute("SELECT * FROM files WHERE size=%d" % size)
entries = db.fetchall()
if len(entries) < 2:
continue
for entry in entries:
try:
db.execute("INSERT INTO same VALUES (?, ?)",
(unicode("%d:%s" % (size, hash_file(entry[1])), "UTF-8"), entry[1]))
except UnicodeEncodeError:
logging.error("%s caused a UnicodeEncodeError. That sucks! Trying to continue anyway." % entry[1])
count += 1
spam("processed %d files" % count)
if count%1000 == 0:
logging.debug("processed %d files" % count)
dbconnection.commit()
dbconnection.commit()
logging.info("done hashing")
logging.info("looking for duplicates")
db.execute("SELECT DISTINCT tag FROM same AS s WHERE (SELECT COUNT(tag) FROM same as s2 where s2.tag=s.tag)>1")
tags = db.fetchall()
for tag in tags:
db.execute("SELECT path FROM same WHERE tag='%s'" % tag[0])
print "these files are the same: ",
for path in db:
print("%s," % path[0]),
print ""
logging.info("END OF LINE")