3 Commits

Author SHA1 Message Date
ge
172c7b920e Bump version 2025-09-24 21:01:49 +03:00
ge
7d754eba79 fix duplicates report (#1) 2025-09-24 20:51:20 +03:00
gechandesu
07d0513958 Update README.md 2025-08-26 13:02:27 +03:00
3 changed files with 14 additions and 6 deletions

View File

@@ -1,5 +1,10 @@
# fdup
> [!CAUTION]
> Recently I've noticed wrong hashsum calculations in some cases (unknown reason for now).
>
> Different files may be falsely marked as having the same hash! Be careful and do not use the `-remove` option until issue https://github.com/gechandesu/fdup/issues/1 is resolved.
The dumb tool for finding duplicate files by their hash sums.
Compile it with `-prod` for better performance:

11
fdup.v
View File

@@ -35,7 +35,7 @@ fn main() {
mut app := cli.Command{
name: 'fdup'
description: 'File duplicates finder'
version: '0.2.0'
version: '0.2.1'
usage: '[DIR...]'
execute: find
defaults: struct {
@@ -286,8 +286,8 @@ enum HashFn {
md5
}
fn hashsum(file string, hash_fn HashFn) string {
file_bytes := os.read_bytes(file) or { []u8{len: 1} }
fn hashsum(file string, hash_fn HashFn) !string {
file_bytes := os.read_bytes(file)!
defer {
unsafe { file_bytes.free() }
}
@@ -317,7 +317,10 @@ fn calculate_hashsums(tid int, files []string, hash_fn HashFn) map[string]string
eprintln('thread ${tid} started with queue of ${files.len} files')
mut sums := map[string]string{}
for file in files {
sums[file] = hashsum(file, hash_fn)
sums[file] = hashsum(file, hash_fn) or {
eprintln('File ${file} is skipped due read error: ${err}')
continue
}
}
return sums
}

4
v.mod
View File

@@ -1,7 +1,7 @@
Module {
name: 'fdup'
description: 'File duplicates finder'
version: '0.2.0'
description: 'Find and remove duplicate files'
version: '0.2.1'
license: 'GPL-3.0-or-later'
dependencies: []
}