#!/bin/bash
# a command line utility intended to simplify bulk decompression
# by automating a pool sized by the processor of parallel operations
# each of which creates a directory, decompresses, and if successful
# removes the compressed file, finally checking each directory and
# pulling out single items to avoid redundant parent directories.

# execute extraction
extract_locally() {
	local extension="${1##*.}"

	[ -d "$1" ] && return # skip/ignore directories
	! [[ "$extension" =~ (7z|zip|rar) ]] && return # ignore unsupported types

	# handle each case independently
	local name="${1%.*}"
	mkdir -p "$name"
	case "$extension" in
		zip)
			if ! unzip -o "$1" -d "$name"; then
				return
			fi
			;;
		rar)
			if ! unrar x -o+ -idp "$1" "$name"; then
				return
			fi
			;;
		7z)
			if ! 7za x -aoa -bd "$1" -o"$name"; then
				return
			fi
			;;
	esac

	# cleanup
	rm "$1"

	# fix redundant parent directories
	if [ $(find "$name" -mindepth 1 -maxdepth 1 | wc -l) -eq 1 ]; then
		mv "$name" "${name}.tmp"
		mv "${name}.tmp"/* .
		rmdir "${name}.tmp"
	fi
}

# capture all files in directory
shopt -s nullglob
export files=(*)

# execute up to nproc parallel extractions
for file in "${files[@]}"; do
	while ! [ $(jobs -p | wc -l) -lt $(nproc) ]; do sleep 1; done # this should wait for available threads
	extract_locally "$file" &
done

# notify the process is complete
wait
echo "done"
