publish_utils.sh: faster checksum matching
There are a large number of checksum files when publishing aptly. We need to search them an parallel for checksum matches. Doing so speeds publication by an order of magnitude. Signed-off-by: Scott Little <scott.little@windriver.com> Change-Id: Iedb392e1bd1d63c4755e1b5c8eab8eb8a379bb14
This commit is contained in:
parent
0d1a482f85
commit
7f604733e5
@ -24,12 +24,15 @@ publish_file() {
|
|||||||
local checksum
|
local checksum
|
||||||
checksum=$(sha256sum "$filename" | awk '{print $1}' ; check_pipe_status) || exit 1
|
checksum=$(sha256sum "$filename" | awk '{print $1}' ; check_pipe_status) || exit 1
|
||||||
|
|
||||||
|
local p
|
||||||
|
p=$(nproc)
|
||||||
|
|
||||||
# find an existing file in $combined_checksums_file that we can
|
# find an existing file in $combined_checksums_file that we can
|
||||||
# hardlink to
|
# hardlink to
|
||||||
local link_created
|
local link_created
|
||||||
link_created=$(
|
link_created=$(
|
||||||
|
|
||||||
cat "$published_checksum_files_list_file" | {
|
cat "$published_checksum_files_list_file" | xargs -P $p \grep -l "$checksum " | {
|
||||||
while read checksum_file ; do
|
while read checksum_file ; do
|
||||||
local checksum_dir
|
local checksum_dir
|
||||||
checksum_dir="${checksum_file%/*}"
|
checksum_dir="${checksum_file%/*}"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user