1919source " $GH_WORKFLOW_ROOT /_externals/tacklelib/bash/tacklelib/bash_tacklelib" || exit $?
2020
2121tkl_include_or_abort " $GH_WORKFLOW_ROOT /bash/github/init-basic-workflow.sh"
22+ tkl_include_or_abort " $GH_WORKFLOW_ROOT /bash/github/init-print-workflow.sh"
2223tkl_include_or_abort " $GH_WORKFLOW_ROOT /bash/github/init-yq-workflow.sh"
2324tkl_include_or_abort " $GH_WORKFLOW_ROOT /bash/github/init-curl-workflow.sh"
2425tkl_include_or_abort " $GH_WORKFLOW_ROOT /bash/github/init-tacklelib-workflow.sh"
@@ -92,7 +93,7 @@ if [[ ! -f "$content_index_file" ]]; then
9293 # CAUTION:
9394 # We must use '' as an empty value placeholder to leave single quotes for it.
9495 # If try to leave a value empty without quotes, then for a value with `:` character the `yq` parser will replace it with double-quotes or
95- # even change it in case of the UTC timestamp value.
96+ # even change it, for example, in case of the UTC timestamp value.
9697 #
9798 echo \
9899" # This file is automatically updated by the GitHub action script: https://github.com/andry81-devops/gh-action--accum-content
@@ -178,6 +179,18 @@ if [[ -z "$TEMP_DIR" ]]; then # otherwise use exterenal TEMP_DIR
178179 tkl_push_trap 'rm -rf "$TEMP_DIR "' EXIT
179180fi
180181
182+ gh_begin_print_annotation_group notice
183+
184+ function end_print_annotation_group_handler()
185+ {
186+ gh_end_print_annotation_group
187+
188+ # self update
189+ function end_print_annotation_group_handler() { :; }
190+ }
191+
192+ tkl_push_trap 'end_print_annotation_group_handler' EXIT
193+
181194stats_failed_inc=0
182195stats_skipped_inc=0
183196stats_expired_inc=0
@@ -211,7 +224,11 @@ for i in $("${YQ_CMDLINE_READ[@]}" '."content-config".entries[0].dirs|keys|.[]'
211224 fi
212225
213226 if [[ "$config_index_dir " != "$index_dir " ]]; then
214- gh_print_warning_ln "$0 : warning: invalid index file directory entry: dirs[$i ]:"$'\n'" config_dir=\` $config_dir \` "$'\n'" index_dir=\` $index_dir \` "$'\n'" content_index_dir=\` $content_index_dir \` "$'\n'" config_index_dir=\` $config_index_dir \` "
227+ gh_print_warning_ln "$0 : warning: invalid index file directory entry: dirs[$i ]:"$'\n'\
228+ " config_dir=\` $config_dir \` "$'\n'\
229+ " index_dir=\` $index_dir \` "$'\n'\
230+ " content_index_dir=\` $content_index_dir \` "$'\n'\
231+ " config_index_dir=\` $config_index_dir \` "
215232
216233 (( stats_failed_inc++ ))
217234
@@ -227,8 +244,14 @@ for i in $("${YQ_CMDLINE_READ[@]}" '."content-config".entries[0].dirs|keys|.[]'
227244 IFS=$'\n' read -r -d '' config_file config_query_url <<< \
228245 $( " ${YQ_CMDLINE_READ[@]} " " .\" content-config\" .entries[0].dirs[$i ].files[$j ].file,.\" content-config\" .entries[0].dirs[$i ].files[$j ].\" query-url\" " $content_config_file ) 2>/dev/null
229246
230- IFS=$'\n' read -r -d '' index_file index_queried_url index_file_prev_md5_hash index_file_prev_timestamp <<< \
231- $( " ${YQ_CMDLINE_READ[@]} " " .\" content-index\" .entries[0].dirs[$i ].files[$j ].file,.\" content-index\" .entries[0].dirs[$i ].files[$j ].\" queried-url\" ,.\" content-index\" .entries[0].dirs[$i ].files[$j ].\" md5-hash\" ,.\" content-index\" .entries[0].dirs[$i ].files[$j ].timestamp" " $content_index_file " ) 2>/dev/null
247+ IFS=$'\n' read -r -d '' index_file index_queried_url index_file_prev_size index_file_prev_md5_hash index_file_prev_timestamp <<< \
248+ $( " ${YQ_CMDLINE_READ[@]} " " \
249+ .\" content-index\" .entries[0].dirs[$i ].files[$j ].file,\
250+ .\" content-index\" .entries[0].dirs[$i ].files[$j ].\" queried-url\" ,\
251+ .\" content-index\" .entries[0].dirs[$i ].files[$j ].size,\
252+ .\" content-index\" .entries[0].dirs[$i ].files[$j ].\" md5-hash\" ,\
253+ .\" content-index\" .entries[0].dirs[$i ].files[$j ].timestamp" \
254+ " $content_index_file " ) 2>/dev/null
232255
233256 # CAUTION:
234257 # Prevent of invalid values spread if upstream user didn't properly commit completely correct yaml file or didn't commit at all.
@@ -238,6 +261,7 @@ for i in $("${YQ_CMDLINE_READ[@]}" '."content-config".entries[0].dirs|keys|.[]'
238261 config_query_url \
239262 index_file \
240263 index_queried_url \
264+ index_file_prev_size \
241265 index_file_prev_md5_hash \
242266 index_file_prev_timestamp
243267
@@ -267,7 +291,7 @@ for i in $("${YQ_CMDLINE_READ[@]}" '."content-config".entries[0].dirs|keys|.[]'
267291
268292 is_file_expired=0
269293
270- config_sched_next_update_timestamp_utc=$current_date_time_utc
294+ config_sched_next_update_timestamp_utc=" $current_date_time_utc "
271295 index_file_expired_timestamp_delta=0
272296
273297 if [[ -n "$index_file_prev_timestamp " ]]; then
@@ -284,43 +308,51 @@ for i in $("${YQ_CMDLINE_READ[@]}" '."content-config".entries[0].dirs|keys|.[]'
284308 (( index_file_expired_sec = current_date_time_utc_sec - index_file_next_update_timestamp_utc_sec ))
285309
286310 if (( index_file_expired_sec > 0 )); then
287- index_file_expired_timestamp_delta=+$( date --utc -d " @$index_file_expired_sec " +%T)
311+ index_file_expired_timestamp_delta=" +$( date --utc -d " @$index_file_expired_sec " +%T) "
288312 is_file_expired=1
289313 else
290314 (( index_file_expired_sec = -index_file_expired_sec ))
291- index_file_expired_timestamp_delta=-$( date --utc -d " @$index_file_expired_sec " +%T)
315+ index_file_expired_timestamp_delta=" -$( date --utc -d " @$index_file_expired_sec " +%T) "
292316 fi
293317 else
294318 is_file_expired=1
295319 fi
296320
297- index_file_next_md5_hash=''
321+ index_file_prev_updated_size=-1 # negative is not exist
322+ index_file_prev_updated_md5_hash=''
323+
298324 if (( is_index_file_prev_exist )); then
325+ # to update the file size in the index file
326+ index_file_prev_updated_size="$( stat -c%s " $index_dir /$index_file " ) "
327+
299328 # to update the file hash in the index file
300- index_file_next_md5_hash =( $( md5sum -b " $index_dir /$index_file " ) )
329+ index_file_prev_updated_md5_hash =( $( md5sum -b " $index_dir /$index_file " ) )
301330
302331 # drop forwarding backslash: https://unix.stackexchange.com/questions/424628/md5sum-prepends-to-the-checksum
303332 #
304- index_file_next_md5_hash ="${index_file_next_md5_hash # \\ } "
333+ index_file_prev_updated_md5_hash ="${index_file_prev_updated_md5_hash # \\ } "
305334 fi
306335
307336 if (( ! is_file_expired )); then
308337 if (( ! NO_SKIP_UNEXPIRED_ENTRIES )); then
309- echo "File is skipped: prev-timestamp=\` $index_file_prev_timestamp \` sched-timestamp=\` $config_sched_next_update_timestamp_utc \` expired-delta=\` $index_file_expired_timestamp_delta \` existed=\` $is_index_file_prev_exist \` file=\` $index_dir /$index_file \` "
338+ echo "File is skipped:
339+ expired-delta=\` $index_file_expired_timestamp_delta \` timestamp=\` $index_file_prev_timestamp -> $config_sched_next_update_timestamp_utc \`
340+ size=\` $index_file_prev_updated_size \` file=\` $index_dir /$index_file \` "
310341
311342 # update existing file hash on skip
312- if [[ -n "$index_file_next_md5_hash " && "$index_file_next_md5_hash " != "$index_file_prev_md5_hash " ]]; then
313- index_file_next_timestamp="$( date --utc +%FT%TZ) "
314-
343+ if (( index_file_prev_updated_size != index_file_prev_size )) || \
344+ [[ -n "$index_file_prev_updated_md5_hash " && "$index_file_prev_updated_md5_hash " != "$index_file_prev_md5_hash " ]]; then
315345 {
316346 # update index file fields
317347 if (( DISABLE_YAML_EDIT_FORMAT_RESTORE_BY_DIFF_MERGE_WORKAROUND )); then
318348 yq_edit 'content-index' 'edit' "$content_index_file " "$TEMP_DIR /content-index-[$i ][$j ]-edited.yml" \
319- ".\"content-index\".entries[0].dirs[$i ].files[$j ].\"md5-hash\"=\"$index_file_next_md5_hash \"" && \
349+ ".\"content-index\".entries[0].dirs[$i ].files[$j ].size=$index_file_prev_updated_size " \
350+ ".\"content-index\".entries[0].dirs[$i ].files[$j ].\"md5-hash\"=\"$index_file_prev_updated_md5_hash \"" && \
320351 mv -Tf "$TEMP_DIR /content-index-[$i ][$j ]-edited.yml" "$content_index_file "
321352 else
322353 yq_edit 'content-index' 'edit' "$content_index_file " "$TEMP_DIR /content-index-[$i ][$j ]-edited.yml" \
323- ".\"content-index\".entries[0].dirs[$i ].files[$j ].\"md5-hash\"=\"$index_file_next_md5_hash \"" && \
354+ ".\"content-index\".entries[0].dirs[$i ].files[$j ].size=$index_file_prev_updated_size " \
355+ ".\"content-index\".entries[0].dirs[$i ].files[$j ].\"md5-hash\"=\"$index_file_prev_updated_md5_hash \"" && \
324356 yq_diff "$TEMP_DIR /content-index-[$i ][$j ]-edited.yml" "$content_index_file " "$TEMP_DIR /content-index-[$i ][$j ]-edited.diff" && \
325357 yq_restore_edited_uniform_diff "$TEMP_DIR /content-index-[$i ][$j ]-edited.diff" "$TEMP_DIR /content-index-[$i ][$j ]-edited-restored.diff" && \
326358 yq_patch "$TEMP_DIR /content-index-[$i ][$j ]-edited.yml" "$TEMP_DIR /content-index-[$i ][$j ]-edited-restored.diff" "$TEMP_DIR /content-index-[$i ][$j ].yml" "$content_index_file "
@@ -336,17 +368,22 @@ for i in $("${YQ_CMDLINE_READ[@]}" '."content-config".entries[0].dirs|keys|.[]'
336368
337369 continue
338370 else
339- echo "File is forced to download: prev-timestamp=\` $index_file_prev_timestamp \` sched-timestamp=\` $config_sched_next_update_timestamp_utc \` expired-delta=\` $index_file_expired_timestamp_delta \` existed=\` $is_index_file_prev_exist \` file=\` $index_dir /$index_file \` "
371+ echo "File is forced to download:
372+ expired-delta=\` $index_file_expired_timestamp_delta \` timestamp=\` $index_file_prev_timestamp -> $config_sched_next_update_timestamp_utc \`
373+ size=\` $index_file_prev_updated_size \` file=\` $index_dir /$index_file \` "
340374 fi
341375 else
342376 (( stats_expired_inc++ ))
343377
344- echo "File is expired: prev-timestamp=\` $index_file_prev_timestamp \` sched-timestamp=\` $config_sched_next_update_timestamp_utc \` expired-delta=\` $index_file_expired_timestamp_delta \` existed=\` $is_index_file_prev_exist \` file=\` $index_dir /$index_file \` "
378+ echo "File is expired:
379+ expired-delta=\` $index_file_expired_timestamp_delta \` timestamp=\` $index_file_prev_timestamp -> $config_sched_next_update_timestamp_utc \`
380+ size=\` $index_file_prev_updated_size \` file=\` $index_dir /$index_file \` "
345381 fi
346382
347383 if (( is_index_file_prev_exist )); then
348384 # always reread from existing file
349- index_file_prev_md5_hash=$index_file_next_md5_hash
385+ index_file_prev_size="$index_file_prev_updated_size "
386+ index_file_prev_md5_hash="$index_file_prev_updated_md5_hash "
350387 fi
351388
352389 [[ ! -d "$TEMP_DIR /content/$index_dir " ]] && mkdir -p "$TEMP_DIR /content/$index_dir "
@@ -360,7 +397,7 @@ for i in $("${YQ_CMDLINE_READ[@]}" '."content-config".entries[0].dirs|keys|.[]'
360397 # The ` sed` has to be used to ignore blank lines by replacing ` CR` by ` LF` .
361398 # This is required for uniform parse the curl output in both verbose or non verbose mode.
362399 #
363- if eval curl $curl_flags -o " \ $ TEMP_DIR/content/\ $ index_dir/\ $ index_file" " \ $ config_query_url" 2>&1 | tee "$TEMP_DIR /curl_stderr/$index_dir /$index_file " | sed -E 's/\r([^\n])/\n\1/g' | grep -P '^(?: [% ] |(?: | \d|\d\d)\d |[<>] )'; then
400+ if eval curl $curl_flags -o '" $TEMP_DIR /content/$index_dir /$index_file "' '" $config_query_url "' 2>&1 | tee "$TEMP_DIR /curl_stderr/$index_dir /$index_file " | sed -E 's/\r([^\n])/\n\1/g' | grep -P '^(?: [% ] |(?: | \d|\d\d)\d |[<>] )'; then
364401 (( stats_downloaded_inc++ ))
365402
366403 echo '---'
@@ -382,8 +419,10 @@ for i in $("${YQ_CMDLINE_READ[@]}" '."content-config".entries[0].dirs|keys|.[]'
382419 continue
383420 fi
384421
422+ index_file_next_size="$( stat -c%s " $TEMP_DIR /content/$index_dir /$index_file " ) "
423+
385424 # check on empty
386- if [[ ! -s " $TEMP_DIR /content/ $index_dir / $index_file " ]] ; then
425+ if (( ! index_file_next_size )) ; then
387426 if [[ -s "$TEMP_DIR /curl_stderr/$index_dir /$index_file " ]]; then
388427 echo "$( < " $TEMP_DIR /curl_stderr/$index_dir /$index_file " ) "
389428 echo '---'
@@ -418,8 +457,12 @@ for i in $("${YQ_CMDLINE_READ[@]}" '."content-config".entries[0].dirs|keys|.[]'
418457 (( stats_changed_inc++ ))
419458
420459 gh_print_notice_and_write_to_changelog_text_ln \
421- "changed: $index_dir /$index_file : md5-hash=\` $index_file_next_md5_hash \` existed=\` $is_index_file_prev_exist \` sched-timestamp=\` $config_sched_next_update_timestamp_utc \` prev-timestamp=\` $index_file_prev_timestamp \` expired-delta=\` $index_file_expired_timestamp_delta \` prev-md5-hash=\` $index_file_prev_md5_hash \` " \
422- "* changed: $index_dir /$index_file : md5-hash=\` $index_file_next_md5_hash \` existed=\` $is_index_file_prev_exist \` sched-timestamp=\` $config_sched_next_update_timestamp_utc \` prev-timestamp=\` $index_file_prev_timestamp \` expired-delta=\` $index_file_expired_timestamp_delta \` prev-md5-hash=\` $index_file_prev_md5_hash \` "
460+ "changed: $index_dir /$index_file :
461+ size=\` $index_file_prev_size -> $index_file_next_size \` md5-hash=\` $index_file_prev_md5_hash -> $index_file_next_md5_hash \`
462+ expired-delta=\` $index_file_expired_timestamp_delta \` timestamp=\` $index_file_prev_timestamp -> $config_sched_next_update_timestamp_utc \` " \
463+ "* changed: $index_dir /$index_file :
464+ size=\` $index_file_prev_size -> $index_file_next_size \` md5-hash=\` $index_file_prev_md5_hash -> $index_file_next_md5_hash \`
465+ expired-delta=\` $index_file_expired_timestamp_delta \` timestamp=\` $index_file_prev_timestamp -> $config_sched_next_update_timestamp_utc \` "
423466
424467 [[ ! -d "$index_dir " ]] && mkdir -p "$index_dir "
425468
@@ -433,12 +476,14 @@ for i in $("${YQ_CMDLINE_READ[@]}" '."content-config".entries[0].dirs|keys|.[]'
433476 if (( DISABLE_YAML_EDIT_FORMAT_RESTORE_BY_DIFF_MERGE_WORKAROUND )); then
434477 yq_edit 'content-index' 'edit' "$content_index_file " "$TEMP_DIR /content-index-[$i ][$j ]-edited.yml" \
435478 ".\"content-index\".entries[0].dirs[$i ].files[$j ].\"queried-url\"=\"$config_query_url \"" \
479+ ".\"content-index\".entries[0].dirs[$i ].files[$j ].size=$index_file_next_size " \
436480 ".\"content-index\".entries[0].dirs[$i ].files[$j ].\"md5-hash\"=\"$index_file_next_md5_hash \"" \
437481 ".\"content-index\".entries[0].dirs[$i ].files[$j ].timestamp=\"$index_file_next_timestamp \"" && \
438482 mv -Tf "$TEMP_DIR /content-index-[$i ][$j ]-edited.yml" "$content_index_file "
439483 else
440484 yq_edit 'content-index' 'edit' "$content_index_file " "$TEMP_DIR /content-index-[$i ][$j ]-edited.yml" \
441485 ".\"content-index\".entries[0].dirs[$i ].files[$j ].\"queried-url\"=\"$config_query_url \"" \
486+ ".\"content-index\".entries[0].dirs[$i ].files[$j ].size=$index_file_next_size " \
442487 ".\"content-index\".entries[0].dirs[$i ].files[$j ].\"md5-hash\"=\"$index_file_next_md5_hash \"" \
443488 ".\"content-index\".entries[0].dirs[$i ].files[$j ].timestamp=\"$index_file_next_timestamp \"" && \
444489 yq_diff "$TEMP_DIR /content-index-[$i ][$j ]-edited.yml" "$content_index_file " "$TEMP_DIR /content-index-[$i ][$j ]-edited.diff" && \
@@ -453,6 +498,9 @@ for i in $("${YQ_CMDLINE_READ[@]}" '."content-config".entries[0].dirs|keys|.[]'
453498 done
454499done
455500
501+ # remove grouping
502+ end_print_annotation_group_handler
503+
456504content_index_file_next_md5_hash=( $( md5sum -b " $content_index_file " ) )
457505
458506# drop forwarding backslash: https://unix.stackexchange.com/questions/424628/md5sum-prepends-to-the-checksum
0 commit comments