-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathfn.bash
executable file
·2136 lines (2073 loc) · 113 KB
/
fn.bash
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env bash
# (c) 2004-2025 George Georgalis unlimited use with this notice
#
# Sundry functions, and commensurate alias, env.
#
# For bash compatible shells
#
# https://github.com/georgalis/pub/blob/master/sub/fn.bash
_help_skel() {
cat 1>&2 <<'eof'
>>>---
eval "$(curl -fsSL --insecure \
https://raw.githubusercontent.com/georgalis/pub/master/skel/.profile)"
export -f devnul stderr chkstd chkwrn logwrn chkerr logerr chktrue \
chkexit logexit siffx validfn
---<<<
eof
}
# one confusion at a time, fn.bash from ~/.profile.local, eg
# siffx "$HOME/sub/fn.bash" || { return 1 ; exit 2 ;}
declare -f chktrue >/dev/null || { _help_skel ; return 2 ; exit 3 ;}
[ "${SHELL##*/}" = "bash" ] && { # alias, to restore login env, iff no active jobs.
alias _env='tput sgr0 ; chkerr "$(jobs -l)" \
&& exec env -i TERM="$TERM" COLORTERM="$COLORTERM" \
SHELL="$SHELL" HOME="$HOME" LOGNAME="$LOGNAME" USER="$USER" \
SSH_AGENT_PID="$SSH_AGENT_PID" SSH_AUTH_SOCK="$SSH_AUTH_SOCK" \
SSH_AGENT_ENV="$SSH_AGENT_ENV" \
verb="$verb" verb1="$verb1" verb2="$verb2" \
'"${SHELL} -l"
alias _env_verb='tput sgr0 ; chkerr "$(jobs -l)" \
&& exec env -i TERM="$TERM" COLORTERM="$COLORTERM" \
SHELL="$SHELL" HOME="$HOME" LOGNAME="$LOGNAME" USER="$USER" \
SSH_AGENT_PID="$SSH_AGENT_PID" SSH_AUTH_SOCK="$SSH_AUTH_SOCK" \
SSH_AGENT_ENV="$SSH_AGENT_ENV" \
verb="chkwrn" verb1="chkwrn" verb2="chkwrn" \
'"${SHELL} -l"
alias _env_noverb='tput sgr0 ; chkerr "$(jobs -l)" \
&& exec env -i TERM="$TERM" COLORTERM="$COLORTERM" \
SHELL="$SHELL" HOME="$HOME" LOGNAME="$LOGNAME" USER="$USER" \
SSH_AGENT_PID="$SSH_AGENT_PID" SSH_AUTH_SOCK="$SSH_AUTH_SOCK" \
SSH_AGENT_ENV="$SSH_AGENT_ENV" \
'"${SHELL} -l"
}
export bash_path="$(which bash)"
# if running bash, and bash_path is different, switch, iff no active jobs.
ps | grep "^[ ]*$$ " | grep bash >/dev/null 2>&1 \
&& { test -x $bash_path \
&& { expr "$("$bash_path" --version)" \
: "GNU bash, version ${BASH_VERSINFO[0]}\.${BASH_VERSINFO[1]}\.${BASH_VERSINFO[2]}(${BASH_VERSINFO[3]})-${BASH_VERSINFO[4]} (${BASH_VERSINFO[5]})" >/dev/null \
|| { tput sgr0 ; chkerr "$(jobs -l)" \
&& exec env -i TERM="$TERM" COLORTERM="$COLORTERM" \
SHELL="${bash_path}" HOME="$HOME" LOGNAME="$LOGNAME" USER="$USER" \
SSH_AGENT_PID="$SSH_AGENT_PID" SSH_AUTH_SOCK="$SSH_AUTH_SOCK" \
SSH_AGENT_ENV="$SSH_AGENT_ENV" \
verb="$verb" verb1="$verb1" verb2="$verb2" \
"$bash_path" -l ;} # replace, BASH_VERSINFO doesn't match
} && { echo "<>< $bash_path ${BASH_VERSINFO[0]}.${BASH_VERSINFO[1]}.${BASH_VERSINFO[2]}(${BASH_VERSINFO[3]})-${BASH_VERSINFO[4]}" ;} \
|| return 1 # exec failed...
} || true # not bash, OR bash_path unavailable OR same version
# earlier and we would normally see it twice...
uptime
validex () { #:> validate executable, compare unit hash vs operation env hash
[ "$1" ] || {
cat 1>&2 <<-EOF
#:: $FUNCNAME {executable} ; returns {executable} {hash}
#:: $FUNCNAME {executable} {hash} ; return no error, if hash match
#:: the former is intended to provide data for the latter
#:: env hashex= to set the hashing function, other than the cksum default
EOF
return 1 ;}
ps | grep "^[ ]*$$ " | grep bash >/dev/null 2>&1 || { echo ">>> $0 : Not bash shell (62af847c) <<<" >&2 ; return 1 ;}
local exin="$(sed -e 's/#.*//' -e 's/^[ ]*//' -e 's/[ ]*$//' -e '/^$/d' <<<"$1")"
[ "$exin" ] || return 0 # drop comments
which "$exin" >/dev/null || { chkwrn "$FUNCNAME : executable not in PATH, '$exin' (643d9a87)" ; return 1 ;}
local _hashfn=''
[ "$hashex" ] || { _hashex () { cat $(which "$1") | printf "%s %08x %08x\n" "$1" $(cksum) ;} && _hashex="_hashex" ;}
[ "$_hashex" ] || _hashex="$hashex" # for env sanity... use crypto hash for security...
shift || true
local sum="$exin $*"
local check="$( "$_hashex" "$exin" )"
[ "$*" ] || { echo "$check" ; return 0 ;} # provide hash data if none given to check
[ "$sum" = "$check" ] || { # report hash data discrepancies on failed check
cat 1>&2 <<-EOF
>>>---
$FUNCNAME error :
unit:'$sum'
env:'$check'
<<<---
EOF
return 1 ;}
} # validex 64399fcb 20230414 1147 Fri 14 Apr PDT
validfn () { #:> validate function, compare unit hash vs operation env hash
[ "$1" ] || {
cat 1>&2 <<-EOF
#:: $FUNCNAME {function} ; returns {function-name} {hash}
#:: $FUNCNAME {function} {hash} ; return no error, if hash match
#:: the former is intended to provide data for the latter
#:: env hashfn= to set the hashing function, "%08x %8x %s\n" cksum program
EOF
return 1 ;}
ps | grep "^[ ]*$$ " | grep bash >/dev/null 2>&1 || { echo ">>> $0 : Not bash shell (62af847c) <<<" >&2 ; return 1 ;}
local _hashfn=''
[ "$hashfn" ] || { _hashfn () { declare -f "$1" | printf "%s %08x %08x\n" "$1" $(cksum) ;} && _hashfn="_hashfn" ;}
[ "$_hashfn" ] || _hashfn="$hashfn" # for env sanity... use crypto hash for security...
local fn="$(sed -e 's/#.*//' -e 's/^[ ]*//' -e 's/[ ]*$//' -e '/^$/d' <<<"$1")"
[ "$fn" ] || return 0 # drop comments
shift || true
local sum="$fn $*"
local check="$( "$_hashfn" "$fn" )"
[ "$*" ] || { echo "$check" ; return 0 ;} # provide hash data if none given to check
[ "$sum" = "$check" ] || { # report hash data discrepancies on failed check
cat 1>&2 <<-EOF
>>>---
$FUNCNAME error :
unit:'$sum'
env:'$check'
<<<---
EOF
return 1 ;}
} # validfn
# Now that validfn is defined, run the framework on expected functions...
#
# eg first, generate hashes of known functions...
# for f in devnul stderr chkstd chkwrn logwrn chkerr logerr chktrue chkexit logexit siffx validfn ; do validfn $f ; done
#
# then run validfn on that data to report if the functions have ever change
# print help if hash unit does not match hash from env --insecure
test "$(declare -f validfn 2>/dev/null)" || { echo "$0 : validfn not defined (6542c8ca)" 1>&2 ; _help_sub ; return 1 ;}
while IFS= read a ; do
validfn $a && true || { echo "validfn error : $a (6542c8d6)" 1>&2 ; _help_skel ; break 1 ;}
done <<EOF
devnul 216e1370 0000001d
stderr 7ccc5704 00000037
chkstd ee4aa465 00000032
chkwrn 2683d3d3 0000005c
logwrn f279f00e 0000005f
chkerr 4f18299d 0000005b
logerr 2db98372 0000005e
chktrue 1f11f91d 0000005c
chkexit e6d9b430 0000005a
logexit 235b98c9 0000005d
siffx c20a9040 000002f7
validfn 6fcde5cc 0000046d
EOF
vfn () { #:> validate function against shake256 xoflen hash, or generate hash
# rev 677ab05a-20250108_132329 ./sub/fn.bash
grep bash >/dev/null 2>&1 < <(grep "^[ ]*$$ " < <(ps)) || { echo ">>> $0 : Not bash shell (677d0f41) <<<" >&2 ; return 1 ;}
# if arg1 ${*:-} ; help
[ -z "$1" -o "$1" = '-h' -o "$1" = '--help' ] && {
cat 1>&2 <<-EOF
:: $FUNCNAME FUNCTION VALIDATOR
:; $FUNCNAME {function} ; returns {function-name} {hash}
:: $FUNCNAME {function} {hash} ; return no error, if hash match
:: the former is intended to provide data for the latter
:: env xoflen sets shake256 hash length, else xoflen determined from input
:: xoflen=5 sets a 40 bit hash (8x), vs a default 256 bit hash, xoflen=32
:: a short bit test may be used on a long hash, max hash 512 bit, xoflen=64
EOF
return 0 ;} || true
local fn= hash= _xoflen= fndef= check=
# substitute non-alphanumeric/underscore chars with underscore on read args
read -r fn hash <<<"${*//[^[:alnum:]_]/_}"
# if xoflen; check xoflen digit and range && _xoflen=xoflen || "xoflen not null, and out of range 3 <= digit <= 64"
[ -n "${xoflen:-}" ] && { { [[ "${xoflen:-}" =~ [[:digit:]]+ ]] && [ "$xoflen" -ge 3 -a "$xoflen" -le 64 ] && _xoflen="$xoflen" ;} \
|| { echo ">>> $0 : xoflen out of range; 3 <= '$xoflen' <= 64 (677d8f59) <<<" >&2 ; return 1 ;} ;}
# if xoflen and hash; check 2*xoflen <= hash-len || "xoflen exceeds available hash"
[ "$xoflen" -a "$hash" ] && { [ $((2*${xoflen})) -le ${#hash} ] \
|| { echo ">>> $0 : 'xoflen=$xoflen' exceeds available hash bits '$((${#hash}*8))' xoflen=$((${#hash}/2)) (677d93ab) <<<" >&2 ; return 1 ;} ;}
# if xoflen and no-hash; generate a hash and return, later, first prioritize check performance
# read fndef; define hashfn, with default if no _xoflen; read check
read -rd '' fndef < <(declare -f "$fn" || { echo ">>> $0 : function not found '$fn' (677df620) <<<" >&2 ; return 1 ;}) || true
read check < <(awk '{print $2}' < <(openssl shake256 -hex -xoflen "${_xoflen:-32}" <<<"$fndef"))
[ "${hash:0:${#check}}" = "$check" ] && return 0 # truncate hash to check length, exit success on match
[ "$hash" -a "$check" ] && { printf '>>>---\n%s check error: %s\n unit: %s\n env: %s\n<<<---\n' "$FUNCNAME" "$fn" "$hash" "$check" 1>&2 ; return 1 ;}
[ "$hash" ] || { echo "$fn $check" ; return 0 ;}
echo ">>> $0 : internal error (677da024) <<<" >&2
return 1
} # vfn 677da1f9 20250107_135153; fnhash Jul 28, 2023; validfn Feb 8, 2020 sub/func.bash
# now that vfn is defined, run the validator on dep functions from .profile, and
# raise help on err; first, generate reference xoflen=48 hashes of functions...
# for f in devnul stderr chkstd chkwrn logwrn chkerr logerr chktrue chkexit logexit siffx validfn vfn ; do xoflen=48 vfn $f ; done
# then run vfn with min 24-bit hash (xoflen=3) against the reference 384-bit
# hash data, to lightweight qualify the functions
declare -f vfn >/dev/null 2>&1 || { echo "$0 : vfn not defined (677e40bc)" 1>&2 ; _help_sub ; return 1 ;}
while IFS= read fn hash ; do xoflen=3 vfn $fn $hash || { echo "env qualify error (677e4110)" 1>&2 ; _help_skel ; break 1 ;} ; done <<EOF
devnul 0eb7cdd2bfb59cd4e2743c0c8d22db1b6b711dc5f70eafc51d841d67aeb850adb5f24f6561829a1c55d3fe9a62a89ceb
stderr e010b0e704f67ee4d5fc8227d32030bfa06e60be9a49daf0b4c91c6eee9671bc2a17a3d517b8da4e20b7946ea302c130
chkstd 66c5e827c43c8291eb471d1bb61067f6323b2cf3fc5fd9f56f9b4b8516a9e574f44d4c0d614853512894ca651af9caa8
chkwrn 3acc570671ddfd9b5c0eeca64a2c7541b4559d9c3e1edae841c2317bba84a9a55c90455b86deafaf32d592a8f48c5726
logwrn 1775beeee82daa381b314944b9a1963381c9291bbc9884ef27ce99cb762a80a7bb8f5c0d5e1c9586b6a12f15a919b150
chkerr 50195549cedb329654b8cd279d5e471a536488ebfd3045b4589d05e20c0072faecf4b89566df11144369f437821a27d5
logerr 8420e3638ccde6b2c86820d4460e059edcfcb00e2f3ba065e5270bf57e66abce058c88ea95168f3345539ea536abad79
chktrue 6053b8cfa998834844f9dd687af4753b96d2f2e14cf6c51c22dac7165daaf2405e4637bd82096c81165e54728d1d5d3b
chkexit 95f639509bf0fd473fb5188531b7138201ce2691b2ca8744c535eb764ab756abca5b5db2ef5d88c9924ad33804960e87
logexit 669f2138af0e8ec67d469796abd822cec889640de3d529fa8ff5af876d30fcdf56a4a6d77baba81c951fe4d33d86ed6d
siffx faf13db46a77326a019ca42325f2d1b4ab3c410c39bc36f3294baf2da85d53f11af2a1738d055eaaaee78333654a8c3e
validfn 4240a107e4f699a97fde04d53403dbb9e4a06452fe72c2b5781258ea3a242d288faac045458f35efc61cecde3cbefc34
vfn fbd426ba997831f20333fea08bd66ceb4a92cc264ce1e9162521d0df83b5eb645122902e6d4a800c15c7d56c8599ceb1
EOF
fnhash () { # gen validfn data from env and fn names in file (arg1), for repo commit comments
# search for expected hash from repo log and find matching function revision
local f="$1" a=''
test -e "$f" || { chkwrn "${FUNCNAME}: no file '$f' (6542c8b8)" && return 0 || return $? ;}
# no fn match no fail...
# helpful for 'git commit -m "bugfix $(fnhash file)" file'
printf "\n\n%s\n" "# $(git rev-parse --show-prefix "$f" | tr -d '\n' ; echo)"
grep '^[_[:alpha:]][_[:alnum:]]*[ ]*() ' "$f" | sed 's/() .*//' \
| sort -u | while read a ; do validfn "$a" ; done
} #:> source arg1 if exists , on err recall args for backtrace
# fortify the shell with git alias
alias gstatus='git status --short'
alias gls='git ls-files'
alias gdf='git diff --name-only'
alias gdiff='git diff --minimal -U0'
alias gadd='git add'
alias gcom='git commit'
alias gamend='git commit --amend --force-with-lease' # --no-edit may be prefered
alias gpush='git push'
alias gpull='git pull'
alias gbr='git branch'
alias gco='git checkout'
#alias gmv='git mv' # it can get messy, do it manually
alias glog='git log'
alias gref='git reflog'
alias grst='git reset HEAD'
# RESTORE FILES # git checkout -- {opt-file-spec}
# UNDO ADD # git reset {opt-file-spec}
# SEARCH COMMITS # git log -G'{regex}' --full-history --all
#=======================================================
# https://git-scm.com/book/en/v2/Git-Tools-Reset-Demystified
# Commit hash, at least four characters and unambiguous beginning
#
# Three Trees
# HEAD : Last repo commit, next parent
# Index : Local proposed, next repo commit
# Working : Sandbox
# HEAD Index Workdir Safe?
#==== Repo Commit ===========================================
# reset --soft [commit] REF - - YES
# reset [commit] REF YES - YES
# reset --hard [commit] REF YES YES -
# checkout <commit> HEAD YES YES YES
#==== File =============================================
# reset [commit] <paths> - YES - YES
# checkout [commit] <paths> - YES YES -
#=======================================================
# https://git-scm.com/docs/git-reset#_discussion
# https://git-scm.com/docs/giteveryday
# https://git-scm.com/book/en/v2/Git-Basics-Undoing-Things
#
# git status # generic command
# gstatus # alias for git short form status of $@
gst () { # short form status of $@ (or pwd repo), sorted
git status --short $@ | sed -e 's/^ /_/' -e 's/^\(.\) /\1_/' | sort ;}
#git status --short $@ | awk '{s=$1; $1=""; sub(/[ ]* /,//,$2); printf "%-2s%s\n",s,$0}' | sort ;}
gsta () { # git short form status of all repos below $@ (or pwd repo), sorted
local start=$@
[ "$start" ] || start='.'
find $start -name .git -type d | while IFS= read a ; do
( cd "${a%/*}" ; gst ) | awk -v a="${a%/*}/./" '{printf "%-2s",$1; $1=""; $2=a$2; printf "%s\n",$0}'
done | sort ;}
gstat () { # find uncommitted changes to all repos below $@ (or current repo), sorted by time
# reports files with <space> in name as irregular... ("read a" doesn't evaluate quotes inserted by git)
gsta $@ | sed 's/^...//' | while IFS= read a ; do ckstat "$a" ; done \
| sort -k5 | awk -F "\t" '{print $3}' ;}
gcfg () { # report all the git config and where it comes from, repo dir may be specified as arg1
local a= b= d="$1" e="$OLDPWD" p=
[ "$d" ] || d='.'
[ -d "$d" ] || { chkerr "$FUNCNAME : '$d' not a dir" ; return 1 ;}
read p < <(pwd -P)
cd "$d" || { chkerr "$FUNCNAME : cannot cd to '$d'" ; return 1 ;}
read b < <(git rev-parse --show-toplevel)
git config --list | sed -e 's/=.*//' | sort -u \
| while read a; do git config --show-origin --get $a \
| awk -va="$a" -vb="$b" '{sub(/file:.git/,b"/.git");sub(/^file:/,"");print $1" "a"="$2}'
done | sort
cd "$e" ; cd "$p" # restore the working dir and the old working dir
} # 665104f8-20240524_142150
which ci >/dev/null 2>&1 && {
_rcs () { # local rcs function
# unfortunately rcs predates modern documentation...
# this wrapper handles basic "capture a file revision"
# _rcs {filename} "{message}"
#
# Show changes since last checkin revision
# rcsdiff {filename}
#
# Review revision information for filename1:
# rlog filename1
#
# Restoring revision2 of filename1 to filename3
# co -p -r"revision2" "filename1" > "filename3"
#
# rlog Options
# -L : Ignore RCS files with no locks set
# -R : Print only the name of the RCS file
# -h : Print only the header of the RCS file
# -t : Print the descriptive text
# -b : Print information about all branches
# -d[date] : Select revisions within date range
# -l[login] : Select revisions checked in by specified user
# -r[rev] : Select revisions based on revision number
# -s[state] : Select revisions based on state
# -w[login] : Select revisions checked in by specified user
# co (Checkout) Options
# -l : Lock the checked-out revision
# -u : Unlock the checked-out revision
# -p : Print the revision to standard output
# -q : Quiet mode; suppress normal output
# -f[rev] : Force overwrite of working file
# -r[rev] : Retrieve specified revision
# -M : Set the modification time to the date of the retrieved revision
# -ksubst : Specify keyword substitution mode
# -d[date] : Retrieve the latest revision no later than date
# -s[state] : Retrieve revision with specified state
# -w[login] : Retrieve revision checked in by specified user
# ci (Checkin) Options
# -l : Locks the file after check-in. This allows you to continue editing.
# -u : Unlocks the file after check-in. This is the default behavior.
# -r[rev] : Specifies the revision number to assign to the new revision.
# -f[rev] : Forces a check-in even if there are no changes.
# -k : Suppresses expansion of keywords in the file.
# -m[msg] : Specifies a log message for the revision.
# -t[txtfile] : Uses the contents of txtfile as the descriptive text.
# -q : Runs in quiet mode, suppressing normal output.
# -d[date] : Uses the specified date for the check-in date.
# -w[login] : Uses the specified login name as the author.
# -nname : Assigns a symbolic name to the revision.
# Additional RCS (Revision Control System) Commands
# rcs : Creates RCS files or changes their attributes
# rcsdiff : Compares RCS revisions
# rcsmerge : Merges RCS revisions
# ident : Identifies RCS keyword strings in files
# rcsclean : Removes working files that are unchanged
# rcsbind : Associates RCS revisions with symbolic names
# rcsfreeze : Freezes a configuration of versions
# merge : Three-way file merge (not RCS-specific, but often used with RCS)
# rcsinfo : Extracts RCS information
# rcsparse : Parses RCS files
# rcsrev : Extracts RCS revision information
local invoke="$FUNCNAME $*"
local orig_mode= os= ; read os < <(uname)
test -f "$1" || { chkerr "$invoke : cannot use '$1' (66833375)"; return 1 ;}
case "$os" in
Linux) read orig_mode < <(stat -c %a "$1");;
Darwin) read orig_mode < <(stat -f "%Lp" "$1");;
esac # better test that because OS could be another...
[[ ${#orig_mode} == 3 || ${#orig_mode} == 4 ]] && [[ $orig_mode =~ ^[0-7]+$ ]] \
|| { chkerr "$invoke : mode probe, '$orig_mode' not sane for file '$orig_mode' (66837150)"; return 1 ;}
ci -m"$invoke" -l -t-$FUNCNAME -q "$1" || { chkerr "$invoke : unable to check-in '$1' (6683351a)"; return 1 ;}
chmod "$orig_mode" "$1" # the ci -l will reset modes
} # 66834066-20240701_164844
} # only if rcs is installed
## shell script fragments
# local infile inpath infilep
# infile="${f##*/}" # infile == basename f
# expr "$f" : ".*/" >/dev/null && inpath="${f%/*}" || inpath="." # inpath == dirname f
# infilep="$(cd "${inpath}" ; pwd -P)/${infile}" # infilep == realpath f
# name="$(sed 's/.[^.]*$//' <<<"$infile")" # name == infile w/o extension
# expr "$0" : ".*/" >/dev/null && cd "${0%/*}" # cd dirname $0
# find -E /mnt \( -regex '/mnt(/local|/%|/bak)' -prune \) -o -type d
# find -E /mnt \( -regex '/mnt(/local|/%|/bak)' -prune -type f \) -o -type f
tss () { # timestamp highres and pass through args
local a="$*"
[ "$(which tai64n)" -a "$(which tai64nlocal)" ] \
&& { set $(echo | tai64n | sed -e 's/^\(@4[0]*\)\([[:xdigit:]]\{8\}\)\([[:xdigit:]]\{8\}\)\(.*\)/\1\2\3\4 \2 \3/')
{ echo $2 $3 ; tai64nlocal <<<$1 | sed -e 's/-//g' -e 's/:\([^:]*\)$/ \1/' -e 's/.\{4\}$//' ;} | tr '\n' ' ' ;} \
|| { set $(date +%s | awk '{printf "@4%015x%08d %8x %08d\n",$1,0,$1,0}')
{ echo $2 $3 ; date -j -r $((0x$2)) "+%Y%m%d %H:%M %0S.00000" ;} | tr '\n' ' ' ;}
echo "$a" ;}
# 64c471f9 00000000 20230728 1857 13.00000
# 64c47234 17284a94 20230728 1858 02.38851
ts () { # timestamp lowres and pass through args
local a="$*"
[ "$(which tai64n)" -a "$(which tai64nlocal)" ] \
&& { set $(echo | tai64n | sed -e 's/^\(@4[0]*\)\([[:xdigit:]]\{8\}\)\([[:xdigit:]]\{8\}\)\(.*\)/\1\2\3\4 \2/')
{ echo $2 ; tai64nlocal <<<$1 | sed -e 's/-//g' -e 's/:\([^:]*\)$//'
date -j -r $((0x$2)) "+%a %e %b %Z" ;} | tr '\n' ' ' ;} \
|| { set $(date +%s | awk '{printf "@4%015x%08d %8x %08d\n",$1,0,$1,0}')
{ echo $2 ; date -j -r $((0x$2)) "+%Y%m%d %H:%M %a %e %b %Z" ;} | tr '\n' ' ' ;}
echo "$a" ;}
# 64c47437 20230728 1906 Fri 28 Jul PDT
# 64c47688 20230728 1916 Fri 28 Jul PDT
tj () { # journal timestamp, [tai sec]-yyyymmdd_hhmmss {args}
local a ; read a < <(tai64n <<<"$*")
sed -e 's/[-:]//g' -e 's/\.[^ ]*//' -e 's/ /_/' -e "s/^/${a:9:8}-/" < <(tai64nlocal <<<"$a")
} # 6625d27f-20240421_195901
tjj () { # journal timestamp [tai sec]-yyyymmdd_hhmmss (hh:mm PM Sun dd mth PDT) {args}"
local a ; read a < <(tai64n <<<"$*")
read b < <(sed -e 's/[-:]//g' -e 's/\..*//' < <(tai64nlocal <<<"$a") )
read b < <(date -j -f "%Y%m%d %H%M%S" "$b" "+(%I:%M %p %a %e %b %Z)")
sed -e 's/[-:]//g' -e 's/\.[^ ]*//' -e 's/ /_/' -e "s/^/${a:9:8}-/" -e "s/ / $b /" < <(tai64nlocal <<<"$a")
} # 6625d8a4-20240421_202514 tjj
which tmux >/dev/null 2>&1 && \
tmu () { # tmux intuitive wrapper
[ "$1" = "-h" -o "$1" = "--help" ] && {
echo 'List sessions, attach last, or create session 0,
exit with signal and list remaining sessions.
* Use "-s" or "--sessions" as arg for report of running sessions
* Use "-a" or "--attach" to attach to most recent active session
* Use "{name}" to create and/or attach to named session
* Default session (no args) is "0"'
return 0 ;}
_active_tmu() { local a=
tmux list-sessions -F '#{session_name} #{session_activity}' 2>/dev/null \
| column -t | sort -k2n \
| while IFS= read -r a ; do set $a ; echo -n "$1 " ; date -r $2 ; done \
| awk '{printf "%8s : %s %s %s %s %s %s\n",$1,$2,$3,$4,$5,$6,$7}' ;}
local sig= args=
[ "$@" ] && local args=$@
[ "$1" = "-a" -o "$1" = "--attach" -o -z "$args" ] \
&& args="$( _active_tmu | awk 'END {print $1}')"
[ "$1" = "-s" -o "$1" = "--sessions" ] \
&& { _active_tmu ; return $? ;}
[ "$args" ] || args=0
tmux new -A -s $args ; sig=$?
_active_tmu
return $sig
} || true # 6429e6a6 20230402 1333 Sun 2 Apr PDT
revargs () {
local a out
out="$1" ; shift || true
while test $# -gt 0 ; do out="$1 $out" ; shift || true ; done
echo "$out"
}
kwds () { # convert stdin to unique words (at least arg1 chars) sorted on length, to stdout
local c="$1"
[ "$c" ] || c=1
[ "$c" -ge 0 ] || c=0
tr -c '[:alpha:]' ' ' \
| tr '\ ' '\n' \
| tr '[:upper:]' '[:lower:]' \
| sort -ru \
| while read w; do [ "${#w}" -ge "$c" ] && echo "${#w} ${w}" ; done \
| sort -rn \
| awk '{print $2}' \
| tr '\n' '\ '
echo
} # kwds
cf () { #:> on terminal output, fold long lines on words
[ -t 1 ] && {
local cols="$(tput cols)"
fold -s -w $cols
} || cat
} # cf formally catfold
ct () { #:> on terminal output, truncate lines to width
[ -t 1 ] && {
local cols
read cols < <(tput cols);
awk -v cols="$((cols))" 'length > cols{$0=substr($0,0,cols)""}1'
} || cat
} # ct formally cattrunc
_youtube_video_list () {
local id="$1" d="$2" xs=$(xs)
[ "$id" ] || read -p "youtube id: " id
[ "$id" ] || { chkerr "6542c9fc : no id? (6542ca44)" ; return 1 ;}
read id < <(sed "s/\([?&]\)si=................[&]*/\1/" <<<"$id") # squash trackers from url
[ "$d" ] || read -p "directory: " d
[ -d "$d" ] || d="$(pwd -P)"
[ -d "$d" ] || mkdir -p "$d" || { chkerr "$FUNCNAME : invalid dir '$d' (6542c62a)" ; return 1 ;}
[ "$ytdl" ] || ytdl="youtube-dl"
"$ytdl" --abort-on-error --yes-playlist \
--write-info-json --write-comments --write-sub --write-auto-sub --sub-langs "en,en-GB" --write-thumbnail \
--restrict-filenames --audio-quality 0 --format-sort "acodec:opus,acodec:m4a" \
--playlist-start 1 \
-o "$d/${xs}%(playlist_title)s/%(playlist_index)s,%(title)s-%(playlist_title)s-%(playlist_id)s-%(upload_date)s_^%(id)s.%(ext)s" $id
"$ytdl" --abort-on-error --yes-playlist \
--restrict-filenames --audio-quality 0 --format-sort "acodec:opus,acodec:m4a" --extract-audio --keep-video \
--playlist-start 1 \
-o "$d/${xs}%(playlist_title)s/%(playlist_index)s,%(title)s-%(playlist_title)s-%(playlist_id)s-%(upload_date)s_^%(id)s.%(ext)s" $id
} # _youtube_video_list 20220516
_yt_vid () { # ytdl wrapper functions for video
# Download video, audio, subtitles, metadata and generate yaml summary
# rev 677027f9 20241228_083153
local id="$1" d="${2:-}" ytdl=${ytdl:-yt-dlp}
local tmp_json= xs= existing= resp= json_path= count= base_name= acodec= media_file= audio_file=
[ "$id" ] || read -p "youtube id: " id
[ "$id" ] || { chkerr "$FUNCNAME : no id? (6542c9d2)" ; return 1 ;}
read id < <(sed -e 's/[?&]si=[[:alnum:]_-]\{16\}[&]*//' -e 's/\?$//' <<<"$id") # squash trackers from url
d="${d:-.}"
[ -d "$d" ] || mkdir -p "$d" || { chkerr "$FUNCNAME : invalid dir '$d' (6542c606)" ; return 1 ;}
mkdir -p "$d"/{@/{,meta},orig}
mkdir -p "$d/@/tmp/ytdl"
tmp_json="$(cd "$d/@/tmp/ytdl" && mktemp ytdl.json-XXXX)"
read xs < <(sed -e 's/^@4[0]*//' -e 's/[[:xdigit:]]\{8\} $//' < <(tai64n <<<'')) \
|| { chkerr "$FUNCNAME : failed to set xs (6674c2fd)" ; return 1 ;}
# Check for existing files using temp json metadata
{ $ytdl --dump-json --no-write-comments "$id" \
|| { chkerr "$FUNCNAME : failed to load json '$id' (676c4aad)" ; return 1 ;}
} >"$d/@/tmp/ytdl/$tmp_json"
read -d '' id acodec < <(jq -r '[.id, .acodec] | @tsv' "$d/@/tmp/ytdl/$tmp_json") || true
read -d '' existing < <(sort < <(find -E "$links" -name "*${id}*" \
\! -regex "$links/.*(tmp|0)/.*")) || true
[ "$existing" ] && { echo "$existing"
read -p "files found, continue (N/y) " resp
[ "$resp" = "y" ] || return 1 ;} || true
# Download video and audio with original format
# https://github.com/yt-dlp/yt-dlp?tab=readme-ov-file#format-selection
$ytdl --write-info-json --write-comments --write-sub --write-auto-sub \
--sub-langs "en,en-GB" --write-thumbnail --restrict-filenames \
-f bv*+ba/b --abort-on-error --no-playlist \
-o "$d/00${xs},%(title)s-%(upload_date)s_^%(id)s.%(ext)s" "$id"
# also breakout the audio
"$ytdl" --restrict-filenames \
--extract-audio --keep-video \
-f bv*+ba/b --abort-on-error --no-playlist \
-o "$d/00${xs},%(title)s-%(upload_date)s_^%(id)s.%(ext)s" $id
# Get downloaded json path and organize files
read -d '' json_path < <(find "$d/" -maxdepth 1 -name "*${id}*.json") || true
[ "$json_path" ] || { chkerr "$FUNCNAME : not found '$d/*${id}*json' (676c546a)" ; return 1 ;}
read count < <(wc -l <<<"$id")
[ "$count" = 1 ] || { chkerr "$FUNCNAME : unexpected json count '$count' for id '${id}' (676c5734)" ; return 1 ;}
# Move files to final locations
find "$d/" -maxdepth 1 -name "00${xs},*" \( -name "*.json" -o -name "*.webp" \
-o -name "*.jpg" -o -name "*.vtt" \) -exec mv {} "$d/@/meta/" \;
read -d '' media_file < <(find "$d/" -mindepth 1 -maxdepth 1 -name "00${xs},*${id}*") || true
[ "$media_file" ] || { chkerr "$FUNCNAME : media_file not found '$d/00${xs},*${id}*' (676c6dcb)" ; return 1 ;}
read count < <(wc -l <<<"$media_file")
[ "$count" = 1 ] || { chkerr "$FUNCNAME : unexpected media_file count '$count' for id '$d/00${xs},*${id}*' (676c6f29)" ; return 1 ;}
#audio_file="${media_file/%webm/$acodec}"
#[ "$media_file" = "$audio_file" ] || mv "${media_file}" "${audio_file}" # maybe webm will never show again... or not.
#read ext < <(sed -e '/^ Stream /!d' -e 's/.* Audio: //' -e 's/,.*//' < <(ffprobe "$audio_file" 2>&1 ))
ln -f "$audio_file" "$d/@/_^${id}.${acodec}"
mv -f "$audio_file" "$d/orig/"
# Generate yaml summary
_yt_json2txt "$d/@/meta/00${xs},"*".json" "$d/@/_^${id}.${acodec}" "$d"
} # _yt_vid _youtube 20220516
x_youtube_video () {
local id="$1" d="$2" xs=$(xs)
[ "$id" ] || read -p "youtube id: " id
[ "$id" ] || { chkerr "$FUNCNAME : no id? (6542c9fc)" ; return 1 ;}
[ "$d" ] || read -p "directory: " d
read id < <(sed "s/\([?&]\)si=................[&]*/\1/" <<<"$id") # squash trackers from url
#id=$(sed 's/si=.*//' <<<"$id") # squash trackers from url
[ -d "$d" ] || d="$(pwd -P)"
[ -d "$d" ] || mkdir -p "$d" || { chkerr "$FUNCNAME : invalid dir '$d' (6542c61e)" ; return 1 ;}
[ "$ytdl" ] || ytdl="youtube-dl"
local ytdl_vtt ytdl_json
chkwrn "capturing ytdl_vtt ytdl_json filenames from video/json download"
read ytdl_vtt ytdl_json < <(tr '\n' ' ' < <(sed -e '/^\[info\] Writing/!d' -e 's/.*: //' < <(# collect filenames from ytdl output
$ytdl --write-info-json --write-comments --write-sub --write-auto-sub --sub-langs "en,en-GB" \
--restrict-filenames --audio-quality 0 --format-sort "acodec:opus,acodec:m4a" \
--abort-on-error --no-playlist \
-o "$d/00${xs},%(title)s-%(upload_date)s_^%(id)s.%(ext)s" "$id" )))
chkwrn "processing ytdl_vtt to txt"
uniq < <(sed -e '/align:start position/d' -e 's/<[^>]*>//g' -e '/ --> /d' -e '/^ [ ]*$/d' -e '/^$/d' "$ytdl_vtt") >"${ytdl_vtt}.txt" \
|| { chkerr "$FUNCNAME : could not create '$d/${ytdl_vtt}.txt' (66fd682e)" ; return 1 ;} # write out vtt as txt
"$ytdl" \
--restrict-filenames --audio-quality 0 --format-sort "acodec:opus,acodec:m4a" --extract-audio --keep-video \
--abort-on-error --no-playlist \
-o "$d/00${xs},%(title)s-%(upload_date)s_^%(id)s.%(ext)s" $id
} # _youtube_video 20220516
_yt_list () { # ytdl wrapper function for playlist
# Download audio, subtitles, metadata and generate yaml summary
# rev 677027f9 20241228_083153
local id="$1" d="${2:-}" ytdl=${ytdl:-yt-dlp}
local tmp_json= xs= existing= resp= json_path= count= base_name= acodec= media_file= audio_file=
[ "$id" ] || read -p "youtube id: " id
[ "$id" ] || { chkerr "$FUNCNAME : no id? (6542c9d2)" ; return 1 ;}
read id < <(sed -e 's/[?&]si=[[:alnum:]_-]\{16\}[&]*//' -e 's/\?$//' <<<"$id") # squash trackers from url
d="${d:-.}"
[ -d "$d" ] || mkdir -p "$d" || { chkerr "$FUNCNAME : invalid dir '$d' (6542c606)" ; return 1 ;}
mkdir -p "$d/@/tmp/ytdl"
tmp_json="$(cd "$d/@/tmp/ytdl" && mktemp ytdl.json-XXXX)"
read xs < <(sed -e 's/^@4[0]*//' -e 's/[[:xdigit:]]\{8\} $//' < <(tai64n <<<'')) \
|| { chkerr "$FUNCNAME : failed to set xs (6674c2fd)" ; return 1 ;}
# Check for existing files using temp json metadata
{ $ytdl --dump-json --no-write-comments "$id" \
|| { chkerr "$FUNCNAME : failed to load json '$id' (676c4aad)" ; return 1 ;}
} >"$d/@/tmp/ytdl/$tmp_json"
read -d '' id acodec < <(jq -r '[.id, .acodec] | @tsv' "$d/@/tmp/ytdl/$tmp_json") || true
read -d '' existing < <(sort < <(find -E "$links" -name "*${id}*" \
\! -regex "$links/.*(tmp|0)/.*")) || true
[ "$existing" ] && { echo "$existing"
read -p "files found, continue (N/y) " resp
[ "$resp" = "y" ] || return 1 ;} || true
# Download content with original audio format
$ytdl --abort-on-error --yes-playlist \
--write-info-json --write-comments --write-sub --write-auto-sub \
--sub-langs "en,en-GB" --write-thumbnail --restrict-filenames \
-f bestaudio --extract-audio --abort-on-error --playlist-start 1 \
-o "$d/00${xs},%(playlist_title)s/%(playlist_index)s,%(title)s-%(playlist_title)s-%(upload_date)s_^%(id)s.%(ext)s" $id
# organize files manually...
find "$d/" -maxdepth 1 -name 00${xs},\*
} # _yt_list _youtube_list 20220516
_yt_txt () { # ytdl transcript wrapper
# Download audio, subtitles, metadata and generate yaml summary
# rev 677027f9 20241228_083153
local id="$1" d="${2:-}" ytdl=${ytdl:-yt-dlp}
local tmp_json= xs= existing= resp= json_path= count= base_name= acodec= media_file= audio_file=
[ "$id" ] || read -p "youtube id: " id
[ "$id" ] || { chkerr "$FUNCNAME : no id? (6542c9d2)" ; return 1 ;}
read id < <(sed -e 's/[?&]si=[[:alnum:]_-]\{16\}[&]*//' -e 's/\?$//' <<<"$id") # squash trackers from url
d="${d:-.}"
[ -d "$d" ] || mkdir -p "$d" || { chkerr "$FUNCNAME : invalid dir '$d' (6542c606)" ; return 1 ;}
mkdir -p "$d"/{@/{,meta},orig}
mkdir -p "$d/@/tmp/ytdl"
tmp_json="$(cd "$d/@/tmp/ytdl" && mktemp ytdl.json-XXXX)"
read xs < <(sed -e 's/^@4[0]*//' -e 's/[[:xdigit:]]\{8\} $//' < <(tai64n <<<'')) \
|| { chkerr "$FUNCNAME : failed to set xs (6674c2fd)" ; return 1 ;}
# Check for existing files using temp json metadata
{ $ytdl --dump-json --no-write-comments "$id" \
|| { chkerr "$FUNCNAME : failed to load json '$id' (676c4aad)" ; return 1 ;}
} >"$d/@/tmp/ytdl/$tmp_json"
read -d '' id acodec < <(jq -r '[.id, .acodec] | @tsv' "$d/@/tmp/ytdl/$tmp_json") || true
read -d '' existing < <(sort < <(find -E "$links" -name "*${id}*" \
\! -regex "$links/.*(tmp|0)/.*")) || true
[ "$existing" ] && { echo "$existing"
read -p "files found, continue (N/y) " resp
[ "$resp" = "y" ] || return 1 ;} || true
# Download content with original audio format
$ytdl --write-info-json --write-comments --write-sub --write-auto-sub \
--sub-langs "en,en-GB" --restrict-filenames --skip-download \
--abort-on-error --no-playlist \
-o "$d/00${xs},%(title)s-%(upload_date)s_^%(id)s.%(ext)s" "$id"
# Get downloaded json path and organize files
read -d '' json_path < <(find "$d/" -maxdepth 1 -name "*${id}*.json") || true
[ "$json_path" ] || { chkerr "$FUNCNAME : not found '$d/*${id}*json' (676c546a)" ; return 1 ;}
read count < <(wc -l <<<"$id")
[ "$count" = 1 ] || { chkerr "$FUNCNAME : unexpected json count '$count' for id '${id}' (676c5734)" ; return 1 ;}
return 1
# ~~~
$verb chkwrn "ytdl_vtt=$ytdl_vtt"
uniq < <(sed -e '/align:start position/d' -e 's/<[^>]*>//g' -e '/ --> /d' -e '/^ [ ]*$/d' -e '/^$/d' "$ytdl_vtt") >"${ytdl_vtt}.txt" \
&& mv "${ytdl_vtt}.txt" "$d" || { chkerr "$FUNCNAME : could not create '$d/${ytdl_vtt}.txt' (6674c795)" ; return 1 ;}
chktrue "$d/${ytdl_vtt##*/}.txt"
$verb chkwrn "ytdl_json=$ytdl_json"
yq -y 'del(.formats, .thumbnails, .thumbnail, .age_limit, ._format_sort_fields,
.automatic_captions, .playable_in_embed, .is_live, .was_live, .tbr,
.format, .format_id, .format_note, .protocol,
.width, .height, .resolution, .fps, .vcodec, .vbr, .aspect_ratio )
| del(.comments[]? | (._time_text, .author_thumbnail, .author_is_verified))' "$ytdl_json" >"${ytdl_json}.yml" \
&& mv "${ytdl_json}.yml" "$d" || { chkerr "$FUNCNAME : could not create '$d/${ytdl_json}.yml' (6674c7fe)" ; return 1 ;}
chktrue "$d/${ytdl_json##*/}.yml"
# Move files to final locations
find "$d/" -maxdepth 1 -name "00${xs},*" \( -name "*.json" -o -name "*.webp" \
-o -name "*.jpg" -o -name "*.vtt" \) -exec mv {} "$d/@/meta/" \;
# read -d '' media_file < <(find "$d" -mindepth 1 -maxdepth 1 -name "00${xs},*${id}*") || true
# [ -f "$media_file" ] || { chkerr "$FUNCNAME : media_file not found '$d/00${xs},*${id}*' (676c6dcb)" ; return 1 ;}
# read count < <(wc -l <<<"$media_file")
# [ "$count" = 1 ] || { chkerr "$FUNCNAME : unexpected media_file count '$count' for id '$d/00${xs},*${id}*' (676c6f29)" ; return 1 ;}
#audio_file="${media_file/%webm/$acodec}"
#[ "$media_file" = "$audio_file" ] || mv "${media_file}" "${audio_file}" # maybe webm will never show again... or not.
#read ext < <(sed -e '/^ Stream /!d' -e 's/.* Audio: //' -e 's/,.*//' < <(ffprobe "$audio_file" 2>&1 ))
chkerr was: ln -f "$audio_file" "$d/@/_^${id}.${acodec}"
chkerr was: mv -f "$audio_file" "$d/orig/"
# Generate yaml summary
_yt_json2txt "$d/@/meta/00${xs},"*".json" "$d/@/_^${id}.${acodec}" "$d"
} # _yt_txt _youtube_txt 66749f14-20240620_142842
_yt () { # ytdl wrapper functions
# Download audio, subtitles, metadata and generate yaml summary
# rev 677027f9 20241228_083153
local id="$1" d="${2:-}" ytdl=${ytdl:-yt-dlp}
local tmp_json= xs= existing= resp= json_path= count= base_name= acodec= media_file= audio_file=
[ "$id" ] || read -p "youtube id: " id
[ "$id" ] || { chkerr "$FUNCNAME : no id? (6542c9d2)" ; return 1 ;}
read id < <(sed -e 's/[?&]si=[[:alnum:]_-]\{16\}[&]*//' -e 's/\?$//' <<<"$id") # squash trackers from url
d="${d:-.}"
[ -d "$d" ] || mkdir -p "$d" || { chkerr "$FUNCNAME : invalid dir '$d' (6542c606)" ; return 1 ;}
mkdir -p "$d"/{@/{,meta},orig}
mkdir -p "$d/@/tmp/ytdl"
tmp_json="$(cd "$d/@/tmp/ytdl" && mktemp ytdl.json-XXXX)"
read xs < <(sed -e 's/^@4[0]*//' -e 's/[[:xdigit:]]\{8\} $//' < <(tai64n <<<'')) \
|| { chkerr "$FUNCNAME : failed to set xs (6674c2fd)" ; return 1 ;}
# Check for existing files using temp json metadata
{ $ytdl --dump-json --no-write-comments "$id" \
|| { chkerr "$FUNCNAME : failed to load json '$id' (676c4aad)" ; return 1 ;}
} >"$d/@/tmp/ytdl/$tmp_json"
read -d '' id acodec < <(jq -r '[.id, .acodec] | @tsv' "$d/@/tmp/ytdl/$tmp_json") || true
read -d '' existing < <(sort < <(find -E "$links" -name "*${id}*" \
\! -regex "$links/.*(tmp|0)/.*")) || true
[ "$existing" ] && { echo "$existing"
read -p "files found, continue (N/y) " resp
[ "$resp" = "y" ] || return 1 ;} || true
# Download content with original audio format
$ytdl --write-info-json --write-comments --write-sub --write-auto-sub \
--sub-langs "en,en-GB" --write-thumbnail --restrict-filenames \
-f bestaudio --extract-audio --abort-on-error --no-playlist \
-o "$d/00${xs},%(title)s-%(upload_date)s_^%(id)s.%(ext)s" "$id"
# Get downloaded json path and organize files
read -d '' json_path < <(find "$d/" -maxdepth 1 -name "*${id}*.json") || true
[ "$json_path" ] || { chkerr "$FUNCNAME : not found '$d/*${id}*json' (676c546a)" ; return 1 ;}
read count < <(wc -l <<<"$id")
[ "$count" = 1 ] || { chkerr "$FUNCNAME : unexpected json count '$count' for id '${id}' (676c5734)" ; return 1 ;}
# Move files to final locations
find "$d/" -maxdepth 1 -name "00${xs},*" \( -name "*.json" -o -name "*.webp" \
-o -name "*.jpg" -o -name "*.vtt" \) -exec mv {} "$d/@/meta/" \;
read -d '' media_file < <(find "$d/" -mindepth 1 -maxdepth 1 -name "00${xs},*${id}*") || true
[ -f "$media_file" ] || { chkerr "$FUNCNAME : media_file not found '$d/00${xs},*${id}*' (676c6dcb)" ; return 1 ;}
read count < <(wc -l <<<"$media_file")
[ "$count" = 1 ] || { chkerr "$FUNCNAME : unexpected media_file count '$count' for id '$d/00${xs},*${id}*' (676c6f29)" ; return 1 ;}
#audio_file="${media_file/%webm/$acodec}"
#[ "$media_file" = "$audio_file" ] || mv "${media_file}" "${audio_file}" # maybe webm will never show again... or not.
#read ext < <(sed -e '/^ Stream /!d' -e 's/.* Audio: //' -e 's/,.*//' < <(ffprobe "$audio_file" 2>&1 ))
#ln -f "$audio_file" "$d/@/_^${id}.${acodec}"
#mv -f "$audio_file" "$d/orig/"
chkwrn 67703fd0 media_file: ln -f "$media_file" "$d/@/_^${id}.${acodec}"
ln -f "$media_file" "$d/@/_^${id}.${acodec}"
chkwrn 67703fd0 media_file: mv -f "$media_file" "$d/orig/"
mv -f "$media_file" "$d/orig/"
# Generate yaml summary
_yt_json2txt "$d/@/meta/00${xs},"*".json" "$d/@/_^${id}.${acodec}" "$d"
} # _yt _youtube 20220516
#awk 'P=$0{printf "\n_a=%s\n_r=%s\n\n",P,P}' >>"${json_file}.txt~" <(
_yt_json2txt () { # fixup youtube .info.json to yaml txt and sort files
# rev 677ba227-20250106_012757
local json_file="$1" media_master="$2" txt_dir="${3:-.}" verb="${verb:-devnul}"
local json= id= file_ext= duration= title= fulltitle= chapters= description= comments= metadata=
[ -f "$json_file" ] || { chkerr "$FUNCNAME : json_file not found '$json_file' (676c648c)" ; return 1 ;}
[ -f "${json_file}.txt" ] && { chkerr "$FUNCNAME : exists '${json_file}.txt' (676c64d2)" ; return 1 ;}
[ -f "${txt_dir}/${json_file##*/}.txt" ] && { chkerr "$FUNCNAME : exists '${txt_dir}/${json_file##*/}.txt' (677b4682)" ; return 1 ;}
read -rd '' json <"$json_file" || true ; $verb json from "$json_file"
read -r id file_ext duration < <(jq -r '[.id, .ext, .duration_string] | @tsv' <<<"$json")
read -rd '' title < <(jq -r '.title' <<<"$json") || true ; $verb title
read -rd '' fulltitle < <(jq -r '.fulltitle' <<<"$json") || true ; $verb fulltitle
read -rd '' chapters < <(yq -ry -w10000 '.chapters[] | {ss: .start_time, to: .end_time, ooo: .title}' <<<"$json") || true ; $verb chapters
read -rd '' description < <(yq -r '.description' <<<"$json") || true ; $verb description
read -rd '' comments < <(yq -r '.comments | sort_by(.timestamp) | .[] | select(.author_is_uploader == true) | .text' <<<"$json") || true ; $verb comments
read -rd '' metadata < <(yq -ry 'del(.formats, .thumbnail, .thumbnails, .downloader_options,
.http_headers, .webpage_url_basename, .author_thumbnail,
.playable_in_embed, .live_status, .automatic_captions,
.extractor, .is_live, .was_live)' <<<"$json") || true ; $verb metadata
{ printf "ss= ; export verb=$verb ss= to= t= p= f= c=r3 F= CF= off= tp= lra= i= cmp=pard v=3db\n"
printf "ss= ; export _f=./@/%s\n\n" "${media_master##*/}"
printf "ss= _a=%s\n" "$title"
printf "ss= _r=%s\n\n" "$fulltitle"
printf ' ss= to= f2rb2mp3 $_f ooo,${_a}-Trak_Title-${_r}\n%s\n\n' "$duration"
printf -- "--- chapters \n%s\n\n" "$chapters" | iconv -f utf-8 -c -t ascii//TRANSLIT \
| sed -e 's/: /=/' -e 's/\.0$//' -e "s/'//g" -e 's/ /_/g' -e '/^---$/d' \
-e 's/^ooo=/f2rb2mp3 $_f ooo,${_a}-/' -e '/^f2rb/s/$/-${_r}/' -e 's/\&/and/g' \
| tr -d '()[].;:`"'
printf -- "--- title \n%s\n\n" "$title"
printf -- "--- description \n%s\n\n" "$description"
printf -- "--- comments \n%s\n\n" "$comments" | tr -s '\n\r' '\n' ; echo
printf -- "--- metadata \n%s\n\n" "$metadata"
# | sed -e 's,\\u0332,,g' -e 's,\\u2013,-,g' -e 's,\\u00d7,-,g' -e 's,\\u2022,-,g' \
#} | iconv -f utf-8 -c -t ascii//TRANSLIT >"${txt_dir}/${json_file##*/}.txt~" \
} >"${txt_dir}/${json_file##*/}.txt~" \
&& mv "${txt_dir}/${json_file##*/}.txt~" "${txt_dir}/${json_file##*/}.txt" \
|| { chkerr "$FUNCNAME : error parsing json or creating '${txt_dir}/${json_file##*/}.txt' (676c7462)" ; return 1 ;}
echo "${txt_dir}/${json_file##*/}.txt"
} # _yt_json2txt _youtube_json2txt 20220516
_youtube_comment_unflatten () { # convert comment text from _youtube_json2txt to ascii formatted
# echo -e "$( yq -r . )" ... subshell, no \" and no utf
awk '
BEGIN {
FS = OFS = ""
escape["\\"] = "\\"
escape["a"] = "\a"
escape["b"] = "\b"
escape["f"] = "\f"
escape["n"] = "\n"
escape["r"] = "\r"
escape["t"] = "\t"
escape["v"] = "\v"
escape["\""] = "\""
}
{
line = $0
result = ""
for (i = 1; i <= length(line); i++) {
if (substr(line, i, 1) == "\\") {
i++
char = substr(line, i, 1)
if (char in escape) {
result = result escape[char]
} else if (char == "x") {
hex = substr(line, i+1, 2)
result = result sprintf("%c", strtonum("0x" hex))
i += 2
} else if (char == "0") {
oct = substr(line, i, 3)
result = result sprintf("%c", strtonum("0" oct))
i += 2
} else {
result = result char
}
} else {
result = result substr(line, i, 1)
}
}
print result
}
'
} # _youtube_comment_unflatten 20230323
_youtube_comment_unflatten () { # convert comment text from _youtube_json2txt to ascii formatted
# echo -e "$( yq -r . )" ... subshell, no \" and no utf
yq -r 'to_entries[] | .value'
} # _youtube_comment_unflatten 20230323
span2ssto () { # start (arg1) span (arg2) and remaining args to f2rb2mp3
# used to calculate ss= to= f2rb2mp3 parameters, given track lengths
# eg
# export offset=1
# sp2ssto 0 7:14 01,\${a}-Song_For_My_Father-\${_r}
# sp2ssto $to 6:07 02,\${a}-The_Natives_Are_Restless_Tonight-\${_r}
# ss=0 to=435 f2rb2mp3 $_f 01,${a}-Song_For_My_Father-${_r}
# ss=435 to=803 f2rb2mp3 $_f 02,${a}-The_Natives_Are_Restless_Tonight-${_r}
local ss=$1 span= offc=;
[ "$offset" ] && offc="$offset +" || offc='';
span=$(dc -e "$2 $offc p")
export to=$(dc -e "$(hms2sec $ss) $(hms2sec $span) + $offc p");
shift 2 || shift || true
echo ss=$ss to=$to f2rb2mp3 '$_f' $*
echo "# $to is 0x$(kdb_xs2hu $(printf '%x' $to))"
} # span2ssto 20220519
hms2sec () { # passthrough seconds or convert hh:mm:ss to seconds
# must provide ss which may be ss.nn, hh: and hh:mm: are optional
# a number must proceed every colon
{ # remove trailing 0 from seconds decimal
[[ $1 == *:*:*:* ]] && { chkerr "too many ':' in '$1' (6542c9ba)" ; return 1 ;}
[[ $1 == *:*:* ]] && { echo $1 | sed -e 's/:/ 0/g' \
| awk '{print "3 k "$1" 60 60 * * "$2" 60 * "$3" + + p"}' | dc && return 0 ;}
[[ $1 == *:* ]] && echo $1 | sed -e 's/:/ 0/g' \
| awk '{print "3 k "$1" 60 * "$2" + p"}' | dc
[[ $1 == *:* ]] || echo $1
} | sed -e '/\./s/[0]*$//' -e 's/\.$//' ;} # hms2sec
prependf () {
local basefp="$1"
local title="$2"
[ -z "$basefp" ] && { chkerr "prependf: base filepath (arg1) not set $@ (6542c852)" ; return 1 ;}
[ -f "$basefp" ] || { chkerr "prependf: base filepath (arg1) not a file $@ (6542c84c)" ; return 1 ;}
[ "$title" ] || return 0 # no operation
local basefn="$(basename "$basefp")"
( cd $(dirname "$basefp") && mv -f "$basefn" "${title}${basefn}" )
} # prependf
# 20200204
f2rb2mp3 () ( # subshell function "file to rubberband to mp3", transcoding/tuning function
# subshell sets pipefail and ensures PWD on err
set -o errexit # Exit on command non-zero status
set -o errtrace # any trap on ERR is inherited by shell functions
set -o functrace # traps on DEBUG and RETURN are inherited by shell functions
set -o pipefail # exit pipeline on non-zero status (rightmost?)
# validate env per
# https://github.com/georgalis/pub/blob/master/skel/.profile
# https://github.com/georgalis/pub/blob/master/sub/func.bash
while IFS= read fndata ; do
validfn $fndata || { echo "validfn error : $fndata" 1>&2 ; return 1 ;}
done <<EOF
# pub/skel/.profile 20220105
devnul 216e1370 0000001d
stderr 7ccc5704 00000037
chkwrn 2683d3d3 0000005c
chkerr 4f18299d 0000005b
# pub/sub/fn.bash 20220105
hms2sec aea30e0e 000001e3
prependf a38214fb 000001c8
EOF
which rubberband-r3 >/dev/null 2>&1 && [ "$c" = "r3" ] && [ -z "$rb" ] && { rb=rubberband-r3 ;}
which rubberband-r3 >/dev/null 2>&1 && [ -z "$c" ] && [ -z "$rb" ] && { rb=rubberband-r3 c=r3 ;}
[ -x "$(which "$rb")" ] || { chkerr "$FUNCNAME : env rb not set to rubberband executable (6542c828)" ; return 1 ;}
[ -x "$(which ffmpeg)" ] || { chkerr "$FUNCNAME : ffmpeg not in path (6542c82e)" ; return 1 ;}
[ -x "$(which sox)" ] || { chkerr "$FUNCNAME : sox not in path (6542c83a)" ; return 1 ;}
# success valid env
[ "$1" = "help" -o "$1" = "-h" ] && { # a function to adjust audio file tempo and pitch independently
# depends on ffmpeg, rubberband and sox
# https://hg.sr.ht/~breakfastquay/rubberband
# https://github.com/breakfastquay/rubberband
# https://breakfastquay.com/rubberband/
# "Crispness" levels:
# -c 0 equivalent to --no-transients --no-lamination --window-long
# -c 1 equivalent to --detector-soft --no-lamination --window-long (for piano)
# -c 2 equivalent to --no-transients --no-lamination
# -c 3 equivalent to --no-transients
# -c 4 equivalent to --bl-transients
# -c 5 default processing options (none of below)
# -c 6 equivalent to --no-lamination --window-short (may be good for drums)
#
# -L, --loose Relax timing in hope of better transient preservation
# --no-transients Disable phase resynchronisation at transients
# --bl-transients Band-limit phase resync to extreme frequencies
# --no-lamination Disable phase lamination
# --window-long Use longer processing window (actual size may vary)
# --window-short Use shorter processing window
# --smoothing Apply window presum and time-domain smoothing
# --detector-perc Use percussive transient detector (as in pre-1.5)
# --detector-soft Use soft transient detector
# --centre-focus Preserve focus of centre material in stereo
# (at a cost in width and individual channel quality)
# echo "# crisp: 0=mushy 1=piano 2=smooth 3=MULTITIMBRAL 4=two-sources 5=standard 6=percussive "
echo "# Formant y/'' CenterFocus y/'' vol 0db/'' frequency (bhz|chz|N)/'' reverse y/''"
echo "# cmp= $(declare -f $FUNCNAME | sed -e '/compand/!d' -e '/sed/d' -e 's/=.*//' -e 's/local//' | tr -s ' \n' '|')"
# declare -f $FUNCNAME | sed -e '/compand/!d' -e '/sed/d' | while IFS= read a ; do ${verb2} "$a" ; done
echo "# ss= to= t= p= f= c= F= CF= off= tp= lra= i= cmp= v= f2rb2mp3 {file-in} {prepend-out}"
echo "# ss=$ss to=$to t=$t p=$p f=$f c=$c F=$F CF=$CF off=$off tp=$tp lra=$lra i=$i cmp=$cmp v=$v f2rb2mp3 {file-in} {prepend-out}"
return 0
} # help
$verb "f2rb2mp3 $1 $2"
$verb ss=$ss to=$to t=$t p=$p f=$f c=$c F=$F CF=$CF off=$off tp=$tp lra=$lra i=$i cmp=$cmp v=$v
[ "$1" ] || { f2rb2mp3 help ; return 1 ;}
[ -f "$1" ] || { f2rb2mp3 help ; chkerr "no input flle '$1' (6542c99c)" ; return 1 ;}
local verb="${verb:=chkwrn}"
local verb2="${verb2:=devnul}"
local verb3="${verb3:=devnul}"
local infile="${1##*/}" # basename
expr "$1" : ".*/" >/dev/null && inpath="${1%/*}" || inpath="." # input dirname
local infilep="$(cd "${inpath}" ; pwd -P)/${infile}" # full filepath
local prependt="$2"
[ "${prependt}" ] || prependt=00,
[ "$t" -o "$p" ] && { [ "$c" ] || local c=5 ;} || true # "Crispness"
[ "$t" = 1 ] && local t= || true
[ "$p" = 0 ] && local p= || true
[ "$f" = 1 ] && local f= || true
local tc='' tn='' ; [ "$t" ] && tc="--time $t" tn="-t${t}" || true
local pc='' pn='' ; [ "$p" ] && pc="--pitch $p" pn="-p${p}" || true
local fhzc='' fhzn=''
[ "$f" ] && { fhzc="-f $f" ; fhzn="-f$f" ;}
[ "$f" = "bhz" ] && { fhzc="-f 0.98181818181818" ; fhzn="-bhz" ;} || true # baroque 432 hz tuning, from classical 440
[ "$f" = "chz" ] && { fhzc="-f 1.01851851851851" ; fhzn="-chz" ;} || true # classical 440 hz tuning, from baroque 432
local cmpn='' cmpc=''
local ckb0="compand 0.2,0.9 -70,-70,-60,-55,-50,-45,-35,-35,-20,-25,0,-12 6 -70 0.2" # piano analog master
local ckb2="compand 0.2,0.9 -70,-99,-50,-60,-50,-45,-30,-30,-20,-25,0,-13 6 -70 0.2" # piano digital master
local ckb3="compand 0.2,0.8 -60,-99,-50,-56,-38,-32,-23,-18,0,-4 -2 -60 0.2" # piano old analog master
local hrn3="compand 0.08,0.3 -74,-80,-50,-46,-18,-18,-0,-6 -1 -68 0" # peaky horn
local cps1="compand 0.07,0.25 -70,-84,-50,-45,-32,-33,-0,-21 3 -71 0.07" # high compress
local par2="compand 0.09,0.25 -100,-116,-88,-97,-80,-80,-63,-72,-54,-60,-23,-48,0,-36 19 -95 0.08" # parabolic extra
local par4="compand 0.13,0.16 -72,-97,-68,-84,-64,-73,-56,-65,-55,-61,-32,-57,-17,-53,0,-49 25 -55 0.12" # parabolic squared
local parc="compand 0.09,0.25 -97,-106,-85,-89,-73,-73,-57,-61,-40,-49,-21,-37,0,-25 11 -13 0.08" # parabolic standard
local pard="compand 0.09,0.25 -84.4,-110.7,-74.4,-89.1,-64.4,-71.0,-54.4,-56.3,-39.7,-46.3,-21.7,-36.3,0,-26.3 13.5 -13 0.091" # parabolic-d
local para="sinc 6-22k compand 0.087,1.78 -64.6,-63,-54.6,-48,-44.7,-30,-26.6,-20,-11.9,-10 -7.0 -20 0.0875" # for old analog, inverse pard
[ "$cmp" = "hrn" -o "$cmp" = "hrn1" ] && cmpn="hrn3" cmpc="$hrn3"
[ "$cmp" = "cps" ] && cmpn="pard" cmpc="$pard"
[ "$cmp" = "ckb" ] && cmpn="$cmp" cmpc="$ckb0"
[ "$cmp" = "ckb2" ] && cmpn="$cmp" cmpc="$ckb2"
[ "$cmp" = "ckb3" ] && cmpn="$cmp" cmpc="$ckb3"
[ "$cmp" = "hrn3" ] && cmpn="$cmp" cmpc="$hrn3"
[ "$cmp" = "para" ] && cmpn="para" cmpc="$para"
[ "$cmp" = "cps1" ] && cmpn="pard" cmpc="$pard"
[ "$cmp" = "parc" ] && cmpn="pard" cmpc="$pard"
[ "$cmp" = "pard" ] && cmpn="$cmp" cmpc="$pard"
[ "$cmp" = "par2" ] && cmpn="$cmp" cmpc="$par2"
[ "$cmp" = "par4" ] && cmpn="$cmp" cmpc="$par4"
$verb2 "cmpn='$cmpn'"
$verb2 "cmpc='$cmpc'"
$verb2 "input='$inpath/$infile'"
mkdir -p "${inpath}/tmp"
null="$(mktemp "${inpath}/tmp/nulltime-XXXXX")"
null="${null##*/}" # basename
local vn='' vc='' # init "volume name" and "volume command"
expr "$v" : "^[-]*[[:digit:]]*db$" >/dev/null || local v=4db # init sane default, if no env overide
[ "$cmpn" ] && vn="-$cmpn" vc="$cmpc" || true # sox compand is basically a volume adjustment...
[ "$v" ] && { vn="${vn}-v${v}" vc="${vc} vol ${v} dither" ;} || true # set vol name (vn) and vol command (vc) if needed
[ "$rev" = "y" ] && vn="${vn}-rev" vc="$vc reverse"
[ "$ss" ] || local ss="0" # if null ss=0 is default, and ss=0 is unspecified in filename, probe "to" if unspecified
[ "$to" ] || { local to= ; read to < <( # if empty, load var with Process Substitution and no subshel/Command Substitution
awk '{print($1==int($1))?$1:int($1)+1}' < <(# round up to an interger
ffprobe -v error -show_entries format=duration -of default=nw=1:nk=1 "$infilep")) ;} # probe file for duration
# https://trac.ffmpeg.org/wiki/FFprobeTips#Formatcontainerduration
local secc='' secn='' ssec='' tsec=''
ssec=$(hms2sec ${ss})
tsec=$(hms2sec ${to})
secc="-ss $ssec -to $tsec" secn="-ss${ssec}-to${tsec}" # typical
[ "$ss" = 0 -a "$to" ] && secc="-to $tsec" secn="-to$tsec" # unspecify ss
local gsec=$(hms2sec $(ffprobe -hide_banner -loglevel info "$infilep" 2>&1 | sed -e '/Duration/!d' -e 's/,.*//' -e 's/.* //'))
$verb "$(awk '{$1=$1 + 0;printf "%1.3f sec, %1.1f%% of %1.3f sec %s",$2-$1,(100*($2-$1))/$3,$3,$4 }' <<<"${ssec} ${tsec} ${gsec} ${infilep}")"
chkerr "$(awk '$1 >= $2 {print $3 ": invalid duration , ss="$1" to="$2}' <<<"${ssec} ${tsec} $FUNCNAME")" || exit 1
$verb "${inpath}/tmp/${infile}${secn}.{meas,flac}"
[ -f "${inpath}/tmp/${infile}${secn}.meas" -a -f "${inpath}/tmp/${infile}${secn}.flac" ] \
|| { # measure for EBU R128 loudness normalization
{ echo "# ${infile}${secn}.meas infile secn meas flac"
#$verb2 "loudnorm in: loudnorm=print_format=json... $secc -i $infilep > ${inpath}/tmp/${infile}${secn}.flac~"
$verb2 @ffmpeg -hide_banner -loglevel info -y $secc -i "$infilep"
ffmpeg -hide_banner -loglevel info -y $secc -i "$infilep" \
-af "highpass=f=6:p=2, lowpass=f=22000:p=2, aresample=48000,
loudnorm=print_format=json" \
-ar 48000 -f flac "${inpath}/tmp/${infile}${secn}.flac~" 2>&1 | awk '/^{/,0' \
| jq -r '. | "measured_I=\(.input_i) measured_TP=\(.input_tp) measured_LRA=\(.input_lra) measured_thresh=\(.input_thresh) linear=\(.linear)\nout_i_LUFS=\(.output_i) out_tp_dBTP=\(.output_tp) out_lra_LU=\(.output_lra) out_tr_LUFS=\(.output_thresh) offset_LU=\(.target_offset)"'
} >"${inpath}/tmp/${infile}${secn}.meas~" \
&& {
mv -f "${inpath}/tmp/${infile}${secn}.flac~" "${inpath}/tmp/${infile}${secn}.flac"