File: clam.sh 1 #!/bin/sh 2 3 # The MIT License (MIT) 4 # 5 # Copyright (c) 2026 pacman64 6 # 7 # Permission is hereby granted, free of charge, to any person obtaining a copy 8 # of this software and associated documentation files (the "Software"), to deal 9 # in the Software without restriction, including without limitation the rights 10 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 # copies of the Software, and to permit persons to whom the Software is 12 # furnished to do so, subject to the following conditions: 13 # 14 # The above copyright notice and this permission notice shall be included in 15 # all copies or substantial portions of the Software. 16 # 17 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 # SOFTWARE. 24 25 26 # clam 27 # 28 # Command-Line Augmentation Module (clam): get the best out of your shell. 29 # 30 # 31 # This is a collection of arguably useful shell functions and shortcuts: 32 # some of these extra commands can be real time/effort savers, ideally 33 # letting you concentrate on getting things done. 34 # 35 # Some of these commands depend on my other scripts from the `pac-tools`, 36 # others either rely on widely-preinstalled command-line apps, or ones 37 # which are available on most of the major command-line `package` managers. 38 # 39 # To use this script, you're supposed to `source` it, so its definitions 40 # stay for your whole shell session: for that, you can run `source clam` or 41 # `. clam` (no quotes either way), either directly or at shell startup. 42 # 43 # Almost all commands defined in this script work with `bash`, `zsh`, and 44 # even `dash`, which is debian linux's default non-interactive shell. Some 45 # of its commands even seem to work on busybox's shell. 46 47 48 case "$1" in 49 -h|--h|-help|--help) 50 # show help message, using the info-comment from this very script 51 awk ' 52 /^case / { exit } 53 /^# +clam$/, /^$/ { gsub(/^# ?/, ""); print } 54 ' "$0" 55 exit 0 56 ;; 57 esac 58 59 60 # dash doesn't support regex-matching syntax, forcing to use case statements 61 case "$0" in 62 -bash|-dash|-sh|bash|dash|sh|/bin/sh) 63 # script is being sourced with bash, dash, or ash, which is good 64 : 65 ;; 66 67 *) 68 case "$ZSH_EVAL_CONTEXT" in 69 *:file) 70 # script is being sourced with zsh, which is good 71 : 72 ;; 73 74 *) 75 # script is being run normally, which is a waste of time 76 printf "\e[7mDon't run this script directly: instead source it\e[0m\n" 77 printf "\e[7mby running '. clam' (without the single quotes).\e[0m\n" 78 printf "\n" 79 printf "\e[7mBefore doing that, you may want to see the help,\e[0m\n" 80 printf "\e[7mby running 'clam -h' (without the single quotes).\e[0m\n" 81 # exiting during shell-startup may deny shell access, even if 82 # the script is being run, instead of being sourced directly 83 ;; 84 esac 85 ;; 86 esac 87 88 89 alias 0='sbs' 90 91 alias 1='bsbs 1' 92 alias 2='bsbs 2' 93 alias 3='bsbs 3' 94 alias 4='bsbs 4' 95 alias 5='bsbs 5' 96 alias 6='bsbs 6' 97 alias 7='bsbs 7' 98 alias 8='bsbs 8' 99 alias 9='bsbs 9' 100 101 # Less with Header n runs `less` with line numbers, ANSI styles, without 102 # line-wraps, and using the first n lines as a sticky-header, so they always 103 # show on top 104 alias lh1='less --header=1 -MKNiCRS' 105 alias lh2='less --header=2 -MKNiCRS' 106 alias lh3='less --header=3 -MKNiCRS' 107 alias lh4='less --header=4 -MKNiCRS' 108 alias lh5='less --header=5 -MKNiCRS' 109 alias lh6='less --header=6 -MKNiCRS' 110 alias lh7='less --header=7 -MKNiCRS' 111 alias lh8='less --header=8 -MKNiCRS' 112 alias lh9='less --header=9 -MKNiCRS' 113 114 # View with Header n runs `less` without line numbers, ANSI styles, without 115 # line-wraps, and using the first n lines as a sticky-header, so they always 116 # show on top 117 alias vh1='less --header=1 -MKiCRS' 118 alias vh2='less --header=2 -MKiCRS' 119 alias vh3='less --header=3 -MKiCRS' 120 alias vh4='less --header=4 -MKiCRS' 121 alias vh5='less --header=5 -MKiCRS' 122 alias vh6='less --header=6 -MKiCRS' 123 alias vh7='less --header=7 -MKiCRS' 124 alias vh8='less --header=8 -MKiCRS' 125 alias vh9='less --header=9 -MKiCRS' 126 127 alias c='cat' 128 alias e='echo' 129 alias r='reset' 130 131 # Awk Print 132 alias ap=abp 133 134 # Book-like MANual, lays out `man` docs as pairs of side-by-side pages; uses 135 # my tool `bsbs` 136 alias bman=bookman 137 138 # load/concatenate BYTES from named data sources 139 # alias bytes='cat' 140 141 # load/concatenate BYTES from named data sources; uses my tool `get` 142 alias bytes='get' 143 144 # Compile C Optimized 145 alias cco='cc -Wall -O2 -s -march=native -mtune=native -flto' 146 147 # Color DMESG 148 alias cdmesg='dmesg --color=always' 149 150 # Colored Json Query runs the `jq` app, allowing an optional filepath as the 151 # data source, and even an optional transformation formula 152 alias cjq='jq -C' 153 154 # CLear Screen 155 alias cls='tput -T xterm reset 2> /dev/null || reset' 156 157 # Compile C Plus Plus Optimized 158 alias cppo='c++ -Wall -O2 -s -march=native -mtune=native -flto' 159 160 # CURL Silent spares you the progress bar, but still tells you about errors 161 alias curls='curl --silent --show-error' 162 163 # dictionary-DEFine the word given, using an online service 164 alias def=define 165 166 # turn JSON Lines into a proper json array 167 # alias dejsonl='jq -s -M' 168 169 # turn json lines into a proper json array using the `jq` app 170 alias dejql='jq -s -M' 171 172 # turn UTF-16 data into UTF-8 173 alias deutf16='iconv -f utf16 -t utf8' 174 175 # edit plain-text files 176 # alias edit='micro' 177 178 # ENV with 0/null-terminated lines on stdout 179 alias env0='env -0' 180 181 # ENV Change folder, runs the command given in the folder given (first) 182 alias envc='env -C' 183 184 # Extended Plain Interactive Grep 185 alias epig='ugrep --color=never -Q -E' 186 187 # Editor Read-Only 188 alias ero='micro -readonly true' 189 190 # Expand 4 turns each tab into up to 4 spaces 191 alias expand4='expand -t 4' 192 193 # run the Fuzzy Finder (fzf) in multi-choice mode, with custom keybindings 194 alias ff='fzf -m --bind ctrl-a:select-all,ctrl-space:toggle' 195 196 # get FILE's MIME types 197 alias filemime='file --mime-type' 198 199 # run `gcc` with all optimizations on and with static analysis on 200 alias gccmax='gcc -Wall -O2 -s -march=native -mtune=native -flto -fanalyzer' 201 202 # hold stdout if used at the end of a pipe-chain 203 alias hold='less -MKiCRS' 204 205 # find all hyperlinks inside HREF attributes in the input text 206 alias hrefs=href 207 208 # make JSON Lines out of JSON data 209 alias jl=jsonl 210 211 # shrink/compact JSON using the `jq` app, allowing an optional filepath, and 212 # even an optional transformation formula after that 213 alias jq0='jq -c -M' 214 215 # show JSON data on multiple lines, using 2 spaces for each indentation level, 216 # allowing an optional filepath, and even an optional transformation formula 217 # after that 218 alias jq2='jq --indent 2 -M' 219 220 # find the LAN (local-area network) IP address for this device 221 alias lanip='hostname -I' 222 223 # run `less`, showing line numbers, among other settings 224 alias least='less -MKNiCRS' 225 226 # try to run the command given using line-buffering for its (standard) output 227 alias livelines='stdbuf -oL' 228 229 # LOAD data from the filename or URI given; uses my `get` tool 230 alias load=get 231 232 # LOcal SERver webserves files in a folder as localhost, using the port 233 # number given, or port 8080 by default 234 alias loser=serve 235 236 # Live RipGrep 237 alias lrg='rg --line-buffered' 238 239 # run `ls` showing how many 4k pages each file takes 240 alias lspages='ls -s --block-size=4096' 241 242 # Listen To Youtube 243 alias lty=yap 244 245 # MAKE IN folder 246 alias makein=mif 247 248 # Multi-Core MaKe runs `make` using all cores 249 alias mcmk=mcmake 250 251 # run `less`, showing line numbers, among other settings 252 alias most='less -MKNiCRS' 253 254 # emit nothing to output and/or discard everything from input 255 alias nil=null 256 257 # Nice Json Query colors JSON data using the `jq` app 258 alias njq=cjq 259 260 # Plain Interactive Grep 261 alias pig='ugrep --color=never -Q -E' 262 263 # Quick Compile C Optimized 264 alias qcco='cc -Wall -O2 -s -march=native -mtune=native -flto' 265 266 # Quick Compile C Plus Plus Optimized 267 alias qcppo='c++ -Wall -O2 -s -march=native -mtune=native -flto' 268 269 # Read-Only Editor 270 alias roe='micro -readonly true' 271 272 # Read-Only Micro (text editor) 273 alias rom='micro -readonly true' 274 275 # Read-Only Top 276 alias rot='htop --readonly' 277 278 # RUN IN folder 279 alias runin='env -C' 280 281 # place lines Side-By-Side 282 # alias sbs='column' 283 284 # Silent CURL spares you the progress bar, but still tells you about errors 285 alias scurl='curl --silent --show-error' 286 287 # Stdbuf Output Line-buffered 288 alias sol='stdbuf -oL' 289 290 # TRY running a command, showing its outcome/error-code on failure 291 alias try=verdict 292 293 # Time Verbosely the command given 294 alias tv='/usr/bin/time -v' 295 296 # VERTical REVert emits lines in reverse order of appearance 297 alias vertrev=tac 298 299 # emit lines in reverse order of appearance 300 alias upsidedown=tac 301 302 # run `cppcheck` with even stricter options 303 alias vetc='cppcheck --enable=portability,style --check-level=exhaustive' 304 305 # run `cppcheck` with even stricter options, also checking for c89 compliance 306 alias vetc89='cppcheck --enable=portability,style --check-level=exhaustive --std=c89' 307 308 # run `cppcheck` with even stricter options 309 alias vetcpp='cppcheck --enable=portability,style --check-level=exhaustive' 310 311 # VET SHell scripts 312 alias vetsh=vetshell 313 314 # check shell scripts for common gotchas, avoiding complaints about using 315 # the `local` keyword, which is widely supported in practice 316 alias vetshell='shellcheck -e 3043' 317 318 # run a command using an empty environment 319 alias void='env -i' 320 321 # turn plain-text from latin-1 into UTF-8; the name is from `vulgarization`, 322 # which is the mutation of languages away from latin during the middle ages 323 alias vulgarize='iconv -f latin-1 -t utf-8' 324 325 # recursively find all files with trailing spaces/CRs 326 alias wheretrails=whichtrails 327 328 # run `xargs`, using zero/null bytes as the extra-arguments terminator 329 alias x0='xargs -0' 330 331 # Xargs Lines, runs `xargs` using whole lines as extra arguments 332 alias xl=xargsl 333 334 # Awk Begin Print 335 abp() { 336 local arg 337 for arg in "$@"; do 338 awk "BEGIN { print (${arg}); exit }" 339 done 340 } 341 342 # find name from the local `apt` database of installable packages 343 aptfind() { 344 local arg 345 local gap=0 346 local options='-MKiCRS' 347 348 if [ $# -eq 1 ]; then 349 options='--header=1 -MKiCRS' 350 fi 351 352 for arg in "$@"; do 353 [ "${gap}" -gt 0 ] && printf "\n" 354 gap=1 355 printf "\e[7m%-80s\e[0m\n\n" "${arg}" 356 357 # despite warnings, the `search` command has been around for years 358 apt search "${arg}" 2> /dev/null | 359 grep -E -A 1 "^[a-z0-9-]*${arg}" | sed 's/^--$//' 360 done | less ${options} 361 } 362 363 # APT GET/install packages 364 aptget() { sudo apt install "$@"; sudo -k; } 365 366 # APT UPdate/grade 367 aptup() { sudo apt update && sudo apt upgrade "$@"; sudo -k; } 368 369 # emit each argument given as its own line of output 370 args() { [ $# -eq 0 ] || printf "%s\n" "$@"; } 371 372 # AWK in BLOCKS/paragraphs-input mode 373 awkblocks() { 374 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 375 stdbuf -oL awk -F='' -v RS='' "$@" 376 else 377 awk -F='' -v RS='' "$@" 378 fi 379 } 380 381 # AWK using TABS as input/output field-separators 382 awktabs() { 383 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 384 stdbuf -oL awk -F "\t" -v OFS="\t" "$@" 385 else 386 awk -F "\t" -v OFS="\t" "$@" 387 fi 388 } 389 390 # Breathe lines 3: separate groups of 3 lines with empty lines 391 b3() { 392 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 393 stdbuf -oL awk 'NR % 3 == 1 && NR != 1 { print "" } 1' "$@" 394 else 395 awk 'NR % 3 == 1 && NR != 1 { print "" } 1' "$@" 396 fi 397 } 398 399 # Breathe lines 5: separate groups of 5 lines with empty lines 400 b5() { 401 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 402 stdbuf -oL awk 'NR % 5 == 1 && NR != 1 { print "" } 1' "$@" 403 else 404 awk 'NR % 5 == 1 && NR != 1 { print "" } 1' "$@" 405 fi 406 } 407 408 # show an ansi-styled BANNER-like line 409 banner() { printf "\e[7m%-$(tput -T xterm cols)s\e[0m\n" "$*"; } 410 411 # emit a colored bar which can help visually separate different outputs 412 bar() { 413 [ "${1:-80}" -gt 0 ] && printf "\e[48;2;218;218;218m%${1:-80}s\e[0m\n" "" 414 } 415 416 # Breathe Header 3: add an empty line after the first one (the header), 417 # then separate groups of 3 lines with empty lines between them 418 bh3() { 419 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 420 stdbuf -oL awk '(NR - 1) % 3 == 1 { print "" } 1' "$@" 421 else 422 awk '(NR - 1) % 3 == 1 { print "" } 1' "$@" 423 fi 424 } 425 426 # Breathe Header 5: add an empty line after the first one (the header), 427 # then separate groups of 5 lines with empty lines between them 428 bh5() { 429 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 430 stdbuf -oL awk '(NR - 1) % 5 == 1 { print "" } 1' "$@" 431 else 432 awk '(NR - 1) % 5 == 1 { print "" } 1' "$@" 433 fi 434 } 435 436 # emit a line with a repeating block-like symbol in it 437 blocks() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -█-g'; } 438 439 # BOOK-like MANual, lays out `man` docs as pairs of side-by-side pages; uses 440 # my tool `bsbs` 441 bookman() { 442 local w 443 w="$(tput -T xterm cols)" 444 w="$((w / 2 - 4))" 445 if [ "$w" -lt 65 ]; then 446 w=65 447 fi 448 MANWIDTH="$w" man "$@" | bsbs 2 449 } 450 451 # split lines using the separator given, turning them into single-item lines 452 breakdown() { 453 local sep="${1:- }" 454 [ $# -gt 0 ] && shift 455 local command='awk' 456 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 457 command='stdbuf -oL awk' 458 fi 459 460 ${command} -F "${sep}" '{ for (i = 1; i <= NF; i++) print $i }' "$@" 461 } 462 463 # CAlculator with Nice numbers runs my tool `ca` and colors results with 464 # my tool `nn`, alternating styles to make long numbers easier to read 465 can() { 466 local arg 467 for arg in "$@"; do 468 [ $# -ge 2 ] && printf "\e[7m%s\e[0m\n" "${arg}" > /dev/stderr 469 ca "${arg}" | nn 470 done 471 } 472 473 # uppercase the first letter on each line, and lowercase all later letters 474 capitalize() { sed -E 's-^(.*)-\L\1-; s-^(.)-\u\1-'; } 475 476 # center-align lines of text, using the current screen width 477 center() { 478 awk -v width="$(tput -T xterm cols)" ' 479 { 480 gsub(/\r$/, "") 481 lines[NR] = $0 482 s = $0 483 gsub(/\x1b\[[0-9;]*[A-Za-z]/, "", s) # ANSI style-changers 484 l = length(s) 485 if (maxlen < l) maxlen = l 486 } 487 488 END { 489 n = (width - maxlen) / 2 490 if (n % 1) n = n - (n % 1) 491 fmt = sprintf("%%%ds%%s\n", (n > 0) ? n : 0) 492 for (i = 1; i <= NR; i++) printf fmt, "", lines[i] 493 } 494 ' "$@" 495 } 496 497 # Colored Go Test on the folder given; uses my command `gbm` 498 cgt() { go test "${@:-.}" 2>&1 | gbm '^ok' '^[-]* ?FAIL' '^\?'; } 499 500 # Colored RipGrep ensures app `rg` emits colors when piped 501 crg() { 502 if [ -p /dev/stdout ] || [ -t 1 ]; then 503 rg --line-buffered --color=always "${@:-.}" 504 else 505 rg --color=always "${@:-.}" 506 fi 507 } 508 509 # Compile Rust Optimized 510 cro() { 511 rustc -C lto=true -C codegen-units=1 -C debuginfo=0 -C strip=symbols \ 512 -C opt-level=3 "$@" 513 } 514 515 # emit a line with a repeating cross-like symbol in it 516 crosses() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -×-g'; } 517 518 # listen to streaming DANCE music 519 dance() { 520 printf "streaming \e[7mDance Wave Retro\e[0m\n" 521 mpv --really-quiet https://retro.dancewave.online/retrodance.mp3 522 } 523 524 # emit a line with a repeating dash-like symbol in it 525 dashes() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -—-g'; } 526 527 # remove commas in numbers, as well as leading dollar signs in numbers 528 decomma() { 529 sed -E 's-([0-9]{3}),-\1-g; s-([0-9]{1,2}),-\1-g; s-\$([0-9\.]+)-\1-g' 530 } 531 532 # remove indentations from lines 533 dedent() { 534 awk ' 535 { 536 lines[NR] = $0 537 if (match($0, /^ +/) && (n == 0 || n > RLENGTH)) n = RLENGTH 538 } 539 540 END { 541 if (n == 0) { 542 for (i = 1; i <= NR; i++) print lines[i] 543 } else { 544 for (i = 1; i <= NR; i++) print substr(lines[i], n + 1) 545 } 546 } 547 ' "$@" 548 } 549 550 dehtmlify() { 551 local command='awk' 552 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 553 command='stdbuf -oL awk' 554 fi 555 556 ${command} ' 557 { 558 gsub(/<\/?[^>]+>/, "") 559 gsub(/&/, "&") 560 gsub(/</, "<") 561 gsub(/>/, ">") 562 gsub(/^ +| *\r?$/, "") 563 gsub(/ +/, " ") 564 print 565 } 566 ' "$@" 567 } 568 569 # expand tabs each into up to the number of space given, or 4 by default 570 detab() { 571 local tabstop="${1:-4}" 572 [ $# -gt 0 ] && shift 573 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 574 stdbuf -oL expand -t "${tabstop}" "$@" 575 else 576 expand -t "${tabstop}" "$@" 577 fi 578 } 579 580 # DIVide 2 numbers 3 ways, including the complement 581 div() { 582 awk -v a="${1:-1}" -v b="${2:-1}" ' 583 BEGIN { 584 gsub(/_/, "", a) 585 gsub(/_/, "", b) 586 if (a > b) { c = a; a = b; b = c } 587 c = 1 - a / b 588 if (0 <= c && c <= 1) printf "%f\n%f\n%f\n", a / b, b / a, c 589 else printf "%f\n%f\n", a / b, b / a 590 exit 591 }' 592 } 593 594 # emit a line with a repeating dot-like symbol in it 595 dots() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -·-g'; } 596 597 # show the current Date and Time 598 dt() { 599 printf "\e[38;2;78;154;6m%s\e[0m \e[38;2;52;101;164m%s\e[0m\n" \ 600 "$(date +'%a %b %d')" "$(date +%T)" 601 } 602 603 # show the current Date, Time, and a Calendar with the 3 `current` months 604 dtc() { 605 { 606 # show the current date/time center-aligned 607 printf "%20s\e[38;2;78;154;6m%s\e[0m \e[38;2;52;101;164m%s\e[0m\n\n" \ 608 "" "$(date +'%a %b %d')" "$(date +%T)" 609 # debian linux has a different `cal` app which highlights the day 610 if [ -e /usr/bin/ncal ]; then 611 # fix debian/ncal's weird way to highlight the current day 612 ncal -C -3 | sed -E 's/_\x08(.+)_\x08([^ ]+)/\x1b\[7m\1\2\x1b\[0m/' 613 else 614 cal -3 615 fi 616 } | less -MKiCRS 617 } 618 619 # EDit RUN shell commands, using an interactive editor; uses my tool `leak` 620 edrun() { 621 # dash doesn't support the process-sub syntax 622 # . <( micro -readonly true -filetype shell | leak --inv ) 623 micro -readonly true -filetype shell | leak --inv | . /dev/fd/0 624 } 625 626 # convert EURos into CAnadian Dollars, using the latest official exchange 627 # rates from the bank of canada; during weekends, the latest rate may be 628 # from a few days ago; the default amount of euros to convert is 1, when 629 # not given 630 eur2cad() { 631 local url 632 local site='https://www.bankofcanada.ca/valet/observations/group' 633 local csv_rates="${site}/FX_RATES_DAILY/csv" 634 url="${csv_rates}?start_date=$(date -d '3 days ago' +'%Y-%m-%d')" 635 curl -s "${url}" | awk -F, -v amount="$(echo "${1:-1}" | sed 's-_--g')" ' 636 /EUR/ { for (i = 1; i <= NF; i++) if($i ~ /EUR/) j = i } 637 END { gsub(/"/, "", $j); if (j != 0) printf "%.2f\n", amount * $j } 638 ' 639 } 640 641 # Fix Audio Duration on a separate copy of the file given 642 fad() { ffmpeg -i "${1:-input.m4a}" -acodec copy "${2:-output.dat}"; } 643 644 # get the first n lines, or 1 by default 645 first() { head -n "${1:-1}" "${2:--}"; } 646 647 # Field-Names AWK remembers field-positions by name, from the first input line 648 fnawk() { 649 local code="${1:-1}" 650 [ $# -gt 0 ] && shift 651 652 local buffering='' 653 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 654 buffering='stdbuf -oL' 655 fi 656 657 ${buffering} awk -v OFS="\t" ' 658 NR == 1 { 659 FS = /\t/ ? "\t" : " " 660 $0 = $0 661 for (i in names) delete names[i] 662 for (i = 1; i <= NF; i++) names[$i] = i 663 i = "" 664 } 665 { low = lower = tolower($0) } 666 '"${code}"' 667 ' "$@" 668 } 669 670 # start from the line number given, skipping all previous ones 671 fromline() { tail -n +"${1:-1}" "${2:--}"; } 672 673 # convert a mix of FeeT and INches into meters 674 ftin() { 675 local ft="${1:-0}" 676 ft="$(echo "${ft}" | sed 's-_--g')" 677 local in="${2:-0}" 678 in="$(echo "${in}" | sed 's-_--g')" 679 awk "BEGIN { print 0.3048 * ${ft} + 0.0254 * ${in}; exit }" 680 } 681 682 # Gawk Bignum Print 683 gbp() { gawk --bignum "BEGIN { print $1; exit }"; } 684 685 # glue/stick together various lines, only emitting a line-feed at the end; an 686 # optional argument is the output-item-separator, which is empty by default 687 glue() { 688 local sep="${1:-}" 689 [ $# -gt 0 ] && shift 690 awk -v sep="${sep}" ' 691 NR > 1 { printf "%s", sep } 692 { gsub(/\r/, ""); printf "%s", $0 } 693 END { if (NR > 0) print "" } 694 ' "$@" 695 } 696 697 # GO Build Stripped: a common use-case for the go compiler 698 gobs() { go build -ldflags "-s -w" -trimpath "$@"; } 699 700 # GO DEPendencieS: show all dependencies in a go project 701 godeps() { go list -f '{{ join .Deps "\n" }}' "$@"; } 702 703 # GO IMPortS: show all imports in a go project 704 goimps() { go list -f '{{ join .Imports "\n" }}' "$@"; } 705 706 # go to the folder picked using an interactive TUI; uses my tool `bf` 707 goto() { 708 local where 709 where="$(bf "${1:-.}")" 710 if [ $? -ne 0 ]; then 711 return 0 712 fi 713 714 where="$(realpath "${where}")" 715 if [ ! -d "${where}" ]; then 716 where="$(dirname "${where}")" 717 fi 718 cd "${where}" || return 719 } 720 721 # show Help laid out on 2 side-by-side columns; uses my tool `bsbs` 722 h2() { naman "$@" | bsbs 2; } 723 724 # Highlighted-style ECHO 725 hecho() { printf "\e[7m%s\e[0m\n" "$*"; } 726 727 # show each byte as a pair of HEXadecimal (base-16) symbols 728 hexify() { 729 cat "$@" | od -v -x -A n | awk ' 730 { gsub(/ +/, ""); printf "%s", $0 } 731 END { print "" } 732 ' 733 } 734 735 # Help Me Remember my custom shell commands 736 hmr() { 737 local cmd="bat" 738 # debian linux uses a different name for the `bat` app 739 if [ -e /usr/bin/batcat ]; then 740 cmd="batcat" 741 fi 742 743 "$cmd" \ 744 --style=plain,header,numbers --theme='Monokai Extended Light' \ 745 --wrap=never --color=always "$(which clam)" | 746 sed -e 's-\x1b\[38;5;70m-\x1b[38;5;28m-g' \ 747 -e 's-\x1b\[38;5;214m-\x1b[38;5;208m-g' \ 748 -e 's-\x1b\[38;5;243m-\x1b[38;5;103m-g' \ 749 -e 's-\x1b\[38;5;238m-\x1b[38;5;245m-g' \ 750 -e 's-\x1b\[38;5;228m-\x1b[48;5;228m-g' | 751 less -MKiCRS 752 } 753 754 # convert seconds into a colon-separated Hours-Minutes-Seconds triple 755 hms() { 756 echo "${@:-0}" | sed -E 's-_--g; s- +-\n-g' | awk ' 757 /./ { 758 x = $0 759 h = (x - x % 3600) / 3600 760 m = (x % 3600) / 60 761 s = x % 60 762 printf "%02d:%02d:%05.2f\n", h, m, s 763 } 764 ' 765 } 766 767 # find all hyperlinks inside HREF attributes in the input text 768 href() { 769 local arg 770 local awk_cmd='awk' 771 local grep_cmd='grep' 772 if [ -p /dev/stdout ] || [ -t 1 ]; then 773 grep_cmd='grep --line-buffered' 774 if [ -e /usr/bin/stdbuf ]; then 775 awk_cmd='stdbuf -oL awk' 776 fi 777 fi 778 779 for arg in "${@:--}"; do 780 ${grep_cmd} -i -E -o 'href="[^"]+"' "${arg}" 781 done | ${awk_cmd} '{ gsub(/^href="|"\r?$/, ""); print }' 782 } 783 784 # avoid/ignore lines which case-insensitively match any of the regexes given 785 iavoid() { 786 local command='awk' 787 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 788 command='stdbuf -oL awk' 789 fi 790 791 ${command} ' 792 BEGIN { 793 if (IGNORECASE == "") { 794 m = "this variant of AWK lacks case-insensitive regex-matching" 795 print(m) > "/dev/stderr" 796 exit 125 797 } 798 IGNORECASE = 1 799 800 for (i = 1; i < ARGC; i++) { 801 e[i] = ARGV[i] 802 delete ARGV[i] 803 } 804 } 805 806 { 807 for (i = 1; i < ARGC; i++) if ($0 ~ e[i]) next 808 print 809 got++ 810 } 811 812 END { exit(got == 0) } 813 ' "${@:-^\r?$}" 814 } 815 816 # ignore command in a pipe: this allows quick re-editing of pipes, while 817 # still leaving signs of previously-used steps, as a memo 818 idem() { cat; } 819 820 # ignore command in a pipe: this allows quick re-editing of pipes, while 821 # still leaving signs of previously-used steps, as a memo 822 ignore() { cat; } 823 824 # only keep lines which case-insensitively match any of the regexes given 825 imatch() { 826 local command='awk' 827 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 828 command='stdbuf -oL awk' 829 fi 830 831 ${command} ' 832 BEGIN { 833 if (IGNORECASE == "") { 834 m = "this variant of AWK lacks case-insensitive regex-matching" 835 print(m) > "/dev/stderr" 836 exit 125 837 } 838 IGNORECASE = 1 839 840 for (i = 1; i < ARGC; i++) { 841 e[i] = ARGV[i] 842 delete ARGV[i] 843 } 844 } 845 846 { 847 for (i = 1; i < ARGC; i++) { 848 if ($0 ~ e[i]) { 849 print 850 got++ 851 next 852 } 853 } 854 } 855 856 END { exit(got == 0) } 857 ' "${@:-[^\r]}" 858 } 859 860 # start each non-empty line with extra n spaces 861 indent() { 862 local command='awk' 863 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 864 command='stdbuf -oL awk' 865 fi 866 867 ${command} ' 868 BEGIN { 869 n = ARGV[1] + 0 870 delete ARGV[1] 871 fmt = sprintf("%%%ds%%s\n", (n > 0) ? n : 0) 872 } 873 874 /^\r?$/ { print ""; next } 875 { gsub(/\r$/, ""); printf(fmt, "", $0) } 876 ' "$@" 877 } 878 879 # emit each word-like item from each input line on its own line; when a file 880 # has tabs on its first line, items are split using tabs alone, which allows 881 # items to have spaces in them 882 items() { 883 local command='awk' 884 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 885 command='stdbuf -oL awk' 886 fi 887 888 ${command} ' 889 FNR == 1 { FS = /\t/ ? "\t" : " "; $0 = $0 } 890 { gsub(/\r$/, ""); for (i = 1; i <= NF; i++) print $i } 891 ' "$@" 892 } 893 894 # listen to streaming JAZZ music 895 jazz() { 896 printf "streaming \e[7mSmooth Jazz Instrumental\e[0m\n" 897 mpv --quiet https://stream.zeno.fm/00rt0rdm7k8uv 898 } 899 900 # show a `dad` JOKE from the web, sometimes even a very funny one 901 joke() { 902 curl --silent --show-error https://icanhazdadjoke.com | fold -s | 903 awk '{ gsub(/ *\r?$/, ""); print }' 904 } 905 906 # JSON Query Lines turns JSON top-level arrays into multiple individually-JSON 907 # lines using the `jq` app, keeping all other top-level values as single line 908 # JSON outputs 909 jql() { 910 local code="${1:-.}" 911 [ $# -gt 0 ] && shift 912 jq -c -M "${code} | .[]" "$@" 913 } 914 915 # JSON Query Keys runs `jq` to find all unique key-combos from tabular JSON 916 jqk() { 917 local code="${1:-.}" 918 [ $# -gt 0 ] && shift 919 jq -c -M "${code} | .[] | keys" "$@" | awk '!c[$0]++' 920 } 921 922 # JSON Keys finds all unique key-combos from tabular JSON data; uses my tools 923 # `jsonl` and `tjp` 924 jsonk() { 925 tjp '[e.keys() for e in v] if isinstance(v, (list, tuple)) else v.keys()' \ 926 "${1:--}" | jsonl | awk '!c[$0]++' 927 } 928 929 # JSON Table, turns TSV tables into tabular JSON, where valid-JSON values are 930 # auto-parsed into numbers, booleans, etc...; uses my tools `jsons` and `tjp` 931 jsont() { 932 jsons "$@" | tjp \ 933 '[{k: rescue(lambda: loads(v), v) for k, v in e.items()} for e in v]' 934 } 935 936 # emit the given number of random/junk bytes, or 1024 junk bytes by default 937 junk() { head -c "$(echo "${1:-1024}" | sed 's-_--g')" /dev/urandom; } 938 939 # get the last n lines, or 1 by default 940 last() { tail -n "${1:-1}" "${2:--}"; } 941 942 # convert pounds (LB) into kilograms 943 lb() { 944 echo "${@:-1}" | sed -E 's-_--g; s- +-\n-g' | 945 awk '/./ { printf "%.2f\n", 0.45359237 * $0 }' 946 } 947 948 # convert a mix of pounds (LB) and weight-ounces (OZ) into kilograms 949 lboz() { 950 local lb="${1:-0}" 951 lb="$(echo "${lb}" | sed 's-_--g')" 952 local oz="${2:-0}" 953 oz="$(echo "${oz}" | sed 's-_--g')" 954 awk "BEGIN { print 0.45359237 * ${lb} + 0.028349523 * ${oz}; exit }" 955 } 956 957 # limit stops at the first n bytes, or 1024 bytes by default 958 limit() { head -c "$(echo "${1:-1024}" | sed 's-_--g')" "${2:--}"; } 959 960 # ensure LINES are never accidentally joined across files, by always emitting 961 # a line-feed at the end of each line 962 lines() { 963 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 964 stdbuf -oL awk 1 "$@" 965 else 966 awk 1 "$@" 967 fi 968 } 969 970 # regroup adjacent lines into n-item tab-separated lines 971 lineup() { 972 local command='awk' 973 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 974 command='stdbuf -oL awk' 975 fi 976 977 local n="${1:-0}" 978 [ $# -gt 0 ] && shift 979 980 if [ "$n" -le 0 ]; then 981 ${command} ' 982 NR > 1 { printf "\t" } 983 { printf "%s", $0 } 984 END { if (NR > 0) print "" } 985 ' "$@" 986 return $? 987 fi 988 989 ${command} -v n="$n" ' 990 NR % n != 1 && n > 1 { printf "\t" } 991 { printf "%s", $0 } 992 NR % n == 0 { print "" } 993 END { if (NR % n != 0) print "" } 994 ' "$@" 995 } 996 997 # LiSt files, showing how many 4K-sized storage blocks they use 998 ls4k() { ls -s --block-size=4096 "$@"; } 999 1000 # LiSt MAN pages 1001 lsman() { man -k "${1:-.}"; } 1002 1003 # MARK the current tab with the message given, followed by the current folder; 1004 # works only on the `bash` shell 1005 mark() { 1006 if [ $# -eq 0 ]; then 1007 PS1="\[\e[0m\e]0;\w\a\$ " 1008 else 1009 PS1="\[\e[0m\e]0;${*} \w\a\$ " 1010 fi 1011 } 1012 1013 marklinks() { 1014 local re='https?://[A-Za-z0-9+_.:%-]+(/[A-Za-z0-9+_.%/,#?&=-]*)*' 1015 sed -E 's-('"${re}"')-\x1b]8;;\1\x1b\\\1\x1b]8;;\x1b\\-g' "$@" 1016 } 1017 1018 # Multi-Core MAKE runs `make` using all cores 1019 mcmake() { make -j "$(nproc)" "$@"; } 1020 1021 # merge stderr into stdout, which is useful for piped commands 1022 merrge() { "${@:-cat /dev/null}" 2>&1; } 1023 1024 metajq() { 1025 # https://github.com/stedolan/jq/issues/243#issuecomment-48470943 1026 jq -r -M ' 1027 [ 1028 path(..) | 1029 map(if type == "number" then "[]" else tostring end) | 1030 join(".") | split(".[]") | join("[]") 1031 ] | unique | map("." + .) | .[] 1032 ' "$@" 1033 } 1034 1035 # Make In Folder, also showing time and max memory used 1036 mif() { 1037 local f='real %e user %U sys %S mem %M exit %x' 1038 local folder 1039 folder="${1:-.}" 1040 [ $# -gt 0 ] && shift 1041 env -C "${folder}" /usr/bin/time -f "$f" make "$@" 1042 } 1043 1044 # MINimize DECimalS ignores all trailing decimal zeros in numbers, and even 1045 # the decimal dots themselves, when decimals in a number are all zeros 1046 # mindecs() { 1047 # local cmd='sed -E' 1048 # if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1049 # cmd='sed -E -u' 1050 # fi 1051 # ${cmd} 's-([0-9]+)\.0+\W-\1-g; s-([0-9]+\.[0-9]*[1-9])0+\W-\1-g' "$@" 1052 # } 1053 1054 # Number all lines counting from 0, using a tab right after each line number 1055 n0() { 1056 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1057 stdbuf -oL nl -b a -w 1 -v 0 "$@" 1058 else 1059 nl -b a -w 1 -v 0 "$@" 1060 fi 1061 } 1062 1063 # Number all lines counting from 1, using a tab right after each line number 1064 n1() { 1065 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1066 stdbuf -oL nl -b a -w 1 -v 1 "$@" 1067 else 1068 nl -b a -w 1 -v 1 "$@" 1069 fi 1070 } 1071 1072 # NArrow MANual, keeps `man` narrow, even if the window/tab is wide when run 1073 naman() { 1074 local w 1075 w="$(tput -T xterm cols)" 1076 w="$((w / 2 - 4))" 1077 if [ "$w" -lt 80 ]; then 1078 w=80 1079 fi 1080 MANWIDTH="$w" man "$@" 1081 } 1082 1083 # Not AND sorts its 2 inputs, then finds lines not in common 1084 nand() { 1085 # comm -3 <(sort "$1") <(sort "$2") 1086 # dash doesn't support the process-sub syntax 1087 (sort "$1" | (sort "$2" | (comm -3 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 1088 } 1089 1090 # Nice DEFine dictionary-defines the words given, using an online service 1091 ndef() { 1092 local arg 1093 local gap=0 1094 local options='-MKiCRS' 1095 1096 if [ $# -eq 0 ]; then 1097 printf "\e[38;2;204;0;0mndef: no words given\e[0m\n" >&2 1098 return 1 1099 fi 1100 1101 if [ $# -eq 1 ]; then 1102 options='--header=1 -MKiCRS' 1103 fi 1104 1105 for arg in "$@"; do 1106 [ "${gap}" -gt 0 ] && printf "\n" 1107 gap=1 1108 printf "\e[7m%-80s\e[0m\n" "${arg}" 1109 curl --silent "dict://dict.org/d:${arg}" | awk ' 1110 { gsub(/\r$/, "") } 1111 /^151 / { 1112 printf "\x1b[38;2;52;101;164m%s\x1b[0m\n", $0 1113 next 1114 } 1115 /^[1-9][0-9]{2} / { 1116 printf "\x1b[38;2;128;128;128m%s\x1b[0m\n", $0 1117 next 1118 } 1119 1 1120 ' 1121 done | less ${options} 1122 } 1123 1124 # listen to streaming NEW WAVE music 1125 newwave() { 1126 printf "streaming \e[7mNew Wave radio\e[0m\n" 1127 mpv --quiet https://puma.streemlion.com:2910/stream 1128 } 1129 1130 # Nice Json Query Lines colors JSONL data using the `jq` app 1131 njql() { 1132 local code="${1:-.}" 1133 [ $# -gt 0 ] && shift 1134 jq -c -C "${code} | .[]" "$@" 1135 } 1136 1137 # empty the clipboard 1138 noclip() { wl-copy --clear; } 1139 1140 # show the current date and time 1141 now() { date +'%Y-%m-%d %H:%M:%S'; } 1142 1143 # Nice Print Awk result; uses my tool `nn` 1144 npa() { 1145 local arg 1146 for arg in "$@"; do 1147 awk "BEGIN { print(${arg}); exit }" 1148 done | nn 1149 } 1150 1151 # Nice Print Python result; uses my tool `nn` 1152 npp() { 1153 local arg 1154 for arg in "$@"; do 1155 python -c "print(${arg})" 1156 done | nn 1157 } 1158 1159 # Nice Size, using my tool `nn` 1160 ns() { wc -c "$@" | nn; } 1161 1162 # emit nothing to output and/or discard everything from input 1163 null() { [ $# -gt 0 ] && "$@" > /dev/null; } 1164 1165 # Print Python result 1166 pp() { 1167 local arg 1168 for arg in "$@"; do 1169 python -c "print(${arg})" 1170 done 1171 } 1172 1173 # PRecede (input) ECHO, prepends a first line to stdin lines 1174 precho() { echo "$@" && cat /dev/stdin; } 1175 1176 # LABEL/precede data with an ANSI-styled line 1177 prelabel() { 1178 printf "\e[7m%-*s\e[0m\n" "$(($(tput -T xterm cols) - 2))" "$*" 1179 cat - 1180 } 1181 1182 # PREcede (input) MEMO, prepends a first highlighted line to stdin lines 1183 prememo() { printf "\e[7m%s\e[0m\n" "$*"; cat -; } 1184 1185 # start by joining all arguments given as a tab-separated-items line of output, 1186 # followed by all lines from stdin verbatim 1187 pretsv() { 1188 awk ' 1189 BEGIN { 1190 for (i = 1; i < ARGC; i++) { 1191 if (i > 1) printf "\t" 1192 printf "%s", ARGV[i] 1193 } 1194 if (ARGC > 1) print "" 1195 exit 1196 } 1197 ' "$@" 1198 cat - 1199 } 1200 1201 # Plain RipGrep 1202 prg() { 1203 if [ -p /dev/stdout ] || [ -t 1 ]; then 1204 rg --line-buffered --color=never "${@:-.}" 1205 else 1206 rg --color=never "${@:-.}" 1207 fi 1208 } 1209 1210 # Quiet MPV 1211 # qmpv() { mpv --quiet "${@:--}"; } 1212 1213 # Quiet MPV 1214 qmpv() { mpv --really-quiet "${@:--}"; } 1215 1216 # ignore stderr, without any ugly keyboard-dancing 1217 quiet() { "$@" 2> /dev/null; } 1218 1219 # keep only lines between the 2 line numbers given, inclusively 1220 rangelines() { 1221 { [ $# -eq 2 ] || [ $# -eq 3 ]; } && [ "${1}" -le "${2}" ] && { 1222 tail -n +"${1}" "${3:--}" | head -n $(("${2}" - "${1}" + 1)) 1223 } 1224 } 1225 1226 # RANdom MANual page 1227 ranman() { 1228 find "/usr/share/man/man${1:-1}" -type f | shuf -n 1 | xargs basename | 1229 sed 's-\.gz$--' | xargs man 1230 } 1231 1232 # REPeat STRing emits a line with a repeating string in it, given both a 1233 # string and a number in either order 1234 repstr() { 1235 awk ' 1236 BEGIN { 1237 if (ARGV[2] ~ /^[+-]?[0-9]+$/) { 1238 symbol = ARGV[1] 1239 times = ARGV[2] + 0 1240 } else { 1241 symbol = ARGV[2] 1242 times = ARGV[1] + 0 1243 } 1244 1245 if (times < 0) exit 1246 if (symbol == "") symbol = "-" 1247 s = sprintf("%*s", times, "") 1248 gsub(/ /, symbol, s) 1249 print s 1250 exit 1251 } 1252 ' "$@" 1253 } 1254 1255 # show a RULER-like width-measuring line 1256 ruler() { 1257 [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed -E \ 1258 's- {10}-····╵····│-g; s- -·-g; s-·····-····╵-' 1259 } 1260 1261 # SystemCTL; `sysctl` is already taken for a separate/unrelated app 1262 sctl() { systemctl "$@" 2>&1 | less -MKiCRS; } 1263 1264 # show a unique-looking SEParator line; useful to run between commands 1265 # which output walls of text 1266 sep() { 1267 [ "${1:-80}" -gt 0 ] && 1268 printf "\e[48;2;218;218;218m%${1:-80}s\e[0m\n" "" | sed 's- -·-g' 1269 } 1270 1271 # webSERVE files in a folder as localhost, using the port number given, or 1272 # port 8080 by default 1273 serve() { 1274 if [ -d "$1" ]; then 1275 printf "\e[7mserving files in %s\e[0m\n" "$1" >&2 1276 python3 -m http.server -d "$1" "${2:-8080}" 1277 else 1278 printf "\e[7mserving files in %s\e[0m\n" "${2:-$(pwd)}" >&2 1279 python3 -m http.server -d "${2:-$(pwd)}" "${1:-8080}" 1280 fi 1281 } 1282 1283 # SET DIFFerence sorts its 2 inputs, then finds lines not in the 2nd input 1284 setdiff() { 1285 # comm -23 <(sort "$1") <(sort "$2") 1286 # dash doesn't support the process-sub syntax 1287 (sort "$1" | (sort "$2" | (comm -23 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 1288 } 1289 1290 # SET INtersection, sorts its 2 inputs, then finds common lines 1291 setin() { 1292 # comm -12 <(sort "$1") <(sort "$2") 1293 # dash doesn't support the process-sub syntax 1294 (sort "$1" | (sort "$2" | (comm -12 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 1295 } 1296 1297 # SET SUBtraction sorts its 2 inputs, then finds lines not in the 2nd input 1298 setsub() { 1299 # comm -23 <(sort "$1") <(sort "$2") 1300 # dash doesn't support the process-sub syntax 1301 (sort "$1" | (sort "$2" | (comm -23 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 1302 } 1303 1304 # Show Files (and folders), coloring folders and links 1305 sf() { 1306 local arg 1307 local gap=0 1308 local options='-MKiCRS' 1309 1310 if [ $# -le 1 ]; then 1311 options='--header=1 -MKiCRS' 1312 fi 1313 1314 for arg in "${@:-.}"; do 1315 [ "${gap}" -gt 0 ] && printf "\n" 1316 printf "\e[7m%s\e[0m\n\n" "$(realpath "${arg}")" 1317 gap=1 1318 1319 ls -al --file-type --color=never --time-style iso "${arg}" | awk ' 1320 BEGIN { 1321 drep = "\x1b[38;2;0;135;255m\x1b[48;2;228;228;228m&\x1b[0m" 1322 lrep = "\x1b[38;2;0;135;95m\x1b[48;2;228;228;228m&\x1b[0m" 1323 } 1324 1325 NR < 4 { next } 1326 (NR - 3) % 5 == 1 && (NR - 3) > 1 { print "" } 1327 1328 { 1329 gsub(/^(d[rwx-]+)/, drep) 1330 gsub(/^(l[rwx-]+)/, lrep) 1331 printf "%6d %s\n", NR - 3, $0 1332 } 1333 ' 1334 done | less ${options} 1335 } 1336 1337 # run apps in color-mode, using the popular option `--color=always` 1338 shine() { 1339 local cmd="$1" 1340 [ $# -gt 0 ] && shift 1341 "${cmd}" --color=always "$@" 1342 } 1343 1344 # skip the first n lines, or the 1st line by default 1345 skip() { tail -n +$(("${1:-1}" + 1)) "${2:--}"; } 1346 1347 # skip the last n lines, or the last line by default 1348 skiplast() { head -n -"${1:-1}" "${2:--}"; } 1349 1350 # SLOW/delay lines from the standard-input, waiting the number of seconds 1351 # given for each line, or waiting 1 second by default 1352 slow() { 1353 local seconds="${1:-1}" 1354 [ $# -gt 0 ] && shift 1355 ( 1356 IFS="$(printf "\n")" 1357 awk 1 "$@" | while read -r line; do 1358 sleep "${seconds}" 1359 printf "%s\n" "${line}" 1360 done 1361 ) 1362 } 1363 1364 # Show Latest Podcasts, using my tools `podfeed` and `si` 1365 slp() { 1366 local title 1367 title="Latest Podcast Episodes as of $(date +'%F %T')" 1368 podfeed -title "${title}" "$@" | si 1369 } 1370 1371 # emit the first line as is, sorting all lines after that, using the 1372 # `sort` command, passing all/any arguments/options to it 1373 sortrest() { 1374 awk -v sort="sort $*" ' 1375 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1376 { gsub(/\r$/, "") } 1377 NR == 1 { print; fflush() } 1378 NR >= 2 { print | sort } 1379 ' 1380 } 1381 1382 # SORt Tab-Separated Values: emit the first line as is, sorting all lines after 1383 # that, using the `sort` command in TSV (tab-separated values) mode, passing 1384 # all/any arguments/options to it 1385 sortsv() { 1386 awk -v sort="sort -t \"$(printf '\t')\" $*" ' 1387 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1388 { gsub(/\r$/, "") } 1389 NR == 1 { print; fflush() } 1390 NR >= 2 { print | sort } 1391 ' 1392 } 1393 1394 # emit a line with the number of spaces given in it 1395 spaces() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" ""; } 1396 1397 # SQUeeze horizontal spaces and STOMP vertical gaps 1398 squomp() { 1399 local command='awk' 1400 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1401 command='stdbuf -oL awk' 1402 fi 1403 1404 ${command} ' 1405 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1406 /^\r?$/ { empty = 1; next } 1407 empty { if (n > 0) print ""; empty = 0 } 1408 1409 { 1410 gsub(/^ +| *\r?$/, "") 1411 gsub(/ *\t */, "\t") 1412 gsub(/ +/, " ") 1413 print; n++ 1414 } 1415 ' "$@" 1416 } 1417 1418 substr() { 1419 local command='awk' 1420 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1421 command='stdbuf -oL awk' 1422 fi 1423 if [ $# -lt 2 ]; then 1424 printf "missing 1-based start index, and substring length\n" >&2 1425 exit 1 1426 fi 1427 1428 ${command} '{ print substr($0, '"$1"', '"$2"') }' 1429 } 1430 1431 # TAC Lines outputs input-lines in reverse order, last one first, and so on... 1432 tacl() { 1433 awk ' 1434 { gsub(/\r$/, ""); lines[NR] = $0 } 1435 END { for (i = NR; i >= 1; i--) print lines[i] } 1436 ' "$@" 1437 } 1438 1439 # Simulate the cadence of old-fashioned TELETYPE machines 1440 teletype() { 1441 awk ' 1442 { 1443 gsub(/\r$/, "") 1444 1445 n = length($0) 1446 for (i = 1; i <= n; i++) { 1447 if (code = system("sleep 0.015")) exit code 1448 printf "%s", substr($0, i, 1); fflush() 1449 } 1450 1451 if (code = system("sleep 0.75")) exit code 1452 print ""; fflush() 1453 } 1454 1455 # END { if (NR > 0 && code != 0) print "" } 1456 ' "$@" 1457 } 1458 1459 # TINY GO Build Optimized: a common use-case for the tinygo compiler 1460 tinygobo() { tinygo build -no-debug -opt=2 "$@"; } 1461 1462 # Timed Make, also showing max memory used 1463 tm() { 1464 local f='real %e user %U sys %S mem %M exit %x' 1465 /usr/bin/time -f "$f" make "$@" 1466 } 1467 1468 # show current date in a specifc format 1469 today() { date +'%Y-%m-%d %a %b %d'; } 1470 1471 # get the first n lines, or 1 by default 1472 toline() { head -n "${1:-1}" "${2:--}"; } 1473 1474 # get the processes currently using the most cpu 1475 topcpu() { 1476 local n="${1:-10}" 1477 [ "$n" -gt 0 ] && ps aux | awk ' 1478 NR == 1 { print; fflush() } 1479 NR > 1 { print | "sort -rnk3,3" } 1480 ' | head -n "$(("$n" + 1))" 1481 } 1482 1483 # get the processes currently using the most memory 1484 topmemory() { 1485 local n="${1:-10}" 1486 [ "$n" -gt 0 ] && ps aux | awk ' 1487 NR == 1 { print; fflush() } 1488 NR > 1 { print | "sort -rnk6,6" } 1489 ' | head -n "$(("$n" + 1))" 1490 } 1491 1492 # only keep UNIQUE lines, keeping them in their original order 1493 unique() { 1494 local command='awk' 1495 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1496 command='stdbuf -oL awk' 1497 fi 1498 1499 ${command} ' 1500 BEGIN { for (i = 1; i < ARGC; i++) if (f[ARGV[i]]++) delete ARGV[i] } 1501 !c[$0]++ 1502 ' "$@" 1503 } 1504 1505 # fix lines, ignoring leading UTF-8_BOMs (byte-order-marks) on each input's 1506 # first line, turning all end-of-line CRLF byte-pairs into single line-feeds, 1507 # and ensuring each input's last line ends with a line-feed; trailing spaces 1508 # are also ignored 1509 unixify() { 1510 local command='awk' 1511 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1512 command='stdbuf -oL awk' 1513 fi 1514 1515 ${command} ' 1516 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1517 { gsub(/ *\r?$/, ""); print } 1518 ' "$@" 1519 } 1520 1521 # skip the first/leading n bytes 1522 unleaded() { tail -c +$(("$1" + 1)) "${2:--}"; } 1523 1524 # go UP n folders, or go up 1 folder by default 1525 up() { 1526 if [ "${1:-1}" -le 0 ]; then 1527 cd . 1528 else 1529 cd "$(printf "%${1:-1}s" "" | sed 's- -../-g')" || return $? 1530 fi 1531 } 1532 1533 # convert United States Dollars into CAnadian Dollars, using the latest 1534 # official exchange rates from the bank of canada; during weekends, the 1535 # latest rate may be from a few days ago; the default amount of usd to 1536 # convert is 1, when not given 1537 usd2cad() { 1538 local url 1539 local site='https://www.bankofcanada.ca/valet/observations/group' 1540 local csv_rates="${site}/FX_RATES_DAILY/csv" 1541 url="${csv_rates}?start_date=$(date -d '3 days ago' +'%Y-%m-%d')" 1542 curl -s "${url}" | awk -F, -v amount="$(echo "${1:-1}" | sed 's-_--g')" ' 1543 /USD/ { for (i = 1; i <= NF; i++) if($i ~ /USD/) j = i } 1544 END { gsub(/"/, "", $j); if (j != 0) printf "%.2f\n", amount * $j } 1545 ' 1546 } 1547 1548 # View Nice Table / Very Nice Table; uses my tool `ncol` 1549 # vnt() { 1550 # ncol "$@" | awk ' 1551 # (NR - 1) % 5 == 1 { print "" } 1552 # { printf "%6d %s\n", NR - 1, $0 } 1553 # ' | { less -MKiCRS --header=1 2> /dev/null || cat; } 1554 # } 1555 1556 # View Nice Table / Very Nice Table; uses my tool `ncol` 1557 # vnt() { 1558 # awk '{ printf "%d\t%s\n", NR - 1, $0 }' "$@" | ncol | awk ' 1559 # NR == 1 || (NR - 1) % 5 == 0 { 1560 # gsub(/\x1b\[0m/, "\x1b[0m\x1b[4m") 1561 # printf("\x1b[4m%s\x1b[0m\n", $0) 1562 # next 1563 # } 1564 # 1 1565 # ' | { less -MKiCRS --header=1 2> /dev/null || cat; } 1566 # } 1567 1568 # View Nice Table / Very Nice Table; uses my tool `ncol` 1569 vnt() { 1570 awk 1 "$@" | ncol | awk ' 1571 NR == 1 || (NR - 1) % 5 == 0 { 1572 gsub(/\x1b\[0m/, "\x1b[0m\x1b[4m") 1573 printf("%6d \x1b[4m%s\x1b[0m\n", NR - 1, $0) 1574 next 1575 } 1576 { printf("%6d %s\n", NR - 1, $0) } 1577 ' | { less -MKiCRS --header=1 2> /dev/null || cat; } 1578 } 1579 1580 # What Are These (?) shows what the names given to it are/do 1581 wat() { 1582 local arg 1583 local gap=0 1584 local less_options='-MKiCRS' 1585 1586 if [ $# -eq 0 ]; then 1587 echo "$0" 1588 return 0 1589 fi 1590 1591 if [ $# -lt 2 ]; then 1592 less_options='-MKiCRS --header=1' 1593 fi 1594 1595 for arg in "$@"; do 1596 [ "${gap}" -gt 0 ] && printf "\n" 1597 gap=1 1598 printf "\e[7m%-80s\e[0m\n" "${arg}" 1599 1600 while alias "${arg}" > /dev/null 2> /dev/null; do 1601 arg="$(alias "${arg}" | sed -E "s-^[^=]+=['\"](.+)['\"]\$-\\1-")" 1602 done 1603 1604 if echo "${arg}" | grep -q ' '; then 1605 printf "%s\n" "${arg}" 1606 continue 1607 fi 1608 1609 if declare -f "${arg}"; then 1610 continue 1611 fi 1612 1613 if which "${arg}" > /dev/null 2> /dev/null; then 1614 which "${arg}" 1615 continue 1616 fi 1617 1618 printf "\e[38;2;204;0;0m%s not found\e[0m\n" "${arg}" 1619 done | { less -MKiCRS ${less_options} 2> /dev/null || cat; } 1620 } 1621 1622 # find all WEB/hyperLINKS (https:// and http://) in the input text 1623 weblinks() { 1624 local arg 1625 local re='https?://[A-Za-z0-9+_.:%-]+(/[A-Za-z0-9+_.%/,#?&=-]*)*' 1626 local grep_cmd='grep' 1627 if [ -p /dev/stdout ] || [ -t 1 ]; then 1628 grep_cmd='grep --line-buffered' 1629 fi 1630 1631 for arg in "${@:--}"; do 1632 ${grep_cmd} -i -E -o "${re}" "${arg}" 1633 done 1634 } 1635 1636 # recursively find all files with trailing spaces/CRs 1637 whichtrails() { 1638 if [ -p /dev/stdout ] || [ -t 1 ]; then 1639 rg --line-buffered -c '[ \r]+$' "${@:-.}" 1640 else 1641 rg -c '[ \r]+$' "${@:-.}" 1642 fi 1643 } 1644 1645 # turn all wsl/unix-style full-paths into WINdows-style full-PATHS 1646 winpaths() { sed -E 's-/mnt/(.)/-\u\1:/-' "$@"; } 1647 1648 # XARGS Lines, runs `xargs` using whole lines as extra arguments 1649 xargsl() { 1650 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1651 stdbuf -oL awk -v ORS='\000' ' 1652 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1653 { gsub(/\r$/, ""); print } 1654 ' | stdbuf -oL xargs -0 "$@" 1655 else 1656 awk -v ORS='\000' ' 1657 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1658 { gsub(/\r$/, ""); print } 1659 ' | xargs -0 "$@" 1660 fi 1661 } 1662 1663 # Youtube Audio Player 1664 yap() { 1665 local url 1666 # some youtube URIs end with extra playlist/tracker parameters 1667 url="$(echo "$1" | sed 's-&.*--')" 1668 mpv "$(yt-dlp -x --audio-format best --get-url "${url}" 2> /dev/null)" 1669 } 1670 1671 # show a calendar for the current YEAR, or for the year given 1672 year() { 1673 { 1674 # show the current date/time center-aligned 1675 printf \ 1676 "%21s\e[38;2;78;154;6m%s\e[0m \e[38;2;52;101;164m%s\e[0m\n\n" \ 1677 "" "$(date +'%a %b %d %Y')" "$(date +'%H:%M')" 1678 # debian linux has a different `cal` app which highlights the day 1679 if [ -e /usr/bin/ncal ]; then 1680 # fix debian/ncal's weird way to highlight the current day 1681 ncal -C -y "$@" | sed -E \ 1682 's/_\x08(.+)_\x08([^ ]+)/\x1b\[7m\1\2\x1b\[0m/' 1683 else 1684 cal -y "$@" 1685 fi 1686 } | { less -MKiCRS 2> /dev/null || cat; } 1687 } 1688 1689 # show the current date in the YYYY-MM-DD format 1690 ymd() { date +'%Y-%m-%d'; }