File: clamor.sh
   1 #!/bin/sh
   2 
   3 # The MIT License (MIT)
   4 #
   5 # Copyright © 2024 pacman64
   6 #
   7 # Permission is hereby granted, free of charge, to any person obtaining a copy
   8 # of this software and associated documentation files (the “Software”), to deal
   9 # in the Software without restriction, including without limitation the rights
  10 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  11 # copies of the Software, and to permit persons to whom the Software is
  12 # furnished to do so, subject to the following conditions:
  13 #
  14 # The above copyright notice and this permission notice shall be included in
  15 # all copies or substantial portions of the Software.
  16 #
  17 # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  18 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  19 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  20 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  21 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  22 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  23 # SOFTWARE.
  24 
  25 
  26 # clamor
  27 # Command-Line Augmentation Module, Optional Refinements
  28 #
  29 # Note: sourcing `shshsh` and `clam` is better than sourcing this script.
  30 #
  31 # Many extras beyond what `clam` offers to interactive shell commands.
  32 #
  33 # This is a collection of arguably useful shell functions and shortcuts:
  34 # some of these can be real time/effort savers, letting you concentrate
  35 # on getting things done.
  36 #
  37 # You're supposed to `source` this script, so its definitions stay for
  38 # your whole shell session: for that, you can run `source clamor` or
  39 # `. clamor` (no quotes either way), either directly or at shell startup.
  40 #
  41 # Most of these shell functions rely on tools which are almost always
  42 # available. The functions depending on tools which are least-likely
  43 # pre-installed are
  44 #
  45 #   cancur       curl
  46 #   cgt          go
  47 #   crg          rg/ripgrep
  48 #   cs           bat/batcat
  49 #   csf          bat/batcat
  50 #   csvql        textql
  51 #   deutf16      iconv
  52 #   dic          curl
  53 #   dict         curl
  54 #   fetch        curl
  55 #   fetchjson    curl
  56 #   fz           fzf
  57 #   get          curl
  58 #   getjson      curl
  59 #   gobs         go
  60 #   gobwingui    go
  61 #   godep        go
  62 #   goimp        go
  63 #   h            hat          my own `HAndy Tools` app; commented out
  64 #   hmr          bat/batcat
  65 #   j2           python3      commented out
  66 #   jc           python3, jc  uses non-built-in package; commented out
  67 #   joke         curl
  68 #   lab          book         my own script; commented out
  69 #   plin         mpv
  70 #   pp           pypy3
  71 #   qurl         curl
  72 #   rom          micro
  73 #   serve        python3
  74 #   tsvql        textql
  75 #   unutf16      iconv
  76 #   weather      finger
  77 #   wheretrails  rg/ripgrep
  78 #   whichtrails  rg/ripgrep
  79 #   yap          mpv, yt-dlp
  80 #   yd           yt-dlp
  81 #   ydaac        yt-dlp
  82 #   ydmp4        yt-dlp
  83 #   yturi        yt-dlp
  84 #
  85 #
  86 # Full list of funcs/commands added
  87 #
  88 # a            run Awk
  89 # allfiles     show all files in a folder, digging recursively
  90 # allfolders   show all folders in a folder, digging recursively
  91 # args         emit each argument given to it on its own line
  92 # avoid        ignore lines matching the regex given
  93 # banner       show words/arguments given as a styled line
  94 # bar          emit a styled bar to visually separate different outputs
  95 # barrier      emit styled bars to visually separate different outputs
  96 # begincsv     emit a line with comma-separated values, then emit stdin lines
  97 # begintsv     emit a line with tab-separated values, then emit stdin lines
  98 # bh           Breathe Header makes lines breathe starting from the 2nd one
  99 # bigfiles     dig into a folder recursively for files of at least n bytes
 100 # bl           Breathe Lines regularly adds extra empty lines every few
 101 # blawk        process BLocks of non-empty lines with AWK
 102 # bleak        Blue LEAK helps you debug pipes, via colored stderr lines
 103 # blow         expand tabs into spaces, using the tabstop-width given
 104 # blowtabs     expand tabs into spaces, using the tabstop-width given
 105 # breathe      Breathe regularly adds extra empty lines every few
 106 # c            run `cat`, which is useful, despite claims to the contrary
 107 # cancur       get the latest exchange rates from the bank of canada as TSV
 108 # cap          limit lines up to their first n bytes, line-feed excluded
 109 # cawk         Comma AWK runs AWK in CSV mode
 110 # cgt          Colored Go Test runs `go test`, making problems stand out
 111 # chopdecs     ignore trailing decimal zeros in numbers
 112 # choplf       ignore last byte if it's a line-feed
 113 # cls          CLear Screen
 114 # coco         COunt COndition, uses an AWK condition as its first arg
 115 # crep         Colored Regular Expression Printer runs `grep` in color-mode
 116 # crg          Colored RipGrep
 117 # cs           Color Syntax
 118 # csf          Color Syntax for a whole Folder, even its subfolders
 119 # csvql        query CSV (comma-separated values) data via sQLite
 120 # debase64     DEcode BASE64 bytes
 121 # debom        ignore leading utf-8 BOM markers for each input, when present
 122 # decap        DECAPitate emits the first line, then runs a cmd with the rest
 123 # decrlf       turn all CRLF byte-pairs into single line-feed bytes
 124 # dedent       ignore the first n leading spaces per line, or 4 by default
 125 # dedup        deduplicate lines, keeping them in their original order
 126 # degz         DEcompress GZip data
 127 # degzip       DEcompress GZIP data
 128 # delay        delay each input line, waiting the number of seconds given
 129 # detab        expand tabs into spaces, using the tabstop-width given
 130 # detargz      extract .tar.gz files/archives
 131 # deutf16      turn UTF-16-encoded (either kind) plain-text into UTF-8
 132 # dic          lookup words using an online dictionary
 133 # dict         lookup words using an online dictionary
 134 # div          divide 2 numbers both ways/orders
 135 # dt           show the current Date and Time, and the 3 `current` months
 136 # each         run command using each stdin line as a stdin-redirected file
 137 # eg           Extended Grep
 138 # emptyfiles   dig into a folder recursively to show all empty files in it
 139 # emptyfolders  dig into a folder recursively to show all empty folders in it
 140 # enum         enumerate all lines, starting from 1
 141 # enum1        enumerate all lines, starting from 1
 142 # enum0        enumerate all lines, starting from 0
 143 # except       ignore lines matching the regex given
 144 # f            show all Files in a folder, digging recursively
 145 # fail         show an error message and fail
 146 # fetch        get/fetch bytes from the URI given
 147 # fetchjson    asks webserver at URI given to respond with a JSON payload
 148 # findtargets  show/find all targets from makefiles
 149 # first        keep only the first n lines, or just the first 1 by default
 150 # firstbytes   keep only the first n bytes, or just the first 1 by default
 151 # firstlines   keep only the first n lines, or just the first 1 by default
 152 # flawk        First Line AWK, and lines satisfying the optional condition
 153 # forever      keep (re)running the command given, until forced to quit
 154 # fz           run the FuZzy finder (fzf) in multi-choice mode, and more...
 155 # g            run Grep (extended)
 156 # get          get/fetch bytes from the URI given
 157 # gethelp      show help for the command given, using common help options
 158 # getjson      asks webserver at URI given to respond with a JSON payload
 159 # get          GET/fetch data from the filename or URI given
 160 # gi           run Grep (extended) in case-Insensitive mode
 161 # gleak        Green LEAK helps you debug pipes, via colored stderr lines
 162 # glue         join all input lines using the separator/joiner string given
 163 # gobs         GO Build Stripped
 164 # gobwingui    GO Build WINdows GUI
 165 # godep        GO DEPendencies from the folder given
 166 # goimp        GO IMPorts from the folder given
 167 # gsub         run the awk function of the same name
 168 # h            run my own `hat` (HAndy Tools) cmd-line app; disabled
 169 # hawk         Highlight lines matching the AWK condition given
 170 # helpless     view most apps' help messages using `less`
 171 # hl           Header Less runs `less` and always shows the 1st line on top
 172 # hmr          Help Me Remember these commands
 173 # hv           Header View runs `less` and always shows the 1st line on top
 174 # ig           run case-Insensitive Grep (extended)
 175 # indent       indent text the number of spaces given, or 4 by default
 176 # j2           reformat JSON into multiple lines with 2-space indent levels
 177 # jc           shortcut to run `jc`, a useful data-to-JSON python module
 178 # joke         show a `Dad Joke` from the web, sometimes even a funny one
 179 # l            run `less`, enabling line numbers, scrolling, and ANSI styles
 180 # lab          layout content like a book; disabled; uses my own `book` app
 181 # largs        run `xargs` taking the extra arguments from whole stdin lines
 182 # last         keep only the last n lines, or just the last 1 by default
 183 # lastbytes    keep only the last n bytes, or just the last 1 by default
 184 # lastlines    keep only the last n lines, or just the last 1 by default
 185 # leak         emit input lines both to stdout and stderr, to debug pipes
 186 # leako        LEAK Orange helps you debug pipes, via colored stderr lines
 187 # least        run `less`, enabling line numbers, scrolling, and ANSI styles
 188 # lf           List Files, coloring folders and links
 189 # lh           Less with Header runs `less` and always shows the 1st line
 190 # lineup       regroup adjacent lines into n-item tab-separated lines
 191 # listfiles    list files, coloring folders and links
 192 # loco         LOwercase line, check (awk) COndition
 193 # loser        LOcal SERver webserves files in a folder as localhost
 194 # lower        lowercase all lines
 195 # m            case-sensitively match the extended regex given
 196 # match        case-sensitively match the extended regex given
 197 # merrge       redirect stderr into stdout, without any ugly finger-dancing
 198 # n            Number all lines, starting from 1
 199 # n0           Number all lines, starting from 0, instead of 1
 200 # n1           Number all lines, starting from 1
 201 # narrow       try to limit lines up to n symbols per line, or 80 by default
 202 # nfs          Nice File Sizes
 203 # nil          emit nothing to stdout and/or discard everything from stdin
 204 # noerr        ignore stderr, without any ugly finger-dancing
 205 # now          show the current date and time
 206 # nth          keep only the n-th line from the input, if it has enough lines
 207 # o            unify Output, by merging stderr into stdout
 208 # oleak        Orange LEAK helps you debug pipes, via colored stderr lines
 209 # p            Plain ignores ANSI terminal styling
 210 # pawk         Print AWK expressions
 211 # plain        ignore all ANSI styles
 212 # plainend     reset ANSI styles at the end of each line
 213 # plin         PLay INput handles playable/multimedia streams from stdin
 214 # pp           find/run the script given with PyPy, to speed it up
 215 # qurl         Quiet cURL
 216 # reprose      reflow/trim lines of prose (text) for improved legibility
 217 # repstr       REPeat a STRing n times, or 80 times by default
 218 # rom          Read-Only Micro (text editor)
 219 # runin        run a command in the folder given
 220 # s            run Sed (extended)
 221 # sep          emit a unique-looking separator line
 222 # serve        start a local webserver from the current folder
 223 # setdiff      find the SET DIFFerence, after sorting its 2 inputs
 224 # setin        SET INtersection, sorts its 2 inputs, then finds common lines
 225 # setsub       find the SET SUBtraction, after sorting its 2 inputs
 226 # showrun      show a command, then run it
 227 # skip         skip the first n lines, or the first 1 by default
 228 # skipfirst    skip the first n lines, or the first 1 by default
 229 # skiplast     ignore the last n lines, or the very last 1 by default
 230 # skiplines    skip the first n lines, or the first 1 by default
 231 # smallfiles   dig into a folder recursively for files under n bytes
 232 # sosi         reverse-SOrted SIzes of the files given
 233 # squeeze      aggressively get rid of extra spaces on every line
 234 # ssv2tsv      turn each run of 1+ spaces into a single tab
 235 # stripe       underline every 5th line
 236 # t            Trim trailing spaces and carriage-returns
 237 # tawk         Tab AWK, runs AWK using tab as its IO item-separator
 238 # today        show current date in a way friendly both to people and tools
 239 # topfiles     show all files directly in a folder, without recursion
 240 # topfolders   show all folders directly in a folder, without recursion
 241 # trim         get rid of leading and trailing spaces on lines
 242 # trimprefix   ignore the prefix given from input lines which start with it
 243 # trimsuffix   ignore the suffix given from input lines which end with it
 244 # trimtrail    get rid of trailing spaces on lines
 245 # trimtrails   get rid of trailing spaces on lines
 246 # try          try running the command given
 247 # tsawk        TimeStamp lines satisfying AWK condition, ignoring the rest
 248 # tsvql        query TSV (tab-separated values) data via sQLite
 249 # u            Unixify ensures plain-text lines are unix-like
 250 # unbase64     decode base64 bytes
 251 # unique       deduplicate lines, keeping them in their original order
 252 # unixify      ensure plain-text lines are unix-like
 253 # uncrlf       turn all CRLF byte-pairs into single line-feed bytes
 254 # ungz         UNcompress GZip data
 255 # ungzip       UNcompress GZIP data
 256 # untab        expand tabs into spaces, using the tabstop-width given
 257 # untargz      extract .tar.gz files/archives
 258 # unutf16      turn UTF-16-encoded (either kind) plain-text into UTF-8
 259 # up           go UP n folders, or go up 1 folder by default
 260 # upsidedown   emit input lines in reverse order, or last to first
 261 # ut           Underline Table underlines the 1st line, then 1 line every 5
 262 # v            View runs `less`, enabling scrolling and ANSI styles
 263 # verdict      run a command, showing its success/failure right after
 264 # vh           View with Header runs `less` and always shows the 1st line
 265 # wat          What Are These (?) shows what the names given to it are/do
 266 # weather      show a short-term WEATHER forecast for the place/city given
 267 # wheretrails  find all files which have trailing spaces/CRs on their lines
 268 # whichtrails  find all files which have trailing spaces/CRs on their lines
 269 # wit          What Is This (?) shows what the names given to it are/do
 270 # words        emit each word-like item from each input line on its own line
 271 # yap          Youtube Audio Player
 272 # yd           Youtube Download
 273 # ydaac        Youtube Download AAC audio
 274 # ydmp4        Youtube Download MP4 video
 275 # year         show a full calendar for the current year, or the year given
 276 # ymd          show the current date in the YYYY-MM-DD format
 277 # yturi        YouTube URI turns youtube-page URIs into direct media URIs
 278 
 279 
 280 # handle help options
 281 case "$1" in
 282     -h|--h|-help|--help)
 283         # show help message, extracting the info-comment at the start
 284         # of this file, and quit
 285         awk '/^# +clam/, /^$/ { gsub(/^# ?/, ""); print }' "$0"
 286         exit 0
 287     ;;
 288 esac
 289 
 290 
 291 # use a simple shell prompt
 292 # PS1="\$ "
 293 # PS2="> "
 294 
 295 # use a simple shell prompt, showing the current folder in the title
 296 # PS1="\[\e]0;\w\a\]\$ "
 297 # PS2="> "
 298 
 299 # prevent `less` from saving searches/commands
 300 # LESSHISTFILE="-"
 301 # LESSSECURE=1
 302 
 303 # prevent the shell from saving commands
 304 # unset HISTFILE
 305 
 306 
 307 # dashed aliases of multi-word commands defined later
 308 alias all-files='allfiles'
 309 alias all-folders='allfolders'
 310 alias begin-csv='begincsv'
 311 alias begin-tsv='begintsv'
 312 alias blow-tabs='blowtabs'
 313 alias chop-decs='chopdecs'
 314 alias chop-lf='choplf'
 315 alias empty-files='emptyfiles'
 316 alias empty-folders='emptyfolders'
 317 alias fetch-json='fetchjson'
 318 alias first-bytes='firstbytes'
 319 alias first-lines='firstlines'
 320 alias get-help='gethelp'
 321 alias get-json='getjson'
 322 alias help-for='gethelp'
 323 alias help-less='helpless'
 324 alias last-bytes='lastbytes'
 325 alias last-lines='lastlines'
 326 alias line-up='lineup'
 327 alias list-files='listfiles'
 328 alias plain-end='plainend'
 329 alias rep-str='repstr'
 330 alias run-in='runin'
 331 alias set-diff='setdiff'
 332 alias set-in='setin'
 333 alias set-sub='setsub'
 334 alias show-run='showrun'
 335 alias skip-first='skipfirst'
 336 alias skip-last='skiplast'
 337 alias skip-last-lines='skiplastlines'
 338 alias skip-lines='skiplines'
 339 alias small-files='smallfiles'
 340 alias ssv-to-tsv='ssv2tsv'
 341 alias top-files='topfiles'
 342 alias top-folders='topfolders'
 343 alias trim-prefix='trimprefix'
 344 alias trim-suffix='trimsuffix'
 345 alias trim-trail='trimtrail'
 346 alias trim-trails='trimtrails'
 347 alias ts-awk='tsawk'
 348 alias upside-down='upsidedown'
 349 alias where-trails='wheretrails'
 350 alias which-trails='whichtrails'
 351 alias yd-aac='ydaac'
 352 alias yd-mp4='ydmp4'
 353 alias yt-uri='yturi'
 354 
 355 # undashed aliases for commands defined later
 356 alias bocler='cancur'
 357 
 358 
 359 # run Awk
 360 a() {
 361     awk "$@"
 362 }
 363 
 364 # show all files in a folder, digging recursively
 365 allfiles() {
 366     local arg
 367     for arg in "${@:-.}"; do
 368         find "${arg}" -type f
 369     done
 370 }
 371 
 372 # show all folders in a folder, digging recursively
 373 allfolders() {
 374     local arg
 375     for arg in "${@:-.}"; do
 376         find "${arg}" -type d | awk 'NR > 1'
 377     done
 378 }
 379 
 380 # emit each argument given as its own line of output
 381 args() {
 382     awk 'BEGIN { for (i = 1; i < ARGC; i++) print ARGV[i]; exit }' "$@"
 383 }
 384 
 385 # avoid lines matching the regex given, or avoid empty(ish) lines by default
 386 avoid() {
 387     local regex="${1:-[^ *]\r?$}"
 388     shift
 389     grep -E -v "${regex}" "$@"
 390 }
 391 
 392 # show a line which clearly labels part of a shell session
 393 banner() {
 394     # printf "\x1b[7m%-80s\x1b[0m\n" "$*"
 395     printf "\x1b[48;5;253m%-80s\x1b[0m\n" "$*"
 396 }
 397 
 398 # emit a colored bar which can help visually separate different outputs
 399 bar() {
 400     printf "\x1b[48;5;253m%${1:-80}s\x1b[0m\n" " "
 401 }
 402 
 403 # emit colored bars which can help visually separate different outputs
 404 barrier() {
 405     printf "\x1b[48;5;253m%$(tput cols)s\x1b[0m\n" " " " " " " " "
 406 }
 407 
 408 # emit a line with comma-separated values, then emit all stdin lines
 409 begincsv() {
 410     awk 'BEGIN {
 411         for (i = 1; i < ARGC; i++) {
 412             if (i > 1) printf ","
 413             printf "%s", ARGV[i]
 414             delete ARGV[i]
 415         }
 416         if (ARGC > 0) printf "\n"
 417     }
 418     1' "$@"
 419 }
 420 
 421 # emit a line with tab-separated values, then emit all stdin lines
 422 begintsv() {
 423     awk 'BEGIN {
 424         for (i = 1; i < ARGC; i++) {
 425             if (i > 1) printf "\t"
 426             printf "%s", ARGV[i]
 427             delete ARGV[i]
 428         }
 429         if (ARGC > 0) printf "\n"
 430     }
 431     1' "$@"
 432 }
 433 
 434 # Breathe Header: add an empty line after the first one (the header),
 435 # then separate groups of 5 lines with empty lines between them
 436 bh() {
 437     awk '(NR - 1) % 5 == 1 && NR > 1 { print "" } 1' "$@"
 438 }
 439 
 440 # dig into a folder recursively for files of at least n bytes
 441 bigfiles() {
 442     find "${2:-.}" -type f -size "${1:-1000000}"c -o -size +"${1:-1000000}"c
 443 }
 444 
 445 # Breathe Lines: separate groups of 5 lines with empty lines
 446 bl() {
 447     awk 'NR % 5 == 1 && NR != 1 { print "" } 1' "$@"
 448 }
 449 
 450 # process BLocks of non-empty lines with AWK
 451 blawk() {
 452     awk -F='' -v RS='' "$@"
 453 }
 454 
 455 # Blue LEAK emits/tees input both to stdout and stderr, coloring blue what
 456 # it emits to stderr using an ANSI-style; this cmd is useful to `debug`
 457 # pipes involving several steps
 458 bleak() {
 459     awk '
 460     {
 461         print
 462         fflush()
 463         gsub(/\x1b\[0m/, "\x1b[0m\x1b[38;5;26m")
 464         printf "\x1b[38;5;26m%s\x1b[0m\n", $0 > "/dev/stderr"
 465         fflush("/dev/stderr")
 466     }' "$@"
 467 }
 468 
 469 # expand tabs into spaces using the tabstop given
 470 blow() {
 471     local tabstop="${1:-4}"
 472     shift
 473     expand -t "${tabstop}" "$@"
 474 }
 475 
 476 # expand tabs into spaces using the tabstop given
 477 blowtabs() {
 478     local tabstop="${1:-4}"
 479     shift
 480     expand -t "${tabstop}" "$@"
 481 }
 482 
 483 # separate groups of 5 lines with empty lines, making text-rows much
 484 # easier to follow/eye-scan along, especially with tall walls of text
 485 breathe() {
 486     awk 'NR % 5 == 1 && NR != 1 { print "" } 1' "$@"
 487 }
 488 
 489 # `cat` can be useful, despite claims to the contrary
 490 c() {
 491     cat "$@"
 492 }
 493 
 494 # CANadian CURrencies emits the Bank Of Canada's Latest Exchange Rates as a
 495 # 2-line table of tab-separated values, where the first line is the header
 496 cancur() {
 497     local b
 498     b='https://www.bankofcanada.ca/valet/observations/group/FX_RATES_DAILY/'
 499     # starting `3 days ago` ensures this works even on weekends, while
 500     # minimizing the data transmitted
 501     curl -s "${b}csv?start_date=$(date -d '3 days ago' +'%Y-%m-%d')" |
 502     # pick the header line, along with the last one, turning CSV into TSV
 503     awk '/^"date"/; END { print }' | tr -d '"' | tr ',' '\t' |
 504     # simplify/change most column names
 505     sed 's-FX--g; s-CAD--g'
 506 }
 507 
 508 # limit lines up to their first n bytes (80 by default), line-feed excluded
 509 cap() {
 510     local n="${1:-80}"
 511     shift
 512     awk -v n="${n}" '{ print substr($0, 1, n) }' "$@"
 513 }
 514 
 515 # Comma AWK: run awk in CSV mode
 516 cawk() {
 517     awk --csv "$@"
 518 }
 519 
 520 # Colored Go Test on the folder given
 521 cgt() {
 522     go test "${1:-.}" 2>&1 | awk '
 523         /^ok/ { printf "\x1b[38;5;29m%s\x1b[0m\n", $0; fflush() }
 524         /^[-]* ?FAIL/ { printf "\x1b[38;5;1m%s\x1b[0m\n", $0; fflush() }
 525         /^\?/ { printf "\x1b[38;5;249m%s\x1b[0m\n", $0; fflush() }'
 526 }
 527 
 528 # ignore trailing decimal zeros in numbers
 529 chopdecs() {
 530     awk '{
 531         for (i = 1; i <= NF; i++) {
 532             gsub(/(\.[0-9]+[1-9]+)0+$/, "&1")
 533             gsub(/([0-9]+)\.0*$/, "&1")
 534         }
 535         print
 536     }' "$@"
 537 }
 538 
 539 # ignore final life-feed from text, if it's the very last byte
 540 choplf() {
 541     awk 'NR > 1 { print "" } { printf "%s", $0 }' "$@"
 542 }
 543 
 544 # CLear Screen
 545 cls() {
 546     clear
 547 }
 548 
 549 # COunt COndition: count how many times the AWK expression given is true
 550 coco() {
 551     local cond="${1:-1}"
 552     shift
 553     awk "${cond} { c++ } END { print c }" "$@"
 554 }
 555 
 556 # Colored Regular Expression Printer runs `grep` to show colored matches
 557 crep() {
 558     grep --color=always "$@"
 559 }
 560 
 561 # Colored RipGrep: ensures app `rg` emits colors when piped
 562 crg() {
 563     rg --color=always "$@"
 564 }
 565 
 566 # Color Syntax: run syntax-coloring app `bat` without line-wrapping
 567 cs() {
 568     local cmd="bat"
 569     # debian linux uses a different name for the `bat` app
 570     if [ -e "/usr/bin/batcat" ]; then
 571         cmd="batcat"
 572     fi
 573 
 574     "$cmd" --style=plain,header,numbers --theme='Monokai Extended Light' \
 575         --wrap=never --color=always "$@" |
 576     sed 's-\x1b\[38;5;70m-\x1b\[38;5;28m-g' | less -JMKiCRS
 577 }
 578 
 579 # Color Syntax of all files in a Folder, showing line numbers
 580 csf() {
 581     local cmd="bat"
 582     # debian linux uses a different name for the `bat` app
 583     if [ -e "/usr/bin/batcat" ]; then
 584         cmd="batcat"
 585     fi
 586 
 587     find "${1:-.}" -type f -print0 | xargs --null "$cmd" \
 588         --style=plain,header,numbers --theme='Monokai Extended Light' \
 589         --wrap=never --color=always |
 590     sed 's-\x1b\[38;5;70m-\x1b\[38;5;28m-g' | less -JMKiCRS
 591 }
 592 
 593 # query CSV (comma-separated values) data via sQLite
 594 csvql() {
 595     local q="${1:-select * from sqlite_master;}"
 596     shift
 597     textql -header -output-header -sql "${q}" "$@"
 598 }
 599 
 600 # DEcode BASE64 bytes
 601 debase64() {
 602     base64 -d "$@"
 603 }
 604 
 605 # ignore leading utf-8 BOM markers for each input, when present
 606 debom() {
 607     awk 'FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1' "$@"
 608 }
 609 
 610 # DECAPitate emits the first line, then runs a cmd with the rest
 611 decap() {
 612     local first
 613     # change IFS to keep all spaces from the first line
 614     IFS='' read -r first
 615     printf "%s\n" "${first}"
 616     "$@"
 617 }
 618 
 619 # DECAPitate emits the first n lines to stderr, and the rest to stdout;
 620 # the name is a reference to the standard cmd-line app `head`, which
 621 # emits only the first n lines to stdout
 622 # decap() {
 623 #     local n="${1:-1}"
 624 #     shift
 625 
 626 #     awk -v n="${n}" '
 627 #         BEGIN {
 628 #             if (n !~ /^-?[0-9]+$/) {
 629 #                 fmt = "leading arg %s isn'\''t a valid line-count\n"
 630 #                 printf fmt, n > "/dev/stderr"
 631 #                 exit 1
 632 #             }
 633 #         }
 634 
 635 #         NR <= n { print > "/dev/stderr"; next }
 636 #         1' "$@"
 637 # }
 638 
 639 # turn all CRLF byte-pairs into single line-feed bytes
 640 decrlf() {
 641     cat "$@" | sed 's-\r$--'
 642 }
 643 
 644 # ignore up to n leading spaces for each line, or up to 4 by default
 645 dedent() {
 646     local upto="${1:-4}"
 647     shift
 648     awk "{ gsub(/^ {0,${upto}}/, \"\"); print }" "$@"
 649 }
 650 
 651 # DEDUPlicate prevents lines from appearing more than once
 652 dedup() {
 653     awk '!c[$0]++' "$@"
 654 }
 655 
 656 # DEDUPlicatE prevents lines from appearing more than once
 657 dedupe() {
 658     awk '!c[$0]++' "$@"
 659 }
 660 
 661 # DEcompress GZip data
 662 degz() {
 663     gzip -d "$@"
 664 }
 665 
 666 # DEcompress GZIP data
 667 degzip() {
 668     gzip -d "$@"
 669 }
 670 
 671 # delay each input line, waiting the number of seconds given, or wait
 672 # 1 second before each line by default
 673 # delay() {
 674 #     IFS='' # keep all spaces from lines
 675 #     local delay="${1:-1}"
 676 #     shift
 677 
 678 #     local line
 679 #     awk 1 "$@" | while read -r line; do
 680 #         sleep "${delay}"
 681 #         printf "%s\n" "${line}"
 682 #     done
 683 # }
 684 
 685 # expand tabs into spaces using the tabstop given
 686 detab() {
 687     local tabstop="${1:-4}"
 688     shift
 689     expand -t "${tabstop}" "$@"
 690 }
 691 
 692 # extract .tar.gz files/archives
 693 detargz() {
 694     tar -xvzf "$@"
 695 }
 696 
 697 # turn UTF-16-encoded (either kind) plain-text into UTF-8
 698 deutf16() {
 699     cat "$@" | iconv -f UTF-16 -t UTF-8
 700 }
 701 
 702 # DICtionary definitions, using an online service
 703 dic() {
 704     curl -s "dict://dict.org/d:$*" | awk '
 705         /^151 / { printf "\x1b[38;5;4m%s\x1b[0m\n", $0; next }
 706         /^[1-9][0-9]{2} / { printf "\x1b[38;5;244m%s\x1b[0m\n", $0; next }
 707         1'
 708 }
 709 
 710 # DICTionary definitions, using an online service
 711 dict() {
 712     curl -s "dict://dict.org/d:$*" | awk '
 713         /^151 / { printf "\x1b[38;5;4m%s\x1b[0m\n", $0; next }
 714         /^[1-9][0-9]{2} / { printf "\x1b[38;5;244m%s\x1b[0m\n", $0; next }
 715         1'
 716 }
 717 
 718 # divide 2 numbers both ways, also showing their proper complement
 719 div() {
 720     awk -v x="${1:-1}" -v y="${2:-1}" '
 721         BEGIN {
 722             gsub(/_/, "", x)
 723             gsub(/_/, "", y)
 724             x = x + 0
 725             y = y + 0
 726             min = x < y ? x : y
 727             max = x > y ? x : y
 728             print x/y
 729             print y/x
 730             print 1 - min / max
 731             exit
 732         }' < /dev/null
 733 }
 734 
 735 # show the current Date and Time, and the 3 `current` months
 736 dt() {
 737     # debian linux has a different `cal` app which highlights the day
 738     if [ -e "/usr/bin/ncal" ]; then
 739         ncal -C -3
 740     else
 741         cal -3
 742     fi
 743 
 744     # show the current time center-aligned
 745     # printf "%28s\x1b[34m%s\x1b[0m\n" " " "$(date +'%T')"
 746     printf "%22s\x1b[32m%s\x1b[0m  \x1b[34m%s\x1b[0m\n" " " \
 747         "$(date +'%a %b %d')" "$(date +'%T')"
 748 }
 749 
 750 # run command using each stdin line as a stdin-redirected file
 751 each() {
 752     local arg
 753     while read -r arg; do
 754         "$@" < "${arg}"
 755     done
 756 }
 757 
 758 # Extended Grep
 759 eg() {
 760     grep -E "$@"
 761 }
 762 
 763 # dig into a folder recursively to show all empty files in it
 764 emptyfiles() {
 765     find "${1:-.}" -type f -empty
 766 }
 767 
 768 # dig into a folder recursively to show all empty folders in it
 769 emptyfolders() {
 770     find "${1:-.}" -type d -empty
 771 }
 772 
 773 # enumerate lines, starting from 1, and using a tab as the separator:
 774 # even empty lines are counted, unlike with `nl`
 775 enum() {
 776     awk '{ printf "%d\t", NR; print }' "$@"
 777 }
 778 
 779 # enumerate lines, starting from 1, and using a tab as the separator:
 780 # even empty lines are counted, unlike with `nl`
 781 enum1() {
 782     awk '{ printf "%d\t", NR; print }' "$@"
 783 }
 784 
 785 # enumerate lines, starting from 0, and using a tab as the separator:
 786 # even empty lines are counted, unlike with `nl`
 787 enum0() {
 788     awk '{ printf "%d\t", NR - 1; print }' "$@"
 789 }
 790 
 791 # avoid lines matching the regex given, or avoid empty(ish) lines by default
 792 except() {
 793     local regex="${1:-[^ *]\r?$}"
 794     shift
 795     grep -E -v "${regex}" "$@"
 796 }
 797 
 798 # show all Files in a folder, digging recursively
 799 f() {
 800     local arg
 801     for arg in "${@:-.}"; do
 802         find "${arg}" -type f
 803     done
 804 }
 805 
 806 # show an error message and fail
 807 fail() {
 808     printf "\x1b[41m\x1b[38;5;15m %s \x1b[0m\n" "$*" >&2 && return 255
 809 }
 810 
 811 # get/fetch data from the URI given
 812 fetch() {
 813     curl -s "$@" || (
 814         printf "\x1b[31mcan't get %s\x1b[0m\n" "$@" >&2
 815         return 1
 816     )
 817 }
 818 
 819 # asks webserver at URI given to respond with a JSON payload
 820 fetchjson() {
 821     curl -s "$@" -H 'Accept: application/json' || (
 822         printf "\x1b[31mcan't get JSON from %s\x1b[0m\n" "$@" >&2
 823         return 1
 824     )
 825 }
 826 
 827 # show/find all targets from makefiles
 828 findtargets() {
 829     grep -E -i '^[a-z0-9_\.$-]+.*:' "${@:-Makefile}"
 830 }
 831 
 832 # get the first n lines, or 1 by default
 833 first() {
 834     head -n "${1:-1}" "${2:--}"
 835 }
 836 
 837 # get the first n bytes, or 1 by default
 838 firstbytes() {
 839     head -c "${1:-1}" "${2:--}"
 840 }
 841 
 842 # get the first n lines, or 1 by default
 843 firstlines() {
 844     head -n "${1:-1}" "${2:--}"
 845 }
 846 
 847 # First Line AWK, and lines satisfying the optional condition
 848 flawk() {
 849     local cond="${1:-0}"
 850     shift
 851     awk "NR == 1 || ${cond}" "$@"
 852 }
 853 
 854 # keep (re)running the command given, until forced to quit, whether
 855 # directly or indirectly
 856 forever() {
 857     while true; do
 858         "$@" || return "$?"
 859     done
 860 }
 861 
 862 # run the FuZzy finder (fzf) in multi-choice mode, with custom keybindings
 863 fz() {
 864     fzf -m --bind ctrl-a:select-all,ctrl-space:toggle "$@"
 865 }
 866 
 867 # run Grep (extended)
 868 g() {
 869     grep -E "$@"
 870 }
 871 
 872 # get/fetch data from the URI given
 873 # get() {
 874 #     curl -s "$@" || (
 875 #         printf "\x1b[31mcan't get %s\x1b[0m\n" "$@" >&2
 876 #         return 1
 877 #     )
 878 # }
 879 
 880 # GET/fetch data from the filename or URI given
 881 get() {
 882     case "$1" in
 883         http://*|https://*|file://*|ftp://*|ftps://*|sftp://*|dict://*)
 884             # curl -s "$1"
 885             curl -s "$1" || (
 886                 printf "\x1b[31mcan't get %s\x1b[0m\n" "$1" >&2
 887                 return 1
 888             )
 889         ;;
 890         *)
 891             cat "$1"
 892         ;;
 893     esac
 894 }
 895 
 896 # view most apps' help messages using `less`; this command used to be
 897 # called `helpless`
 898 gethelp() {
 899     "${1}" "${2:---help}" 2>&1 | less -JMKiCRS
 900 }
 901 
 902 # asks webserver at URI given to respond with a JSON payload
 903 getjson() {
 904     curl -s "$@" -H 'Accept: application/json' || (
 905         printf "\x1b[31mcan't get JSON from %s\x1b[0m\n" "$@" >&2
 906         return 1
 907     )
 908 }
 909 
 910 # run Grep (extended) in case-Insensitive mode
 911 gi() {
 912     grep -i -E "$@"
 913 }
 914 
 915 # Green LEAK emits/tees input both to stdout and stderr, coloring green what
 916 # it emits to stderr using an ANSI-style; this cmd is useful to `debug` pipes
 917 # involving several steps
 918 gleak() {
 919     awk '
 920     {
 921         print
 922         fflush()
 923         gsub(/\x1b\[0m/, "\x1b[0m\x1b[38;5;29m")
 924         printf "\x1b[38;5;29m%s\x1b[0m\n", $0 > "/dev/stderr"
 925         fflush("/dev/stderr")
 926     }'
 927 }
 928 
 929 # join all input lines using the separator/joiner string given; the name
 930 # `join` is already taken by a standard(ish) cmd-line app
 931 glue() {
 932     local sep="${1:-}"
 933     shift
 934 
 935     awk -v sep="${sep}" '
 936         NR > 1 { printf "%s", sep }
 937         1 { printf "%s", $0 }
 938         END { printf "\n" }' "$@"
 939 }
 940 
 941 # GO Build Stripped: a common use-case for the go compiler
 942 gobs() {
 943     go build -ldflags "-s -w" -trimpath "$@"
 944 }
 945 
 946 # GO Build WINdows GUI: a common use-case for the go compiler
 947 gobwingui() {
 948     go build -ldflags "-s -w -H=windowsgui" -trimpath "$@"
 949 }
 950 
 951 # GO DEPendencies: shows all dependencies in a go project
 952 godep() {
 953     go list -f '{{ join .Deps "\n" }}' "$@"
 954 }
 955 
 956 # GO IMPorts: show all imports in a go project
 957 goimp() {
 958     go list -f '{{ join .Imports "\n" }}' "$@"
 959 }
 960 
 961 # transform lines using AWK's gsub func (global substitute)
 962 gsub() {
 963     local what="${1}"
 964     local with="${2}"
 965     shift
 966     shift
 967     # awk "{ gsub(/${what}/, \"${with}\"); print }" "$@"
 968     awk "{ gsub(${what}, \"${with}\"); print }" "$@"
 969 }
 970 
 971 # run my own `hat` (HAndy Tools) cmd-line app
 972 # h() {
 973 #     hat "$@"
 974 # }
 975 
 976 # Highlight (lines) with AWK
 977 hawk() {
 978     local cond="${1:-1}"
 979     shift
 980 
 981     awk "
 982         ${cond} {
 983             gsub(/\\x1b\\[0m/, \"\x1b[0m\\x1b[7m\")
 984             printf \"\\x1b[7m%s\\x1b[0m\\n\", \$0
 985             fflush()
 986             next
 987         }
 988 
 989         { print; fflush() }" "$@"
 990 }
 991 
 992 # view most apps' help messages using `less`
 993 helpless() {
 994     "${1}" "${2:---help}" 2>&1 | less -JMKiCRS
 995 }
 996 
 997 # Header Less runs `less` with line numbers, ANSI styles, no line-wrapping,
 998 # and using the first line as a sticky-header, so it always shows on top
 999 hl() {
1000     less --header=1 -JMKNiCRS "$@"
1001 }
1002 
1003 # Help Me Remember my custom shell commands
1004 hmr() {
1005     local cmd="bat"
1006     # debian linux uses a different name for the `bat` app
1007     if [ -e "/usr/bin/batcat" ]; then
1008         cmd="batcat"
1009     fi
1010 
1011     "$cmd" --style=plain,header,numbers --theme='Monokai Extended Light' \
1012         --wrap=never --color=always "$(which clam)" |
1013     sed 's-\x1b\[38;5;70m-\x1b\[38;5;28m-g' | less -JMKiCRS
1014 }
1015 
1016 # Header View runs `less` without line numbers, with ANSI styles, with no
1017 # line-wrapping, and using the first line as a sticky-header, so it always
1018 # shows on top
1019 hv() {
1020     less --header=1 -JMKiCRS "$@"
1021 }
1022 
1023 # run case-Insensitive Grep (extended)
1024 ig() {
1025     grep -i -E "$@"
1026 }
1027 
1028 # indent each line the number of spaces given, or 4 spaces by default
1029 indent() {
1030     local n="${1:-4}"
1031     shift
1032 
1033     awk -v n="${n}" '
1034     BEGIN {
1035         # for (i = 1; i <= n; i++) pre = pre " "
1036         pre = "                                "
1037         while (length(pre) < n) pre = pre pre
1038         pre = substr(pre, 1, n)
1039     }
1040 
1041     { printf pre; print }' "$@"
1042 }
1043 
1044 # reformat JSON into multiple lines with 2-space indent levels
1045 # j2() {
1046 #     cat "${1:--}" | python3 -c "#!/usr/bin/python3
1047 # from json import load, dump
1048 # from sys import exit, stderr, stdin, stdout
1049 # try:
1050 #     seps = (', ', ': ')
1051 #     stdout.reconfigure(newline='\n', encoding='utf-8')
1052 #     dump(load(stdin), stdout, indent=2, allow_nan=False, separators=seps)
1053 #     stdout.write('\n')
1054 # except Exception as e:
1055 #     print('\x1b[31m' + str(e) + '\x1b[0m', file=stderr)
1056 #     exit(1)
1057 # "
1058 # }
1059 
1060 # Json Converter; uses python package `jc`, which isn't built-in
1061 # jc() {
1062 #     python3 -m jc "$@"
1063 # }
1064 
1065 # show a `dad` JOKE from the web, some of which are even funny
1066 joke() {
1067     curl -s https://icanhazdadjoke.com | fold -s | sed -E 's- *\r?$--'
1068     # plain-text output from previous cmd doesn't end with a line-feed
1069     printf "\n"
1070 }
1071 
1072 # run `less` with line numbers, ANSI styles, and no line-wrapping
1073 l() {
1074     less -JMKNiCRS "$@"
1075 }
1076 
1077 # Like A Book groups lines as 2 side-by-side pages, the same way books
1078 # do it; uses my own script named `book`
1079 # lab() {
1080 #     book "$(($(tput lines) - 1))" "$@" | less -JMKiCRS
1081 # }
1082 
1083 # Line xARGS: `xargs` using line separators, which handles filepaths
1084 # with spaces, as long as the standard input has 1 path per line
1085 largs() {
1086     xargs -d "\n" "$@"
1087 }
1088 
1089 # get the last n lines, or 1 by default
1090 last() {
1091     tail -n "${1:-1}" "${2:--}"
1092 }
1093 
1094 # get the last n bytes, or 1 by default
1095 lastbytes() {
1096     tail -c "${1:-1}" "${2:--}"
1097 }
1098 
1099 # get the last n lines, or 1 by default
1100 lastlines() {
1101     tail -n "${1:-1}" "${2:--}"
1102 }
1103 
1104 # leak emits/tees input both to stdout and stderr; useful in pipes
1105 leak() {
1106     tee /dev/stderr
1107 }
1108 
1109 # LEAK Orange emits/tees input both to stdout and stderr, coloring orange
1110 # what it emits to stderr using an ANSI-style; this cmd is useful to `debug`
1111 # pipes involving several steps
1112 leako() {
1113     awk '
1114     {
1115         print
1116         fflush()
1117         gsub(/\x1b\[0m/, "\x1b[0m\x1b[38;5;166m")
1118         printf "\x1b[38;5;166m%s\x1b[0m\n", $0 > "/dev/stderr"
1119         fflush("/dev/stderr")
1120     }' "$@"
1121 }
1122 
1123 # run `less` with line numbers, ANSI styles, and no line-wrapping
1124 least() {
1125     less -JMKNiCRS "$@"
1126 }
1127 
1128 # List Files, coloring folders and links
1129 lf() {
1130     ls -al --color=never --time-style iso "$@" | awk '
1131         /^d/ { printf "\x1b[38;5;33m%s\x1b[0m\n", $0; next }
1132         /^l/ { printf "\x1b[38;5;29m%s\x1b[0m\n", $0; next }
1133         1
1134     '
1135 }
1136 
1137 # Less with Header runs `less` with line numbers, ANSI styles, no line-wraps,
1138 # and using the first line as a sticky-header, so it always shows on top
1139 lh() {
1140     less --header=1 -JMKNiCRS "$@"
1141 }
1142 
1143 # regroup adjacent lines into n-item tab-separated lines
1144 lineup() {
1145     local n="${1:-0}"
1146     shift
1147 
1148     if [ "${n}" -le 0 ]; then
1149         awk '
1150             NR > 1 { printf "\t" }
1151             { printf "%s", $0 }
1152             END { if (NR > 0) print "" }' "$@"
1153         return "$?"
1154     fi
1155 
1156     awk -v n="${n}" '
1157         NR % n != 1 { printf "\t" }
1158         { printf "%s", $0 }
1159         NR % n == 0 { print "" }
1160         END { if (NR % n != 0) print "" }' "$@"
1161 }
1162 
1163 # list files, coloring folders and links
1164 listfiles() {
1165     ls -al --color=never --time-style iso "$@" | awk '
1166         /^d/ { printf "\x1b[38;5;33m%s\x1b[0m\n", $0; next }
1167         /^l/ { printf "\x1b[38;5;29m%s\x1b[0m\n", $0; next }
1168         1
1169     '
1170 }
1171 
1172 # LOwercase line, check (awk) COndition
1173 loco() {
1174     local cond="${1:-1}"
1175     shift
1176     # awk "{ \$0 = tolower(\$0) } ${cond}" "$@"
1177     awk "
1178         { original = \$0; \$0 = tolower(\$0) }
1179         ${cond} { print original }" "$@"
1180 }
1181 
1182 # LOcal SERver webserves files in a folder as localhost, using the port
1183 # number given, or port 8080 by default
1184 loser() {
1185     printf "\x1b[38;5;26mserving files in ${2:-$(pwd)}\x1b[0m\n" >&2
1186     python3 -m http.server "${1:-8080}" -d "${2:-.}"
1187 }
1188 
1189 # make all text lowercase
1190 lower() {
1191     awk '{ print tolower($0) }' "$@"
1192 }
1193 
1194 # match the regex given, or match non-empty(ish) lines by default
1195 m() {
1196     local regex="${1:-[^ *]\r?$}"
1197     shift
1198     grep -E "${regex}" "$@"
1199 }
1200 
1201 # match the regex given, or match non-empty(ish) lines by default
1202 match() {
1203     local regex="${1:-[^ *]\r?$}"
1204     shift
1205     grep -E "${regex}" "$@"
1206 }
1207 
1208 # merge stderr into stdout without any keyboard-dancing
1209 merrge() {
1210     "$@" 2>&1
1211 }
1212 
1213 # Number all lines starting from 1, ensuring lines aren't accidentally joined
1214 # when changing input sources
1215 n() {
1216     # awk '{ printf "%6d  %s\n", NR, $0; fflush() }' "$@"
1217     awk '{ printf "%d\t%s\n", NR, $0; fflush() }' "$@"
1218 }
1219 
1220 # Number all lines starting from 0, ensuring lines aren't accidentally joined
1221 # when changing input sources
1222 n0() {
1223     # awk '{ printf "%6d  %s\n", NR - 1, $0; fflush() }' "$@"
1224     awk '{ printf "%d\t%s\n", NR - 1, $0; fflush() }' "$@"
1225 }
1226 
1227 # Number all lines starting from 1, ensuring lines aren't accidentally joined
1228 # when changing input sources
1229 n1() {
1230     # awk '{ printf "%6d  %s\n", NR, $0; fflush() }' "$@"
1231     awk '{ printf "%d\t%s\n", NR, $0; fflush() }' "$@"
1232 }
1233 
1234 # try to limit lines up to n symbols per line, or 80 by default
1235 narrow() {
1236     fold -s -w "${1:-80}" | sed -E 's- +$--'
1237 }
1238 
1239 # Nice File Sizes
1240 nfs() {
1241     # turn arg-list into single-item lines
1242     awk 'BEGIN { for (i = 1; i < ARGC; i++) print ARGV[i]; exit }' "$@" |
1243     # calculate file-sizes, and reverse-sort results
1244     xargs -d '\n' wc -c | sort -rn |
1245     # start output with a header-like line, and add a MiB field
1246     awk 'BEGIN { printf "%5s  %9s  %8s  name\n", "n", "bytes", "MiB" }
1247     { printf "%5d  %9d  %8.2f  %s\n", NR - 1, $1, $1 / 1048576, $2 }' |
1248     # make zeros in the MiB field stand out with a special color
1249     awk '{ gsub(/ 0.00 /, "\x1b[38;5;103m 0.00 \x1b[0m"); print }' |
1250     # make table breathe with empty lines, so tall outputs are readable
1251     awk '(NR - 2) % 5 == 1 && NR > 1 { print "" } 1'
1252 }
1253 
1254 # emit nothing to output and/or discard everything from input
1255 nil() {
1256     if [ -p /dev/stdin ]; then
1257         cat > /dev/null
1258     else
1259         head -c 0
1260     fi
1261 }
1262 
1263 # ignore stderr without any keyboard-dancing
1264 noerr() {
1265     "$@" 2> /dev/null
1266 }
1267 
1268 # show the current date and time
1269 now() {
1270     date +'%Y-%m-%d %H:%M:%S'
1271 }
1272 
1273 # keep only the nth line from the input, if it has at least that many lines
1274 nth() {
1275     local n="${1}"
1276     shift
1277     awk -v n="${n}" 'BEGIN { if (n < 1) exit } NR == n { print; exit }' "$@"
1278 }
1279 
1280 # unify Output, by merging stderr into stdout
1281 o() {
1282     "$@" 2>&1
1283 }
1284 
1285 # Orange LEAK emits/tees input both to stdout and stderr, coloring orange
1286 # what it emits to stderr using an ANSI-style; this cmd is useful to `debug`
1287 # pipes involving several steps
1288 oleak() {
1289     awk '
1290     {
1291         print
1292         fflush()
1293         gsub(/\x1b\[0m/, "\x1b[0m\x1b[38;5;166m")
1294         printf "\x1b[38;5;166m%s\x1b[0m\n", $0 > "/dev/stderr"
1295         fflush("/dev/stderr")
1296     }' "$@"
1297 }
1298 
1299 # Plain ignores ANSI terminal styling
1300 p() {
1301     awk '
1302     {
1303         # ignore notifications (code 9) and hyperlinks (code 8)
1304         gsub(/\x1b\](8|9);[^\x07]*\x07/, "")
1305         # ignore cursor-movers and style-changers
1306         gsub(/\x1b\[([0-9]*[A-HJKST]|[0-9;]*m)/, "")
1307 
1308         print
1309         fflush()
1310     }' "$@"
1311 }
1312 
1313 # Print AWK expressions
1314 pawk() {
1315     local arg
1316     local shown
1317     for arg in "$@"; do
1318         shown="END { print $(echo "${arg}" | sed 's-"-\\"-g') }"
1319         printf "\x1b[48;5;253m\x1b[38;5;26m%-80s\x1b[0m\n" \
1320             "awk \"${shown}\" < /dev/null" >&2
1321         awk "END { print ${arg} }" < /dev/null
1322     done
1323 }
1324 
1325 # ignore ANSI terminal styling
1326 plain() {
1327     awk '
1328     {
1329         # ignore notifications (code 9) and hyperlinks (code 8)
1330         gsub(/\x1b\](8|9);[^\x07]*\x07/, "")
1331         # ignore cursor-movers and style-changers
1332         gsub(/\x1b\[([0-9]*[A-HJKST]|[0-9;]*m)/, "")
1333 
1334         print
1335         fflush()
1336     }' "$@"
1337 }
1338 
1339 # reset ANSI styles at the end of each line
1340 plainend() {
1341     awk '{ printf "%s\x1b[0m\n", $0; fflush() }' "$@"
1342 }
1343 
1344 # PLay INput handles playable/multimedia streams from stdin
1345 plin() {
1346     mpv -
1347 }
1348 
1349 # find/run the script given with PyPy, to speed it up
1350 pp() {
1351     local script="$(which $1)"
1352     shift
1353     pypy3 "${script}" "$@"
1354 }
1355 
1356 # Quiet cURL
1357 qurl() {
1358     curl -s "$@"
1359 }
1360 
1361 # reflow/trim lines of prose (text) to improve its legibility: it
1362 # seems especially useful when the text is pasted from web-pages
1363 # being viewed in reader mode
1364 reprose() {
1365     local w="${1:-80}"
1366     shift
1367     awk 'FNR == 1 && NR > 1 { print "" } 1' "$@" | sed -E 's- *\r?$--' |
1368         fold -s -w="$w" | sed -E 's- +$--'
1369 }
1370 
1371 # REPeat a STRing n times, or 80 times by default
1372 repstr() {
1373     awk -v what="${1}" -v times="${2:-80}" '
1374         BEGIN {
1375             if (length(what) == 0) exit 0
1376             for (i = 1; i <= times; i++) printf "%s", what
1377             printf "\n"
1378         }' < /dev/null
1379 }
1380 
1381 # Read-Only Micro (text editor)
1382 rom() {
1383     micro --readonly true "$@"
1384 }
1385 
1386 # RUN a command IN the folder given as the first argument
1387 runin() {
1388     local prev
1389     local res
1390 
1391     prev="${OLDPWD}"
1392     cd "${1}" || return 1
1393 
1394     shift
1395     "$@"
1396     res="$?"
1397 
1398     cd - || return 1
1399     OLDPWD="${prev}"
1400     return "${res}"
1401 }
1402 
1403 # run Sed (extended)
1404 s() {
1405     sed -E "$@"
1406 }
1407 
1408 # show a unique-looking separator line; useful to run between commands
1409 # which output walls of text
1410 sep() {
1411     printf "\x1b[48;5;253m"
1412     printf "·························································"
1413     printf "·······················"
1414     printf "\x1b[0m\n"
1415 }
1416 
1417 # start a local webserver from the current folder, using the port number
1418 # given, or port 8080 by default
1419 serve() {
1420     printf "\x1b[38;5;26mserving files in ${2:-$(pwd)}\x1b[0m\n" >&2
1421     python3 -m http.server "${1:-8080}" -d "${2:-.}"
1422 }
1423 
1424 # SET DIFFerence sorts its 2 inputs, then finds lines not in the 2nd input
1425 setdiff() {
1426     # comm -23 <(sort "$1") <(sort "$2")
1427     # dash doesn't support the process-sub syntax
1428     (sort "$1" | (sort "$2" | (comm -23 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0)
1429 }
1430 
1431 # SET INtersection, sorts its 2 inputs, then finds common lines
1432 setin() {
1433     # comm -12 <(sort "$1") <(sort "$2")
1434     # dash doesn't support the process-sub syntax
1435     (sort "$1" | (sort "$2" | (comm -12 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0)
1436 }
1437 
1438 # SET SUBtraction sorts its 2 inputs, then finds lines not in the 2nd input
1439 setsub() {
1440     # comm -23 <(sort "$1") <(sort "$2")
1441     # dash doesn't support the process-sub syntax
1442     (sort "$1" | (sort "$2" | (comm -23 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0)
1443 }
1444 
1445 # show a command, then run it
1446 showrun() {
1447     printf "\x1b[7m%s\x1b[0m\n" "$*" && "$@"
1448 }
1449 
1450 # SKIP the first n lines, or the 1st line by default
1451 skip() {
1452     tail -n +$(("${1:-1}" + 1)) "${2:--}"
1453 }
1454 
1455 # SKIP the FIRST n lines, or the 1st line by default
1456 skipfirst() {
1457     tail -n +$(("${1:-1}" + 1)) "${2:--}"
1458 }
1459 
1460 # skip/ignore the last n lines, or only the very last line by default
1461 skiplast() {
1462     head -n -"${1:-1}" "${2:--}"
1463 }
1464 
1465 # skip/ignore the last n lines, or only the very last line by default
1466 skiplastlines() {
1467     head -n -"${1:-1}" "${2:--}"
1468 }
1469 
1470 # SKIP the first n LINES, or the 1st line by default
1471 skiplines() {
1472     tail -n +$(("${1:-1}" + 1)) "${2:--}"
1473 }
1474 
1475 # dig into a folder recursively for files under n bytes
1476 smallfiles() {
1477     find "${2:-.}" -type f -size -"${1:-1000000}"c
1478 }
1479 
1480 # show the reverse-SOrted SIzes of various files
1481 sosi() {
1482     wc -c "$@" | sort -rn
1483 }
1484 
1485 # ignore leading spaces, trailing spaces, even runs of multiple spaces
1486 # in the middle of lines, as well as trailing carriage returns
1487 squeeze() {
1488     awk '
1489     {
1490         gsub(/  +/, " ")
1491         gsub(/ *\t */, "\t")
1492         gsub(/(^ +)|( *\r?$)/, "")
1493         print
1494     }' "$@"
1495 }
1496 
1497 # ssv2tsv turns each run of 1+ spaces into a single tab, while ignoring
1498 # leading spaces in each line
1499 ssv2tsv() {
1500     awk 1 "$@" | sed -E 's-^ +--; s- +-\t-g'
1501 }
1502 
1503 # underline every 5th line
1504 stripe() {
1505     awk '
1506         NR % 5 == 0 && NR != 1 { printf "\x1b[4m%s\x1b[0m\n", $0; next }
1507         1' "$@"
1508 }
1509 
1510 # Trim leading/trailing spaces and trailing carriage-returns
1511 t() {
1512     awk 'FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1' "$@" |
1513         sed -E 's-^ +--; s- *\r?$--'
1514 }
1515 
1516 # Tab AWK: TSV-specific I/O settings for `awk`
1517 tawk() {
1518     awk -F "\t" -v OFS="\t" "$@"
1519 }
1520 
1521 # Trim End ignores trailing spaces and possibly a carriage return in all
1522 # lines; also, this command ensures separate lines from different inputs
1523 # will never join by accident
1524 te() {
1525     awk 'FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1' "$@" | sed -E 's- *\r?$--'
1526 }
1527 
1528 # show current date in a specifc format, which is both people-friendly
1529 # and machine/tool/search/automation-friendly
1530 today() {
1531     date +'%Y-%m-%d %a %b %d'
1532 }
1533 
1534 # show all files directly in the folder given, without looking any deeper
1535 topfiles() {
1536     local arg
1537     for arg in "${@:-.}"; do
1538         find "${arg}" -maxdepth 1 -type f
1539     done
1540 }
1541 
1542 # show all folders directly in the folder given, without looking any deeper
1543 topfolders() {
1544     local arg
1545     for arg in "${@:-.}"; do
1546         find "${arg}" -maxdepth 1 -type d | awk 'NR > 1'
1547     done
1548 }
1549 
1550 # ignore leading spaces, trailing spaces, and carriage returns on lines
1551 trim() {
1552     awk 'FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1' "$@" |
1553         sed -E 's-^ +--; s- *\r?$--'
1554 }
1555 
1556 # ignore trailing spaces and possibly a carriage return in all lines;
1557 # also, this command ensures separate lines from different inputs will
1558 # never join by accident
1559 trimend() {
1560     awk 'FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1' "$@" | sed -E 's- *\r?$--'
1561 }
1562 
1563 # ignore the prefix given from input lines which start with it; input
1564 # lines which don't start with the prefix given will stay unchanged
1565 trimprefix() {
1566     local prefix="${1:-}"
1567     shift
1568 
1569     awk -v pre="${prefix}" '
1570         index($0, pre) == 1 { $0 = substr($0, length(pre) + 1) }
1571         1' "$@"
1572 }
1573 
1574 # ignore the suffix given from input lines which end with it; input
1575 # lines which don't end with the suffix given will stay unchanged
1576 trimsuffix() {
1577     local suffix="${1:-}"
1578     shift
1579 
1580     awk -v suf="${suffix}" '
1581     {
1582         i = index($0, suf)
1583         if (i != 0 && i == length - length(suf) + 1) {
1584             $0 = substr($0, 1, length - length(suf))
1585         }
1586     }
1587 
1588     1' "$@"
1589 }
1590 
1591 # ignore trailing spaces and possibly a carriage return in all lines;
1592 # also, this command ensures separate lines from different inputs will
1593 # never join by accident
1594 trimtrail() {
1595     awk 'FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1' "$@" | sed -E 's- *\r?$--'
1596 }
1597 
1598 # ignore trailing spaces and possibly a carriage return in all lines;
1599 # also, this command ensures separate lines from different inputs will
1600 # never join by accident
1601 trimtrails() {
1602     awk 'FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1' "$@" | sed -E 's- *\r?$--'
1603 }
1604 
1605 # try running a command, emitting an explicit message to standard-error
1606 # if the command given fails
1607 try() {
1608     "$@" || (
1609         printf "%s: failure running %s\n" "$0" "$*" >&2
1610         return 255
1611     )
1612 }
1613 
1614 # TimeStamp lines satisfying an AWK condition, ignoring all other lines
1615 tsawk() {
1616     # -v line="\x1b[38;5;27m%s\x1b[0m %s\n"
1617     awk \
1618         -v line="\x1b[48;5;255m\x1b[38;5;24m%s\x1b[0m %s\n" \
1619         -v time="%Y-%m-%d %H:%M:%S" \
1620         "${1:-1} { printf line, strftime(time), \$0; fflush() }"
1621 }
1622 
1623 # query TSV (tab-separated values) data via sQLite
1624 tsvql() {
1625     local q="${1:-select * from sqlite_master;}"
1626     shift
1627     textql -header -output-header -dlm=tab -output-dlm=tab -sql "${q}" "$@"
1628 }
1629 
1630 # Unixify concatenates all named input sources, ignoring trailing CRLFs
1631 # into LFs, and guaranteeing lines from different sources are accidentally
1632 # joined, by adding a line-feed when an input's last line doesn't end with
1633 # one; also, ignore leading UTF-8 BOMs on the first line of each input, as
1634 # those are useless at best
1635 u() {
1636     awk 'FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1' "$@" | sed -E 's-\r$--'
1637 }
1638 
1639 # decode base64 bytes
1640 unbase64() {
1641     base64 -d "$@"
1642 }
1643 
1644 # deduplicate lines, keeping them in their original order
1645 unique() {
1646     awk '!c[$0]++' "$@"
1647 }
1648 
1649 # concatenate all named input sources, ignoring trailing CRLFs into LFs,
1650 # and guaranteeing lines from different sources are accidentally joined,
1651 # by adding a line-feed when an input's last line doesn't end with one;
1652 # also, ignore leading UTF-8 BOMs on the first line of each input, as
1653 # those are useless at best
1654 unixify() {
1655     awk 'FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1' "$@" | sed -E 's-\r$--'
1656 }
1657 
1658 # turn all CRLF byte-pairs into single line-feed bytes
1659 uncrlf() {
1660     cat "$@" | sed -E 's-\r$--'
1661 }
1662 
1663 # UNcompress GZip data
1664 ungz() {
1665     gzip -d "$@"
1666 }
1667 
1668 # UNcompress GZIP data
1669 ungzip() {
1670     gzip -d "$@"
1671 }
1672 
1673 # expand tabs into spaces using the tabstop given
1674 untab() {
1675     local tabstop="${1:-4}"
1676     shift
1677     expand -t "${tabstop}" "$@"
1678 }
1679 
1680 # extract .tar.gz files/archives
1681 untargz() {
1682     tar -xvzf "$@"
1683 }
1684 
1685 # turn UTF-16-encoded (either kind) plain-text into UTF-8
1686 unutf16() {
1687     cat "$@" | iconv -f UTF-16 -t UTF-8
1688 }
1689 
1690 # go UP n folders, or go up 1 folder by default
1691 up() {
1692     if [ "${1:-1}" -le 0 ]; then
1693         cd .
1694         return "$?"
1695     fi
1696 
1697     cd "$(printf "%${1:-1}s" "" | sed 's- -../-g')" || return $?
1698 }
1699 
1700 # emit input lines in reverse order, or last to first
1701 upsidedown() {
1702     awk '
1703         { lines[NR] = $0 }
1704         END { for (i = NR; i >= 1; i--) print lines[i] }' "$@"
1705 }
1706 
1707 # Underline Table: underline the first line (the header), then
1708 # underline every 5th line after that
1709 ut() {
1710     awk '
1711         (NR - 1) % 5 == 0 {
1712             gsub(/\x1b\[0m/, "\x1b[0m\x1b[4m")
1713             printf "\x1b[4m%s\x1b[0m\n", $0
1714             next
1715         }
1716 
1717         1' "$@"
1718 }
1719 
1720 # View text: run `less` with ANSI styles and no line-wrapping
1721 v() {
1722     less -JMKiCRS "$@"
1723 }
1724 
1725 # run a command, showing its success/failure right after
1726 verdict() {
1727     local code
1728     local fs
1729     local msg
1730 
1731     "$@"
1732     code="$?"
1733 
1734     if [ "${code}" -eq 0 ]; then
1735         fs="\n\x1b[38;5;29m%s \x1b[48;5;29m\x1b[97m succeeded \x1b[0m\n"
1736         printf "${fs}" "$*" >&2
1737         return 0
1738     fi
1739 
1740     msg="failed with error code ${code}"
1741     printf "\n\x1b[31m%s \x1b[41m\x1b[97m ${msg} \x1b[0m\n" "$*" >&2
1742     return "${code}"
1743 }
1744 
1745 # View with Header runs `less` without line numbers, with ANSI styles, with
1746 # no line-wrapping, and using the first line as a sticky-header, so it always
1747 # shows on top
1748 vh() {
1749     less --header=1 -JMKiCRS "$@"
1750 }
1751 
1752 # What Are These (?) shows what the names given to it are/do
1753 wat() {
1754     local code
1755     local a
1756     local res
1757 
1758     code=0
1759     for a in "$@"; do
1760         printf "\x1b[48;5;253m\x1b[38;5;26m%-80s\x1b[0m\n" "${a}"
1761         (
1762             alias "${a}" || declare -f "${a}" || which "${a}" || type "${a}"
1763         ) 2> /dev/null
1764         res="$?"
1765 
1766         if [ "${res}" -ne 0 ]; then
1767             code="${res}"
1768             printf "\x1b[31m%s not found\x1b[0m\n" "${a}"
1769         fi
1770     done
1771 
1772     return "${code}"
1773 }
1774 
1775 # show a short-term WEATHER forecast for the place/city given
1776 weather() {
1777     if [ $# -eq 0 ]; then
1778         printf "Show a short-term weather forecast for the place given.\n"
1779         return 0
1780     fi
1781 
1782     finger "${*}~$(expr $(tput cols) - 2)@graph.no" |
1783         tr - @ | tr '#' F | tr '|' R |
1784         sed -E 's/^ +@=/ -=/; s/=@ *$/=-/' |
1785         grep -v '^\['
1786 }
1787 
1788 # find all files which have at least 1 line with trailing spaces/CRs, with
1789 # the option to limit the (fully-recursive) search to the files/folders given
1790 wheretrails() {
1791     rg -c '[ \r]+$' "${@:-.}"
1792 }
1793 
1794 # find all files which have at least 1 line with trailing spaces/CRs, with
1795 # the option to limit the (fully-recursive) search to the files/folders given
1796 whichtrails() {
1797     rg -c '[ \r]+$' "${@:-.}"
1798 }
1799 
1800 # What Is This (?) shows what the name given to it is/does
1801 # wit() {
1802 #     (
1803 #         alias "${1}" || declare -f "${1}" || which "${1}" || type "${1}"
1804 #     ) 2> /dev/null || (
1805 #         printf "\x1b[31m%s not found\x1b[0m\n" "${1}" >&2 && return 1
1806 #     )
1807 # }
1808 
1809 # What Is This (?) shows what the names given to it are/do
1810 wit() {
1811     local code
1812     local a
1813     local res
1814 
1815     code=0
1816     for a in "$@"; do
1817         printf "\x1b[48;5;253m\x1b[38;5;26m%-80s\x1b[0m\n" "${a}"
1818         (
1819             alias "${a}" || declare -f "${a}" || which "${a}" || type "${a}"
1820         ) 2> /dev/null
1821         res="$?"
1822 
1823         if [ "${res}" -ne 0 ]; then
1824             code="${res}"
1825             printf "\x1b[31m%s not found\x1b[0m\n" "${a}"
1826         fi
1827     done
1828 
1829     return "${code}"
1830 }
1831 
1832 # emit each word-like item from each input line on its own line
1833 words() {
1834     awk '{ for (i = 1; i <= NF; i++) print $i }' "$@"
1835 }
1836 
1837 # Youtube Audio Player
1838 yap() {
1839     mpv "$(yt-dlp -f 140 --get-url $(echo ${1} | sed 's-&.*--') 2> /dev/null)"
1840 }
1841 
1842 # Youtube Download
1843 yd() {
1844     yt-dlp "$@"
1845 }
1846 
1847 # Youtube Download AAC audio
1848 ydaac() {
1849     yt-dlp -f 140 "$@"
1850 }
1851 
1852 # Youtube Download MP4 video
1853 ydmp4() {
1854     yt-dlp -f 22 "$@"
1855 }
1856 
1857 # year shows a full calendar for the current year, or for the year given
1858 year() {
1859     # debian linux has a different `cal` app which highlights the day
1860     if [ -e "/usr/bin/ncal" ]; then
1861         ncal -C -y "$@"
1862     else
1863         cal -y "$@"
1864     fi
1865 }
1866 
1867 # show the current date in the YYYY-MM-DD format
1868 ymd() {
1869     date +'%Y-%m-%d'
1870 }
1871 
1872 # YouTube URI turns youtube-page URIs into direct media-URIs
1873 yturi() {
1874     local page="$(echo $1 | sed 's-&.*--')"
1875     shift
1876     yt-dlp "${@:--f 140}" --get-url "${page}" 2> /dev/null
1877 }