File: clam.sh 1 #!/bin/sh 2 3 # The MIT License (MIT) 4 # 5 # Copyright (c) 2026 pacman64 6 # 7 # Permission is hereby granted, free of charge, to any person obtaining a copy 8 # of this software and associated documentation files (the "Software"), to deal 9 # in the Software without restriction, including without limitation the rights 10 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 # copies of the Software, and to permit persons to whom the Software is 12 # furnished to do so, subject to the following conditions: 13 # 14 # The above copyright notice and this permission notice shall be included in 15 # all copies or substantial portions of the Software. 16 # 17 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 # SOFTWARE. 24 25 26 # clam 27 # 28 # Command-Line Augmentation Module (clam): get the best out of your shell. 29 # 30 # 31 # This is a collection of arguably useful shell functions and shortcuts: 32 # some of these extra commands can be real time/effort savers, ideally 33 # letting you concentrate on getting things done. 34 # 35 # Some of these commands depend on my other scripts from the `pac-tools`, 36 # others either rely on widely-preinstalled command-line apps, or ones 37 # which are available on most of the major command-line `package` managers. 38 # 39 # To use this script, you're supposed to `source` it, so its definitions 40 # stay for your whole shell session: for that, you can run `source clam` or 41 # `. clam` (no quotes either way), either directly or at shell startup. 42 # 43 # Almost all commands defined in this script work with `bash`, `zsh`, and 44 # even `dash`, which is debian linux's default non-interactive shell. Some 45 # of its commands even seem to work on busybox's shell. 46 47 48 case "$1" in 49 -h|--h|-help|--help) 50 # show help message, using the info-comment from this very script 51 awk ' 52 /^case / { exit } 53 /^# +clam$/, /^$/ { gsub(/^# ?/, ""); print } 54 ' "$0" 55 exit 0 56 ;; 57 esac 58 59 60 # dash doesn't support regex-matching syntax, forcing to use case statements 61 case "$0" in 62 -bash|-dash|-sh|bash|dash|sh|/bin/sh) 63 # script is being sourced with bash, dash, or ash, which is good 64 : 65 ;; 66 67 *) 68 case "$ZSH_EVAL_CONTEXT" in 69 *:file) 70 # script is being sourced with zsh, which is good 71 : 72 ;; 73 74 *) 75 # script is being run normally, which is a waste of time 76 printf "\e[7mDon't run this script directly: instead source it\e[0m\n" 77 printf "\e[7mby running '. clam' (without the single quotes).\e[0m\n" 78 printf "\n" 79 printf "\e[7mBefore doing that, you may want to see the help,\e[0m\n" 80 printf "\e[7mby running 'clam -h' (without the single quotes).\e[0m\n" 81 # exiting during shell-startup may deny shell access, even if 82 # the script is being run, instead of being sourced directly 83 ;; 84 esac 85 ;; 86 esac 87 88 89 alias 0='sbs' 90 91 alias 1='bsbs 1' 92 alias 2='bsbs 2' 93 alias 3='bsbs 3' 94 alias 4='bsbs 4' 95 alias 5='bsbs 5' 96 alias 6='bsbs 6' 97 alias 7='bsbs 7' 98 alias 8='bsbs 8' 99 alias 9='bsbs 9' 100 101 # Less with Header n runs `less` with line numbers, ANSI styles, without 102 # line-wraps, and using the first n lines as a sticky-header, so they always 103 # show on top 104 alias lh1='less --header=1 -MKNiCRS' 105 alias lh2='less --header=2 -MKNiCRS' 106 alias lh3='less --header=3 -MKNiCRS' 107 alias lh4='less --header=4 -MKNiCRS' 108 alias lh5='less --header=5 -MKNiCRS' 109 alias lh6='less --header=6 -MKNiCRS' 110 alias lh7='less --header=7 -MKNiCRS' 111 alias lh8='less --header=8 -MKNiCRS' 112 alias lh9='less --header=9 -MKNiCRS' 113 114 # View with Header n runs `less` without line numbers, ANSI styles, without 115 # line-wraps, and using the first n lines as a sticky-header, so they always 116 # show on top 117 alias vh1='less --header=1 -MKiCRS' 118 alias vh2='less --header=2 -MKiCRS' 119 alias vh3='less --header=3 -MKiCRS' 120 alias vh4='less --header=4 -MKiCRS' 121 alias vh5='less --header=5 -MKiCRS' 122 alias vh6='less --header=6 -MKiCRS' 123 alias vh7='less --header=7 -MKiCRS' 124 alias vh8='less --header=8 -MKiCRS' 125 alias vh9='less --header=9 -MKiCRS' 126 127 alias c='cat' 128 alias e='echo' 129 alias r='reset' 130 131 # Breathe periodically adds extra empty lines; uses my own `breathe` tool 132 alias b='breathe' 133 134 # Plain ignores ANSI-styles; uses my own `plain` tool 135 alias p='plain' 136 137 # Awk Print 138 alias ap=abp 139 140 # Book-like MANual, lays out `man` docs as pairs of side-by-side pages; uses 141 # my tool `bsbs` 142 alias bman=bookman 143 144 # load/concatenate BYTES from named data sources 145 # alias bytes='cat' 146 147 # load/concatenate BYTES from named data sources; uses my tool `get` 148 alias bytes='get' 149 150 # Compile C Optimized 151 alias cco='cc -Wall -O2 -s -march=native -mtune=native -flto' 152 153 # Color DMESG 154 alias cdmesg='dmesg --color=always' 155 156 # Colored Json Query runs the `jq` app, allowing an optional filepath as the 157 # data source, and even an optional transformation formula 158 alias cjq='jq -C' 159 160 # CLear Screen 161 alias cls='tput -T xterm reset 2> /dev/null || reset' 162 163 # Compile C Plus Plus Optimized 164 alias cppo='c++ -Wall -O2 -s -march=native -mtune=native -flto' 165 166 # CURL Info only shows the response headers from the request given 167 alias curli='curl -I' 168 169 # CURL Silent spares you the progress bar, but still tells you about errors 170 alias curls='curl --silent --show-error' 171 172 # dictionary-DEFine the word given, using an online service 173 alias def=define 174 175 # turn JSON Lines into a proper json array 176 # alias dejsonl='jq -s -M' 177 178 # turn json lines into a proper json array using the `jq` app 179 alias dejql='jq -s -M' 180 181 # turn UTF-16 data into UTF-8 182 alias deutf16='iconv -f utf16 -t utf8' 183 184 # edit plain-text files 185 # alias edit='micro' 186 187 # ENV with 0/null-terminated lines on stdout 188 alias env0='env -0' 189 190 # ENV Change folder, runs the command given in the folder given (first) 191 alias envc='env -C' 192 193 # Extended Plain Interactive Grep 194 alias epig='ugrep --color=never -Q -E' 195 196 # Editor Read-Only 197 alias ero='micro -readonly true' 198 199 # Expand 4 turns each tab into up to 4 spaces 200 alias expand4='expand -t 4' 201 202 # run the Fuzzy Finder (fzf) in multi-choice mode, with custom keybindings 203 alias ff='fzf -m --bind ctrl-a:select-all,ctrl-space:toggle' 204 205 # get FILE's MIME types 206 alias filemime='file --mime-type' 207 208 # run `gcc` with all optimizations on and with static analysis on 209 alias gccmax='gcc -Wall -O2 -s -march=native -mtune=native -flto -fanalyzer' 210 211 # hold stdout if used at the end of a pipe-chain 212 alias hold='less -MKiCRS' 213 214 # find all hyperlinks inside HREF attributes in the input text 215 alias hrefs=href 216 217 # make JSON Lines out of JSON data 218 alias jl=jsonl 219 220 # shrink/compact JSON using the `jq` app, allowing an optional filepath, and 221 # even an optional transformation formula after that 222 alias jq0='jq -c -M' 223 224 # show JSON data on multiple lines, using 2 spaces for each indentation level, 225 # allowing an optional filepath, and even an optional transformation formula 226 # after that 227 alias jq2='jq --indent 2 -M' 228 229 # find the LAN (local-area network) IP address for this device 230 alias lanip='hostname -I' 231 232 # run `less`, showing line numbers, among other settings 233 alias least='less -MKNiCRS' 234 235 # Live GREP 236 alias lgrep='grep --line-buffered' 237 238 # try to run the command given using line-buffering for its (standard) output 239 alias livelines='stdbuf -oL' 240 241 # LOAD data from the filename or URI given; uses my `get` tool 242 alias load=get 243 244 # LOcal SERver webserves files in a folder as localhost, using the port 245 # number given, or port 8080 by default 246 alias loser=serve 247 248 # Live RipGrep 249 alias lrg='rg --line-buffered' 250 251 # run `ls` showing how many 4k pages each file takes 252 alias lspages='ls -s --block-size=4096' 253 254 # Listen To Youtube 255 alias lty=yap 256 257 # LXC-LS Fancy 258 alias lxc-lsf='lxc-ls --fancy' 259 260 # MAKE IN folder 261 alias makein=mif 262 263 # Multi-Core MaKe runs `make` using all cores 264 alias mcmk=mcmake 265 266 # METAdata CURL only shows the response headers from the request given 267 alias metacurl='curl -I' 268 269 # run `less`, showing line numbers, among other settings 270 alias most='less -MKNiCRS' 271 272 # emit nothing to output and/or discard everything from input 273 alias nil=null 274 275 # Nice Json Query colors JSON data using the `jq` app 276 alias njq=cjq 277 278 # Plain Interactive Grep 279 alias pig='ugrep --color=never -Q -E' 280 281 # Quick Compile C Optimized 282 alias qcco='cc -Wall -O2 -s -march=native -mtune=native -flto' 283 284 # Quick Compile C Plus Plus Optimized 285 alias qcppo='c++ -Wall -O2 -s -march=native -mtune=native -flto' 286 287 # Read-Only Editor 288 alias roe='micro -readonly true' 289 290 # Read-Only Micro (text editor) 291 alias rom='micro -readonly true' 292 293 # Read-Only Top 294 alias rot='htop --readonly' 295 296 # RUN IN folder 297 alias runin='env -C' 298 299 # place lines Side-By-Side 300 # alias sbs='column' 301 302 # Silent CURL spares you the progress bar, but still tells you about errors 303 alias scurl='curl --silent --show-error' 304 305 # Stdbuf Output Line-buffered 306 alias sol='stdbuf -oL' 307 308 # TRY running a command, showing its outcome/error-code on failure; uses my 309 # `verdict` tool 310 alias try='verdict' 311 312 # Time Verbosely the command given 313 alias tv='/usr/bin/time -v' 314 315 # VERTical REVert emits lines in reverse order of appearance 316 alias vertrev='tac' 317 318 # UGREP in Query/interactive mode 319 alias ugrepq='ugrep -Q' 320 321 # emit lines in reverse order of appearance 322 alias upsidedown='tac' 323 324 # run `cppcheck` with even stricter options 325 alias vetc='cppcheck --enable=portability,style --check-level=exhaustive' 326 327 # run `cppcheck` with even stricter options, also checking for c89 compliance 328 alias vetc89='cppcheck --enable=portability,style --check-level=exhaustive --std=c89' 329 330 # run `cppcheck` with even stricter options 331 alias vetcpp='cppcheck --enable=portability,style --check-level=exhaustive' 332 333 # VET SHell scripts 334 alias vetsh=vetshell 335 336 # check shell scripts for common gotchas, avoiding complaints about using 337 # the `local` keyword, which is widely supported in practice 338 alias vetshell='shellcheck -e 3043' 339 340 # run a command using an empty environment 341 alias void='env -i' 342 343 # turn plain-text from latin-1 into UTF-8; the name is from `vulgarization`, 344 # which is the mutation of languages away from latin during the middle ages 345 alias vulgarize='iconv -f latin-1 -t utf-8' 346 347 # recursively find all files with trailing spaces/CRs 348 alias wheretrails=whichtrails 349 350 # run `xargs`, using zero/null bytes as the extra-arguments terminator 351 alias x0='xargs -0' 352 353 # Xargs Lines, runs `xargs` using whole lines as extra arguments 354 alias xl=xargsl 355 356 # Awk Begin Print 357 abp() { 358 local arg 359 for arg in "$@"; do 360 awk "BEGIN { print (${arg}); exit }" 361 done 362 } 363 364 # APT UPdate/grade 365 aptup() { sudo apt update && sudo apt upgrade "$@"; sudo -k; } 366 367 # emit each argument given as its own line of output 368 args() { [ $# -eq 0 ] || printf "%s\n" "$@"; } 369 370 # AWK in BLOCKS/paragraphs-input mode 371 awkblocks() { 372 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 373 stdbuf -oL awk -F='' -v RS='' "$@" 374 else 375 awk -F='' -v RS='' "$@" 376 fi 377 } 378 379 # AWK using TABS as input/output field-separators 380 awktabs() { 381 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 382 stdbuf -oL awk -F "\t" -v OFS="\t" "$@" 383 else 384 awk -F "\t" -v OFS="\t" "$@" 385 fi 386 } 387 388 # Breathe lines 3: separate groups of 3 lines with empty lines 389 b3() { 390 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 391 stdbuf -oL awk 'NR % 3 == 1 && NR != 1 { print "" } 1' "$@" 392 else 393 awk 'NR % 3 == 1 && NR != 1 { print "" } 1' "$@" 394 fi 395 } 396 397 # Breathe lines 5: separate groups of 5 lines with empty lines 398 b5() { 399 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 400 stdbuf -oL awk 'NR % 5 == 1 && NR != 1 { print "" } 1' "$@" 401 else 402 awk 'NR % 5 == 1 && NR != 1 { print "" } 1' "$@" 403 fi 404 } 405 406 # show an ansi-styled BANNER-like line 407 banner() { printf "\e[7m%-$(tput -T xterm cols)s\e[0m\n" "$*"; } 408 409 # emit a colored bar which can help visually separate different outputs 410 bar() { 411 [ "${1:-80}" -gt 0 ] && printf "\e[48;2;218;218;218m%${1:-80}s\e[0m\n" "" 412 } 413 414 # Breathe Header 3: add an empty line after the first one (the header), 415 # then separate groups of 3 lines with empty lines between them 416 bh3() { 417 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 418 stdbuf -oL awk '(NR - 1) % 3 == 1 { print "" } 1' "$@" 419 else 420 awk '(NR - 1) % 3 == 1 { print "" } 1' "$@" 421 fi 422 } 423 424 # Breathe Header 5: add an empty line after the first one (the header), 425 # then separate groups of 5 lines with empty lines between them 426 bh5() { 427 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 428 stdbuf -oL awk '(NR - 1) % 5 == 1 { print "" } 1' "$@" 429 else 430 awk '(NR - 1) % 5 == 1 { print "" } 1' "$@" 431 fi 432 } 433 434 # emit a line with a repeating block-like symbol in it 435 blocks() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -█-g'; } 436 437 # BOOK-like MANual, lays out `man` docs as pairs of side-by-side pages; uses 438 # my tool `bsbs` 439 bookman() { 440 local w 441 w="$(tput -T xterm cols)" 442 w="$((w / 2 - 4))" 443 if [ "$w" -lt 65 ]; then 444 w=65 445 fi 446 MANWIDTH="$w" man "$@" | bsbs 2 447 } 448 449 # split lines using the separator given, turning them into single-item lines 450 breakdown() { 451 local sep="${1:- }" 452 [ $# -gt 0 ] && shift 453 local command='awk' 454 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 455 command='stdbuf -oL awk' 456 fi 457 458 ${command} -F "${sep}" '{ for (i = 1; i <= NF; i++) print $i }' "$@" 459 } 460 461 # CAlculator with Nice numbers runs my tool `ca` and colors results with 462 # my tool `nn`, alternating styles to make long numbers easier to read 463 can() { 464 local arg 465 for arg in "$@"; do 466 [ $# -ge 2 ] && printf "\e[7m%s\e[0m\n" "${arg}" >&2 467 ca "${arg}" | nn 468 done 469 } 470 471 # uppercase the first letter on each line, and lowercase all later letters 472 capitalize() { sed -E 's-^(.*)-\L\1-; s-^(.)-\u\1-'; } 473 474 # center-align lines of text, using the current screen width 475 center() { 476 awk -v width="$(tput -T xterm cols)" ' 477 { 478 gsub(/\r$/, "") 479 lines[NR] = $0 480 s = $0 481 gsub(/\x1b\[[0-9;]*[A-Za-z]/, "", s) # ANSI style-changers 482 l = length(s) 483 if (maxlen < l) maxlen = l 484 } 485 486 END { 487 n = (width - maxlen) / 2 488 if (n % 1) n = n - (n % 1) 489 fmt = sprintf("%%%ds%%s\n", (n > 0) ? n : 0) 490 for (i = 1; i <= NR; i++) printf fmt, "", lines[i] 491 } 492 ' "$@" 493 } 494 495 # Colored GREP ensures matches are colored when piped 496 cgrep() { 497 if [ -p /dev/stdout ] || [ -t 1 ]; then 498 grep --line-buffered --color=always "${@:-.}" 499 else 500 grep --color=always "${@:-.}" 501 fi 502 } 503 504 # Colored Go Test on the folder given; uses my command `gbm` 505 cgt() { 506 local f='real %e user %U sys %S mem %M exit %x' 507 /usr/bin/time -f "$f" go test "${@:-.}" 2>&1 \ 508 | gbm '^ok' '^[-]* ?FAIL' '^\?' 509 } 510 511 # Colored RipGrep ensures app `rg` emits colors when piped 512 crg() { 513 if [ -p /dev/stdout ] || [ -t 1 ]; then 514 rg --line-buffered --color=always "${@:-.}" 515 else 516 rg --color=always "${@:-.}" 517 fi 518 } 519 520 # Compile Rust Optimized 521 cro() { 522 rustc -C lto=true -C codegen-units=1 -C debuginfo=0 -C strip=symbols \ 523 -C opt-level=3 "$@" 524 } 525 526 # emit a line with a repeating cross-like symbol in it 527 crosses() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -×-g'; } 528 529 # listen to streaming DANCE music 530 dance() { 531 printf "streaming \e[7mDance Wave Retro\e[0m\n" 532 mpv --really-quiet https://retro.dancewave.online/retrodance.mp3 533 } 534 535 # emit a line with a repeating dash-like symbol in it 536 dashes() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -—-g'; } 537 538 # remove commas in numbers, as well as leading dollar signs in numbers 539 decomma() { 540 sed -E 's-([0-9]{3}),-\1-g; s-([0-9]{1,2}),-\1-g; s-\$([0-9\.]+)-\1-g' 541 } 542 543 dehtmlify() { 544 local command='awk' 545 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 546 command='stdbuf -oL awk' 547 fi 548 549 ${command} ' 550 { 551 gsub(/<\/?[^>]+>/, "") 552 gsub(/&/, "&") 553 gsub(/</, "<") 554 gsub(/>/, ">") 555 gsub(/^ +| *\r?$/, "") 556 gsub(/ +/, " ") 557 print 558 } 559 ' "$@" 560 } 561 562 # expand tabs each into up to the number of space given, or 4 by default 563 detab() { 564 local tabstop="${1:-4}" 565 [ $# -gt 0 ] && shift 566 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 567 stdbuf -oL expand -t "${tabstop}" "$@" 568 else 569 expand -t "${tabstop}" "$@" 570 fi 571 } 572 573 # DIVide 2 numbers 3 ways, including the complement 574 div() { 575 awk -v a="${1:-1}" -v b="${2:-1}" ' 576 BEGIN { 577 gsub(/_/, "", a) 578 gsub(/_/, "", b) 579 if (a > b) { c = a; a = b; b = c } 580 c = 1 - a / b 581 if (0 <= c && c <= 1) printf "%f\n%f\n%f\n", a / b, b / a, c 582 else printf "%f\n%f\n", a / b, b / a 583 exit 584 }' 585 } 586 587 # emit a line with a repeating dot-like symbol in it 588 dots() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -·-g'; } 589 590 # show the current Date and Time 591 dt() { 592 printf "\e[38;2;78;154;6m%s\e[0m \e[38;2;52;101;164m%s\e[0m\n" \ 593 "$(date +'%a %b %d')" "$(date +%T)" 594 } 595 596 # show the current Date, Time, and a Calendar with the 3 `current` months 597 dtc() { 598 { 599 # show the current date/time center-aligned 600 printf "%20s\e[38;2;78;154;6m%s\e[0m \e[38;2;52;101;164m%s\e[0m\n\n" \ 601 "" "$(date +'%a %b %d')" "$(date +%T)" 602 # debian linux has a different `cal` app which highlights the day 603 if [ -e /usr/bin/ncal ]; then 604 # fix debian/ncal's weird way to highlight the current day 605 ncal -C -3 | sed -E 's/_\x08(.+)_\x08([^ ]+)/\x1b\[7m\1\2\x1b\[0m/' 606 else 607 cal -3 608 fi 609 } | less -MKiCRS 610 } 611 612 # EDit RUN shell commands, using an interactive editor; uses my tool `leak` 613 edrun() { 614 # dash doesn't support the process-sub syntax 615 # . <( micro -readonly true -filetype shell | leak --inv ) 616 micro -readonly true -filetype shell | leak --inv | . /dev/fd/0 617 } 618 619 # convert EURos into CAnadian Dollars, using the latest official exchange 620 # rates from the bank of canada; during weekends, the latest rate may be 621 # from a few days ago; the default amount of euros to convert is 1, when 622 # not given 623 eur2cad() { 624 local url 625 local site='https://www.bankofcanada.ca/valet/observations/group' 626 local csv_rates="${site}/FX_RATES_DAILY/csv" 627 url="${csv_rates}?start_date=$(date -d '3 days ago' +'%Y-%m-%d')" 628 curl -s "${url}" | awk -F, -v amount="$(echo "${1:-1}" | sed 's-_--g')" ' 629 /EUR/ { for (i = 1; i <= NF; i++) if($i ~ /EUR/) j = i } 630 END { gsub(/"/, "", $j); if (j != 0) printf "%.2f\n", amount * $j } 631 ' 632 } 633 634 # Fix Audio Duration on a separate copy of the file given 635 fad() { ffmpeg -i "${1:-input.m4a}" -acodec copy "${2:-output.dat}"; } 636 637 # get the first n lines, or 1 by default 638 first() { head -n "${1:-1}" "${2:--}"; } 639 640 # Field-Names AWK remembers field-positions by name, from the first input line 641 fnawk() { 642 local code="${1:-1}" 643 [ $# -gt 0 ] && shift 644 645 local buffering='' 646 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 647 buffering='stdbuf -oL' 648 fi 649 650 ${buffering} awk -v OFS="\t" ' 651 NR == 1 { 652 FS = /\t/ ? "\t" : " " 653 $0 = $0 654 for (i in names) delete names[i] 655 for (i = 1; i <= NF; i++) names[$i] = i 656 i = "" 657 } 658 { low = lower = tolower($0) } 659 '"${code}"' 660 ' "$@" 661 } 662 663 # start from the line number given, skipping all previous ones 664 fromline() { tail -n +"${1:-1}" "${2:--}"; } 665 666 # convert a mix of FeeT and INches into meters 667 ftin() { 668 local ft="${1:-0}" 669 ft="$(echo "${ft}" | sed 's-_--g')" 670 local in="${2:-0}" 671 in="$(echo "${in}" | sed 's-_--g')" 672 awk "BEGIN { print 0.3048 * ${ft} + 0.0254 * ${in}; exit }" 673 } 674 675 # Gawk Bignum Print 676 gbp() { gawk --bignum "BEGIN { print $1; exit }"; } 677 678 # glue/stick together various lines, only emitting a line-feed at the end; an 679 # optional argument is the output-item-separator, which is empty by default 680 glue() { 681 local sep="${1:-}" 682 [ $# -gt 0 ] && shift 683 awk -v sep="${sep}" ' 684 NR > 1 { printf "%s", sep } 685 { gsub(/\r/, ""); printf "%s", $0 } 686 END { if (NR > 0) print "" } 687 ' "$@" 688 } 689 690 # GO Build Stripped: a common use-case for the go compiler 691 gobs() { go build -ldflags "-s -w" -trimpath "$@"; } 692 693 # GO DEPendencieS: show all dependencies in a go project 694 godeps() { go list -f '{{ join .Deps "\n" }}' "$@"; } 695 696 # GO IMPortS: show all imports in a go project 697 goimps() { go list -f '{{ join .Imports "\n" }}' "$@"; } 698 699 # go to the folder picked using an interactive TUI; uses my tool `bf` 700 goto() { 701 local where 702 where="$(bf "${1:-.}")" 703 if [ $? -ne 0 ]; then 704 return 0 705 fi 706 707 where="$(realpath "${where}")" 708 if [ ! -d "${where}" ]; then 709 where="$(dirname "${where}")" 710 fi 711 cd "${where}" || return 712 } 713 714 # show Help laid out on 2 side-by-side columns; uses my tool `bsbs` 715 h2() { naman "$@" | bsbs 2; } 716 717 # show Help laid out on 3 side-by-side columns; uses my tool `bsbs` 718 h3() { 719 local w 720 w="$(tput -T xterm cols)" 721 w="$((w / 3 - 6))" 722 if [ "$w" -lt 55 ]; then 723 w=55 724 fi 725 MANWIDTH="$w" man "$@" | bsbs 3 726 } 727 728 # Highlighted-style ECHO 729 hecho() { printf "\e[7m%s\e[0m\n" "$*"; } 730 731 # show each byte as a pair of HEXadecimal (base-16) symbols 732 hexify() { 733 cat "$@" | od -v -x -A n | awk ' 734 { gsub(/ +/, ""); printf "%s", $0 } 735 END { print "" } 736 ' 737 } 738 739 # History Fuzzy-finder 740 hf() { 741 eval "$( 742 history \ 743 | fzf --reverse --height 80% --tmux center,80% \ 744 | awk ' 745 { 746 $1 = "" 747 $2 = "" 748 gsub(/^ */, "") 749 printf("\x1b[7m%s\x1b[0m\n", $0) > "/dev/stderr" 750 print 751 } 752 ' 753 )" 754 } 755 756 # Help Me Remember my custom shell commands 757 hmr() { 758 local cmd="bat" 759 # debian linux uses a different name for the `bat` app 760 if [ -e /usr/bin/batcat ]; then 761 cmd="batcat" 762 fi 763 764 "$cmd" \ 765 --style=plain,header,numbers --theme='Monokai Extended Light' \ 766 --wrap=never --color=always "$(which clam)" | 767 sed -e 's-\x1b\[38;5;70m-\x1b[38;5;28m-g' \ 768 -e 's-\x1b\[38;5;214m-\x1b[38;5;208m-g' \ 769 -e 's-\x1b\[38;5;243m-\x1b[38;5;103m-g' \ 770 -e 's-\x1b\[38;5;238m-\x1b[38;5;245m-g' \ 771 -e 's-\x1b\[38;5;228m-\x1b[48;5;228m-g' | 772 less -MKiCRS 773 } 774 775 # convert seconds into a colon-separated Hours-Minutes-Seconds triple 776 hms() { 777 echo "${@:-0}" | sed -E 's-_--g; s- +-\n-g' | awk ' 778 /./ { 779 x = $0 780 h = (x - x % 3600) / 3600 781 m = (x % 3600) / 60 782 s = x % 60 783 printf "%02d:%02d:%05.2f\n", h, m, s 784 } 785 ' 786 } 787 788 # find all hyperlinks inside HREF attributes in the input text 789 href() { 790 local arg 791 local awk_cmd='awk' 792 local grep_cmd='grep' 793 if [ -p /dev/stdout ] || [ -t 1 ]; then 794 grep_cmd='grep --line-buffered' 795 if [ -e /usr/bin/stdbuf ]; then 796 awk_cmd='stdbuf -oL awk' 797 fi 798 fi 799 800 for arg in "${@:--}"; do 801 ${grep_cmd} -i -E -o 'href="[^"]+"' "${arg}" 802 done | ${awk_cmd} '{ gsub(/^href="|"\r?$/, ""); print }' 803 } 804 805 # avoid/ignore lines which case-insensitively match any of the regexes given 806 iavoid() { 807 local command='awk' 808 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 809 command='stdbuf -oL awk' 810 fi 811 812 ${command} ' 813 BEGIN { 814 if (IGNORECASE == "") { 815 m = "this variant of AWK lacks case-insensitive regex-matching" 816 print(m) > "/dev/stderr" 817 exit 125 818 } 819 IGNORECASE = 1 820 821 for (i = 1; i < ARGC; i++) { 822 e[i] = ARGV[i] 823 delete ARGV[i] 824 } 825 } 826 827 { 828 for (i = 1; i < ARGC; i++) if ($0 ~ e[i]) next 829 print 830 got++ 831 } 832 833 END { exit(got == 0) } 834 ' "${@:-^\r?$}" 835 } 836 837 # ignore command in a pipe: this allows quick re-editing of pipes, while 838 # still leaving signs of previously-used steps, as a memo 839 idem() { cat; } 840 841 # ignore command in a pipe: this allows quick re-editing of pipes, while 842 # still leaving signs of previously-used steps, as a memo 843 ignore() { cat; } 844 845 # only keep lines which case-insensitively match any of the regexes given 846 imatch() { 847 local command='awk' 848 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 849 command='stdbuf -oL awk' 850 fi 851 852 ${command} ' 853 BEGIN { 854 if (IGNORECASE == "") { 855 m = "this variant of AWK lacks case-insensitive regex-matching" 856 print(m) > "/dev/stderr" 857 exit 125 858 } 859 IGNORECASE = 1 860 861 for (i = 1; i < ARGC; i++) { 862 e[i] = ARGV[i] 863 delete ARGV[i] 864 } 865 } 866 867 { 868 for (i = 1; i < ARGC; i++) { 869 if ($0 ~ e[i]) { 870 print 871 got++ 872 next 873 } 874 } 875 } 876 877 END { exit(got == 0) } 878 ' "${@:-[^\r]}" 879 } 880 881 # start each non-empty line with extra n spaces 882 indent() { 883 local command='awk' 884 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 885 command='stdbuf -oL awk' 886 fi 887 888 ${command} ' 889 BEGIN { 890 n = ARGV[1] + 0 891 delete ARGV[1] 892 fmt = sprintf("%%%ds%%s\n", (n > 0) ? n : 0) 893 } 894 895 /^\r?$/ { print ""; next } 896 { gsub(/\r$/, ""); printf(fmt, "", $0) } 897 ' "$@" 898 } 899 900 # INSTall APT packages 901 instapt() { sudo apt install "$@"; sudo -k; } 902 903 # emit each word-like item from each input line on its own line; when a file 904 # has tabs on its first line, items are split using tabs alone, which allows 905 # items to have spaces in them 906 items() { 907 local command='awk' 908 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 909 command='stdbuf -oL awk' 910 fi 911 912 ${command} ' 913 FNR == 1 { FS = /\t/ ? "\t" : " "; $0 = $0 } 914 { gsub(/\r$/, ""); for (i = 1; i <= NF; i++) print $i } 915 ' "$@" 916 } 917 918 # listen to streaming JAZZ music 919 jazz() { 920 printf "streaming \e[7mSmooth Jazz Instrumental\e[0m\n" 921 mpv --quiet https://stream.zeno.fm/00rt0rdm7k8uv 922 } 923 924 # show a `dad` JOKE from the web, sometimes even a very funny one 925 joke() { 926 curl --silent --show-error https://icanhazdadjoke.com | fold -s | 927 awk '{ gsub(/ *\r?$/, ""); print }' 928 } 929 930 # JSON Query Lines turns JSON top-level arrays into multiple individually-JSON 931 # lines using the `jq` app, keeping all other top-level values as single line 932 # JSON outputs 933 jql() { 934 local code="${1:-.}" 935 [ $# -gt 0 ] && shift 936 jq -c -M "${code} | .[]" "$@" 937 } 938 939 # JSON Query Keys runs `jq` to find all unique key-combos from tabular JSON 940 jqk() { 941 local code="${1:-.}" 942 [ $# -gt 0 ] && shift 943 jq -c -M "${code} | .[] | keys" "$@" | awk '!c[$0]++' 944 } 945 946 # JSON Keys finds all unique key-combos from tabular JSON data; uses my tools 947 # `jsonl` and `tjp` 948 jsonk() { 949 tjp '[e.keys() for e in v] if isinstance(v, (list, tuple)) else v.keys()' \ 950 "${1:--}" | jsonl | awk '!c[$0]++' 951 } 952 953 # JSON Table, turns TSV tables into tabular JSON, where valid-JSON values are 954 # auto-parsed into numbers, booleans, etc...; uses my tools `jsons` and `tjp` 955 jsont() { 956 jsons "$@" | tjp \ 957 '[{k: rescue(lambda: loads(v), v) for k, v in e.items()} for e in v]' 958 } 959 960 # emit the given number of random/junk bytes, or 1024 junk bytes by default 961 junk() { head -c "$(echo "${1:-1024}" | sed 's-_--g')" /dev/urandom; } 962 963 # get the last n lines, or 1 by default 964 last() { tail -n "${1:-1}" "${2:--}"; } 965 966 # convert pounds (LB) into kilograms 967 lb() { 968 echo "${@:-1}" | sed -E 's-_--g; s- +-\n-g' | 969 awk '/./ { printf "%.2f\n", 0.45359237 * $0 }' 970 } 971 972 # convert a mix of pounds (LB) and weight-ounces (OZ) into kilograms 973 lboz() { 974 local lb="${1:-0}" 975 lb="$(echo "${lb}" | sed 's-_--g')" 976 local oz="${2:-0}" 977 oz="$(echo "${oz}" | sed 's-_--g')" 978 awk "BEGIN { print 0.45359237 * ${lb} + 0.028349523 * ${oz}; exit }" 979 } 980 981 # limit stops at the first n bytes, or 1024 bytes by default 982 limit() { head -c "$(echo "${1:-1024}" | sed 's-_--g')" "${2:--}"; } 983 984 # ensure LINES are never accidentally joined across files, by always emitting 985 # a line-feed at the end of each line 986 lines() { 987 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 988 stdbuf -oL awk 1 "$@" 989 else 990 awk 1 "$@" 991 fi 992 } 993 994 # regroup adjacent lines into n-item tab-separated lines 995 lineup() { 996 local command='awk' 997 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 998 command='stdbuf -oL awk' 999 fi 1000 1001 local n="${1:-0}" 1002 [ $# -gt 0 ] && shift 1003 1004 if [ "$n" -le 0 ]; then 1005 ${command} ' 1006 NR > 1 { printf "\t" } 1007 { printf "%s", $0 } 1008 END { if (NR > 0) print "" } 1009 ' "$@" 1010 return $? 1011 fi 1012 1013 ${command} -v n="$n" ' 1014 NR % n != 1 && n > 1 { printf "\t" } 1015 { printf "%s", $0 } 1016 NR % n == 0 { print "" } 1017 END { if (NR % n != 0) print "" } 1018 ' "$@" 1019 } 1020 1021 # emit LINEs ending with a Zero/null bytes 1022 linez() { 1023 if [ -p /dev/stdout ] || [ -t 1 ]; then 1024 stdbuf -oL awk -v ORS='\000' 1 "$@" 1025 else 1026 awk -v ORS='\000' 1 "$@" 1027 fi 1028 } 1029 1030 # LiSt files, showing how many 4K-sized storage blocks they use 1031 ls4k() { ls -s --block-size=4096 "$@"; } 1032 1033 # LiSt MAN pages 1034 lsman() { man -k "${1:-.}"; } 1035 1036 # MARK the current tab with the message given, followed by the current folder; 1037 # works only on the `bash` shell 1038 mark() { 1039 if [ $# -eq 0 ]; then 1040 PS1="\[\e[0m\e]0;\w\a\$ " 1041 else 1042 PS1="\[\e[0m\e]0;${*} \w\a\$ " 1043 fi 1044 } 1045 1046 marklinks() { 1047 local re='https?://[A-Za-z0-9+_.:%-]+(/[A-Za-z0-9+_.%/,#?&=-]*)*' 1048 sed -E 's-('"${re}"')-\x1b]8;;\1\x1b\\\1\x1b]8;;\x1b\\-g' "$@" 1049 } 1050 1051 # Multi-Core MAKE runs `make` using all cores 1052 mcmake() { make -j "$(nproc)" "$@"; } 1053 1054 # merge stderr into stdout, which is useful for piped commands 1055 merrge() { "${@:-cat /dev/null}" 2>&1; } 1056 1057 metajq() { 1058 # https://github.com/stedolan/jq/issues/243#issuecomment-48470943 1059 jq -r -M ' 1060 [ 1061 path(..) | 1062 map(if type == "number" then "[]" else tostring end) | 1063 join(".") | split(".[]") | join("[]") 1064 ] | unique | map("." + .) | .[] 1065 ' "$@" 1066 } 1067 1068 # Make In Folder, also showing time and max memory used 1069 mif() { 1070 local f='real %e user %U sys %S mem %M exit %x' 1071 local folder 1072 folder="${1:-.}" 1073 [ $# -gt 0 ] && shift 1074 env -C "${folder}" /usr/bin/time -f "$f" make "$@" 1075 } 1076 1077 # MINimize DECimalS ignores all trailing decimal zeros in numbers, and even 1078 # the decimal dots themselves, when decimals in a number are all zeros 1079 # mindecs() { 1080 # local cmd='sed -E' 1081 # if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1082 # cmd='sed -E -u' 1083 # fi 1084 # ${cmd} 's-([0-9]+)\.0+\W-\1-g; s-([0-9]+\.[0-9]*[1-9])0+\W-\1-g' "$@" 1085 # } 1086 1087 # MaKe, also showing the time taken and the max memory used 1088 mk() { 1089 local f='real %e user %U sys %S mem %M exit %x' 1090 /usr/bin/time -f "$f" make "$@" 1091 } 1092 1093 # Number all lines counting from 0, using a tab right after each line number 1094 n0() { 1095 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1096 stdbuf -oL nl -b a -w 1 -v 0 "$@" 1097 else 1098 nl -b a -w 1 -v 0 "$@" 1099 fi 1100 } 1101 1102 # Number all lines counting from 1, using a tab right after each line number 1103 n1() { 1104 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1105 stdbuf -oL nl -b a -w 1 -v 1 "$@" 1106 else 1107 nl -b a -w 1 -v 1 "$@" 1108 fi 1109 } 1110 1111 # NArrow MANual, keeps `man` narrow, even if the window/tab is wide when run 1112 naman() { 1113 local w 1114 w="$(tput -T xterm cols)" 1115 w="$((w / 2 - 4))" 1116 if [ "$w" -lt 80 ]; then 1117 w=80 1118 fi 1119 MANWIDTH="$w" man "$@" 1120 } 1121 1122 # Not AND sorts its 2 inputs, then finds lines not in common 1123 nand() { 1124 # comm -3 <(sort "$1") <(sort "$2") 1125 # dash doesn't support the process-sub syntax 1126 (sort "$1" | (sort "$2" | (comm -3 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 1127 } 1128 1129 # Nice DEFine dictionary-defines the words given, using an online service 1130 ndef() { 1131 local arg 1132 local gap=0 1133 local options='-MKiCRS' 1134 1135 if [ $# -eq 0 ]; then 1136 printf "\e[38;2;204;0;0mndef: no words given\e[0m\n" >&2 1137 return 1 1138 fi 1139 1140 if [ $# -eq 1 ]; then 1141 options='--header=1 -MKiCRS' 1142 fi 1143 1144 for arg in "$@"; do 1145 [ "${gap}" -gt 0 ] && printf "\n" 1146 gap=1 1147 printf "\e[7m%-80s\e[0m\n" "${arg}" 1148 curl --silent "dict://dict.org/d:${arg}" | awk ' 1149 { gsub(/\r$/, "") } 1150 /^151 / { 1151 printf "\x1b[38;2;52;101;164m%s\x1b[0m\n", $0 1152 next 1153 } 1154 /^[1-9][0-9]{2} / { 1155 printf "\x1b[38;2;128;128;128m%s\x1b[0m\n", $0 1156 next 1157 } 1158 1 1159 ' 1160 done | less ${options} 1161 } 1162 1163 # listen to streaming NEW WAVE music 1164 newwave() { 1165 printf "streaming \e[7mNew Wave radio\e[0m\n" 1166 mpv --quiet https://puma.streemlion.com:2910/stream 1167 } 1168 1169 # Nice Json Query Lines colors JSONL data using the `jq` app 1170 njql() { 1171 local code="${1:-.}" 1172 [ $# -gt 0 ] && shift 1173 jq -c -C "${code} | .[]" "$@" 1174 } 1175 1176 # empty the clipboard 1177 noclip() { wl-copy --clear; } 1178 1179 # show the current date and time 1180 # now() { date +'%Y-%m-%d %H:%M:%S'; } 1181 1182 # Nice Print Awk result; uses my tool `nn` 1183 npa() { 1184 local arg 1185 for arg in "$@"; do 1186 awk "BEGIN { print(${arg}); exit }" 1187 done | nn 1188 } 1189 1190 # Nice Print Python result; uses my tool `nn` 1191 npp() { 1192 local arg 1193 for arg in "$@"; do 1194 python -c "print(${arg})" 1195 done | nn 1196 } 1197 1198 # Nice Size, using my tool `nn` 1199 ns() { wc -c "$@" | nn; } 1200 1201 # emit nothing to output and/or discard everything from input 1202 null() { [ $# -gt 0 ] && "$@" > /dev/null; } 1203 1204 # Operations using 1 or 2 numbers 1205 o() { 1206 awk -v a="${1:-1}" -v b="${2:-1}" -v n="$#" ' 1207 function factorial(n, f, i) { 1208 if (n < 1) return 0 1209 f = 1 1210 for (i = 2; i <= n; i++) f *= i 1211 return f 1212 } 1213 1214 BEGIN { 1215 gsub(/_/, "", a) 1216 gsub(/_/, "", b) 1217 1218 if (n == 1) { 1219 printf "1 / %f = %f\n", a, 1 / a 1220 printf "sqrt(%f) = %f\n", a, sqrt(a) 1221 printf "log(%f) = %f\n", a, log(a) 1222 printf "exp(%f) = %f\n", a, exp(a) 1223 a -= a % 1 1224 if (a >= 1) printf "%f! = %f\n", a, factorial(a) 1225 exit 1226 } 1227 1228 printf "%f + %f = %f\n", a, b, a + b 1229 printf "%f - %f = %f\n", a, b, a - b 1230 printf "%f * %f = %f\n", a, b, a * b 1231 if (a > b) { c = a; a = b; b = c } 1232 c = 1 - a / b 1233 printf "%f / %f = %f\n", a, b, a / b 1234 printf "%f / %f = %f\n", b, a, b / a 1235 printf "%f ^ %f = %f\n", a, b, a ^ b 1236 printf "%f ^ %f = %f\n", b, a, b ^ a 1237 if (0 <= c && c <= 1) printf "1 - (%f / %f) = %f\n", a, b, c 1238 exit 1239 } 1240 ' 1241 } 1242 1243 # Print Python result 1244 pp() { 1245 local arg 1246 for arg in "$@"; do 1247 python -c "print(${arg})" 1248 done 1249 } 1250 1251 # PRecede (input) ECHO, prepends a first line to stdin lines 1252 precho() { echo "$@" && cat /dev/stdin; } 1253 1254 # LABEL/precede data with an ANSI-styled line 1255 prelabel() { 1256 printf "\e[7m%-*s\e[0m\n" "$(($(tput -T xterm cols) - 2))" "$*" 1257 cat - 1258 } 1259 1260 # PREcede (input) MEMO, prepends a first highlighted line to stdin lines 1261 prememo() { printf "\e[7m%-80s\e[0m\n" "$*"; cat -; } 1262 1263 # start by joining all arguments given as a tab-separated-items line of output, 1264 # followed by all lines from stdin verbatim 1265 pretsv() { 1266 awk ' 1267 BEGIN { 1268 for (i = 1; i < ARGC; i++) { 1269 if (i > 1) printf "\t" 1270 printf "%s", ARGV[i] 1271 } 1272 if (ARGC > 1) print "" 1273 exit 1274 } 1275 ' "$@" 1276 cat - 1277 } 1278 1279 # Plain RipGrep 1280 prg() { 1281 if [ -p /dev/stdout ] || [ -t 1 ]; then 1282 rg --line-buffered --color=never "${@:-.}" 1283 else 1284 rg --color=never "${@:-.}" 1285 fi 1286 } 1287 1288 # Quiet MPV 1289 # qmpv() { mpv --quiet "${@:--}"; } 1290 1291 # Quiet MPV 1292 qmpv() { mpv --really-quiet "${@:--}"; } 1293 1294 # ignore stderr, without any ugly keyboard-dancing 1295 quiet() { "$@" 2> /dev/null; } 1296 1297 # keep only lines between the 2 line numbers given, inclusively 1298 rangelines() { 1299 { [ $# -eq 2 ] || [ $# -eq 3 ]; } && [ "${1}" -le "${2}" ] && { 1300 tail -n +"${1}" "${3:--}" | head -n $(("${2}" - "${1}" + 1)) 1301 } 1302 } 1303 1304 # RANdom MANual page 1305 ranman() { 1306 find "/usr/share/man/man${1:-1}" -type f | shuf -n 1 | xargs basename | 1307 sed 's-\.gz$--' | xargs man 1308 } 1309 1310 # REPeat STRing emits a line with a repeating string in it, given both a 1311 # string and a number in either order 1312 repstr() { 1313 awk ' 1314 BEGIN { 1315 if (ARGV[2] ~ /^[+-]?[0-9]+$/) { 1316 symbol = ARGV[1] 1317 times = ARGV[2] + 0 1318 } else { 1319 symbol = ARGV[2] 1320 times = ARGV[1] + 0 1321 } 1322 1323 if (times < 0) exit 1324 if (symbol == "") symbol = "-" 1325 s = sprintf("%*s", times, "") 1326 gsub(/ /, symbol, s) 1327 print s 1328 exit 1329 } 1330 ' "$@" 1331 } 1332 1333 # show a RULER-like width-measuring line 1334 ruler() { 1335 [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed -E \ 1336 's- {10}-····╵····│-g; s- -·-g; s-·····-····╵-' 1337 } 1338 1339 # SystemCTL; `sysctl` is already taken for a separate/unrelated app 1340 sctl() { systemctl "$@" 2>&1 | less -MKiCRS; } 1341 1342 # show a unique-looking SEParator line; useful to run between commands 1343 # which output walls of text 1344 sep() { 1345 [ "${1:-80}" -gt 0 ] && 1346 printf "\e[48;2;218;218;218m%${1:-80}s\e[0m\n" "" | sed 's- -·-g' 1347 } 1348 1349 # webSERVE files in a folder as localhost, using the port number given, or 1350 # port 8080 by default 1351 serve() { 1352 if [ -d "$1" ]; then 1353 printf "\e[7mserving files in %s\e[0m\n" "$1" >&2 1354 python3 -m http.server -d "$1" "${2:-8080}" 1355 else 1356 printf "\e[7mserving files in %s\e[0m\n" "${2:-$(pwd)}" >&2 1357 python3 -m http.server -d "${2:-$(pwd)}" "${1:-8080}" 1358 fi 1359 } 1360 1361 # SET DIFFerence sorts its 2 inputs, then finds lines not in the 2nd input 1362 setdiff() { 1363 # comm -23 <(sort "$1") <(sort "$2") 1364 # dash doesn't support the process-sub syntax 1365 (sort "$1" | (sort "$2" | (comm -23 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 1366 } 1367 1368 # SET INtersection, sorts its 2 inputs, then finds common lines 1369 setin() { 1370 # comm -12 <(sort "$1") <(sort "$2") 1371 # dash doesn't support the process-sub syntax 1372 (sort "$1" | (sort "$2" | (comm -12 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 1373 } 1374 1375 # SET SUBtraction sorts its 2 inputs, then finds lines not in the 2nd input 1376 setsub() { 1377 # comm -23 <(sort "$1") <(sort "$2") 1378 # dash doesn't support the process-sub syntax 1379 (sort "$1" | (sort "$2" | (comm -23 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 1380 } 1381 1382 # run apps in color-mode, using the popular option `--color=always` 1383 shine() { 1384 local cmd="$1" 1385 [ $# -gt 0 ] && shift 1386 "${cmd}" --color=always "$@" 1387 } 1388 1389 # skip the first n lines, or the 1st line by default 1390 skip() { tail -n +$(("${1:-1}" + 1)) "${2:--}"; } 1391 1392 # skip the last n lines, or the last line by default 1393 skiplast() { head -n -"${1:-1}" "${2:--}"; } 1394 1395 # SLOW/delay lines from the standard-input, waiting the number of seconds 1396 # given for each line, or waiting 1 second by default 1397 slow() { 1398 local seconds="${1:-1}" 1399 [ $# -gt 0 ] && shift 1400 ( 1401 IFS="$(printf "\n")" 1402 awk 1 "$@" | while read -r line; do 1403 sleep "${seconds}" 1404 printf "%s\n" "${line}" 1405 done 1406 ) 1407 } 1408 1409 # Show Latest Podcasts, using my tools `podfeed` and `si` 1410 slp() { 1411 local title 1412 title="Latest Podcast Episodes as of $(date +'%F %T')" 1413 podfeed -title "${title}" "$@" | si 1414 } 1415 1416 # emit the first line as is, sorting all lines after that, using the 1417 # `sort` command, passing all/any arguments/options to it 1418 sortrest() { 1419 awk -v sort="sort $*" ' 1420 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1421 { gsub(/\r$/, "") } 1422 NR == 1 { print; fflush() } 1423 NR >= 2 { print | sort } 1424 ' 1425 } 1426 1427 # SORt Tab-Separated Values: emit the first line as is, sorting all lines after 1428 # that, using the `sort` command in TSV (tab-separated values) mode, passing 1429 # all/any arguments/options to it 1430 sortsv() { 1431 awk -v sort="sort -t \"$(printf '\t')\" $*" ' 1432 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1433 { gsub(/\r$/, "") } 1434 NR == 1 { print; fflush() } 1435 NR >= 2 { print | sort } 1436 ' 1437 } 1438 1439 # emit a line with the number of spaces given in it 1440 spaces() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" ""; } 1441 1442 # SQUeeze horizontal spaces and STOMP vertical gaps 1443 squomp() { 1444 local command='awk' 1445 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1446 command='stdbuf -oL awk' 1447 fi 1448 1449 ${command} ' 1450 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1451 /^\r?$/ { empty = 1; next } 1452 empty { if (n > 0) print ""; empty = 0 } 1453 1454 { 1455 gsub(/^ +| *\r?$/, "") 1456 gsub(/ *\t */, "\t") 1457 gsub(/ +/, " ") 1458 print; n++ 1459 } 1460 ' "$@" 1461 } 1462 1463 # TAC Lines outputs input-lines in reverse order, last one first, and so on... 1464 tacl() { 1465 awk ' 1466 { gsub(/\r$/, ""); lines[NR] = $0 } 1467 END { for (i = NR; i >= 1; i--) print lines[i] } 1468 ' "$@" 1469 } 1470 1471 # TINY GO Build Optimized: a common use-case for the tinygo compiler 1472 tinygobo() { tinygo build -no-debug -opt=2 "$@"; } 1473 1474 # show current date in a specifc format 1475 today() { date +'%Y-%m-%d %a %b %d'; } 1476 1477 # get the first n lines, or 1 by default 1478 toline() { head -n "${1:-1}" "${2:--}"; } 1479 1480 # get the processes currently using the most cpu 1481 topcpu() { 1482 local n="${1:-10}" 1483 [ "$n" -gt 0 ] && ps aux | awk ' 1484 NR == 1 { print; fflush() } 1485 NR > 1 { print | "sort -rnk3,3" } 1486 ' | head -n "$(("$n" + 1))" 1487 } 1488 1489 # get the processes currently using the most memory 1490 topmemory() { 1491 local n="${1:-10}" 1492 [ "$n" -gt 0 ] && ps aux | awk ' 1493 NR == 1 { print; fflush() } 1494 NR > 1 { print | "sort -rnk6,6" } 1495 ' | head -n "$(("$n" + 1))" 1496 } 1497 1498 # only keep UNIQUE lines, keeping them in their original order 1499 unique() { 1500 local command='awk' 1501 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1502 command='stdbuf -oL awk' 1503 fi 1504 1505 ${command} ' 1506 BEGIN { for (i = 1; i < ARGC; i++) if (f[ARGV[i]]++) delete ARGV[i] } 1507 !c[$0]++ 1508 ' "$@" 1509 } 1510 1511 # fix lines, ignoring leading UTF-8_BOMs (byte-order-marks) on each input's 1512 # first line, turning all end-of-line CRLF byte-pairs into single line-feeds, 1513 # and ensuring each input's last line ends with a line-feed; trailing spaces 1514 # are also ignored 1515 unixify() { 1516 local command='awk' 1517 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1518 command='stdbuf -oL awk' 1519 fi 1520 1521 ${command} ' 1522 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1523 { gsub(/ *\r?$/, ""); print } 1524 ' "$@" 1525 } 1526 1527 # skip the first/leading n bytes 1528 unleaded() { tail -c +$(("$1" + 1)) "${2:--}"; } 1529 1530 # go UP n folders, or go up 1 folder by default 1531 up() { 1532 if [ "${1:-1}" -le 0 ]; then 1533 cd . 1534 else 1535 cd "$(printf "%${1:-1}s" "" | sed 's- -../-g')" || return $? 1536 fi 1537 } 1538 1539 # convert United States Dollars into CAnadian Dollars, using the latest 1540 # official exchange rates from the bank of canada; during weekends, the 1541 # latest rate may be from a few days ago; the default amount of usd to 1542 # convert is 1, when not given 1543 usd2cad() { 1544 local url 1545 local site='https://www.bankofcanada.ca/valet/observations/group' 1546 local csv_rates="${site}/FX_RATES_DAILY/csv" 1547 url="${csv_rates}?start_date=$(date -d '3 days ago' +'%Y-%m-%d')" 1548 curl -s "${url}" | awk -F, -v amount="$(echo "${1:-1}" | sed 's-_--g')" ' 1549 /USD/ { for (i = 1; i <= NF; i++) if($i ~ /USD/) j = i } 1550 END { gsub(/"/, "", $j); if (j != 0) printf "%.2f\n", amount * $j } 1551 ' 1552 } 1553 1554 # What Are These (?) shows what the names given to it are/do 1555 wat() { 1556 local arg 1557 local gap=0 1558 local less_options='-MKiCRS' 1559 1560 if [ $# -eq 0 ]; then 1561 echo "$0" 1562 return 0 1563 fi 1564 1565 if [ $# -lt 2 ]; then 1566 less_options='-MKiCRS --header=1' 1567 fi 1568 1569 for arg in "$@"; do 1570 [ "${gap}" -gt 0 ] && printf "\n" 1571 gap=1 1572 printf "\e[7m%-80s\e[0m\n" "${arg}" 1573 1574 while alias "${arg}" > /dev/null 2> /dev/null; do 1575 arg="$(alias "${arg}" | sed -E "s-^[^=]+=['\"](.+)['\"]\$-\\1-")" 1576 done 1577 1578 if echo "${arg}" | grep -q ' '; then 1579 printf "%s\n" "${arg}" 1580 continue 1581 fi 1582 1583 if declare -f "${arg}"; then 1584 continue 1585 fi 1586 1587 if which "${arg}" > /dev/null 2> /dev/null; then 1588 which "${arg}" 1589 continue 1590 fi 1591 1592 printf "\e[38;2;204;0;0m%s not found\e[0m\n" "${arg}" 1593 done | { less -MKiCRS ${less_options} 2> /dev/null || cat; } 1594 } 1595 1596 # find all WEB/hyperLINKS (https:// and http://) in the input text 1597 weblinks() { 1598 local arg 1599 local re='https?://[A-Za-z0-9+_.:%-]+(/[A-Za-z0-9+_.%/,#?&=-]*)*' 1600 local grep_cmd='grep' 1601 if [ -p /dev/stdout ] || [ -t 1 ]; then 1602 grep_cmd='grep --line-buffered' 1603 fi 1604 1605 for arg in "${@:--}"; do 1606 ${grep_cmd} -i -E -o "${re}" "${arg}" 1607 done 1608 } 1609 1610 # recursively find all files with trailing spaces/CRs 1611 whichtrails() { 1612 if [ -p /dev/stdout ] || [ -t 1 ]; then 1613 rg --line-buffered -c '[ \r]+$' "${@:-.}" 1614 else 1615 rg -c '[ \r]+$' "${@:-.}" 1616 fi 1617 } 1618 1619 # turn all wsl/unix-style full-paths into WINdows-style full-PATHS 1620 winpaths() { sed -E 's-/mnt/(.)/-\u\1:/-' "$@"; } 1621 1622 # XARGS Lines, runs `xargs` using whole lines as extra arguments 1623 xargsl() { 1624 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1625 stdbuf -oL awk -v ORS='\000' ' 1626 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1627 { gsub(/\r$/, ""); print } 1628 ' | stdbuf -oL xargs -0 "$@" 1629 else 1630 awk -v ORS='\000' ' 1631 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1632 { gsub(/\r$/, ""); print } 1633 ' | xargs -0 "$@" 1634 fi 1635 } 1636 1637 # Youtube Audio Player 1638 yap() { 1639 local url 1640 # some youtube URIs end with extra playlist/tracker parameters 1641 url="$(echo "$1" | sed 's-&.*--')" 1642 mpv "$(yt-dlp -x --audio-format best --get-url "${url}" 2> /dev/null)" 1643 } 1644 1645 # show a calendar for the current YEAR, or for the year given 1646 year() { 1647 { 1648 # show the current date/time center-aligned 1649 printf \ 1650 "%21s\e[38;2;78;154;6m%s\e[0m \e[38;2;52;101;164m%s\e[0m\n\n" \ 1651 "" "$(date +'%a %b %d %Y')" "$(date +'%H:%M')" 1652 # debian linux has a different `cal` app which highlights the day 1653 if [ -e /usr/bin/ncal ]; then 1654 # fix debian/ncal's weird way to highlight the current day 1655 ncal -C -y "$@" | sed -E \ 1656 's/_\x08(.+)_\x08([^ ]+)/\x1b\[7m\1\2\x1b\[0m/' 1657 else 1658 cal -y "$@" 1659 fi 1660 } | { less -MKiCRS 2> /dev/null || cat; } 1661 } 1662 1663 # show the current date in the YYYY-MM-DD format 1664 ymd() { date +'%Y-%m-%d'; }