File: clam.sh 1 #!/bin/sh 2 3 # The MIT License (MIT) 4 # 5 # Copyright (c) 2026 pacman64 6 # 7 # Permission is hereby granted, free of charge, to any person obtaining a copy 8 # of this software and associated documentation files (the "Software"), to deal 9 # in the Software without restriction, including without limitation the rights 10 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 # copies of the Software, and to permit persons to whom the Software is 12 # furnished to do so, subject to the following conditions: 13 # 14 # The above copyright notice and this permission notice shall be included in 15 # all copies or substantial portions of the Software. 16 # 17 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 # SOFTWARE. 24 25 26 # clam 27 # 28 # Command-Line Augmentation Module (clam): get the best out of your shell. 29 # 30 # 31 # This is a collection of arguably useful shell functions and shortcuts: 32 # some of these extra commands can be real time/effort savers, ideally 33 # letting you concentrate on getting things done. 34 # 35 # Some of these commands depend on my other scripts from the `pac-tools`, 36 # others either rely on widely-preinstalled command-line apps, or ones 37 # which are available on most of the major command-line `package` managers. 38 # 39 # To use this script, you're supposed to `source` it, so its definitions 40 # stay for your whole shell session: for that, you can run `source clam` or 41 # `. clam` (no quotes either way), either directly or at shell startup. 42 # 43 # Almost all commands defined in this script work with `bash`, `zsh`, and 44 # even `dash`, which is debian linux's default non-interactive shell. Some 45 # of its commands even seem to work on busybox's shell. 46 47 48 case "$1" in 49 -h|--h|-help|--help) 50 awk ' 51 /^case / { exit } 52 /^# +clam$/, /^$/ { gsub(/^# ?/, ""); print } 53 ' "$0" 54 exit 0 55 ;; 56 esac 57 58 59 # dash doesn't support regex-matching syntax, forcing to use case statements 60 case "$0" in 61 -bash|-dash|-sh|bash|dash|sh|/bin/sh) 62 # script is being sourced with bash, dash, or ash, which is good 63 : 64 ;; 65 66 *) 67 case "$ZSH_EVAL_CONTEXT" in 68 *:file) 69 # script is being sourced with zsh, which is good 70 : 71 ;; 72 73 *) 74 # script is being run normally, which is a waste of time 75 printf "\e[7mDon't run this script directly: instead source it\e[0m\n" 76 printf "\e[7mby running '. clam' (without the single quotes).\e[0m\n" 77 printf "\n" 78 printf "\e[7mBefore doing that, you may want to see the help,\e[0m\n" 79 printf "\e[7mby running 'clam -h' (without the single quotes).\e[0m\n" 80 # exiting during shell-startup may deny shell access, even if 81 # the script is being run, instead of being sourced directly 82 ;; 83 esac 84 ;; 85 esac 86 87 88 alias 0='sbs' 89 90 alias 1='bsbs 1' 91 alias 2='bsbs 2' 92 alias 3='bsbs 3' 93 alias 4='bsbs 4' 94 alias 5='bsbs 5' 95 alias 6='bsbs 6' 96 alias 7='bsbs 7' 97 alias 8='bsbs 8' 98 alias 9='bsbs 9' 99 100 # Less with Header n runs `less` with line numbers, ANSI styles, without 101 # line-wraps, and using the first n lines as a sticky-header, so they always 102 # show on top 103 alias lh1='less --header=1 -MKNiCRS' 104 alias lh2='less --header=2 -MKNiCRS' 105 alias lh3='less --header=3 -MKNiCRS' 106 alias lh4='less --header=4 -MKNiCRS' 107 alias lh5='less --header=5 -MKNiCRS' 108 alias lh6='less --header=6 -MKNiCRS' 109 alias lh7='less --header=7 -MKNiCRS' 110 alias lh8='less --header=8 -MKNiCRS' 111 alias lh9='less --header=9 -MKNiCRS' 112 113 # View with Header n runs `less` without line numbers, ANSI styles, without 114 # line-wraps, and using the first n lines as a sticky-header, so they always 115 # show on top 116 alias vh1='less --header=1 -MKiCRS' 117 alias vh2='less --header=2 -MKiCRS' 118 alias vh3='less --header=3 -MKiCRS' 119 alias vh4='less --header=4 -MKiCRS' 120 alias vh5='less --header=5 -MKiCRS' 121 alias vh6='less --header=6 -MKiCRS' 122 alias vh7='less --header=7 -MKiCRS' 123 alias vh8='less --header=8 -MKiCRS' 124 alias vh9='less --header=9 -MKiCRS' 125 126 alias c='cat' 127 alias e='echo' 128 alias r='reset' 129 130 # Breathe periodically adds extra empty lines; uses my own `breathe` tool 131 alias b='breathe' 132 133 # Plain ignores ANSI-styles; uses my own `plain` tool 134 alias p='plain' 135 136 # Awk Print 137 alias ap=abp 138 139 # Book-like MANual, lays out `man` docs as pairs of side-by-side pages; uses 140 # my tool `bsbs` 141 alias bman=bookman 142 143 # load/concatenate BYTES from named data sources 144 # alias bytes='cat' 145 146 # load/concatenate BYTES from named data sources; uses my tool `get` 147 alias bytes='get' 148 149 # Compile C Optimized 150 alias cco='cc -Wall -O2 -s -march=native -mtune=native -flto' 151 152 # Color DMESG 153 alias cdmesg='dmesg --color=always' 154 155 # Colored Json Query runs the `jq` app, allowing an optional filepath as the 156 # data source, and even an optional transformation formula 157 alias cjq='jq -C' 158 159 # CLear Screen 160 alias cls='tput -T xterm reset 2> /dev/null || reset' 161 162 # Compile C Plus Plus Optimized 163 alias cppo='c++ -Wall -O2 -s -march=native -mtune=native -flto' 164 165 # CURL Info only shows the response headers from the request given 166 alias curli='curl -I' 167 168 # CURL Silent spares you the progress bar, but still tells you about errors 169 alias curls='curl --silent --show-error' 170 171 # dictionary-DEFine the word given, using an online service 172 alias def=define 173 174 # turn JSON Lines into a proper json array 175 # alias dejsonl='jq -s -M' 176 177 # turn json lines into a proper json array using the `jq` app 178 alias dejql='jq -s -M' 179 180 # turn UTF-16 data into UTF-8 181 alias deutf16='iconv -f utf16 -t utf8' 182 183 # edit plain-text files 184 # alias edit='micro' 185 186 # ENV with 0/null-terminated lines on stdout 187 alias env0='env -0' 188 189 # ENV Change folder, runs the command given in the folder given (first) 190 alias envc='env -C' 191 192 # Extended Plain Interactive Grep 193 alias epig='ugrep --color=never -Q -E' 194 195 # Editor Read-Only 196 alias ero='micro -readonly true' 197 198 # Expand 4 turns each tab into up to 4 spaces 199 alias expand4='expand -t 4' 200 201 # run the Fuzzy Finder (fzf) in multi-choice mode, with custom keybindings 202 alias ff='fzf -m --bind ctrl-a:select-all,ctrl-space:toggle' 203 204 # get FILE's MIME types 205 alias filemime='file --mime-type' 206 207 # run `gcc` with all optimizations on and with static analysis on 208 alias gccmax='gcc -Wall -O2 -s -march=native -mtune=native -flto -fanalyzer' 209 210 # hold stdout if used at the end of a pipe-chain 211 alias hold='less -MKiCRS' 212 213 # find all hyperlinks inside HREF attributes in the input text 214 alias hrefs=href 215 216 # make JSON Lines out of JSON data 217 alias jl=jsonl 218 219 # shrink/compact JSON using the `jq` app, allowing an optional filepath, and 220 # even an optional transformation formula after that 221 alias jq0='jq -c -M' 222 223 # show JSON data on multiple lines, using 2 spaces for each indentation level, 224 # allowing an optional filepath, and even an optional transformation formula 225 # after that 226 alias jq2='jq --indent 2 -M' 227 228 # find the LAN (local-area network) IP address for this device 229 alias lanip='hostname -I' 230 231 # run `less`, showing line numbers, among other settings 232 alias least='less -MKNiCRS' 233 234 # Live GREP 235 alias lgrep='grep --line-buffered' 236 237 # try to run the command given using line-buffering for its (standard) output 238 alias livelines='stdbuf -oL' 239 240 # LOAD data from the filename or URI given; uses my `get` tool 241 alias load=get 242 243 # LOcal SERver webserves files in a folder as localhost, using the port 244 # number given, or port 8080 by default 245 alias loser=serve 246 247 # Live RipGrep 248 alias lrg='rg --line-buffered' 249 250 # run `ls` showing how many 4k pages each file takes 251 alias lspages='ls -s --block-size=4096' 252 253 # Listen To Youtube 254 alias lty=yap 255 256 # LXC-LS Fancy 257 alias lxc-lsf='lxc-ls --fancy' 258 259 # MAKE IN folder 260 alias makein=mif 261 262 # Multi-Core MaKe runs `make` using all cores 263 alias mcmk=mcmake 264 265 # METAdata CURL only shows the response headers from the request given 266 alias metacurl='curl -I' 267 268 # run `less`, showing line numbers, among other settings 269 alias most='less -MKNiCRS' 270 271 # emit nothing to output and/or discard everything from input 272 alias nil=null 273 274 # Nice Json Query colors JSON data using the `jq` app 275 alias njq=cjq 276 277 # Plain Interactive Grep 278 alias pig='ugrep --color=never -Q -E' 279 280 # Quick Compile C Optimized 281 alias qcco='cc -Wall -O2 -s -march=native -mtune=native -flto' 282 283 # Quick Compile C Plus Plus Optimized 284 alias qcppo='c++ -Wall -O2 -s -march=native -mtune=native -flto' 285 286 # Read-Only Editor 287 alias roe='micro -readonly true' 288 289 # Read-Only Micro (text editor) 290 alias rom='micro -readonly true' 291 292 # Read-Only Top 293 alias rot='htop --readonly' 294 295 # RUN IN folder 296 alias runin='env -C' 297 298 # place lines Side-By-Side 299 # alias sbs='column' 300 301 # Silent CURL spares you the progress bar, but still tells you about errors 302 alias scurl='curl --silent --show-error' 303 304 # Stdbuf Output Line-buffered 305 alias sol='stdbuf -oL' 306 307 # TRY running a command, showing its outcome/error-code on failure; uses my 308 # `verdict` tool 309 alias try='verdict' 310 311 # Time Verbosely the command given 312 alias tv='/usr/bin/time -v' 313 314 # VERTical REVert emits lines in reverse order of appearance 315 alias vertrev='tac' 316 317 # UGREP in Query/interactive mode 318 alias ugrepq='ugrep -Q' 319 320 # emit lines in reverse order of appearance 321 alias upsidedown='tac' 322 323 # run `cppcheck` with even stricter options 324 alias vetc='cppcheck --enable=portability,style --check-level=exhaustive' 325 326 # run `cppcheck` with even stricter options, also checking for c89 compliance 327 alias vetc89='cppcheck --enable=portability,style --check-level=exhaustive --std=c89' 328 329 # run `cppcheck` with even stricter options 330 alias vetcpp='cppcheck --enable=portability,style --check-level=exhaustive' 331 332 # VET SHell scripts 333 alias vetsh=vetshell 334 335 # check shell scripts for common gotchas, avoiding complaints about using 336 # the `local` keyword, which is widely supported in practice 337 alias vetshell='shellcheck -e 3043' 338 339 # run a command using an empty environment 340 alias void='env -i' 341 342 # turn plain-text from latin-1 into UTF-8; the name is from `vulgarization`, 343 # which is the mutation of languages away from latin during the middle ages 344 alias vulgarize='iconv -f latin-1 -t utf-8' 345 346 # recursively find all files with trailing spaces/CRs 347 alias wheretrails=whichtrails 348 349 # run `xargs`, using zero/null bytes as the extra-arguments terminator 350 alias x0='xargs -0' 351 352 # Xargs Lines, runs `xargs` using whole lines as extra arguments 353 alias xl=xargsl 354 355 # Awk Begin Print 356 abp() { 357 local arg 358 for arg in "$@"; do 359 awk "BEGIN { print (${arg}); exit }" 360 done 361 } 362 363 # APT UPdate/grade 364 aptup() { sudo apt update && sudo apt upgrade "$@"; sudo -k; } 365 366 # emit each argument given as its own line of output 367 args() { [ $# -eq 0 ] || printf "%s\n" "$@"; } 368 369 # AWK in BLOCKS/paragraphs-input mode 370 awkblocks() { 371 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 372 stdbuf -oL awk -F='' -v RS='' "$@" 373 else 374 awk -F='' -v RS='' "$@" 375 fi 376 } 377 378 # AWK using TABS as input/output field-separators 379 awktabs() { 380 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 381 stdbuf -oL awk -F "\t" -v OFS="\t" "$@" 382 else 383 awk -F "\t" -v OFS="\t" "$@" 384 fi 385 } 386 387 # Breathe lines 3: separate groups of 3 lines with empty lines 388 b3() { 389 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 390 stdbuf -oL awk 'NR % 3 == 1 && NR != 1 { print "" } 1' "$@" 391 else 392 awk 'NR % 3 == 1 && NR != 1 { print "" } 1' "$@" 393 fi 394 } 395 396 # Breathe lines 5: separate groups of 5 lines with empty lines 397 b5() { 398 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 399 stdbuf -oL awk 'NR % 5 == 1 && NR != 1 { print "" } 1' "$@" 400 else 401 awk 'NR % 5 == 1 && NR != 1 { print "" } 1' "$@" 402 fi 403 } 404 405 # show an ansi-styled BANNER-like line 406 banner() { printf "\e[7m%-$(tput -T xterm cols)s\e[0m\n" "$*"; } 407 408 # emit a colored bar which can help visually separate different outputs 409 bar() { 410 [ "${1:-80}" -gt 0 ] && printf "\e[48;2;218;218;218m%${1:-80}s\e[0m\n" "" 411 } 412 413 # Breathe Header 3: add an empty line after the first one (the header), 414 # then separate groups of 3 lines with empty lines between them 415 bh3() { 416 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 417 stdbuf -oL awk '(NR - 1) % 3 == 1 { print "" } 1' "$@" 418 else 419 awk '(NR - 1) % 3 == 1 { print "" } 1' "$@" 420 fi 421 } 422 423 # Breathe Header 5: add an empty line after the first one (the header), 424 # then separate groups of 5 lines with empty lines between them 425 bh5() { 426 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 427 stdbuf -oL awk '(NR - 1) % 5 == 1 { print "" } 1' "$@" 428 else 429 awk '(NR - 1) % 5 == 1 { print "" } 1' "$@" 430 fi 431 } 432 433 # emit a line with a repeating block-like symbol in it 434 blocks() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -█-g'; } 435 436 # BOOK-like MANual, lays out `man` docs as pairs of side-by-side pages; uses 437 # my tool `bsbs` 438 bookman() { 439 local w 440 w="$(tput -T xterm cols)" 441 w="$((w / 2 - 4))" 442 if [ "$w" -lt 65 ]; then 443 w=65 444 fi 445 MANWIDTH="$w" man "$@" | bsbs 2 446 } 447 448 # split lines using the separator given, turning them into single-item lines 449 breakdown() { 450 local sep="${1:- }" 451 [ $# -gt 0 ] && shift 452 local command='awk' 453 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 454 command='stdbuf -oL awk' 455 fi 456 457 ${command} -F "${sep}" '{ for (i = 1; i <= NF; i++) print $i }' "$@" 458 } 459 460 # CAlculator with Nice numbers runs my tool `ca` and colors results with 461 # my tool `nn`, alternating styles to make long numbers easier to read 462 can() { 463 local arg 464 for arg in "$@"; do 465 [ $# -ge 2 ] && printf "\e[7m%s\e[0m\n" "${arg}" >&2 466 ca "${arg}" | nn 467 done 468 } 469 470 # uppercase the first letter on each line, and lowercase all later letters 471 capitalize() { sed -E 's-^(.*)-\L\1-; s-^(.)-\u\1-'; } 472 473 # center-align lines of text, using the current screen width 474 center() { 475 awk -v width="$(tput -T xterm cols)" ' 476 { 477 gsub(/\r$/, "") 478 lines[NR] = $0 479 s = $0 480 gsub(/\x1b\[[0-9;]*[A-Za-z]/, "", s) # ANSI style-changers 481 l = length(s) 482 if (maxlen < l) maxlen = l 483 } 484 485 END { 486 n = (width - maxlen) / 2 487 if (n % 1) n = n - (n % 1) 488 fmt = sprintf("%%%ds%%s\n", (n > 0) ? n : 0) 489 for (i = 1; i <= NR; i++) printf fmt, "", lines[i] 490 } 491 ' "$@" 492 } 493 494 # Colored GREP ensures matches are colored when piped 495 cgrep() { 496 if [ -p /dev/stdout ] || [ -t 1 ]; then 497 grep --line-buffered --color=always "${@:-.}" 498 else 499 grep --color=always "${@:-.}" 500 fi 501 } 502 503 # Colored Go Test on the folder given; uses my command `gbm` 504 cgt() { 505 local f='real %e user %U sys %S mem %M exit %x' 506 /usr/bin/time -f "$f" go test "${@:-.}" 2>&1 \ 507 | gbm '^ok' '^[-]* ?FAIL' '^\?' 508 } 509 510 # Colored RipGrep ensures app `rg` emits colors when piped 511 crg() { 512 if [ -p /dev/stdout ] || [ -t 1 ]; then 513 rg --line-buffered --color=always "${@:-.}" 514 else 515 rg --color=always "${@:-.}" 516 fi 517 } 518 519 # Compile Rust Optimized 520 cro() { 521 rustc -C lto=true -C codegen-units=1 -C debuginfo=0 -C strip=symbols \ 522 -C opt-level=3 "$@" 523 } 524 525 # emit a line with a repeating cross-like symbol in it 526 crosses() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -×-g'; } 527 528 # listen to streaming DANCE music 529 dance() { 530 printf "streaming \e[7mDance Wave Retro\e[0m\n" 531 mpv --really-quiet https://retro.dancewave.online/retrodance.mp3 532 } 533 534 # emit a line with a repeating dash-like symbol in it 535 dashes() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -—-g'; } 536 537 # remove commas in numbers, as well as leading dollar signs in numbers 538 decomma() { 539 sed -E 's-([0-9]{3}),-\1-g; s-([0-9]{1,2}),-\1-g; s-\$([0-9\.]+)-\1-g' 540 } 541 542 dehtmlify() { 543 local command='awk' 544 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 545 command='stdbuf -oL awk' 546 fi 547 548 ${command} ' 549 { 550 gsub(/<\/?[^>]+>/, "") 551 gsub(/&/, "&") 552 gsub(/</, "<") 553 gsub(/>/, ">") 554 gsub(/^ +| *\r?$/, "") 555 gsub(/ +/, " ") 556 print 557 } 558 ' "$@" 559 } 560 561 # expand tabs each into up to the number of space given, or 4 by default 562 detab() { 563 local tabstop="${1:-4}" 564 [ $# -gt 0 ] && shift 565 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 566 stdbuf -oL expand -t "${tabstop}" "$@" 567 else 568 expand -t "${tabstop}" "$@" 569 fi 570 } 571 572 # DIVide 2 numbers 3 ways, including the complement 573 div() { 574 awk -v a="${1:-1}" -v b="${2:-1}" ' 575 BEGIN { 576 gsub(/_/, "", a) 577 gsub(/_/, "", b) 578 if (a > b) { c = a; a = b; b = c } 579 c = 1 - a / b 580 if (0 <= c && c <= 1) printf "%f\n%f\n%f\n", a / b, b / a, c 581 else printf "%f\n%f\n", a / b, b / a 582 exit 583 }' 584 } 585 586 # emit a line with a repeating dot-like symbol in it 587 dots() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -·-g'; } 588 589 # show the current Date and Time 590 dt() { 591 printf "\e[38;2;78;154;6m%s\e[0m \e[38;2;52;101;164m%s\e[0m\n" \ 592 "$(date +'%a %b %d')" "$(date +%T)" 593 } 594 595 # show the current Date, Time, and a Calendar with the 3 `current` months 596 dtc() { 597 { 598 # show the current date/time center-aligned 599 printf "%20s\e[38;2;78;154;6m%s\e[0m \e[38;2;52;101;164m%s\e[0m\n\n" \ 600 "" "$(date +'%a %b %d')" "$(date +%T)" 601 # debian linux has a different `cal` app which highlights the day 602 if [ -e /usr/bin/ncal ]; then 603 # fix debian/ncal's weird way to highlight the current day 604 ncal -C -3 | sed -E 's/_\x08(.+)_\x08([^ ]+)/\x1b\[7m\1\2\x1b\[0m/' 605 else 606 cal -3 607 fi 608 } | less -MKiCRS 609 } 610 611 # EDit RUN shell commands, using an interactive editor; uses my tool `leak` 612 edrun() { 613 # dash doesn't support the process-sub syntax 614 # . <( micro -readonly true -filetype shell | leak --inv ) 615 micro -readonly true -filetype shell | leak --inv | . /dev/fd/0 616 } 617 618 # convert EURos into CAnadian Dollars, using the latest official exchange 619 # rates from the bank of canada; during weekends, the latest rate may be 620 # from a few days ago; the default amount of euros to convert is 1, when 621 # not given 622 eur2cad() { 623 local url 624 local site='https://www.bankofcanada.ca/valet/observations/group' 625 local csv_rates="${site}/FX_RATES_DAILY/csv" 626 url="${csv_rates}?start_date=$(date -d '3 days ago' +'%Y-%m-%d')" 627 curl -s "${url}" | awk -F, -v amount="$(echo "${1:-1}" | sed 's-_--g')" ' 628 /EUR/ { for (i = 1; i <= NF; i++) if($i ~ /EUR/) j = i } 629 END { gsub(/"/, "", $j); if (j != 0) printf "%.2f\n", amount * $j } 630 ' 631 } 632 633 # Fix Audio Duration on a separate copy of the file given 634 fad() { ffmpeg -i "${1:-input.m4a}" -acodec copy "${2:-output.dat}"; } 635 636 # get the first n lines, or 1 by default 637 first() { head -n "${1:-1}" "${2:--}"; } 638 639 # Field-Names AWK remembers field-positions by name, from the first input line 640 fnawk() { 641 local code="${1:-1}" 642 [ $# -gt 0 ] && shift 643 644 local buffering='' 645 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 646 buffering='stdbuf -oL' 647 fi 648 649 ${buffering} awk -v OFS="\t" ' 650 NR == 1 { 651 FS = /\t/ ? "\t" : " " 652 $0 = $0 653 for (i in names) delete names[i] 654 for (i = 1; i <= NF; i++) names[$i] = i 655 i = "" 656 } 657 { low = lower = tolower($0) } 658 '"${code}"' 659 ' "$@" 660 } 661 662 # start from the line number given, skipping all previous ones 663 fromline() { tail -n +"${1:-1}" "${2:--}"; } 664 665 # convert a mix of FeeT and INches into meters 666 ftin() { 667 local ft="${1:-0}" 668 ft="$(echo "${ft}" | sed 's-_--g')" 669 local in="${2:-0}" 670 in="$(echo "${in}" | sed 's-_--g')" 671 awk "BEGIN { print 0.3048 * ${ft} + 0.0254 * ${in}; exit }" 672 } 673 674 # Gawk Bignum Print 675 gbp() { gawk --bignum "BEGIN { print $1; exit }"; } 676 677 # glue/stick together various lines, only emitting a line-feed at the end; an 678 # optional argument is the output-item-separator, which is empty by default 679 glue() { 680 local sep="${1:-}" 681 [ $# -gt 0 ] && shift 682 awk -v sep="${sep}" ' 683 NR > 1 { printf "%s", sep } 684 { gsub(/\r/, ""); printf "%s", $0 } 685 END { if (NR > 0) print "" } 686 ' "$@" 687 } 688 689 # GO Build Stripped: a common use-case for the go compiler 690 gobs() { go build -ldflags "-s -w" -trimpath "$@"; } 691 692 # GO DEPendencieS: show all dependencies in a go project 693 godeps() { go list -f '{{ join .Deps "\n" }}' "$@"; } 694 695 # GO IMPortS: show all imports in a go project 696 goimps() { go list -f '{{ join .Imports "\n" }}' "$@"; } 697 698 # go to the folder picked using an interactive TUI; uses my tool `bf` 699 goto() { 700 local where 701 where="$(bf "${1:-.}")" 702 if [ $? -ne 0 ]; then 703 return 0 704 fi 705 706 where="$(realpath "${where}")" 707 if [ ! -d "${where}" ]; then 708 where="$(dirname "${where}")" 709 fi 710 cd "${where}" || return 711 } 712 713 # show Help laid out on 2 side-by-side columns; uses my tool `bsbs` 714 h2() { naman "$@" | bsbs 2; } 715 716 # show Help laid out on 3 side-by-side columns; uses my tool `bsbs` 717 h3() { 718 local w 719 w="$(tput -T xterm cols)" 720 w="$((w / 3 - 6))" 721 if [ "$w" -lt 55 ]; then 722 w=55 723 fi 724 MANWIDTH="$w" man "$@" | bsbs 3 725 } 726 727 # Highlighted-style ECHO 728 hecho() { printf "\e[7m%s\e[0m\n" "$*"; } 729 730 # show each byte as a pair of HEXadecimal (base-16) symbols 731 hexify() { 732 cat "$@" | od -v -x -A n | awk ' 733 { gsub(/ +/, ""); printf "%s", $0 } 734 END { print "" } 735 ' 736 } 737 738 # History Fuzzy-finder 739 hf() { 740 eval "$( 741 history \ 742 | fzf --reverse --height 80% --tmux center,80% \ 743 | awk ' 744 { 745 $1 = "" 746 $2 = "" 747 gsub(/^ */, "") 748 printf("\x1b[7m%s\x1b[0m\n", $0) > "/dev/stderr" 749 print 750 } 751 ' 752 )" 753 } 754 755 # Help Me Remember my custom shell commands 756 hmr() { 757 local cmd="bat" 758 # debian linux uses a different name for the `bat` app 759 if [ -e /usr/bin/batcat ]; then 760 cmd="batcat" 761 fi 762 763 "$cmd" \ 764 --style=plain,header,numbers --theme='Monokai Extended Light' \ 765 --wrap=never --color=always "$(which clam)" | 766 sed -e 's-\x1b\[38;5;70m-\x1b[38;5;28m-g' \ 767 -e 's-\x1b\[38;5;214m-\x1b[38;5;208m-g' \ 768 -e 's-\x1b\[38;5;243m-\x1b[38;5;103m-g' \ 769 -e 's-\x1b\[38;5;238m-\x1b[38;5;245m-g' \ 770 -e 's-\x1b\[38;5;228m-\x1b[48;5;228m-g' | 771 less -MKiCRS 772 } 773 774 # convert seconds into a colon-separated Hours-Minutes-Seconds triple 775 hms() { 776 echo "${@:-0}" | sed -E 's-_--g; s- +-\n-g' | awk ' 777 /./ { 778 x = $0 779 h = (x - x % 3600) / 3600 780 m = (x % 3600) / 60 781 s = x % 60 782 printf "%02d:%02d:%05.2f\n", h, m, s 783 } 784 ' 785 } 786 787 # find all hyperlinks inside HREF attributes in the input text 788 href() { 789 local arg 790 local awk_cmd='awk' 791 local grep_cmd='grep' 792 if [ -p /dev/stdout ] || [ -t 1 ]; then 793 grep_cmd='grep --line-buffered' 794 if [ -e /usr/bin/stdbuf ]; then 795 awk_cmd='stdbuf -oL awk' 796 fi 797 fi 798 799 for arg in "${@:--}"; do 800 ${grep_cmd} -i -E -o 'href="[^"]+"' "${arg}" 801 done | ${awk_cmd} '{ gsub(/^href="|"\r?$/, ""); print }' 802 } 803 804 # avoid/ignore lines which case-insensitively match any of the regexes given 805 iavoid() { 806 local command='awk' 807 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 808 command='stdbuf -oL awk' 809 fi 810 811 ${command} ' 812 BEGIN { 813 if (IGNORECASE == "") { 814 m = "this variant of AWK lacks case-insensitive regex-matching" 815 print(m) > "/dev/stderr" 816 exit 125 817 } 818 IGNORECASE = 1 819 820 for (i = 1; i < ARGC; i++) { 821 e[i] = ARGV[i] 822 delete ARGV[i] 823 } 824 } 825 826 { 827 for (i = 1; i < ARGC; i++) if ($0 ~ e[i]) next 828 print 829 got++ 830 } 831 832 END { exit(got == 0) } 833 ' "${@:-^\r?$}" 834 } 835 836 # ignore command in a pipe: this allows quick re-editing of pipes, while 837 # still leaving signs of previously-used steps, as a memo 838 idem() { cat; } 839 840 # ignore command in a pipe: this allows quick re-editing of pipes, while 841 # still leaving signs of previously-used steps, as a memo 842 ignore() { cat; } 843 844 # only keep lines which case-insensitively match any of the regexes given 845 imatch() { 846 local command='awk' 847 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 848 command='stdbuf -oL awk' 849 fi 850 851 ${command} ' 852 BEGIN { 853 if (IGNORECASE == "") { 854 m = "this variant of AWK lacks case-insensitive regex-matching" 855 print(m) > "/dev/stderr" 856 exit 125 857 } 858 IGNORECASE = 1 859 860 for (i = 1; i < ARGC; i++) { 861 e[i] = ARGV[i] 862 delete ARGV[i] 863 } 864 } 865 866 { 867 for (i = 1; i < ARGC; i++) { 868 if ($0 ~ e[i]) { 869 print 870 got++ 871 next 872 } 873 } 874 } 875 876 END { exit(got == 0) } 877 ' "${@:-[^\r]}" 878 } 879 880 # start each non-empty line with extra n spaces 881 indent() { 882 local command='awk' 883 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 884 command='stdbuf -oL awk' 885 fi 886 887 ${command} ' 888 BEGIN { 889 n = ARGV[1] + 0 890 delete ARGV[1] 891 fmt = sprintf("%%%ds%%s\n", (n > 0) ? n : 0) 892 } 893 894 /^\r?$/ { print ""; next } 895 { gsub(/\r$/, ""); printf(fmt, "", $0) } 896 ' "$@" 897 } 898 899 # INSTall APT packages 900 instapt() { sudo apt install "$@"; sudo -k; } 901 902 # emit each word-like item from each input line on its own line; when a file 903 # has tabs on its first line, items are split using tabs alone, which allows 904 # items to have spaces in them 905 items() { 906 local command='awk' 907 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 908 command='stdbuf -oL awk' 909 fi 910 911 ${command} ' 912 FNR == 1 { FS = /\t/ ? "\t" : " "; $0 = $0 } 913 { gsub(/\r$/, ""); for (i = 1; i <= NF; i++) print $i } 914 ' "$@" 915 } 916 917 # listen to streaming JAZZ music 918 jazz() { 919 printf "streaming \e[7mSmooth Jazz Instrumental\e[0m\n" 920 mpv --quiet https://stream.zeno.fm/00rt0rdm7k8uv 921 } 922 923 # show a `dad` JOKE from the web, sometimes even a very funny one 924 joke() { 925 curl --silent --show-error https://icanhazdadjoke.com | fold -s | 926 awk '{ gsub(/ *\r?$/, ""); print }' 927 } 928 929 # JSON Query Lines turns JSON top-level arrays into multiple individually-JSON 930 # lines using the `jq` app, keeping all other top-level values as single line 931 # JSON outputs 932 jql() { 933 local code="${1:-.}" 934 [ $# -gt 0 ] && shift 935 jq -c -M "${code} | .[]" "$@" 936 } 937 938 # JSON Query Keys runs `jq` to find all unique key-combos from tabular JSON 939 jqk() { 940 local code="${1:-.}" 941 [ $# -gt 0 ] && shift 942 jq -c -M "${code} | .[] | keys" "$@" | awk '!c[$0]++' 943 } 944 945 # JSON Keys finds all unique key-combos from tabular JSON data; uses my tools 946 # `jsonl` and `tjp` 947 jsonk() { 948 tjp '[e.keys() for e in v] if isinstance(v, (list, tuple)) else v.keys()' \ 949 "${1:--}" | jsonl | awk '!c[$0]++' 950 } 951 952 # JSON Table, turns TSV tables into tabular JSON, where valid-JSON values are 953 # auto-parsed into numbers, booleans, etc...; uses my tools `jsons` and `tjp` 954 jsont() { 955 jsons "$@" | tjp \ 956 '[{k: rescue(lambda: loads(v), v) for k, v in e.items()} for e in v]' 957 } 958 959 # emit the given number of random/junk bytes, or 1024 junk bytes by default 960 junk() { head -c "$(echo "${1:-1024}" | sed 's-_--g')" /dev/urandom; } 961 962 # get the last n lines, or 1 by default 963 last() { tail -n "${1:-1}" "${2:--}"; } 964 965 # convert pounds (LB) into kilograms 966 lb() { 967 echo "${@:-1}" | sed -E 's-_--g; s- +-\n-g' | 968 awk '/./ { printf "%.2f\n", 0.45359237 * $0 }' 969 } 970 971 # convert a mix of pounds (LB) and weight-ounces (OZ) into kilograms 972 lboz() { 973 local lb="${1:-0}" 974 lb="$(echo "${lb}" | sed 's-_--g')" 975 local oz="${2:-0}" 976 oz="$(echo "${oz}" | sed 's-_--g')" 977 awk "BEGIN { print 0.45359237 * ${lb} + 0.028349523 * ${oz}; exit }" 978 } 979 980 # limit stops at the first n bytes, or 1024 bytes by default 981 limit() { head -c "$(echo "${1:-1024}" | sed 's-_--g')" "${2:--}"; } 982 983 # ensure LINES are never accidentally joined across files, by always emitting 984 # a line-feed at the end of each line 985 lines() { 986 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 987 stdbuf -oL awk 1 "$@" 988 else 989 awk 1 "$@" 990 fi 991 } 992 993 # regroup adjacent lines into n-item tab-separated lines 994 lineup() { 995 local command='awk' 996 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 997 command='stdbuf -oL awk' 998 fi 999 1000 local n="${1:-0}" 1001 [ $# -gt 0 ] && shift 1002 1003 if [ "$n" -le 0 ]; then 1004 ${command} ' 1005 NR > 1 { printf "\t" } 1006 { printf "%s", $0 } 1007 END { if (NR > 0) print "" } 1008 ' "$@" 1009 return $? 1010 fi 1011 1012 ${command} -v n="$n" ' 1013 NR % n != 1 && n > 1 { printf "\t" } 1014 { printf "%s", $0 } 1015 NR % n == 0 { print "" } 1016 END { if (NR % n != 0) print "" } 1017 ' "$@" 1018 } 1019 1020 # emit LINEs ending with a Zero/null bytes 1021 linez() { 1022 if [ -p /dev/stdout ] || [ -t 1 ]; then 1023 stdbuf -oL awk -v ORS='\000' 1 "$@" 1024 else 1025 awk -v ORS='\000' 1 "$@" 1026 fi 1027 } 1028 1029 # LiSt files, showing how many 4K-sized storage blocks they use 1030 ls4k() { ls -s --block-size=4096 "$@"; } 1031 1032 # LiSt MAN pages 1033 lsman() { man -k "${1:-.}"; } 1034 1035 # MARK the current tab with the message given, followed by the current folder; 1036 # works only on the `bash` shell 1037 mark() { 1038 if [ $# -eq 0 ]; then 1039 PS1="\[\e[0m\e]0;\w\a\$ " 1040 else 1041 PS1="\[\e[0m\e]0;${*} \w\a\$ " 1042 fi 1043 } 1044 1045 marklinks() { 1046 local re='https?://[A-Za-z0-9+_.:%-]+(/[A-Za-z0-9+_.%/,#?&=-]*)*' 1047 sed -E 's-('"${re}"')-\x1b]8;;\1\x1b\\\1\x1b]8;;\x1b\\-g' "$@" 1048 } 1049 1050 # Multi-Core MAKE runs `make` using all cores 1051 mcmake() { make -j "$(nproc)" "$@"; } 1052 1053 # merge stderr into stdout, which is useful for piped commands 1054 merrge() { "${@:-cat /dev/null}" 2>&1; } 1055 1056 metajq() { 1057 # https://github.com/stedolan/jq/issues/243#issuecomment-48470943 1058 jq -r -M ' 1059 [ 1060 path(..) | 1061 map(if type == "number" then "[]" else tostring end) | 1062 join(".") | split(".[]") | join("[]") 1063 ] | unique | map("." + .) | .[] 1064 ' "$@" 1065 } 1066 1067 # Make In Folder, also showing time and max memory used 1068 mif() { 1069 local f='real %e user %U sys %S mem %M exit %x' 1070 local folder 1071 folder="${1:-.}" 1072 [ $# -gt 0 ] && shift 1073 env -C "${folder}" /usr/bin/time -f "$f" make "$@" 1074 } 1075 1076 # MINimize DECimalS ignores all trailing decimal zeros in numbers, and even 1077 # the decimal dots themselves, when decimals in a number are all zeros 1078 # mindecs() { 1079 # local cmd='sed -E' 1080 # if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1081 # cmd='sed -E -u' 1082 # fi 1083 # ${cmd} 's-([0-9]+)\.0+\W-\1-g; s-([0-9]+\.[0-9]*[1-9])0+\W-\1-g' "$@" 1084 # } 1085 1086 # MaKe, also showing the time taken and the max memory used 1087 mk() { 1088 local f='real %e user %U sys %S mem %M exit %x' 1089 /usr/bin/time -f "$f" make "$@" 1090 } 1091 1092 # Number all lines counting from 0, using a tab right after each line number 1093 n0() { 1094 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1095 stdbuf -oL nl -b a -w 1 -v 0 "$@" 1096 else 1097 nl -b a -w 1 -v 0 "$@" 1098 fi 1099 } 1100 1101 # Number all lines counting from 1, using a tab right after each line number 1102 n1() { 1103 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1104 stdbuf -oL nl -b a -w 1 -v 1 "$@" 1105 else 1106 nl -b a -w 1 -v 1 "$@" 1107 fi 1108 } 1109 1110 # NArrow MANual, keeps `man` narrow, even if the window/tab is wide when run 1111 naman() { 1112 local w 1113 w="$(tput -T xterm cols)" 1114 w="$((w / 2 - 4))" 1115 if [ "$w" -lt 80 ]; then 1116 w=80 1117 fi 1118 MANWIDTH="$w" man "$@" 1119 } 1120 1121 # Not AND sorts its 2 inputs, then finds lines not in common 1122 nand() { 1123 # comm -3 <(sort "$1") <(sort "$2") 1124 # dash doesn't support the process-sub syntax 1125 (sort "$1" | (sort "$2" | (comm -3 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 1126 } 1127 1128 # Nice DEFine dictionary-defines the words given, using an online service 1129 ndef() { 1130 local arg 1131 local gap=0 1132 local options='-MKiCRS' 1133 1134 if [ $# -eq 0 ]; then 1135 printf "\e[38;2;204;0;0mndef: no words given\e[0m\n" >&2 1136 return 1 1137 fi 1138 1139 if [ $# -eq 1 ]; then 1140 options='--header=1 -MKiCRS' 1141 fi 1142 1143 for arg in "$@"; do 1144 [ "${gap}" -gt 0 ] && printf "\n" 1145 gap=1 1146 printf "\e[7m%-80s\e[0m\n" "${arg}" 1147 curl --silent "dict://dict.org/d:${arg}" | awk ' 1148 { gsub(/\r$/, "") } 1149 /^151 / { 1150 printf "\x1b[38;2;52;101;164m%s\x1b[0m\n", $0 1151 next 1152 } 1153 /^[1-9][0-9]{2} / { 1154 printf "\x1b[38;2;128;128;128m%s\x1b[0m\n", $0 1155 next 1156 } 1157 1 1158 ' 1159 done | less ${options} 1160 } 1161 1162 # listen to streaming NEW WAVE music 1163 newwave() { 1164 printf "streaming \e[7mNew Wave radio\e[0m\n" 1165 mpv --quiet https://puma.streemlion.com:2910/stream 1166 } 1167 1168 # Nice Json Query Lines colors JSONL data using the `jq` app 1169 njql() { 1170 local code="${1:-.}" 1171 [ $# -gt 0 ] && shift 1172 jq -c -C "${code} | .[]" "$@" 1173 } 1174 1175 # empty the clipboard 1176 noclip() { wl-copy --clear; } 1177 1178 # show the current date and time 1179 # now() { date +'%Y-%m-%d %H:%M:%S'; } 1180 1181 # Nice Print Awk result; uses my tool `nn` 1182 npa() { 1183 local arg 1184 for arg in "$@"; do 1185 awk "BEGIN { print(${arg}); exit }" 1186 done | nn 1187 } 1188 1189 # Nice Print Python result; uses my tool `nn` 1190 npp() { 1191 local arg 1192 for arg in "$@"; do 1193 python -c "print(${arg})" 1194 done | nn 1195 } 1196 1197 # Nice Size, using my tool `nn` 1198 ns() { wc -c "$@" | nn; } 1199 1200 # emit nothing to output and/or discard everything from input 1201 null() { [ $# -gt 0 ] && "$@" > /dev/null; } 1202 1203 # Operations using 1 or 2 numbers 1204 o() { 1205 awk -v a="${1:-1}" -v b="${2:-1}" -v n="$#" ' 1206 function factorial(n, f, i) { 1207 if (n < 1) return 0 1208 f = 1 1209 for (i = 2; i <= n; i++) f *= i 1210 return f 1211 } 1212 1213 BEGIN { 1214 gsub(/_/, "", a) 1215 gsub(/_/, "", b) 1216 1217 if (n == 1) { 1218 printf "1 / %f = %f\n", a, 1 / a 1219 printf "sqrt(%f) = %f\n", a, sqrt(a) 1220 printf "log(%f) = %f\n", a, log(a) 1221 printf "exp(%f) = %f\n", a, exp(a) 1222 a -= a % 1 1223 if (a >= 1) printf "%f! = %f\n", a, factorial(a) 1224 exit 1225 } 1226 1227 printf "%f + %f = %f\n", a, b, a + b 1228 printf "%f - %f = %f\n", a, b, a - b 1229 printf "%f * %f = %f\n", a, b, a * b 1230 if (a > b) { c = a; a = b; b = c } 1231 c = 1 - a / b 1232 printf "%f / %f = %f\n", a, b, a / b 1233 printf "%f / %f = %f\n", b, a, b / a 1234 printf "%f ^ %f = %f\n", a, b, a ^ b 1235 printf "%f ^ %f = %f\n", b, a, b ^ a 1236 if (0 <= c && c <= 1) printf "1 - (%f / %f) = %f\n", a, b, c 1237 exit 1238 } 1239 ' 1240 } 1241 1242 # Print Python result 1243 pp() { 1244 local arg 1245 for arg in "$@"; do 1246 python -c "print(${arg})" 1247 done 1248 } 1249 1250 # PRecede (input) ECHO, prepends a first line to stdin lines 1251 precho() { echo "$@" && cat /dev/stdin; } 1252 1253 # LABEL/precede data with an ANSI-styled line 1254 prelabel() { 1255 printf "\e[7m%-*s\e[0m\n" "$(($(tput -T xterm cols) - 2))" "$*" 1256 cat - 1257 } 1258 1259 # PREcede (input) MEMO, prepends a first highlighted line to stdin lines 1260 prememo() { printf "\e[7m%-80s\e[0m\n" "$*"; cat -; } 1261 1262 # start by joining all arguments given as a tab-separated-items line of output, 1263 # followed by all lines from stdin verbatim 1264 pretsv() { 1265 awk ' 1266 BEGIN { 1267 for (i = 1; i < ARGC; i++) { 1268 if (i > 1) printf "\t" 1269 printf "%s", ARGV[i] 1270 } 1271 if (ARGC > 1) print "" 1272 exit 1273 } 1274 ' "$@" 1275 cat - 1276 } 1277 1278 # Plain RipGrep 1279 prg() { 1280 if [ -p /dev/stdout ] || [ -t 1 ]; then 1281 rg --line-buffered --color=never "${@:-.}" 1282 else 1283 rg --color=never "${@:-.}" 1284 fi 1285 } 1286 1287 # Quiet MPV 1288 # qmpv() { mpv --quiet "${@:--}"; } 1289 1290 # Quiet MPV 1291 qmpv() { mpv --really-quiet "${@:--}"; } 1292 1293 # ignore stderr, without any ugly keyboard-dancing 1294 quiet() { "$@" 2> /dev/null; } 1295 1296 # keep only lines between the 2 line numbers given, inclusively 1297 rangelines() { 1298 { [ $# -eq 2 ] || [ $# -eq 3 ]; } && [ "${1}" -le "${2}" ] && { 1299 tail -n +"${1}" "${3:--}" | head -n $(("${2}" - "${1}" + 1)) 1300 } 1301 } 1302 1303 # RANdom MANual page 1304 ranman() { 1305 find "/usr/share/man/man${1:-1}" -type f | shuf -n 1 | xargs basename | 1306 sed 's-\.gz$--' | xargs man 1307 } 1308 1309 # REPeat STRing emits a line with a repeating string in it, given both a 1310 # string and a number in either order 1311 repstr() { 1312 awk ' 1313 BEGIN { 1314 if (ARGV[2] ~ /^[+-]?[0-9]+$/) { 1315 symbol = ARGV[1] 1316 times = ARGV[2] + 0 1317 } else { 1318 symbol = ARGV[2] 1319 times = ARGV[1] + 0 1320 } 1321 1322 if (times < 0) exit 1323 if (symbol == "") symbol = "-" 1324 s = sprintf("%*s", times, "") 1325 gsub(/ /, symbol, s) 1326 print s 1327 exit 1328 } 1329 ' "$@" 1330 } 1331 1332 # show a RULER-like width-measuring line 1333 ruler() { 1334 [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed -E \ 1335 's- {10}-····╵····│-g; s- -·-g; s-·····-····╵-' 1336 } 1337 1338 # SystemCTL; `sysctl` is already taken for a separate/unrelated app 1339 sctl() { systemctl "$@" 2>&1 | less -MKiCRS; } 1340 1341 # show a unique-looking SEParator line; useful to run between commands 1342 # which output walls of text 1343 sep() { 1344 [ "${1:-80}" -gt 0 ] && 1345 printf "\e[48;2;218;218;218m%${1:-80}s\e[0m\n" "" | sed 's- -·-g' 1346 } 1347 1348 # webSERVE files in a folder as localhost, using the port number given, or 1349 # port 8080 by default 1350 serve() { 1351 if [ -d "$1" ]; then 1352 printf "\e[7mserving files in %s\e[0m\n" "$1" >&2 1353 python3 -m http.server -d "$1" "${2:-8080}" 1354 else 1355 printf "\e[7mserving files in %s\e[0m\n" "${2:-$(pwd)}" >&2 1356 python3 -m http.server -d "${2:-$(pwd)}" "${1:-8080}" 1357 fi 1358 } 1359 1360 # SET DIFFerence sorts its 2 inputs, then finds lines not in the 2nd input 1361 setdiff() { 1362 # comm -23 <(sort "$1") <(sort "$2") 1363 # dash doesn't support the process-sub syntax 1364 (sort "$1" | (sort "$2" | (comm -23 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 1365 } 1366 1367 # SET INtersection, sorts its 2 inputs, then finds common lines 1368 setin() { 1369 # comm -12 <(sort "$1") <(sort "$2") 1370 # dash doesn't support the process-sub syntax 1371 (sort "$1" | (sort "$2" | (comm -12 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 1372 } 1373 1374 # SET SUBtraction sorts its 2 inputs, then finds lines not in the 2nd input 1375 setsub() { 1376 # comm -23 <(sort "$1") <(sort "$2") 1377 # dash doesn't support the process-sub syntax 1378 (sort "$1" | (sort "$2" | (comm -23 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 1379 } 1380 1381 # run apps in color-mode, using the popular option `--color=always` 1382 shine() { 1383 local cmd="$1" 1384 [ $# -gt 0 ] && shift 1385 "${cmd}" --color=always "$@" 1386 } 1387 1388 # skip the first n lines, or the 1st line by default 1389 skip() { tail -n +$(("${1:-1}" + 1)) "${2:--}"; } 1390 1391 # skip the last n lines, or the last line by default 1392 skiplast() { head -n -"${1:-1}" "${2:--}"; } 1393 1394 # SLOW/delay lines from the standard-input, waiting the number of seconds 1395 # given for each line, or waiting 1 second by default 1396 slow() { 1397 local seconds="${1:-1}" 1398 [ $# -gt 0 ] && shift 1399 ( 1400 IFS="$(printf "\n")" 1401 awk 1 "$@" | while read -r line; do 1402 sleep "${seconds}" 1403 printf "%s\n" "${line}" 1404 done 1405 ) 1406 } 1407 1408 # Show Latest Podcasts, using my tools `podfeed` and `si` 1409 slp() { 1410 local title 1411 title="Latest Podcast Episodes as of $(date +'%F %T')" 1412 podfeed -title "${title}" "$@" | si 1413 } 1414 1415 # emit the first line as is, sorting all lines after that, using the 1416 # `sort` command, passing all/any arguments/options to it 1417 sortrest() { 1418 awk -v sort="sort $*" ' 1419 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1420 { gsub(/\r$/, "") } 1421 NR == 1 { print; fflush() } 1422 NR >= 2 { print | sort } 1423 ' 1424 } 1425 1426 # SORt Tab-Separated Values: emit the first line as is, sorting all lines after 1427 # that, using the `sort` command in TSV (tab-separated values) mode, passing 1428 # all/any arguments/options to it 1429 sortsv() { 1430 awk -v sort="sort -t \"$(printf '\t')\" $*" ' 1431 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1432 { gsub(/\r$/, "") } 1433 NR == 1 { print; fflush() } 1434 NR >= 2 { print | sort } 1435 ' 1436 } 1437 1438 # emit a line with the number of spaces given in it 1439 spaces() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" ""; } 1440 1441 # SQUeeze horizontal spaces and STOMP vertical gaps 1442 squomp() { 1443 local command='awk' 1444 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1445 command='stdbuf -oL awk' 1446 fi 1447 1448 ${command} ' 1449 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1450 /^\r?$/ { empty = 1; next } 1451 empty { if (n > 0) print ""; empty = 0 } 1452 1453 { 1454 gsub(/^ +| *\r?$/, "") 1455 gsub(/ *\t */, "\t") 1456 gsub(/ +/, " ") 1457 print; n++ 1458 } 1459 ' "$@" 1460 } 1461 1462 # TAC Lines outputs input-lines in reverse order, last one first, and so on... 1463 tacl() { 1464 awk ' 1465 { gsub(/\r$/, ""); lines[NR] = $0 } 1466 END { for (i = NR; i >= 1; i--) print lines[i] } 1467 ' "$@" 1468 } 1469 1470 # TINY GO Build Optimized: a common use-case for the tinygo compiler 1471 tinygobo() { tinygo build -no-debug -opt=2 "$@"; } 1472 1473 # show current date in a specifc format 1474 today() { date +'%Y-%m-%d %a %b %d'; } 1475 1476 # get the first n lines, or 1 by default 1477 toline() { head -n "${1:-1}" "${2:--}"; } 1478 1479 # get the processes currently using the most cpu 1480 topcpu() { 1481 local n="${1:-10}" 1482 [ "$n" -gt 0 ] && ps aux | awk ' 1483 NR == 1 { print; fflush() } 1484 NR > 1 { print | "sort -rnk3,3" } 1485 ' | head -n "$(("$n" + 1))" 1486 } 1487 1488 # get the processes currently using the most memory 1489 topmemory() { 1490 local n="${1:-10}" 1491 [ "$n" -gt 0 ] && ps aux | awk ' 1492 NR == 1 { print; fflush() } 1493 NR > 1 { print | "sort -rnk6,6" } 1494 ' | head -n "$(("$n" + 1))" 1495 } 1496 1497 # only keep UNIQUE lines, keeping them in their original order 1498 unique() { 1499 local command='awk' 1500 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1501 command='stdbuf -oL awk' 1502 fi 1503 1504 ${command} ' 1505 BEGIN { for (i = 1; i < ARGC; i++) if (f[ARGV[i]]++) delete ARGV[i] } 1506 !c[$0]++ 1507 ' "$@" 1508 } 1509 1510 # fix lines, ignoring leading UTF-8_BOMs (byte-order-marks) on each input's 1511 # first line, turning all end-of-line CRLF byte-pairs into single line-feeds, 1512 # and ensuring each input's last line ends with a line-feed; trailing spaces 1513 # are also ignored 1514 unixify() { 1515 local command='awk' 1516 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1517 command='stdbuf -oL awk' 1518 fi 1519 1520 ${command} ' 1521 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1522 { gsub(/ *\r?$/, ""); print } 1523 ' "$@" 1524 } 1525 1526 # skip the first/leading n bytes 1527 unleaded() { tail -c +$(("$1" + 1)) "${2:--}"; } 1528 1529 # go UP n folders, or go up 1 folder by default 1530 up() { 1531 if [ "${1:-1}" -le 0 ]; then 1532 cd . 1533 else 1534 cd "$(printf "%${1:-1}s" "" | sed 's- -../-g')" || return $? 1535 fi 1536 } 1537 1538 # convert United States Dollars into CAnadian Dollars, using the latest 1539 # official exchange rates from the bank of canada; during weekends, the 1540 # latest rate may be from a few days ago; the default amount of usd to 1541 # convert is 1, when not given 1542 usd2cad() { 1543 local url 1544 local site='https://www.bankofcanada.ca/valet/observations/group' 1545 local csv_rates="${site}/FX_RATES_DAILY/csv" 1546 url="${csv_rates}?start_date=$(date -d '3 days ago' +'%Y-%m-%d')" 1547 curl -s "${url}" | awk -F, -v amount="$(echo "${1:-1}" | sed 's-_--g')" ' 1548 /USD/ { for (i = 1; i <= NF; i++) if($i ~ /USD/) j = i } 1549 END { gsub(/"/, "", $j); if (j != 0) printf "%.2f\n", amount * $j } 1550 ' 1551 } 1552 1553 # What Are These (?) shows what the names given to it are/do 1554 wat() { 1555 local arg 1556 local gap=0 1557 local less_options='-MKiCRS' 1558 1559 if [ $# -eq 0 ]; then 1560 echo "$0" 1561 return 0 1562 fi 1563 1564 if [ $# -lt 2 ]; then 1565 less_options='-MKiCRS --header=1' 1566 fi 1567 1568 for arg in "$@"; do 1569 [ "${gap}" -gt 0 ] && printf "\n" 1570 gap=1 1571 printf "\e[7m%-80s\e[0m\n" "${arg}" 1572 1573 while alias "${arg}" > /dev/null 2> /dev/null; do 1574 arg="$(alias "${arg}" | sed -E "s-^[^=]+=['\"](.+)['\"]\$-\\1-")" 1575 done 1576 1577 if echo "${arg}" | grep -q ' '; then 1578 printf "%s\n" "${arg}" 1579 continue 1580 fi 1581 1582 if declare -f "${arg}"; then 1583 continue 1584 fi 1585 1586 if which "${arg}" > /dev/null 2> /dev/null; then 1587 which "${arg}" 1588 continue 1589 fi 1590 1591 printf "\e[38;2;204;0;0m%s not found\e[0m\n" "${arg}" 1592 done | { less -MKiCRS ${less_options} 2> /dev/null || cat; } 1593 } 1594 1595 # find all WEB/hyperLINKS (https:// and http://) in the input text 1596 weblinks() { 1597 local arg 1598 local re='https?://[A-Za-z0-9+_.:%-]+(/[A-Za-z0-9+_.%/,#?&=-]*)*' 1599 local grep_cmd='grep' 1600 if [ -p /dev/stdout ] || [ -t 1 ]; then 1601 grep_cmd='grep --line-buffered' 1602 fi 1603 1604 for arg in "${@:--}"; do 1605 ${grep_cmd} -i -E -o "${re}" "${arg}" 1606 done 1607 } 1608 1609 # recursively find all files with trailing spaces/CRs 1610 whichtrails() { 1611 if [ -p /dev/stdout ] || [ -t 1 ]; then 1612 rg --line-buffered -c '[ \r]+$' "${@:-.}" 1613 else 1614 rg -c '[ \r]+$' "${@:-.}" 1615 fi 1616 } 1617 1618 # turn all wsl/unix-style full-paths into WINdows-style full-PATHS 1619 winpaths() { sed -E 's-/mnt/(.)/-\u\1:/-' "$@"; } 1620 1621 # XARGS Lines, runs `xargs` using whole lines as extra arguments 1622 xargsl() { 1623 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1624 stdbuf -oL awk -v ORS='\000' ' 1625 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1626 { gsub(/\r$/, ""); print } 1627 ' | stdbuf -oL xargs -0 "$@" 1628 else 1629 awk -v ORS='\000' ' 1630 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1631 { gsub(/\r$/, ""); print } 1632 ' | xargs -0 "$@" 1633 fi 1634 } 1635 1636 # Youtube Audio Player 1637 yap() { 1638 local url 1639 # some youtube URIs end with extra playlist/tracker parameters 1640 url="$(echo "$1" | sed 's-&.*--')" 1641 mpv "$(yt-dlp -x --audio-format best --get-url "${url}" 2> /dev/null)" 1642 } 1643 1644 # show a calendar for the current YEAR, or for the year given 1645 year() { 1646 { 1647 # show the current date/time center-aligned 1648 printf \ 1649 "%21s\e[38;2;78;154;6m%s\e[0m \e[38;2;52;101;164m%s\e[0m\n\n" \ 1650 "" "$(date +'%a %b %d %Y')" "$(date +'%H:%M')" 1651 # debian linux has a different `cal` app which highlights the day 1652 if [ -e /usr/bin/ncal ]; then 1653 # fix debian/ncal's weird way to highlight the current day 1654 ncal -C -y "$@" | sed -E \ 1655 's/_\x08(.+)_\x08([^ ]+)/\x1b\[7m\1\2\x1b\[0m/' 1656 else 1657 cal -y "$@" 1658 fi 1659 } | { less -MKiCRS 2> /dev/null || cat; } 1660 } 1661 1662 # show the current date in the YYYY-MM-DD format 1663 ymd() { date +'%Y-%m-%d'; }