File: clam.sh 1 #!/bin/sh 2 3 # The MIT License (MIT) 4 # 5 # Copyright (c) 2026 pacman64 6 # 7 # Permission is hereby granted, free of charge, to any person obtaining a copy 8 # of this software and associated documentation files (the "Software"), to deal 9 # in the Software without restriction, including without limitation the rights 10 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 # copies of the Software, and to permit persons to whom the Software is 12 # furnished to do so, subject to the following conditions: 13 # 14 # The above copyright notice and this permission notice shall be included in 15 # all copies or substantial portions of the Software. 16 # 17 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 # SOFTWARE. 24 25 26 # clam 27 # 28 # Command-Line Augmentation Module (clam): get the best out of your shell. 29 # 30 # 31 # This is a collection of arguably useful shell functions and shortcuts: 32 # some of these extra commands can be real time/effort savers, ideally 33 # letting you concentrate on getting things done. 34 # 35 # Some of these commands depend on my other scripts from the `pac-tools`, 36 # others either rely on widely-preinstalled command-line apps, or ones 37 # which are available on most of the major command-line `package` managers. 38 # 39 # To use this script, you're supposed to `source` it, so its definitions 40 # stay for your whole shell session: for that, you can run `source clam` or 41 # `. clam` (no quotes either way), either directly or at shell startup. 42 # 43 # Almost all commands defined in this script work with `bash`, `zsh`, and 44 # even `dash`, which is debian linux's default non-interactive shell. Some 45 # of its commands even seem to work on busybox's shell. 46 47 48 # This is a smaller version of the full `clam`. 49 50 case "$1" in 51 -h|--h|-help|--help) 52 awk ' 53 /^case / { exit } 54 /^# +clam$/, /^$/ { gsub(/^# ?/, ""); print } 55 ' "$0" 56 exit 0 57 ;; 58 esac 59 60 61 # dash doesn't support regex-matching syntax, forcing to use case statements 62 case "$0" in 63 -bash|-dash|-sh|bash|dash|sh|/bin/sh) 64 # script is being sourced with bash, dash, or ash, which is good 65 : 66 ;; 67 68 *) 69 case "$ZSH_EVAL_CONTEXT" in 70 *:file) 71 # script is being sourced with zsh, which is good 72 : 73 ;; 74 75 *) 76 # script is being run normally, which is a waste of time 77 printf "\e[7mDon't run this script directly: instead source it\e[0m\n" 78 printf "\e[7mby running '. clam' (without the single quotes).\e[0m\n" 79 printf "\n" 80 printf "\e[7mBefore doing that, you may want to see the help,\e[0m\n" 81 printf "\e[7mby running 'clam -h' (without the single quotes).\e[0m\n" 82 # exiting during shell-startup may deny shell access, even if 83 # the script is being run, instead of being sourced directly 84 ;; 85 esac 86 ;; 87 esac 88 89 90 alias 0='sbs' 91 92 alias 1='bsbs 1' 93 alias 2='bsbs 2' 94 alias 3='bsbs 3' 95 alias 4='bsbs 4' 96 alias 5='bsbs 5' 97 alias 6='bsbs 6' 98 alias 7='bsbs 7' 99 alias 8='bsbs 8' 100 alias 9='bsbs 9' 101 102 # Less with Header n runs `less` with line numbers, ANSI styles, without 103 # line-wraps, and using the first n lines as a sticky-header, so they always 104 # show on top 105 alias lh1='less --header=1 -MKNiCRS' 106 alias lh2='less --header=2 -MKNiCRS' 107 alias lh3='less --header=3 -MKNiCRS' 108 alias lh4='less --header=4 -MKNiCRS' 109 alias lh5='less --header=5 -MKNiCRS' 110 alias lh6='less --header=6 -MKNiCRS' 111 alias lh7='less --header=7 -MKNiCRS' 112 alias lh8='less --header=8 -MKNiCRS' 113 alias lh9='less --header=9 -MKNiCRS' 114 115 # View with Header n runs `less` without line numbers, ANSI styles, without 116 # line-wraps, and using the first n lines as a sticky-header, so they always 117 # show on top 118 alias vh1='less --header=1 -MKiCRS' 119 alias vh2='less --header=2 -MKiCRS' 120 alias vh3='less --header=3 -MKiCRS' 121 alias vh4='less --header=4 -MKiCRS' 122 alias vh5='less --header=5 -MKiCRS' 123 alias vh6='less --header=6 -MKiCRS' 124 alias vh7='less --header=7 -MKiCRS' 125 alias vh8='less --header=8 -MKiCRS' 126 alias vh9='less --header=9 -MKiCRS' 127 128 alias c='cat' 129 alias e='echo' 130 alias r='reset' 131 132 # Breathe periodically adds extra empty lines; uses my own `breathe` tool 133 alias b='breathe' 134 135 # Plain ignores ANSI-styles; uses my own `plain` tool 136 alias p='plain' 137 138 # Compile C Optimized 139 alias cco='cc -Wall -O2 -s -march=native -mtune=native -flto' 140 141 # Colored Json Query runs the `jq` app, allowing an optional filepath as the 142 # data source, and even an optional transformation formula 143 alias cjq='jq -C' 144 145 # Compile C Plus Plus Optimized 146 alias cppo='c++ -Wall -O2 -s -march=native -mtune=native -flto' 147 148 # CURL Info only shows the response headers from the request given 149 alias curli='curl -I' 150 151 # CURL Silent spares you the progress bar, but still tells you about errors 152 alias curls='curl --silent --show-error' 153 154 # turn json lines into a proper json array using the `jq` app 155 alias dejql='jq -s -M' 156 157 # turn UTF-16 data into UTF-8 158 alias deutf16='iconv -f utf16 -t utf8' 159 160 # edit plain-text files 161 # alias ed='TERM=xterm-truecolor micro' 162 163 # edit plain-text files 164 # alias edit='TERM=xterm-truecolor micro' 165 166 # ENV with 0/null-terminated lines on stdout 167 alias env0='env -0' 168 169 # ENV Change folder, runs the command given in the folder given (first) 170 alias envc='env -C' 171 172 # Extended Plain Interactive Grep 173 alias epig='ugrep --color=never -Q -E' 174 175 # Editor Read-Only 176 alias ero='micro -readonly true' 177 178 # run the Fuzzy Finder (fzf) in multi-choice mode, with custom keybindings 179 alias ff='fzf -m --bind ctrl-a:select-all,ctrl-space:toggle' 180 181 # get FILE's MIME types 182 alias filemime='file --mime-type' 183 184 # run `gcc` with all optimizations on and with static analysis on 185 alias gccmax='gcc -Wall -O2 -s -march=native -mtune=native -flto -fanalyzer' 186 187 # hold stdout if used at the end of a pipe-chain 188 alias hold='less -MKiCRS' 189 190 # shrink/compact JSON using the `jq` app, allowing an optional filepath, and 191 # even an optional transformation formula after that 192 alias jq0='jq -c -M' 193 194 # show JSON data on multiple lines, using 2 spaces for each indentation level, 195 # allowing an optional filepath, and even an optional transformation formula 196 # after that 197 alias jq2='jq --indent 2 -M' 198 199 # find the LAN (local-area network) IP address for this device 200 alias lanip='hostname -I' 201 202 # Listen To Youtube 203 alias lty=yap 204 205 # LXC-LS Fancy 206 alias lxc-lsf='lxc-ls --fancy' 207 208 # MAKE IN folder 209 alias makein=mif 210 211 # METAdata CURL only shows the response headers from the request given 212 alias metacurl='curl -I' 213 214 # run `less`, showing line numbers, among other settings 215 alias most='less -MKNiCRS' 216 217 # Plain Interactive Grep 218 alias pig='ugrep --color=never -Q -E' 219 220 # Quick Compile C Optimized 221 alias qcco='cc -Wall -O2 -s -march=native -mtune=native -flto' 222 223 # Quick Compile C Plus Plus Optimized 224 alias qcppo='c++ -Wall -O2 -s -march=native -mtune=native -flto' 225 226 # Read-Only Editor 227 alias roe='micro -readonly true' 228 229 # Read-Only Micro (text editor) 230 alias rom='micro -readonly true' 231 232 # Read-Only Top 233 alias rot='htop --readonly' 234 235 # RUN IN folder 236 alias runin='env -C' 237 238 # Silent CURL spares you the progress bar, but still tells you about errors 239 alias scurl='curl --silent --show-error' 240 241 # Stdbuf Output Line-buffered 242 alias sol='stdbuf -oL' 243 244 # TRY running a command, showing its outcome/error-code on failure; uses my 245 # `verdict` tool 246 alias try='verdict' 247 248 # VERTical REVert emits lines in reverse order of appearance 249 alias vertrev='tac' 250 251 # UGREP in Query/interactive mode 252 alias ugrepq='ugrep -Q' 253 254 # emit lines in reverse order of appearance 255 alias upsidedown='tac' 256 257 # run `cppcheck` with even stricter options 258 alias vetc='cppcheck --enable=portability,style --check-level=exhaustive' 259 260 # run `cppcheck` with even stricter options, also checking for c89 compliance 261 alias vetc89='cppcheck --enable=portability,style --check-level=exhaustive --std=c89' 262 263 # run `cppcheck` with even stricter options 264 alias vetcpp='cppcheck --enable=portability,style --check-level=exhaustive' 265 266 # check shell scripts for common gotchas, avoiding complaints about using 267 # the `local` keyword, which is widely supported in practice 268 alias vetshell='shellcheck -e 3043' 269 270 # run a command using an empty environment 271 alias void='env -i' 272 273 # turn plain-text from latin-1 into UTF-8; the name is from `vulgarization`, 274 # which is the mutation of languages away from latin during the middle ages 275 alias vulgarize='iconv -f latin-1 -t utf-8' 276 277 # recursively find all files with trailing spaces/CRs 278 alias wheretrails=whichtrails 279 280 # run `xargs`, using zero/null bytes as the extra-arguments terminator 281 alias x0='xargs -0' 282 283 # Xargs Lines, runs `xargs` using whole lines as extra arguments 284 alias xl=xargsl 285 286 # Awk Begin Print 287 abp() { 288 local arg 289 for arg in "$@"; do 290 awk "BEGIN { print (${arg}); exit }" 291 done 292 } 293 294 # APT UPdate/grade 295 aptup() { sudo apt update && sudo apt upgrade "$@"; sudo -k; } 296 297 # emit a colored bar which can help visually separate different outputs 298 bar() { 299 [ "${1:-80}" -gt 0 ] && printf "\e[48;2;218;218;218m%${1:-80}s\e[0m\n" "" 300 } 301 302 # emit a line with a repeating block-like symbol in it 303 blocks() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -█-g'; } 304 305 # BOOK-like MANual, lays out `man` docs as pairs of side-by-side pages; uses 306 # my tool `bsbs` 307 bookman() { 308 local w 309 w="$(tput -T xterm cols)" 310 w="$((w / 2 - 4))" 311 if [ "$w" -lt 65 ]; then 312 w=65 313 fi 314 MANWIDTH="$w" man "$@" | bsbs 2 315 } 316 317 # CAlculator with Nice numbers runs my tool `ca` and colors results with 318 # my tool `nn`, alternating styles to make long numbers easier to read 319 can() { 320 local arg 321 for arg in "$@"; do 322 [ $# -ge 2 ] && printf "\e[7m%s\e[0m\n" "${arg}" >&2 323 ca "${arg}" | nn 324 done 325 } 326 327 # uppercase the first letter on each line, and lowercase all later letters 328 capitalize() { sed -E 's-^(.*)-\L\1-; s-^(.)-\u\1-'; } 329 330 # Colored Go Test on the folder given; uses my command `gbm` 331 cgt() { 332 local f='real %e user %U sys %S mem %M exit %x' 333 /usr/bin/time -f "$f" go test "${@:-.}" 2>&1 \ 334 | gbm '^ok' '^[-]* ?FAIL' '^\?' 335 } 336 337 # Compile Rust Optimized 338 cro() { 339 rustc -C lto=true -C codegen-units=1 -C debuginfo=0 -C strip=symbols \ 340 -C opt-level=3 "$@" 341 } 342 343 # emit a line with a repeating cross-like symbol in it 344 crosses() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -×-g'; } 345 346 # emit a line with a repeating dash-like symbol in it 347 dashes() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -—-g'; } 348 349 # remove commas in numbers, as well as leading dollar signs in numbers 350 decomma() { 351 sed -E 's-([0-9]{3}),-\1-g; s-([0-9]{1,2}),-\1-g; s-\$([0-9\.]+)-\1-g' 352 } 353 354 dehtmlify() { 355 local command='awk' 356 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 357 command='stdbuf -oL awk' 358 fi 359 360 ${command} ' 361 { 362 gsub(/<\/?[^>]+>/, "") 363 gsub(/&/, "&") 364 gsub(/</, "<") 365 gsub(/>/, ">") 366 gsub(/^ +| *\r?$/, "") 367 gsub(/ +/, " ") 368 print 369 } 370 ' "$@" 371 } 372 373 # expand tabs each into up to the number of space given, or 4 by default 374 detab() { 375 local tabstop="${1:-4}" 376 [ $# -gt 0 ] && shift 377 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 378 stdbuf -oL expand -t "${tabstop}" "$@" 379 else 380 expand -t "${tabstop}" "$@" 381 fi 382 } 383 384 # DIVide 2 numbers 3 ways, including the complement 385 div() { 386 awk -v a="${1:-1}" -v b="${2:-1}" ' 387 BEGIN { 388 gsub(/_/, "", a) 389 gsub(/_/, "", b) 390 if (a > b) { c = a; a = b; b = c } 391 c = 1 - a / b 392 if (0 <= c && c <= 1) printf "%f\n%f\n%f\n", a / b, b / a, c 393 else printf "%f\n%f\n", a / b, b / a 394 exit 395 }' 396 } 397 398 # emit a line with a repeating dot-like symbol in it 399 dots() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" "" | sed 's- -·-g'; } 400 401 # show the current Date and Time 402 dt() { 403 printf "\e[38;2;78;154;6m%s\e[0m \e[38;2;52;101;164m%s\e[0m\n" \ 404 "$(date +'%a %b %d')" "$(date +%T)" 405 } 406 407 # EDit RUN shell commands, using an interactive editor; uses my tool `leak` 408 edrun() { 409 # dash doesn't support the process-sub syntax 410 # . <( micro -readonly true -filetype shell | leak --inv ) 411 micro -readonly true -filetype shell | leak --inv | . /dev/fd/0 412 } 413 414 # convert EURos into CAnadian Dollars, using the latest official exchange 415 # rates from the bank of canada; during weekends, the latest rate may be 416 # from a few days ago; the default amount of euros to convert is 1, when 417 # not given 418 eur2cad() { 419 local url 420 local site='https://www.bankofcanada.ca/valet/observations/group' 421 local csv_rates="${site}/FX_RATES_DAILY/csv" 422 url="${csv_rates}?start_date=$(date -d '3 days ago' +'%Y-%m-%d')" 423 curl -s "${url}" | awk -F, -v amount="$(echo "${1:-1}" | sed 's-_--g')" ' 424 /EUR/ { for (i = 1; i <= NF; i++) if($i ~ /EUR/) j = i } 425 END { gsub(/"/, "", $j); if (j != 0) printf "%.2f\n", amount * $j } 426 ' 427 } 428 429 # Fix Audio Duration on a separate copy of the file given 430 fad() { ffmpeg -i "${1:-input.m4a}" -acodec copy "${2:-output.dat}"; } 431 432 # start from the line number given, skipping all previous ones 433 fromline() { tail -n +"${1:-1}" "${2:--}"; } 434 435 # convert a mix of FeeT and INches into meters 436 ftin() { 437 local ft="${1:-0}" 438 ft="$(echo "${ft}" | sed 's-_--g')" 439 local in="${2:-0}" 440 in="$(echo "${in}" | sed 's-_--g')" 441 awk "BEGIN { print 0.3048 * ${ft} + 0.0254 * ${in}; exit }" 442 } 443 444 # glue/stick together various lines, only emitting a line-feed at the end; an 445 # optional argument is the output-item-separator, which is empty by default 446 glue() { 447 local sep="${1:-}" 448 [ $# -gt 0 ] && shift 449 awk -v sep="${sep}" ' 450 NR > 1 { printf "%s", sep } 451 { gsub(/\r/, ""); printf "%s", $0 } 452 END { if (NR > 0) print "" } 453 ' "$@" 454 } 455 456 # GO Build Stripped: a common use-case for the go compiler 457 gobs() { go build -ldflags "-s -w" -trimpath "$@"; } 458 459 # GO DEPendencieS: show all dependencies in a go project 460 godeps() { go list -f '{{ join .Deps "\n" }}' "$@"; } 461 462 # GO IMPortS: show all imports in a go project 463 goimps() { go list -f '{{ join .Imports "\n" }}' "$@"; } 464 465 # go to the folder picked using an interactive TUI; uses my tool `bf` 466 goto() { 467 local where 468 where="$(bf "${1:-.}")" 469 if [ $? -ne 0 ]; then 470 return 0 471 fi 472 473 where="$(realpath "${where}")" 474 if [ ! -d "${where}" ]; then 475 where="$(dirname "${where}")" 476 fi 477 cd "${where}" || return 478 } 479 480 # show Help laid out on 2 side-by-side columns; uses my tool `bsbs` 481 h2() { naman "$@" | bsbs 2; } 482 483 # show each byte as a pair of HEXadecimal (base-16) symbols 484 hexify() { 485 cat "$@" | od -v -x -A n | awk ' 486 { gsub(/ +/, ""); printf "%s", $0 } 487 END { print "" } 488 ' 489 } 490 491 # History Fuzzy-finder 492 hf() { 493 eval "$( 494 history \ 495 | fzf --reverse --height 80% --tmux center,80% \ 496 | awk ' 497 { 498 $1 = "" 499 $2 = "" 500 gsub(/^ */, "") 501 printf("\x1b[7m%s\x1b[0m\n", $0) > "/dev/stderr" 502 print 503 } 504 ' 505 )" 506 } 507 508 # Help Me Remember my custom shell commands 509 hmr() { 510 local cmd="bat" 511 # debian linux uses a different name for the `bat` app 512 if [ -e /usr/bin/batcat ]; then 513 cmd="batcat" 514 fi 515 516 "$cmd" \ 517 --style=plain,header,numbers --theme='Monokai Extended Light' \ 518 --wrap=never --color=always "$(which clamette)" | 519 sed -e 's-\x1b\[38;5;70m-\x1b[38;5;28m-g' \ 520 -e 's-\x1b\[38;5;214m-\x1b[38;5;208m-g' \ 521 -e 's-\x1b\[38;5;243m-\x1b[38;5;103m-g' \ 522 -e 's-\x1b\[38;5;238m-\x1b[38;5;245m-g' \ 523 -e 's-\x1b\[38;5;228m-\x1b[48;5;228m-g' | 524 less -MKiCRS 525 } 526 527 # convert seconds into a colon-separated Hours-Minutes-Seconds triple 528 hms() { 529 echo "${@:-0}" | sed -E 's-_--g; s- +-\n-g' | awk ' 530 /./ { 531 x = $0 532 h = (x - x % 3600) / 3600 533 m = (x % 3600) / 60 534 s = x % 60 535 printf "%02d:%02d:%05.2f\n", h, m, s 536 } 537 ' 538 } 539 540 # find all hyperlinks inside HREF attributes in the input text 541 href() { 542 local arg 543 local awk_cmd='awk' 544 local grep_cmd='grep' 545 if [ -p /dev/stdout ] || [ -t 1 ]; then 546 grep_cmd='grep --line-buffered' 547 if [ -e /usr/bin/stdbuf ]; then 548 awk_cmd='stdbuf -oL awk' 549 fi 550 fi 551 552 for arg in "${@:--}"; do 553 ${grep_cmd} -i -E -o 'href="[^"]+"' "${arg}" 554 done | ${awk_cmd} '{ gsub(/^href="|"\r?$/, ""); print }' 555 } 556 557 # ignore command in a pipe: this allows quick re-editing of pipes, while 558 # still leaving signs of previously-used steps, as a memo 559 idem() { cat; } 560 561 # INSTall APT packages 562 instapt() { sudo apt install "$@"; sudo -k; } 563 564 # JSON Query Lines turns JSON top-level arrays into multiple individually-JSON 565 # lines using the `jq` app, keeping all other top-level values as single line 566 # JSON outputs 567 jql() { 568 local code="${1:-.}" 569 [ $# -gt 0 ] && shift 570 jq -c -M "${code} | .[]" "$@" 571 } 572 573 # JSON Query Keys runs `jq` to find all unique key-combos from tabular JSON 574 jqk() { 575 local code="${1:-.}" 576 [ $# -gt 0 ] && shift 577 jq -c -M "${code} | .[] | keys" "$@" | awk '!c[$0]++' 578 } 579 580 # JSON Keys finds all unique key-combos from tabular JSON data; uses my tools 581 # `jsonl` and `tjp` 582 jsonk() { 583 tjp '[e.keys() for e in v] if isinstance(v, (list, tuple)) else v.keys()' \ 584 "${1:--}" | jsonl | awk '!c[$0]++' 585 } 586 587 # JSON Table, turns TSV tables into tabular JSON, where valid-JSON values are 588 # auto-parsed into numbers, booleans, etc...; uses my tools `jsons` and `tjp` 589 jsont() { 590 jsons "$@" | tjp \ 591 '[{k: rescue(lambda: loads(v), v) for k, v in e.items()} for e in v]' 592 } 593 594 # emit the given number of random/junk bytes, or 1024 junk bytes by default 595 junk() { head -c "$(echo "${1:-1024}" | sed 's-_--g')" /dev/urandom; } 596 597 # convert pounds (LB) into kilograms 598 lb() { 599 echo "${@:-1}" | sed -E 's-_--g; s- +-\n-g' | 600 awk '/./ { printf "%.2f\n", 0.45359237 * $0 }' 601 } 602 603 # convert a mix of pounds (LB) and weight-ounces (OZ) into kilograms 604 lboz() { 605 local lb="${1:-0}" 606 lb="$(echo "${lb}" | sed 's-_--g')" 607 local oz="${2:-0}" 608 oz="$(echo "${oz}" | sed 's-_--g')" 609 awk "BEGIN { print 0.45359237 * ${lb} + 0.028349523 * ${oz}; exit }" 610 } 611 612 # regroup adjacent lines into n-item tab-separated lines 613 lineup() { 614 local command='awk' 615 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 616 command='stdbuf -oL awk' 617 fi 618 619 local n="${1:-0}" 620 [ $# -gt 0 ] && shift 621 622 if [ "$n" -le 0 ]; then 623 ${command} ' 624 NR > 1 { printf "\t" } 625 { printf "%s", $0 } 626 END { if (NR > 0) print "" } 627 ' "$@" 628 return $? 629 fi 630 631 ${command} -v n="$n" ' 632 NR % n != 1 && n > 1 { printf "\t" } 633 { printf "%s", $0 } 634 NR % n == 0 { print "" } 635 END { if (NR % n != 0) print "" } 636 ' "$@" 637 } 638 639 # LiSt files, showing how many 4K-sized storage blocks they use 640 ls4k() { ls -s --block-size=4096 "$@"; } 641 642 # LiSt MAN pages 643 lsman() { man -k "${1:-.}"; } 644 645 # MARK the current tab with the message given, followed by the current folder; 646 # works only on the `bash` shell 647 mark() { 648 if [ $# -eq 0 ]; then 649 PS1="\[\e[0m\e]0;\w\a\$ " 650 else 651 PS1="\[\e[0m\e]0;${*} \w\a\$ " 652 fi 653 } 654 655 marklinks() { 656 local re='https?://[A-Za-z0-9+_.:%-]+(/[A-Za-z0-9+_.%/,#?&=-]*)*' 657 sed -E 's-('"${re}"')-\x1b]8;;\1\x1b\\\1\x1b]8;;\x1b\\-g' "$@" 658 } 659 660 # Multi-Core MAKE runs `make` using all cores 661 mcmake() { make -j "$(nproc)" "$@"; } 662 663 # merge stderr into stdout, which is useful for piped commands 664 merrge() { "${@:-cat /dev/null}" 2>&1; } 665 666 metajq() { 667 # https://github.com/stedolan/jq/issues/243#issuecomment-48470943 668 jq -r -M ' 669 [ 670 path(..) | 671 map(if type == "number" then "[]" else tostring end) | 672 join(".") | split(".[]") | join("[]") 673 ] | unique | map("." + .) | .[] 674 ' "$@" 675 } 676 677 # Make In Folder, also showing time and max memory used 678 mif() { 679 local f='real %e user %U sys %S mem %M exit %x' 680 local folder 681 folder="${1:-.}" 682 [ $# -gt 0 ] && shift 683 env -C "${folder}" /usr/bin/time -f "$f" make "$@" 684 } 685 686 # MINimize DECimalS ignores all trailing decimal zeros in numbers, and even 687 # the decimal dots themselves, when decimals in a number are all zeros 688 # mindecs() { 689 # local cmd='sed -E' 690 # if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 691 # cmd='sed -E -u' 692 # fi 693 # ${cmd} 's-([0-9]+)\.0+\W-\1-g; s-([0-9]+\.[0-9]*[1-9])0+\W-\1-g' "$@" 694 # } 695 696 # NArrow MANual, keeps `man` narrow, even if the window/tab is wide when run 697 naman() { 698 local w 699 w="$(tput -T xterm cols)" 700 w="$((w / 2 - 4))" 701 if [ "$w" -lt 80 ]; then 702 w=80 703 fi 704 MANWIDTH="$w" man "$@" 705 } 706 707 # Not AND sorts its 2 inputs, then finds lines not in common 708 nand() { 709 # comm -3 <(sort "$1") <(sort "$2") 710 # dash doesn't support the process-sub syntax 711 (sort "$1" | (sort "$2" | (comm -3 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 712 } 713 714 # listen to streaming NEW WAVE music 715 newwave() { 716 printf "streaming \e[7mNew Wave radio\e[0m\n" 717 mpv --quiet https://puma.streemlion.com:2910/stream 718 } 719 720 # Nice Json Query Lines colors JSONL data using the `jq` app 721 njql() { 722 local code="${1:-.}" 723 [ $# -gt 0 ] && shift 724 jq -c -C "${code} | .[]" "$@" 725 } 726 727 # empty the clipboard 728 noclip() { wl-copy --clear; } 729 730 # Nice Print Awk result; uses my tool `nn` 731 npa() { 732 local arg 733 for arg in "$@"; do 734 awk "BEGIN { print(${arg}); exit }" 735 done | nn 736 } 737 738 # Nice Print Python result; uses my tool `nn` 739 npp() { 740 local arg 741 for arg in "$@"; do 742 python -c "print(${arg})" 743 done | nn 744 } 745 746 # Nice Size, using my tool `nn` 747 ns() { wc -c "$@" | nn; } 748 749 # emit nothing to output and/or discard everything from input 750 null() { [ $# -gt 0 ] && "$@" > /dev/null; } 751 752 # Print Python result 753 pp() { 754 local arg 755 for arg in "$@"; do 756 python -c "print(${arg})" 757 done 758 } 759 760 # PRecede (input) ECHO, prepends a first line to stdin lines 761 precho() { echo "$@" && cat /dev/stdin; } 762 763 # LABEL/precede data with an ANSI-styled line 764 prelabel() { 765 printf "\e[7m%-*s\e[0m\n" "$(($(tput -T xterm cols) - 2))" "$*" 766 cat - 767 } 768 769 # Quiet MPV 770 # qmpv() { mpv --quiet "${@:--}"; } 771 772 # Quiet MPV 773 qmpv() { mpv --really-quiet "${@:--}"; } 774 775 # keep only lines between the 2 line numbers given, inclusively 776 rangelines() { 777 { [ $# -eq 2 ] || [ $# -eq 3 ]; } && [ "${1}" -le "${2}" ] && { 778 tail -n +"${1}" "${3:--}" | head -n $(("${2}" - "${1}" + 1)) 779 } 780 } 781 782 # RANdom MANual page 783 ranman() { 784 find "/usr/share/man/man${1:-1}" -type f | shuf -n 1 | xargs basename | 785 sed 's-\.gz$--' | xargs man 786 } 787 788 # REPeat STRing emits a line with a repeating string in it, given both a 789 # string and a number in either order 790 repstr() { 791 awk ' 792 BEGIN { 793 if (ARGV[2] ~ /^[+-]?[0-9]+$/) { 794 symbol = ARGV[1] 795 times = ARGV[2] + 0 796 } else { 797 symbol = ARGV[2] 798 times = ARGV[1] + 0 799 } 800 801 if (times < 0) exit 802 if (symbol == "") symbol = "-" 803 s = sprintf("%*s", times, "") 804 gsub(/ /, symbol, s) 805 print s 806 exit 807 } 808 ' "$@" 809 } 810 811 # SystemCTL; `sysctl` is already taken for a separate/unrelated app 812 sctl() { systemctl "$@" 2>&1 | less -MKiCRS; } 813 814 # show a unique-looking SEParator line; useful to run between commands 815 # which output walls of text 816 sep() { 817 [ "${1:-80}" -gt 0 ] && 818 printf "\e[48;2;218;218;218m%${1:-80}s\e[0m\n" "" | sed 's- -·-g' 819 } 820 821 # webSERVE files in a folder as localhost, using the port number given, or 822 # port 8080 by default 823 serve() { 824 if [ -d "$1" ]; then 825 printf "\e[7mserving files in %s\e[0m\n" "$1" >&2 826 python3 -m http.server -d "$1" "${2:-8080}" 827 else 828 printf "\e[7mserving files in %s\e[0m\n" "${2:-$(pwd)}" >&2 829 python3 -m http.server -d "${2:-$(pwd)}" "${1:-8080}" 830 fi 831 } 832 833 # SET DIFFerence sorts its 2 inputs, then finds lines not in the 2nd input 834 setdiff() { 835 # comm -23 <(sort "$1") <(sort "$2") 836 # dash doesn't support the process-sub syntax 837 (sort "$1" | (sort "$2" | (comm -23 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 838 } 839 840 # SET INtersection, sorts its 2 inputs, then finds common lines 841 setin() { 842 # comm -12 <(sort "$1") <(sort "$2") 843 # dash doesn't support the process-sub syntax 844 (sort "$1" | (sort "$2" | (comm -12 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 845 } 846 847 # SET SUBtraction sorts its 2 inputs, then finds lines not in the 2nd input 848 setsub() { 849 # comm -23 <(sort "$1") <(sort "$2") 850 # dash doesn't support the process-sub syntax 851 (sort "$1" | (sort "$2" | (comm -23 /dev/fd/3 /dev/fd/4) 4<&0) 3<&0) 852 } 853 854 # skip the last n lines, or the last line by default 855 skiplast() { head -n -"${1:-1}" "${2:--}"; } 856 857 # SLOW/delay lines from the standard-input, waiting the number of seconds 858 # given for each line, or waiting 1 second by default 859 slow() { 860 local seconds="${1:-1}" 861 [ $# -gt 0 ] && shift 862 ( 863 IFS="$(printf "\n")" 864 awk 1 "$@" | while read -r line; do 865 sleep "${seconds}" 866 printf "%s\n" "${line}" 867 done 868 ) 869 } 870 871 # Show Latest Podcasts, using my tools `podfeed` and `si` 872 slp() { 873 local title 874 title="Latest Podcast Episodes as of $(date +'%F %T')" 875 podfeed -title "${title}" "$@" | si 876 } 877 878 # emit the first line as is, sorting all lines after that, using the 879 # `sort` command, passing all/any arguments/options to it 880 sortrest() { 881 awk -v sort="sort $*" ' 882 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 883 { gsub(/\r$/, "") } 884 NR == 1 { print; fflush() } 885 NR >= 2 { print | sort } 886 ' 887 } 888 889 # SORt Tab-Separated Values: emit the first line as is, sorting all lines after 890 # that, using the `sort` command in TSV (tab-separated values) mode, passing 891 # all/any arguments/options to it 892 sortsv() { 893 awk -v sort="sort -t \"$(printf '\t')\" $*" ' 894 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 895 { gsub(/\r$/, "") } 896 NR == 1 { print; fflush() } 897 NR >= 2 { print | sort } 898 ' 899 } 900 901 # emit a line with the number of spaces given in it 902 spaces() { [ "${1:-80}" -gt 0 ] && printf "%${1:-80}s\n" ""; } 903 904 # SQUeeze horizontal spaces and STOMP vertical gaps 905 squomp() { 906 local command='awk' 907 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 908 command='stdbuf -oL awk' 909 fi 910 911 ${command} ' 912 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 913 /^\r?$/ { empty = 1; next } 914 empty { if (n > 0) print ""; empty = 0 } 915 916 { 917 gsub(/^ +| *\r?$/, "") 918 gsub(/ *\t */, "\t") 919 gsub(/ +/, " ") 920 print; n++ 921 } 922 ' "$@" 923 } 924 925 # TAC Lines outputs input-lines in reverse order, last one first, and so on... 926 tacl() { 927 awk ' 928 { gsub(/\r$/, ""); lines[NR] = $0 } 929 END { for (i = NR; i >= 1; i--) print lines[i] } 930 ' "$@" 931 } 932 933 # TINY GO Build Optimized: a common use-case for the tinygo compiler 934 tinygobo() { tinygo build -no-debug -opt=2 "$@"; } 935 936 # show current date in a specifc format 937 today() { date +'%Y-%m-%d %a %b %d'; } 938 939 # get the first n lines, or 1 by default 940 toline() { head -n "${1:-1}" "${2:--}"; } 941 942 # get the processes currently using the most cpu 943 topcpu() { 944 local n="${1:-10}" 945 [ "$n" -gt 0 ] && ps aux | awk ' 946 NR == 1 { print; fflush() } 947 NR > 1 { print | "sort -rnk3,3" } 948 ' | head -n "$(("$n" + 1))" 949 } 950 951 # get the processes currently using the most memory 952 topmemory() { 953 local n="${1:-10}" 954 [ "$n" -gt 0 ] && ps aux | awk ' 955 NR == 1 { print; fflush() } 956 NR > 1 { print | "sort -rnk6,6" } 957 ' | head -n "$(("$n" + 1))" 958 } 959 960 # skip the first/leading n bytes 961 unleaded() { tail -c +$(("$1" + 1)) "${2:--}"; } 962 963 # go UP n folders, or go up 1 folder by default 964 up() { 965 if [ "${1:-1}" -le 0 ]; then 966 cd . 967 else 968 cd "$(printf "%${1:-1}s" "" | sed 's- -../-g')" || return $? 969 fi 970 } 971 972 # convert United States Dollars into CAnadian Dollars, using the latest 973 # official exchange rates from the bank of canada; during weekends, the 974 # latest rate may be from a few days ago; the default amount of usd to 975 # convert is 1, when not given 976 usd2cad() { 977 local url 978 local site='https://www.bankofcanada.ca/valet/observations/group' 979 local csv_rates="${site}/FX_RATES_DAILY/csv" 980 url="${csv_rates}?start_date=$(date -d '3 days ago' +'%Y-%m-%d')" 981 curl -s "${url}" | awk -F, -v amount="$(echo "${1:-1}" | sed 's-_--g')" ' 982 /USD/ { for (i = 1; i <= NF; i++) if($i ~ /USD/) j = i } 983 END { gsub(/"/, "", $j); if (j != 0) printf "%.2f\n", amount * $j } 984 ' 985 } 986 987 # View Nice Table / Very Nice Table; uses my own tools `catl`, `u`, and `ncol` 988 vnt() { 989 catl "$@" \ 990 | ncol \ 991 | u --top \ 992 | awk '{ printf "%6d %s\n", NR - 1, $0 }' \ 993 | { 994 less -MKiCRS --header=1 2> /dev/null \ 995 || less -RIMS 2> /dev/null \ 996 || cat 997 } 998 } 999 1000 # What Are These (?) shows what the names given to it are/do 1001 wat() { 1002 local arg 1003 local gap=0 1004 local less_options='-MKiCRS' 1005 1006 if [ $# -eq 0 ]; then 1007 echo "$0" 1008 return 0 1009 fi 1010 1011 if [ $# -lt 2 ]; then 1012 less_options='-MKiCRS --header=1' 1013 fi 1014 1015 for arg in "$@"; do 1016 [ "${gap}" -gt 0 ] && printf "\n" 1017 gap=1 1018 printf "\e[7m%-80s\e[0m\n" "${arg}" 1019 1020 while alias "${arg}" > /dev/null 2> /dev/null; do 1021 arg="$(alias "${arg}" | sed -E "s-^[^=]+=['\"](.+)['\"]\$-\\1-")" 1022 done 1023 1024 if echo "${arg}" | grep -q ' '; then 1025 printf "%s\n" "${arg}" 1026 continue 1027 fi 1028 1029 if declare -f "${arg}"; then 1030 continue 1031 fi 1032 1033 if which "${arg}" > /dev/null 2> /dev/null; then 1034 which "${arg}" 1035 continue 1036 fi 1037 1038 printf "\e[38;2;204;0;0m%s not found\e[0m\n" "${arg}" 1039 done | { 1040 less -MKiCRS ${less_options} 2> /dev/null \ 1041 || less -MKiCRS 2> /dev/null \ 1042 || less -RIMS 2> /dev/null \ 1043 || cat 1044 } 1045 } 1046 1047 # find all WEB/hyperLINKS (https:// and http://) in the input text 1048 weblinks() { 1049 local arg 1050 local re='https?://[A-Za-z0-9+_.:%-]+(/[A-Za-z0-9+_.%/,#?&=-]*)*' 1051 local grep_cmd='grep' 1052 if [ -p /dev/stdout ] || [ -t 1 ]; then 1053 grep_cmd='grep --line-buffered' 1054 fi 1055 1056 for arg in "${@:--}"; do 1057 ${grep_cmd} -i -E -o "${re}" "${arg}" 1058 done 1059 } 1060 1061 # recursively find all files with trailing spaces/CRs 1062 whichtrails() { 1063 if [ -p /dev/stdout ] || [ -t 1 ]; then 1064 rg --line-buffered -c '[ \r]+$' "${@:-.}" 1065 else 1066 rg -c '[ \r]+$' "${@:-.}" 1067 fi 1068 } 1069 1070 # turn all wsl/unix-style full-paths into WINdows-style full-PATHS 1071 winpaths() { sed -E 's-/mnt/(.)/-\u\1:/-' "$@"; } 1072 1073 # XARGS Lines, runs `xargs` using whole lines as extra arguments 1074 xargsl() { 1075 if { [ -p /dev/stdout ] || [ -t 1 ]; } && [ -e /usr/bin/stdbuf ]; then 1076 stdbuf -oL awk -v ORS='\000' ' 1077 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1078 { gsub(/\r$/, ""); print } 1079 ' | stdbuf -oL xargs -0 "$@" 1080 else 1081 awk -v ORS='\000' ' 1082 FNR == 1 { gsub(/^\xef\xbb\xbf/, "") } 1083 { gsub(/\r$/, ""); print } 1084 ' | xargs -0 "$@" 1085 fi 1086 } 1087 1088 # Youtube Audio Player 1089 yap() { 1090 local url 1091 # some youtube URIs end with extra playlist/tracker parameters 1092 url="$(echo "$1" | sed 's-&.*--')" 1093 mpv "$(yt-dlp -x --audio-format best --get-url "${url}" 2> /dev/null)" 1094 } 1095 1096 # show the current date in the YYYY-MM-DD format 1097 ymd() { date +'%Y-%m-%d'; }