Last updated: 13 Apr 25 19:24:44 (UTC)

[public] holms Shell Spickzettel

holms Shell Spickzettel

Short Link ::: Original Share



Internetressourcen

kombinierte Listen

Beispiel 1

for finger in {left,right}-{thumb,{index,middle,ring,little}-finger}; 
    do fprintd-enroll -f "$finger" "$USER"; 
done
for finger in {left,right}-{thumb,{index,middle,ring,little}-finger}; 
    do fprintd-enroll -f "$finger" "$USER"; 
done

Beispiel 2

for finger in {left,right}-{index,middle}-finger; 
    do fprintd-enroll -f "$finger" "$USER"; 
done
for finger in {left,right}-{index,middle}-finger; 
    do fprintd-enroll -f "$finger" "$USER"; 
done

Beispiel 3

for richtung in {oben,unten,}{links,rechts,};
    do echo "$richtung";
done

obenlinks
obenrechts
oben
untenlinks
untenrechts
unten
links
rechts
for richtung in {oben,unten,}{links,rechts,};
    do echo "$richtung";
done

obenlinks
obenrechts
oben
untenlinks
untenrechts
unten
links
rechts

File Management / Navigation

cd back to last directory

cd -
cd -

find / rename files in subfolders

find . -maxdepth 4 -name '*.jpg' -execdir mv {} cover.jpg \;
find . -maxdepth 4 -name '*.jpg' -execdir mv {} cover.jpg \;

find with multiple patterns

Alles was nicht .deb oder -vmdk heisst:

find ./ ! -regex  '.*\(deb\|vmdk\)$'
find ./ ! -regex  '.*\(deb\|vmdk\)$'

find files by extension and replace strings in it per sed

find . -maxdepth 4 -name '*.json' -exec sed -i s/"Development Version"/"Live On Stage"/ {} \;
find . -maxdepth 4 -name '*.json' -exec sed -i s/"Development Version"/"Live On Stage"/ {} \;

Alle .xls, .doc, xlsx files

find ./ -regex  '.*\(doc\|xls\|xlsx\)$' -execdir echo {} \;
find ./ -regex  '.*\(doc\|xls\|xlsx\)$' -execdir echo {} \;

cp / mv multi extensions

mv *.{png,jpg} ~/Dev

cp foo.conf{,.orig}
mv *.{png,jpg} ~/Dev

cp foo.conf{,.orig}

touch

create File1, File2 … File9

$ touch file-{1..9}.txt
$ ls
file-1.txt  file-2.txt  file-3.txt  file-4.txt  file-5.txt  file-6.txt  file-7.txt  file-8.txt  file-9.txt
$ touch file-{1..9}.txt
$ ls
file-1.txt  file-2.txt  file-3.txt  file-4.txt  file-5.txt  file-6.txt  file-7.txt  file-8.txt  file-9.txt

compress / zip / tar / rar

entpacke alle .zip mit overwrite

for i in *.zip; do echo unzip -o $i; done | sh
for i in *.zip; do echo unzip -o $i; done | sh

download file1, file2 … file2342

$ for i in {1..2342}; do curl -O https://server/path/file-$i.jpg; done
$ for i in {1..2342}; do curl -O https://server/path/file-$i.jpg; done

rsync

Eierlegendewollmilchsauinschnell

rsync -aHAXxv --info=progress2 -e "ssh -T -c aes128-ctr -o Compression=no -x" ./ holm@10.1.2.3:~/ --exclude .cache --exclude .local/share/gnome-boxes --exclude ISO --exclude 'vm*.raw
rsync -aHAXxv --info=progress2 -e "ssh -T -c aes128-ctr -o Compression=no -x" ./ holm@10.1.2.3:~/ --exclude .cache --exclude .local/share/gnome-boxes --exclude ISO --exclude 'vm*.raw

archive

copy all files recursive with user information and date

## 
rsync -a SRC DST
## 
rsync -a SRC DST

folder vs. content of folder

Demo files

$ find SRC
.
./SRC
./SRC/SUBSRC
./SRC/SUBSRC/subsrc-1.dat
./SRC/SUBSRC/subsrc-3.dat
./SRC/SUBSRC/subsrc-2.dat
./SRC/src-1.dat
./SRC/src-3.dat
./SRC/src-2.dat
$ find SRC
.
./SRC
./SRC/SUBSRC
./SRC/SUBSRC/subsrc-1.dat
./SRC/SUBSRC/subsrc-3.dat
./SRC/SUBSRC/subsrc-2.dat
./SRC/src-1.dat
./SRC/src-3.dat
./SRC/src-2.dat

folder

$ rsync -a SRC DST

$ find DST
.
./DST
./DST/SRC
./DST/SRC/src-1.dat
./DST/SRC/SUBSRC
./DST/SRC/SUBSRC/subsrc-1.dat
./DST/SRC/SUBSRC/subsrc-3.dat
./DST/SRC/SUBSRC/subsrc-2.dat
./DST/SRC/src-3.dat
./DST/SRC/src-2.dat
$ rsync -a SRC DST

$ find DST
.
./DST
./DST/SRC
./DST/SRC/src-1.dat
./DST/SRC/SUBSRC
./DST/SRC/SUBSRC/subsrc-1.dat
./DST/SRC/SUBSRC/subsrc-3.dat
./DST/SRC/SUBSRC/subsrc-2.dat
./DST/SRC/src-3.dat
./DST/SRC/src-2.dat

content

$ rsync -a SRC/ DST

$ find DST
DST
DST/src-3.dat
DST/src-2.dat
DST/src-1.dat
DST/SUBSRC
DST/SUBSRC/subsrc-2.dat
DST/SUBSRC/subsrc-3.dat
DST/SUBSRC/subsrc-1.dat
$ rsync -a SRC/ DST

$ find DST
DST
DST/src-3.dat
DST/src-2.dat
DST/src-1.dat
DST/SUBSRC
DST/SUBSRC/subsrc-2.dat
DST/SUBSRC/subsrc-3.dat
DST/SUBSRC/subsrc-1.dat

show Progress

per File

$ rsync -a -P SRC DST
sending incremental file list
created directory DST
SRC/
SRC/src-1.dat
              0 100%    0.00kB/s    0:00:00 (xfr#1, to-chk=6/8)
SRC/src-2.dat
              0 100%    0.00kB/s    0:00:00 (xfr#2, to-chk=5/8)
SRC/src-3.dat
              0 100%    0.00kB/s    0:00:00 (xfr#3, to-chk=4/8)
SRC/SUBSRC/
SRC/SUBSRC/subsrc-1.dat
              0 100%    0.00kB/s    0:00:00 (xfr#4, to-chk=2/8)
SRC/SUBSRC/subsrc-2.dat
              0 100%    0.00kB/s    0:00:00 (xfr#5, to-chk=1/8)
SRC/SUBSRC/subsrc-3.dat
              0 100%    0.00kB/s    0:00:00 (xfr#6, to-chk=0/8)
$ rsync -a -P SRC DST
sending incremental file list
created directory DST
SRC/
SRC/src-1.dat
              0 100%    0.00kB/s    0:00:00 (xfr#1, to-chk=6/8)
SRC/src-2.dat
              0 100%    0.00kB/s    0:00:00 (xfr#2, to-chk=5/8)
SRC/src-3.dat
              0 100%    0.00kB/s    0:00:00 (xfr#3, to-chk=4/8)
SRC/SUBSRC/
SRC/SUBSRC/subsrc-1.dat
              0 100%    0.00kB/s    0:00:00 (xfr#4, to-chk=2/8)
SRC/SUBSRC/subsrc-2.dat
              0 100%    0.00kB/s    0:00:00 (xfr#5, to-chk=1/8)
SRC/SUBSRC/subsrc-3.dat
              0 100%    0.00kB/s    0:00:00 (xfr#6, to-chk=0/8)

over all

slows down start of copy process until a complete file list exists

$ rsync -a --info=progress2 SRC DST
     86,621,168  99%  139.78MB/s    0:00:00 (xfr#28, to-chk=0/75) 
$ rsync -a --info=progress2 SRC DST
     86,621,168  99%  139.78MB/s    0:00:00 (xfr#28, to-chk=0/75) 

Bash Zine-Export

Bite Size Bash zine Mitschrift /

  • Allgemeine Dinge

shellcheck

  • wiki
  • shellcheck.net
"$var"  "${var}.png"   $?=exit code
$0=scriptname
"$@"=all arguments

shift

inkscape "$1" -b white --export-png="$2"

for i in "$0"
do
  …
done
"$var"  "${var}.png"   $?=exit code
$0=scriptname
"$@"=all arguments

shift

inkscape "$1" -b white --export-png="$2"

for i in "$0"
dodone

sudo pipe

falsch: sudo echo x > xyz richtig echo x | sudo tee xyz

x=$((2+2))     # = 4

a{.png,.svg}   # = a.png a.svg

VAR=$(c at x.txt)

x=(1 2 3 4 5)  

x={1..5}

y={001..087}

< (command) == command |

${var//search/replace}

[[ $DIR=/home/* ]]

diff <(./command1) <(./command2)
x=$((2+2))     # = 4

a{.png,.svg}   # = a.png a.svg

VAR=$(c at x.txt)

x=(1 2 3 4 5)  

x={1..5}

y={001..087}

< (command) == command |

${var//search/replace}

[[ $DIR=/home/* ]]

diff <(./command1) <(./command2)

for

for LINE

for i in *.png
do
    convert"$i" "${i/png/jpg}"
done
for i in *.png
do
    convert"$i" "${i/png/jpg}"
done

for WORD

while bashcommand
do
    ...
done

# ---

for i in $(seq 1 5)
# or
for i in {1..5}

# ---

read -r text1 text2  | FS=''
# or
echo "$text" | FS=''
while bashcommand
do
    ...
done

# ---

for i in $(seq 1 5)
# or
for i in {1..5}

# ---

read -r text1 text2  | FS=''
# or
echo "$text" | FS=''

pipes

mkfifo mypipe
ls > mypipe
wc < mypipe
mkfifo mypipe
ls > mypipe
wc < mypipe

vars

${}  

${var}      		# var
${#var}     		# length of var

${var:-$othervar}  	# => wenn var unset/null, dann var=othervar

${var:?Fehlermeldung}

${var#prefix} 		# entfernt PREFIX
${var%suffix} 		# entfernt SUFFIX

${var/suche/ersetze}  	#  FIRST
${var//suche/ersetze} 	#  ALLE

${var:offset:length}  	#  subvar
${}  

${var}      		# var
${#var}     		# length of var

${var:-$othervar}  	# => wenn var unset/null, dann var=othervar

${var:?Fehlermeldung}

${var#prefix} 		# entfernt PREFIX
${var%suffix} 		# entfernt SUFFIX

${var/suche/ersetze}  	#  FIRST
${var//suche/ersetze} 	#  ALLE

${var:offset:length}  	#  subvar

trap - cleanup

trap 'kill $(jobs -p)' INT
# killall background processes when ctrl-c

function cleanup() {
  rm -rf $TEMPDIR
  rm $TEMPFILE
}

trap cleanup EXIT
trap 'kill $(jobs -p)' INT
# killall background processes when ctrl-c

function cleanup() {
  rm -rf $TEMPDIR
  rm $TEMPFILE
}

trap cleanup EXIT

errors

stops after exit

set -e        # stops after exit
set -e        # stops after exit

stops the script on unset variables

set -u
set -u

makes the pipe fail if any command fails

set -o pipefail
set -o pipefail

example

set -e
unzip fle.zip # typo in filename
              # -> error
              # -> stop
set -e
unzip fle.zip # typo in filename
              # -> error
              # -> stop

Debugging

“@ECHO On”

set -x 
# or
bash -x script.sh
set -x 
# or
bash -x script.sh

commandlinefu extract

Snippets von commandlinefu - Shell Snippet Sammlung

kill process by TCP-Port

In emergency situations, in order not to panic, shut down the following port on the network with the following rather then shutting down the PC.

fuser -k 445/tcp
fuser -k 445/tcp

wuseman1 · 2024-08-26 19:11:03

Record a certain command output

script -c "ls -la" logfile.log
script -c "ls -la" logfile.log

wuseman1 · 2024-08-26 18:34:05

ffmpeg: Generate GIF from video

The 30 means start extracting frames from 30 seconds into the video. The 3 means extract the next 3 seconds from that point. The fps can be adjusted based on your preferences. The 320 is the width of the gif, the height will be calculated automatically. input.mp4 is the video file, which can be any video file ffmpeg supports. The output.gif is the gif created.

ffmpeg -ss 30 -t 3 -i input.mp4 -vf "fps=10,scale=320:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse" -loop 0 output.gif
ffmpeg -ss 30 -t 3 -i input.mp4 -vf "fps=10,scale=320:-1:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse" -loop 0 output.gif

keyboardsage · 2024-03-19 00:34:23

´find´ and delete all hidden dot files

seek

find ./path_to_dir -type f -name '.*'
find ./path_to_dir -type f -name '.*'

and destroy

find ./path_to_dir -type f -name '.*' -exec rm '{}' \;`
find ./path_to_dir -type f -name '.*' -exec rm '{}' \;`

keyboardsage · 2024-03-16 23:47:01

7zip hochverdichtet und encrypted

Create a 7zip archive named “some_directory.7z” and adds to it the directory “some_directory”. The -mhe=on is for header encryption, basically it mangles the file names so no one knows whats inside the 7z. If -mhe=on wasn’t included, then a person without the password would still be able to view the file names inside the 7z. Having this option ensures confidentiality. To ensure the result is small use lzma2, level 9 compression. Lzma2 fast bytes range from 5 to 272, the higher the number the more aggressive it is at finding repetitive bytes that can be added to the dictionary. Here the fast bytes are set to 64 bytes and the dictionary is 32 MB. Depending on your purposes (the directory size and desired file size), you can be more aggressive with these values. Lastly, -ms=on just says concatenate all the individual files and treat them as a singular file when compressing. This leads to a higher compression ratio generally.

$ du -sh Dev*
114M    Dev
 36M    Dev.7z
 49M    Dev.tgz
$ du -sh Dev*
114M    Dev
 36M    Dev.7z
 49M    Dev.tgz
7z a -t7z -mhe=on -m0=lzma2 -mx=9 -mfb=64 -md=32m -ms=on Dev.7z Dev/
7z a -t7z -mhe=on -m0=lzma2 -mx=9 -mfb=64 -md=32m -ms=on Dev.7z Dev/

keyboardsage · 2024-03-16 23:36:38

Disk usage skipping mount points (even top-level ones)

Other solutions that involve doing du -sx /* are incomplete because they will still descend other top-level filesystems are that mounted directly at “/” because the * expands to explicitly include all files and directories in “/”, and du will still traverse them even with -x because you asked it to by supplying the directory name as a parameter (indirectly via “*”). Show Sample Output

for a in /*; do mountpoint -q -- "$a" || du -shx "$a"; done | sort -h
for a in /*; do mountpoint -q -- "$a" || du -shx "$a"; done | sort -h

dmmst19 · 2024-02-28 01:43:19

$(var) from JSON

A recursive version might be useful too. /dev/tty is used to show which shell variables just got defined. Show Sample Output

json='{"a":42, "b":"s t r i n g", "c": []}' ; eval $(echo $json | jq -r 'to_entries | .[] | select(.value | scalars) | .key + "=\"" + (.value | tostring) + "\";"' | tee /dev/tty)
json='{"a":42, "b":"s t r i n g", "c": []}' ; eval $(echo $json | jq -r 'to_entries | .[] | select(.value | scalars) | .key + "=\"" + (.value | tostring) + "\";"' | tee /dev/tty)

penthief · 2023-09-26 03:11:38

logge sich ändernde files

Monitor changed files into a log file, with day rotation, using fswatch (MacOS). This command monitors changes in the current folder structure (subfolders included) and files, and log it into a hidden file in the same folder, called .file_changes_YYMMDD.log. Modify the --exclude parameters to define what should be skipped. Show Sample Output

fswatch --exclude=.git/* --exclude=.settings --event-flags --event-flag-separator=\; -t -f '%Y-%m-%d %H:%M:%S' . >> ./.file_changes_$(date +"%Y-%m-%d" | sed s/-//g).log
fswatch --exclude=.git/* --exclude=.settings --event-flags --event-flag-separator=\; -t -f '%Y-%m-%d %H:%M:%S' . >> ./.file_changes_$(date +"%Y-%m-%d" | sed s/-//g).log

paulera · 2023-08-17 23:06:30

Color bars

while :; do for ((i=0;i<$(tput cols);i++));do clear;for ((j=0;j<$(tput lines);j++));do printf "\e[48;5;$((RANDOM%256))m%*s\e[0m\n" $(((j+i)%2?$(tput cols)-i:i)) "";done;sleep 0.05;done;done
while :; do for ((i=0;i<$(tput cols);i++));do clear;for ((j=0;j<$(tput lines);j++));do printf "\e[48;5;$((RANDOM%256))m%*s\e[0m\n" $(((j+i)%2?$(tput cols)-i:i)) "";done;sleep 0.05;done;done

bzw.

while :; do printf "\e[48;2;$((RANDOM % 256));$((RANDOM % 256));$((RANDOM % 256))m%*s\e[0m" $(tput cols) ""; sleep 0.1; done
while :; do printf "\e[48;2;$((RANDOM % 256));$((RANDOM % 256));$((RANDOM % 256))m%*s\e[0m" $(tput cols) ""; sleep 0.1; done

wuseman1 · 2023-07-04 00:47:37

ls sort by git commit

This lists all the files in a folder, then finds the commit date for them one by one, then sorts them from newest to oldest

git ls-tree --name-only HEAD foldername/ | while read filename; do echo "$(git log -1 --format="%ci " -- $filename) $filename"; done | sort -r
git ls-tree --name-only HEAD foldername/ | while read filename; do echo "$(git log -1 --format="%ci " -- $filename) $filename"; done | sort -r

fivestones · 2023-03-01 17:02:51

Copy a file with progress and save hash to a different file

pv file.txt | tee >(sha1sum > file.sha1) > file-copy.txt
pv file.txt | tee >(sha1sum > file.sha1) > file-copy.txt

bugmenot · 2022-11-24 20:23:02