X-Git-Url: https://iankelling.org/git/?a=blobdiff_plain;f=distro-end;h=655ae7f4c19d68bf6f4aa1cb6c9ea25997f03b3b;hb=0b6d44c7f3d567e0a26138509c8a24cb57c69b50;hp=973dc85bf5514368501969524bccea1409f92bba;hpb=86ebcd0416223ded297f6cbbcb0906b85793e359;p=distro-setup diff --git a/distro-end b/distro-end index 973dc85..655ae7f 100755 --- a/distro-end +++ b/distro-end @@ -13,252 +13,85 @@ # See the License for the specific language governing permissions and # limitations under the License. -errcatch +### setup +source /a/bin/errhandle/err +src="${BASH_SOURCE%/*}" +source $src/pkgs set -x - exec &> >(sudo tee -a /var/log/distro-end) echo "$0: $(date): starting now)" - -src="${BASH_SOURCE%/*}" - # see example of usage to understand. end_msg() { - local y - IFS= read -r -d '' y ||: - end_msg_var+="$y" + local y + IFS= read -r -d '' y ||: + end_msg_var+="$y" } - spa() { # simple package add - simple_packages+=($@) + simple_packages+=($@) } - distro=$(distro-name) - pending_reboot=false sed="sed --follow-symlinks" - # template case $distro in esac +#### initial packages pup -pi aptitude - -simple_packages=( - htop - iptables - mailutils - nmon - rdiff-backup - ruby - ruby-rest-client - tree - vim - wcd - wget -) - -case $HOSTNAME in - lj|li) : ;; - *) - # universal packages - # swh-plugins is for karaoke pulsaudio filter. - # mutagen for pithos - # guvcview set webcam brightness to highest - # pidgin-otr, i went into pidgin pluggin settings and generated a key for some accounts - simple_packages+=( - apache2 - apache2-doc - apt-doc - apt-listchanges - aptitude-doc-en - bash-doc - beets - beets-doc - binutils-doc - bind9-doc - bind9utils - bwm-ng - cloc - cpulimit - cron - debconf-doc - dirmngr - dnsutils - dnsmasq - dtrx - duplicity - eclipse - evince - fdupes - feh - filelight - flashrom - gawk-doc - gcc-doc - gdb - gdb-doc - geoip-bin - git-doc - git-email - gitk - glibc-doc - goaccess - gnome-screenshot - guvcview - i3lock - inetutils-traceroute - iperf3 - iproute2-doc - jq - kid3-qt - kid3-cli - linux-doc - locate - lshw - make-doc - manpages - manpages-dev - mb2md - meld - mps-youtube - mumble - nagstamon - ncdu - nginx-doc - nmap - offlineimap - oathtool - opendkim-tools - p7zip - paprefs - parted-doc - pavucontrol - pdfgrep - perl-doc - pianobar - pidgin - pidgin-otr - pry - python-autopep8 - python3-doc - python3-mutagen - qrencode - reportbug - $(aptitude show ruby | sed -rn 's/Depends: (.*)/\1/p')-doc - sqlite3-doc - squashfs-tools - swh-plugins - tar-doc - tcpdump - telnet - transmission-remote-gtk - vlc - whois - wondershaper - ) - spa $(apt-cache search ruby[.0-9]+-doc| awk '{print $1}') - ;; -esac - +if isdeb; then + pi aptitude +fi ########### begin section including li ################ +pi ${p3[@]} $($src/distro-pkgs) +conflink case $distro in - arch) - # ubuntu 14.04 uses b-cron, - # but its not maintained in arch. - # of the ones in the main repos, cronie is only one maintained. - # fcron appears abandoned software. - pi cronie - sgo cronie - ;; - *) : ;; # other distros come with cron. + arch) sgo cronie ;; esac - - case $distro in - *) pi at ;;& - arch) sgo atd ;; + arch) sgo atd ;; esac case $distro in - debian|trisquel|ubuntu) pi curl;; - arch) : ;; - # fedora: unknown -esac - -case $distro in - # tk for gitk - arch) spa git tk ;; - *) spa git ;; -esac - -case $distro in - arch) spa the_silver_searcher ;; - debian|trisquel|ubuntu) spa silversearcher-ag ;; - # fedora unknown -esac - -case $distro in - debian|trisquel|ubuntu) spa ntp;; - arch) - pi ntp - sgo ntpd - ;; - # others unknown + arch) sgo ntpd ;; esac # no equivalent in other distros: case $distro in - debian|trisquel|ubuntu) - pi aptitude - if ! dpkg -s apt-file &>/dev/null; then - # this condition is just a speed optimization - pi apt-file - s apt-file update - fi - # for debconf-get-selections - spa debconf-utils - ;; + debian|trisquel|ubuntu) + if ! dpkg -s apt-file &>/dev/null; then + # this condition is just a speed optimization + pi apt-file + s apt-file update + fi + ;; esac - -case $distro in - arch|debian|trisquel|ubuntu) - spa bash-completion - ;; - # others unknown -esac - - - - - # disable motd junk. case $distro in - debian) - # allows me to pipe with ssh -t, and gets rid of spam - # http://forums.debian.net/viewtopic.php?f=5&t=85822 - # i'd rather disable the service than comment the init file - # this says disabling the service, it will still get restarted - # but this script doesn't do anything on restart, so it should be fine - s dd of=/var/run/motd.dynamic if=/dev/null - # stretch doesn't have initscripts pkg installed by default - if [[ $(debian-codename) == jessie ]]; then - s update-rc.d motd disable - fi - ;; - trisquel|ubuntu) - # this isn't a complete solution. It still shows me when updates are available, - # but it's no big deal. - s t /etc/update-motd.d/10-help-text /etc/update-motd.d/00-header - ;; + debian) + # allows me to pipe with ssh -t, and gets rid of spam + # http://forums.debian.net/viewtopic.php?f=5&t=85822 + # i'd rather disable the service than comment the init file + # this says disabling the service, it will still get restarted + # but this script doesn't do anything on restart, so it should be fine + s dd of=/var/run/motd.dynamic if=/dev/null + # stretch doesn't have initscripts pkg installed by default + if [[ $(debian-codename) == jessie ]]; then + s update-rc.d motd disable + fi + ;; + trisquel|ubuntu) + # this isn't a complete solution. It still shows me when updates are available, + # but it's no big deal. + s t /etc/update-motd.d/10-help-text /etc/update-motd.d/00-header + ;; esac # automatic updates @@ -268,100 +101,131 @@ esac # /usr/share/doc/unattended-upgrades# cat README.md # /etc/apt/apt.conf.d/50unattended-upgrades if isdebian; then - setup-debian-auto-update + setup-debian-auto-update fi -# we've got a few dependencies later on, so install them now. -pi eatmydata -s eatmydata apt-get -y install --purge --auto-remove "${simple_packages[@]}" -simple_packages=() - ### begin docker install #### - if isdeb; then - # https://store.docker.com/editions/community/docker-ce-server-debian?tab=description - pi software-properties-common apt-transport-https - curl -fsSL https://download.docker.com/linux/$(distro-name-compat)/gpg | sudo apt-key add - - sudo add-apt-repository \ - "deb [arch=amd64] https://download.docker.com/linux/$(distro-name-compat) \ - $(debian-codename-compat) \ - stable" + # https://store.docker.com/editions/community/docker-ce-server-debian?tab=description + pi software-properties-common apt-transport-https + curl -fsSL https://download.docker.com/linux/$(distro-name-compat)/gpg | sudo apt-key add - + url=https://download.docker.com/linux/$(distro-name-compat) + l="deb [arch=amd64] $url $(debian-codename-compat) stable" + + if ! grep -xFq "$l" /etc/apt/sources.list{,.d/*.list}; then + sudo add-apt-repository $l p update - pi docker-ce - sgo docker - # other distros unknown + fi + # docker eats up a fair amount of cpu when doing nothing, so don't enable it unless + # we really need it. + pi-nostart docker-ce + # and docker is even more crap, it ignores that it shouldnt start + ser stop docker + ser disable docker + case $HOSTNAME in + li|lj) sgo docker ;; + esac + # other distros unknown fi ### end docker install #### +### begin certbot install ### case $distro in - debian) - # note, need python-certbot-nginx for nginx, but it depends on nginx, - # and I'm not installing nginx by default right now - if isdebian-testing; then - pi --install-suggests certbot - else - pi --install-suggests -t jessie-backports certbot - fi - # make a version of the certbot timer that emails me. - x=/systemd/system/certbot - $sed -r -f - /lib$x.timer <<'EOF' |s dd of=/etc${x}mail.timer + debian) + # note, need python-certbot-nginx for nginx, but it depends on nginx, + # and I'm not installing nginx by default right now. + # note python-certbot-apache is in suggests, but so is a doc package that brought in xorg + if [[ $(debian-codename) == jessie ]]; then + pi -t jessie-backports certbot python-certbot-apache + else + pi certbot python-certbot-apache + fi + ;; + trisquel|ubuntu) + # not packaged in xenial or flidas + pi software-properties-common + l="deb http://ppa.launchpad.net/certbot/certbot/ubuntu xenial main" + if ! grep -xFq "$l" /etc/apt/sources.list{,.d/*.list}; then + s add-apt-repository -y ppa:certbot/certbot ||: + p update + fi + pi python-certbot-apache + ;; + # todo: other distros unknown +esac +# make a version of the certbot timer that emails me. +x=/systemd/system/certbot +$sed -r -f - /lib$x.timer <<'EOF' |s dd of=/etc${x}mail.timer s,^Description.*,\0 mail version, EOF - $sed -r -f - /lib$x.service <<'EOF' |s dd of=/etc${x}mail.service +$sed -r -f - /lib$x.service <<'EOF' |s dd of=/etc${x}mail.service s,(ExecStart=)(/usr/bin/certbot),\1/a/bin/log-quiet/sysd-mail-once certbotmail \2 --renew-hook /a/bin/distro-setup/certbot-renew-hook, EOF - ser daemon-reload - sgo certbotmail.timer - - ;; - trisquel|ubuntu) - # not packaged in xenial or flidas - pi software-properties-common - s add-apt-repository -y ppa:certbot/certbot - p update - pi python-certbot-apache - ;; - # todo: other distros unknown -esac +ser daemon-reload +sgo certbotmail.timer +### end certbot install ### + + +# dogcam setup. not using atm +# case $HOSTNAME in +# lj|li) +# /a/bin/webcam/install-server +# ;; +# kw) +# /a/bin/webcam/install-client +# ;; +# esac + +pi ${p1[@]} # website setup case $HOSTNAME in - lj|li) - - case $HOSTNAME in - lj) domain=iank.bid; exit 0 ;; - li) domain=iankelling.org ;; - esac - /a/h/setup.sh $domain - /a/h/build.rb - - sudo -E /a/bin/mediawiki-setup/mw-setup-script - #$src/phab-setup - - pi-nostart mumble-server - s $sed -ri "s/^ *(serverpassword=).*/\1$(< /a/bin/bash_unpublished/mumble_pass)/" /etc/mumble-server.ini - - # do certificate to avoid warning about unsigned cert, - # which is overkill for my use, but hey, I'm cool, I know - # how to do this. - web-conf apache2 mumble.iankelling.org - s rm -f /etc/apache2/sites-enabled/mumble.iankelling.org - sudo -i <<'EOF' + lj|li) + case $HOSTNAME in + lj) domain=iank.bid; exit 0 ;; + li) domain=iankelling.org ;; + esac + /a/h/setup.sh $domain + /a/h/build.rb + + sudo -E /a/bin/mediawiki-setup/mw-setup-script + + pi-nostart mumble-server + s $sed -ri "s/^ *(serverpassword=).*/\1$(< /a/bin/bash_unpublished/mumble_pass)/" /etc/mumble-server.ini + + # do certificate to avoid warning about unsigned cert, + # which is overkill for my use, but hey, I'm cool, I know + # how to do this. + web-conf apache2 mumble.iankelling.org + s rm -f /etc/apache2/sites-enabled/mumble.iankelling.org + sudo -i <<'EOF' export RENEWED_LINEAGE=/etc/letsencrypt/live/mumble.iankelling.org /a/bin/distro-setup/certbot-renew-hook EOF - sgo mumble-server + sgo mumble-server - vpn-server-setup -d - tee /etc/openvpn/client-config/mail <<'EOF' + vpn-server-setup -rd + s tee /etc/openvpn/client-config/mail <<'EOF' ifconfig-push 10.8.0.4 255.255.255.0 EOF + # it\'s strange. docker seems to make the default for forward + # be drop, but then I set it to accept and it\'s stuck that way, + # I dun know why. But, let\'s make sure we can forward anyways. + s DEBIAN_FRONTEND=noninteractive pi iptables-persistent + rm /etc/iptables/rules.v6 + s tee /etc/iptables/rules.v4 <<'EOF' +*filter +-A FORWARD -i tun+ -o eth0 -j ACCEPT +-A FORWARD -i eth0 -o tun+ -j ACCEPT +COMMIT +EOF + - sudo dd of=/etc/systemd/system/vpnmail.service < @@ -396,20 +265,20 @@ EOF # setup one time, with root:www-data, 640 AuthUserFile "/etc/caldav-htpasswd" Require valid-user - + EOF - # nginx version of above would be: - # auth_basic "Not currently available"; - # auth_basic_user_file /etc/nginx/caldav/htpasswd; + # nginx version of above would be: + # auth_basic "Not currently available"; + # auth_basic_user_file /etc/nginx/caldav/htpasswd; - ########## begin pump.io setup ########## + ########## begin pump.io setup ########## - # once pump adds a logrotation script, turn off nologger, - # and add - # "logfile": "/var/log/pumpio/pumpio.log", - # - s dd of=/etc/pump.io.json <<'EOF' + # once pump adds a logrotation script, turn off nologger, + # and add + # "logfile": "/var/log/pumpio/pumpio.log", + # + s dd of=/etc/pump.io.json <<'EOF' { "secret": "SECRET_REPLACE_ME", "driver": "mongodb", @@ -433,33 +302,41 @@ EOF "sockjs": false } EOF - s sed -i "s#SECRET_REPLACE_ME#$(cat /p/c/machine_specific/li/pump-secret)#" /etc/pump.io.json - - # jessie\'s node is too old - # https://nodejs.org/en/download/package-manager/ - curl -sL https://deb.nodesource.com/setup_6.x | sudo -E bash - - pi nodejs - cd /home/iank - rm -rf pump.io.git - git clone https://github.com/pump-io/pump.io.git - cd pump.io - # note: doing this or the npm install pump.io as root had problems. - npm install - npm run build - # normally, next command would be - # s npm install -g databank-mongodb - # but it\'s this until a bug in pump gets fixed - s npm install -g databank-mongodb@0.19.2 - s useradd -m -s /bin/false pumpio - sudo -u pumpio mkdir -p /home/pumpio/pumpdata - # for testing browser when only listening to localhost, - # in the pump.io.json, set hostname localhost, urlPort 5233 - #ssh -L 5233:localhost:5233 li - - s mkdir -p /var/log/pumpio/ - s chown pumpio:pumpio /var/log/pumpio/ - - web-conf - apache2 pump.iankelling.org <<'EOF' + s sed -i "s#SECRET_REPLACE_ME#$(cat /p/c/machine_specific/li/pump-secret)#" /etc/pump.io.json + + # stretch node is too old + # https://nodejs.org/en/download/package-manager/ + curl -sL https://deb.nodesource.com/setup_8.x | sudo -E bash - + pi nodejs graphicsmagick mongodb + cd /home/iank + if [[ -e pump.io ]]; then + cd pump.io + git pull + else + git clone https://github.com/pump-io/pump.io.git + cd pump.io + fi + # note: these 2 commands seem + # note: doing this or the npm install pump.io as root had problems. + npm install + npm run build + # normally, next command would be + # s npm install -g odb + # but it\'s this until a bug in pump gets fixed + # https://github.com/pump-io/pump.io/issues/1287 + s npm install -g databank-mongodb@0.19.2 + if ! getent passwd pumpio &>/dev/null; then + s useradd -Um -s /bin/false pumpio + fi + sudo -u pumpio mkdir -p /home/pumpio/pumpdata + # for testing browser when only listening to localhost, + # in the pump.io.json, set hostname localhost, urlPort 5233 + #ssh -L 5233:localhost:5233 li + + s mkdir -p /var/log/pumpio/ + s chown pumpio:pumpio /var/log/pumpio/ + + web-conf - apache2 pump.iankelling.org <<'EOF' # currently a bug in pump that we cant terminate ssl SSLProxyEngine On ProxyPreserveHost On @@ -475,15 +352,16 @@ EOF EOF - sudo -i <<'EOF' + sudo -i <<'EOF' export RENEWED_LINEAGE=/etc/letsencrypt/live/pump.iankelling.org /a/bin/distro-setup/certbot-renew-hook EOF - s dd of=/etc/systemd/system/pump.service <<'EOF' + s dd of=/etc/systemd/system/pump.service <<'EOF' [Unit] Description=pump.io -After=syslog.target network.target +After=syslog.target network.target mongodb.service +Requires=mongodb.service [Service] Type=simple @@ -498,41 +376,47 @@ Environment=NODE_PATH=/usr/lib/nodejs:/usr/lib/node_modules:/usr/share/javascrip [Install] WantedBy=multi-user.target EOF - ser daemon-reload - sgo pump - ########## end pump.io setup ############ + ser daemon-reload + sgo pump + ########## end pump.io setup ############ - ############# begin setup mastodon ############## + ############# begin setup mastodon ############## - # I'd like to try gnu social just cuz of gnu, but it's not being - # well maintained, for example, simple pull requests - # languishing: - # https://git.gnu.io/gnu/gnu-social/merge_requests/143 - # and I submitted my own bugs, basic docs are broken - # https://git.gnu.io/gnu/gnu-social/issues/269 + # main doc is Docker-Guide.md in docs repo - # note, docker required, but we installed it earlier + # I'd like to try gnu social just cuz of gnu, but it's not being + # well maintained, for example, simple pull requests + # languishing: + # https://git.gnu.io/gnu/gnu-social/merge_requests/143 + # and I submitted my own bugs, basic docs are broken + # https://git.gnu.io/gnu/gnu-social/issues/269 - # i subscrubed to https://github.com/docker/compose/releases.atom - # to deal with updates manually. So far, it means just reving the - # version number, then restarting docker-compose with - # cd ~/mastodon - # docker-compose up -d - curl -L https://github.com/docker/compose/releases/download/1.13.0/docker-compose-`uname -s`-`uname -m` | s dd of=/usr/local/bin/docker-compose - s chmod +x /usr/local/bin/docker-compose + # note, docker required, but we installed it earlier + # i subscrubed to https://github.com/docker/compose/releases.atom + # to see release notes. + # i had some problems upgrading. blew things away with + # docker-compose down + # docker rmi $(docker images -q) + # s reboot now + # when running docker-compose run, kernel stack traces are printed to the journal. + # things seem to succeed, google says nothing, so ignoring them. + curl -L https://github.com/docker/compose/releases/download/1.18.0/docker-compose-`uname -s`-`uname -m` | s dd of=/usr/local/bin/docker-compose + s chmod +x /usr/local/bin/docker-compose - cd ~ - i clone https://github.com/tootsuite/mastodon - cd mastodon - # subbed to atom feed to deal with updates - git checkout $(git tag | grep -v rc | tail -n1) - # per instructions, uncomment redis/postgres persistence in docker-compose.yml - sed -i 's/^#//' docker-compose.yml + cd ~ + s rm -rf mastodon + i clone https://github.com/tootsuite/mastodon + cd mastodon + # subbed to atom feed to deal with updates + git checkout $(git tag | grep -v rc | tail -n1) - cat >.env.production <<'EOF' + # per instructions, uncomment redis/postgres persistence in docker-compose.yml + sed -i 's/^#//' docker-compose.yml + + cat >.env.production <<'EOF' REDIS_HOST=redis REDIS_PORT=6379 DB_HOST=db @@ -546,7 +430,7 @@ LOCAL_HTTPS=true SINGLE_USER_MODE=true -SMTP_SERVER=10.8.0.4 +SMTP_SERVER=mail.iankelling.org SMTP_PORT=25 SMTP_LOGIN=li SMTP_FROM_ADDRESS=notifications@mast.iankelling.org @@ -554,25 +438,46 @@ SMTP_DOMAIN=mast.iankelling.org SMTP_DELIVERY_METHOD=smtp EOF - for key in PAPERCLIP_SECRET SECRET_KEY_BASE OTP_SECRET; do - printf "%s=%s" $key "$(docker-compose run --rm web rake secret)" >>.env.production - done - s cat /etc/mailpass| while read -r domain port pass; do - if [[ $domain == mail.iankelling.org ]]; then - printf "SMTP_PASSWORD=%s" "$pass" >>.env.production - break - fi - done - - - - docker-compose run --rm web rails assets:precompile - - # docker daemon takes care of starting on boot. - docker-compose up -d - - s a2enmod proxy_wstunnel headers - web-conf -f 3000 - apache2 mast.iankelling.org <<'EOF' + for key in PAPERCLIP_SECRET SECRET_KEY_BASE OTP_SECRET; do + # 1 minute 7 seconds to run this docker command + # to generate a secret, and it has ^M chars at the end. wtf. really dumb + printf "%s=%s\n" $key "$(docker-compose run --rm web rake secret|dos2unix|tail -n1)" >>.env.production + done + found=false + while read -r domain port pass; do + if [[ $domain == mail.iankelling.org ]]; then + found=true + # remove the username part + pass="${pass#*:}" + printf "SMTP_PASSWORD=%s\n" "$pass" >>.env.production + break + fi + done < <(s cat /etc/mailpass) + if ! $found; then + echo "$0: error, failed to find mailpass domain for mastadon" + exit 1 + fi + + # docker compose makes an interface named like br-8f3e208558f2. we need mail to + # get routed to us. + if ! s /sbin/iptables -t nat -C PREROUTING -i br-+ -p tcp -m tcp --dport 25 -j DNAT --to-destination 10.8.0.4:25; then + s /sbin/iptables -t nat -A PREROUTING -i br-+ -p tcp -m tcp --dport 25 -j DNAT --to-destination 10.8.0.4:25 + fi + + docker-compose run --rm web rake mastodon:webpush:generate_vapid_key | grep -E '^VAPID_PUBLIC_KEY=|^VAPID_PRIVATE_KEY=' >> .env.production + logq docker-compose run --rm web rake db:migrate + docker-compose run --rm web rails assets:precompile + + # avatar failed to upload, did + # docker logs mastodon_web_1 + # google lead me to this + s chown -R 991:991 public/system + + # docker daemon takes care of starting on boot. + docker-compose up -d + + s a2enmod proxy_wstunnel headers + web-conf -f 3000 - apache2 mast.iankelling.org <<'EOF' ProxyPreserveHost On RequestHeader set X-Forwarded-Proto "https" ProxyPass /500.html ! @@ -587,108 +492,171 @@ EOF EOF - ############### !!!!!!!!!!!!!!!!! - ############### manual steps: + ############### !!!!!!!!!!!!!!!!! + ############### manual steps: - # only following 2 people atm, so not bothering to figure out backups - # when mastodon has not documented it at all. - # - # fsf@status.fsf.org - # cwebber@toot.cat - # dbd@status.fsf.org - # johns@status.fsf.org - - # sign in page is at https://mast.iankelling.org/auth/sign_in - # register as iank, then - # https://github.com/tootsuite/documentation/blob/master/Running-Mastodon/Administration-guide.md - # docker-compose run --rm web bundle exec rails mastodon:make_admin USERNAME=iank + # only following a few people atm, so not bothering to figure out backups + # when mastodon has not documented it at all. + # + # fsf@status.fsf.org + # cwebber@toot.cat + # dbd@status.fsf.org + # johns@status.fsf.org + + # sign in page is at https://mast.iankelling.org/auth/sign_in + # register as iank, then + # https://github.com/tootsuite/documentation/blob/master/Running-Mastodon/Administration-guide.md + # docker-compose run --rm web bundle exec rails mastodon:make_admin USERNAME=iank + + ############# end setup mastodon ############## + + # we use nsupdate to update the ip of home + pi bind9 + + pi znc + # znc config generated by doing + # znc --makeconf + # selected port is also used in erc config + # comma separated channel list worked. + # while figuring things out, running znc -D for debug in foreground. + # to exit and save config: + # /msg *status shutdown + # configed auth on freenode by following + # https://wiki.znc.in/Sasl: + # /msg *sasl RequireAuth yes + # /msg *sasl Mechanism PLAIN + # /msg *sasl Set ident_name password + # created the system service after, and had to do + # mv /home/iank/.znc/* /var/lib/znc + # sed -i 's,/home/iank/.znc/,/var/lib/znc,' /var/lib/znc/config/znc.conf + # and made a copy of the config files into /p/c + # /msg *status LoadMod --type=global log -sanitize + # to get into the web interface, + # cat /etc/letsencrypt/live/iankelling.org/{privkey,cert,chain}.pem > /var/lib/znc/znc.pem + # then use non-main browser or else it doesn't allow it based on ocsp stapling from my main site. + # https://iankelling.org:12533/ + # i'm going to figure out how to automate this when it expires. i know i can hook a script into the renewal. https://wiki.znc.in/FAQ seems to imply that znc doesn\'t need restart. + # todo: in config file AllowWeb = true should be false. better security if that is off unless we need it. + # /msg *status LoadMod --type=network perform + # /msg *perform add PRIVMSG ChanServ :invite #fsf-office + # /msg *perform add JOIN #fsf-office + # + # i set Buffer = 500 + # also ran /znc LoadMod clearbufferonmsg + # it would be nice if erc supported erc query buffers by doing + # /msg *status clearbuffer /dev/null <<'EOF' +[Unit] +Description=ZNC, an advanced IRC bouncer +After=network-online.target - ############# end setup mastodon ############## +[Service] +ExecStart=/usr/bin/znc -f --datadir=/var/lib/znc +User=znc - # we use nsupdate to update the ip of home - pi bind9 +[Install] +WantedBy=multi-user.target +EOF + ser daemon-reload + sgo znc - echo "$0: $(date): ending now)" - exit 0 - ;; + echo "$0: $(date): ending now)" + exit 0 + ;; esac - ########### end section including li/lj ############### -case $distro in - debian) spa gnome-session-flashback ;; - # flidas is missing dependency gnome-panel. others unknown -esac - - +# depends gcc is a way to install suggests. this is apparently the only +# way to install suggests even if the main package is already +# installed. reinstall doesn't work, uninstalling can cause removing +# dependent packages. +pi ${pall[@]} $(apt-cache search ruby[.0-9]+-doc| awk '{print $1}') $(apt-cache depends gcc|grep -i suggests:| awk '{print $2}') -case $distro in - trisquel|ubuntu|debian) spa ack-grep ;; - arch|fedora) spa ack ;; - # fedora unknown -esac - - -if isdeb; then - spa debian-goodies +if ! type pip; then + x=$(mktemp) + wget -O$x https://bootstrap.pypa.io/get-pip.py + python3 $x --user fi +sgo fsf-vpn-dns-cleanup + case $distro in - debian) - pi chromium ;; - xenial|ubuntu) - wget -qO - https://downloads.iridiumbrowser.de/ubuntu/iridium-release-sign-01.pub|sudo apt-key add - - cat <$t </dev/null <$t < /dev/null; then + case $distro in + arch) + s groupadd -g 450 debian-transmission + s useradd \ + --system \ + --create-home \ + --gid 450 \ + --uid 450 \ + --home-dir /var/lib/transmission-daemon \ + --shell /bin/false \ + debian-transmission + ;; + *) + s adduser --quiet \ + --gid 450 \ + --uid 450 \ + --system \ + --group \ + --no-create-home \ + --disabled-password \ + --home /var/lib/transmission-daemon \ + debian-transmission + ;; + esac +fi +# We want group writable stuff from transmission. +# However, after setting this, I learn that transmission sets it's +# own umask based on it's settings file. Well, no harm leaving this +# so it's set right from the beginning. +s chfn debian-transmission -o umask=0002 + +case $distro in + debian|trisquel|ubuntu) + # note i had to do this, which is persistent: + # cd /i/k + # s chgrp debian-transmission torrents partial-torrents + + # syslog says things like + # 'Failed to set receive buffer: requested 4194304, got 425984' + # google suggets giving it even more than that + tu /etc/sysctl.conf<<'EOF' net.core.rmem_max = 67108864 net.core.wmem_max = 16777216 EOF - s sysctl -p - - # some reason it doesn\'t seem to start automatically anyways - pi-nostart transmission-daemon - - # the folder was moved here after an install around 02/2017. - # it contains runtime data, - # plus a simple symlink to the config file which it\'s - # not worth separating out. - s lnf -T /i/transmission-daemon /var/lib/transmission-daemon/.config/transmission-daemon - # between comps, the uid can change - s chown -R debian-transmission:debian-transmission /i/transmission-daemon /var/lib/transmission-daemon - s chown -R debian-transmission:traci /i/k/partial-torrents /i/k/torrents - # - # config file documented here, and it\'s the same config - # for daemon vs client, so it\'s documented in the gui. - # https://trac.transmissionbt.com/wiki/EditConfigFiles#Options - # - # I originaly setup rpc-whitelist, but after using - # routing to a network namespace, it doesn\'t see the - # real source address, so it\'s disabled. - # - # Changed the cache-size to 256 mb, reduces disk use. - # It is a read & write cache. - # - s ruby <<'EOF' + s sysctl -p + + # some reason it doesn\'t seem to start automatically anyways + pi-nostart transmission-daemon + # be extra sure its not started + ser disable transmission-daemon + ser stop transmission-daemon + + # the folder was moved here after an install around 02/2017. + # it contains runtime data, + # plus a simple symlink to the config file which it\'s + # not worth separating out. + # between comps, the uid can change + f=/i/transmission-daemon + s lnf -T $f /var/lib/transmission-daemon/.config/transmission-daemon + if [[ -e $f ]]; then + s chown -R debian-transmission:debian-transmission $f + fi + for f in /i/k/partial-torrents /i/k/torrents; do + if [[ -e $f ]]; then + s chown -R debian-transmission:traci $f + fi + done + s chown -R debian-transmission:debian-transmission /var/lib/transmission-daemon + # + # config file documented here, and it\'s the same config + # for daemon vs client, so it\'s documented in the gui. + # https://trac.transmissionbt.com/wiki/EditConfigFiles#Options + # + # I originaly setup rpc-whitelist, but after using + # routing to a network namespace, it doesn\'t see the + # real source address, so it\'s disabled. + # + # Changed the cache-size to 256 mb, reduces disk use. + # It is a read & write cache. + # + s ruby <<'EOF' require 'json' p = '/etc/transmission-daemon/settings.json' File.write(p, JSON.pretty_generate(JSON.parse(File.read(p)).merge({ @@ -950,39 +1187,16 @@ File.write(p, JSON.pretty_generate(JSON.parse(File.read(p)).merge({ })) + "\n") EOF - # make sure its not enabled, not sure if this is needed - ser disable transmission-daemon - ;; - # todo: others unknown + ;; + # todo: others unknown esac -# adapted from /var/lib/dpkg/info/transmission-daemon.postinst -if ! getent passwd debian-transmission > /dev/null; then - case $distro in - arch) - s useradd \ - --system \ - --create-home \ - --home-dir /var/lib/transmission-daemon \ - --shell /bin/false \ - debian-transmission - ;; - *) - s adduser --quiet \ - --system \ - --group \ - --no-create-home \ - --disabled-password \ - --home /var/lib/transmission-daemon \ - debian-transmission - ;; - esac -fi + # trisquel 8 = openvpn, debian stretch = openvpn-client vpn_ser=openvpn-client if [[ ! -e /lib/systemd/system/openvpn-client@.service ]]; then - vpn_ser=openvpn + vpn_ser=openvpn fi s dd of=/etc/systemd/system/transmission-daemon-nn.service </dev/null; then + cd $(mktemp -d) + # just the latest stable at the time of writing + # TODO, maybe put this all into a build script, + # and do some automatic updates + wget https://dl.google.com/go/go1.12.4.linux-amd64.tar.gz + s tar -C /usr/local -xzf go1.12.4.linux-amd64.tar.gz + rm -f * + fi + go get -u mvdan.cc/fdroidcl + # a bit of googling, and added settings to bashrc + ;; esac - case $distro in - arch|debian|trisquel|ubuntu) spa pumpa ;; - # others unknown. do have a buildscript: - # /a/bin/buildscripts/pumpa ;; + arch) + # otherwise we get error about accessing kvm module. + # seems like there might be a better way, but google was a bit vague. + s $sed -ri '/^ *user *=/d' /etc/libvirt/qemu.conf + echo 'user = "root"' | s tee -a /etc/libvirt/qemu.conf + # https://bbs.archlinux.org/viewtopic.php?id=206206 + # # this should prolly go in the wiki + sgo virtlogd.socket + # guessing this is not needed + #sgo virtlogd.service + sgo libvirtd + ;; esac case $distro in - debian) pi adb ;; - debian|trisquel|ubuntu) spa android-tools-adbd ;; - # todo: not sure this is needed anymore, or if trisqel etc works even -# debian) spa android-tools-adbd/unstable ;; - arch) spa android-tools ;; - # other distros unknown + fedora) + cd $(mktemp -d) + wget ftp://ftp.gnu.org/pub/gnu/global/global-6.5.7.tar.gz + ex global* + cd global-6.5.7 + # based on https://github.com/leoliu/ggtags + ./configure --with-exuberant-ctags=/usr/bin/ctags + make + s make install + pip install pygments + ;; + *) + pi global + ;;& + arch) + pi python2-pygments + ;; + debian|trisquel|ubuntu) + pi python-pygments + ;; esac -if [[ $HOSTNAME == treetowl ]]; then - : - ## bitcoin disabled. fees too high - # case $distro in - # debian) - # if [[ `debian-archive` == testing ]]; then - # # has no unstable dependencies - # pi bitcoind/unstable - # src=/a/opt/bitcoin/contrib/init/bitcoind.service - # s cp $src /etc/systemd/system - # p=/etc/bitcoin/bitcoin - # dst=/etc/systemd/system/bitcoinjm.service - # # jm for joinmarket - # $sed -r "/^\s*ExecStart/s,${p}.conf,${p}jm.conf," $src \ - # >/etc/systemd/system/bitcoinjm.service - - # d=jm; jm=d # being clever for succinctness - # for s in d jm; do - # s $sed -ri "/^\s*\[Unit\]/a Conflicts=bitcoin${!s}.service" \ - # /etc/systemd/system/bitcoin${s}.service - # done - - # ser daemon-reload - - # dir=/nocow/.bitcoin - # s mkdir -p $dir - # s chown -R bitcoin:bitcoin $dir - # dir=/etc/bitcoin - # s mkdir -p $dir - # s chown -R root:bitcoin $dir - # s chmod 750 $dir - - # # pruning decreases the bitcoin dir to 2 gb, keeps - # # just the recent blocks. can\'t do a few things like - # # import a wallet dump. - # # pruning works, but people had to do - # # some manual stuff in joinmarket. I dun need the - # # disk space, so not bothering yet, maybe in a year or so. - # # https://github.com/JoinMarket-Org/joinmarket/issues/431 - # #https://bitcoin.org/en/release/v0.12.0#wallet-pruning - # #prune=550 - - # f=$dir/bitcoin.conf - # s dd of=$f </dev/null <