mirror of
https://github.com/vimagick/dockerfiles
synced 2024-06-16 11:58:47 +00:00
Compare commits
15 Commits
629c9e930b
...
1c004636b6
Author | SHA1 | Date | |
---|---|---|---|
|
1c004636b6 | ||
|
6050c5f150 | ||
|
3a84de4841 | ||
|
fa491f5231 | ||
|
6b6d7d76ba | ||
|
1c39413107 | ||
|
324258a705 | ||
|
a6fdbcd65b | ||
|
63d9c9f3e2 | ||
|
b53ae6ccae | ||
|
f1478baf34 | ||
|
4651e02449 | ||
|
f996b2195b | ||
|
83b2702939 | ||
|
9809ddeabf |
@ -154,6 +154,7 @@ A collection of delicious docker recipes.
|
|||||||
## Utility
|
## Utility
|
||||||
|
|
||||||
- [x] registry-cli
|
- [x] registry-cli
|
||||||
|
- [x] tldextract
|
||||||
|
|
||||||
## Media
|
## Media
|
||||||
|
|
||||||
|
@ -88,11 +88,23 @@ $ pip install selenium
|
|||||||
$ python screenshot.py
|
$ python screenshot.py
|
||||||
$ imgcat google.png
|
$ imgcat google.png
|
||||||
|
|
||||||
$ http http://127.0.0.1:3000/screenshot \
|
$ http :3000/screenshot \
|
||||||
url=https://www.youtube.com \
|
url=https://www.youtube.com \
|
||||||
options[fullPage]:=true \
|
options[fullPage]:=true \
|
||||||
gotoOptions[waitUntil]=networkidle2 > youtube.png
|
gotoOptions[waitUntil]=networkidle2 > youtube.png
|
||||||
$ imgcat youtube.png
|
$ imgcat youtube.png
|
||||||
|
|
||||||
|
$ http :3000/scrape url=https://www.youtube.com elements[0][selector]=title debug[network]:=true |
|
||||||
|
jq -r '.debug.network.outbound[].url' |
|
||||||
|
xargs -r tldextract -j |
|
||||||
|
jq -r 'select(.fqdn|length>0).fqdn' |
|
||||||
|
sort -u
|
||||||
|
accounts.google.com
|
||||||
|
fonts.googleapis.com
|
||||||
|
fonts.gstatic.com
|
||||||
|
googleads.g.doubleclick.net
|
||||||
|
i.ytimg.com
|
||||||
|
www.youtube.com
|
||||||
```
|
```
|
||||||
|
|
||||||
[1]: https://docs.browserless.io/
|
[1]: https://docs.browserless.io/
|
||||||
|
@ -5,9 +5,10 @@
|
|||||||
FROM alpine:3
|
FROM alpine:3
|
||||||
MAINTAINER EasyPi Software Foundation
|
MAINTAINER EasyPi Software Foundation
|
||||||
|
|
||||||
ARG FTL_VERSION=v5.23
|
ARG FTL_VERSION=v5.25.1
|
||||||
ARG FTL_FILE=pihole-FTL-musl-linux-x86_64
|
ARG FTL_FILE=pihole-FTL-musl-linux-x86_64
|
||||||
ARG FTL_URL=https://github.com/pi-hole/FTL/releases/download/${FTL_VERSION}/${FTL_FILE}
|
ARG FTL_GIT=https://github.com/pi-hole
|
||||||
|
ARG FTL_URL=${FTL_GIT}/FTL/releases/download/${FTL_VERSION}/${FTL_FILE}
|
||||||
|
|
||||||
RUN set -xe \
|
RUN set -xe \
|
||||||
&& apk add --no-cache curl \
|
&& apk add --no-cache curl \
|
||||||
@ -15,6 +16,15 @@ RUN set -xe \
|
|||||||
&& curl -sSL ${FTL_URL} -o /usr/bin/pihole-FTL \
|
&& curl -sSL ${FTL_URL} -o /usr/bin/pihole-FTL \
|
||||||
&& chmod +x /usr/bin/pihole-FTL \
|
&& chmod +x /usr/bin/pihole-FTL \
|
||||||
&& pihole-FTL --version \
|
&& pihole-FTL --version \
|
||||||
|
&& mkdir -p /opt/pihole \
|
||||||
|
&& cd /opt/pihole \
|
||||||
|
&& curl -sSL -O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/list.sh \
|
||||||
|
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/utils.sh \
|
||||||
|
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/chronometer.sh \
|
||||||
|
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/COL_TABLE \
|
||||||
|
&& cd /usr/local/bin \
|
||||||
|
&& curl -sSL ${FTL_GIT}/pi-hole/raw/master/pihole -o /usr/local/bin/pihole \
|
||||||
|
&& chmod +x /usr/local/bin/pihole /opt/pihole/*.sh \
|
||||||
&& apk del curl
|
&& apk del curl
|
||||||
|
|
||||||
VOLUME /etc/pihole
|
VOLUME /etc/pihole
|
||||||
|
38
ftldns/Dockerfile.debian
Normal file
38
ftldns/Dockerfile.debian
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
#
|
||||||
|
# Dockerfile for FTLDNS (pihole-FTL)
|
||||||
|
#
|
||||||
|
|
||||||
|
FROM debian:12-slim
|
||||||
|
MAINTAINER EasyPi Software Foundation
|
||||||
|
|
||||||
|
ARG FTL_VERSION=v5.25.1
|
||||||
|
ARG FTL_FILE=pihole-FTL-aarch64-linux-gnu
|
||||||
|
ARG FTL_GIT=https://github.com/pi-hole
|
||||||
|
ARG FTL_URL=${FTL_GIT}/FTL/releases/download/${FTL_VERSION}/${FTL_FILE}
|
||||||
|
|
||||||
|
RUN set -xe \
|
||||||
|
&& apt update -y \
|
||||||
|
&& apt install -y curl procps \
|
||||||
|
&& echo "conf-dir=/etc/pihole/dnsmasq.d,*.conf" >> /etc/dnsmasq.conf \
|
||||||
|
&& curl -sSL ${FTL_URL} -o /usr/bin/pihole-FTL \
|
||||||
|
&& chmod +x /usr/bin/pihole-FTL \
|
||||||
|
&& pihole-FTL --version \
|
||||||
|
&& mkdir -p /opt/pihole \
|
||||||
|
&& cd /opt/pihole \
|
||||||
|
&& curl -sSL -O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/list.sh \
|
||||||
|
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/utils.sh \
|
||||||
|
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/chronometer.sh \
|
||||||
|
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/COL_TABLE \
|
||||||
|
&& cd /usr/local/bin \
|
||||||
|
&& curl -sSL ${FTL_GIT}/pi-hole/raw/master/pihole -o /usr/local/bin/pihole \
|
||||||
|
&& chmod +x /usr/local/bin/pihole /opt/pihole/*.sh \
|
||||||
|
&& apt remove -y curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
VOLUME /etc/pihole
|
||||||
|
|
||||||
|
EXPOSE 4711/tcp \
|
||||||
|
53/tcp \
|
||||||
|
53/udp
|
||||||
|
|
||||||
|
CMD ["pihole-FTL", "-f"]
|
@ -10,14 +10,29 @@ $ docker compose up -d
|
|||||||
|
|
||||||
$ dig @127.0.0.1 -p 53 www.youtube.com
|
$ dig @127.0.0.1 -p 53 www.youtube.com
|
||||||
|
|
||||||
$ docker compose exec ftldns pihole-FTL sql -h gravity.db
|
$ docker compose exec ftldns bash
|
||||||
>>> .schema domainlist
|
>>> pihole-FTL sql -h gravity.db
|
||||||
>>> insert into domainlist(type, domain) values (3, '(\.|^)youtube\.com$');
|
.schema domainlist
|
||||||
>>> .quit
|
insert into domainlist(type, domain) values (3, '(\.|^)youtube\.com$');
|
||||||
|
.quit
|
||||||
|
>>> pihole --regex '(\.|^)baidu\.com$'
|
||||||
|
>>> pihole --wild 'qq.com'
|
||||||
|
>>> pihole-FTL regex-test www.baidu.com
|
||||||
|
>>> pihole --wild -d youtube.com
|
||||||
|
>>> pihole --regex -l
|
||||||
|
Displaying regex blacklist:
|
||||||
|
1: (\.|^)baidu\.com$ (enabled, last modified Tue, 27 Feb 2024 11:17:59 +0000)
|
||||||
|
2: (\.|^)qq\.com$ (enabled, last modified Tue, 27 Feb 2024 11:22:17 +0000)
|
||||||
|
>>> pihole -c
|
||||||
|
|¯¯¯(¯)_|¯|_ ___|¯|___ Core: API Offline
|
||||||
|
| ¯_/¯|_| ' \/ _ \ / -_)
|
||||||
|
|_| |_| |_||_\___/_\___|
|
||||||
|
——————————————————————————————————————————————————————————
|
||||||
|
>>> exit
|
||||||
|
|
||||||
$ docker compose exec ftldns kill -RTMIN 1
|
$ docker compose kill -s RTMIN ftldns
|
||||||
|
|
||||||
$ dig @127.0.0.1 -p 53 www.youtube.com
|
$ dig @127.0.0.1 -p 53 www.baidu.com
|
||||||
|
|
||||||
$ telnet 127.0.0.1 4711
|
$ telnet 127.0.0.1 4711
|
||||||
>version
|
>version
|
||||||
@ -28,7 +43,8 @@ $ telnet 127.0.0.1 4711
|
|||||||
>quit
|
>quit
|
||||||
```
|
```
|
||||||
|
|
||||||
> Read more about [telnet-api][2].
|
> Read more about [domainlist][2] and [telnet-api][3].
|
||||||
|
|
||||||
[1]: https://github.com/pi-hole/FTL
|
[1]: https://github.com/pi-hole/FTL
|
||||||
[2]: https://docs.pi-hole.net/ftldns/telnet-api/
|
[2]: https://docs.pi-hole.net/database/gravity/#domain-tables-domainlist
|
||||||
|
[3]: https://docs.pi-hole.net/ftldns/telnet-api/
|
||||||
|
36
memgraph/example/dnslookup
Executable file
36
memgraph/example/dnslookup
Executable file
@ -0,0 +1,36 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
HOST=${1:?DOMAIN or IP is empty}
|
||||||
|
|
||||||
|
docker exec -i memgraph_memgraph_1 mgconsole -output-format=csv <<- _CQL_ | sed -e 's/"//g' | tail -n +2 | tr '[],[]' ' ' | gawk -f /dev/fd/3 3<<- "_AWK_" | column -t -i1 -p2 -r3 -H1,2 | sed 's/─/& /'
|
||||||
|
MATCH p=(n)-[*]->(m)
|
||||||
|
WHERE any(n in nodes(p) where n.name = '$HOST') AND not exists(()-->(n)) AND not exists((m)-->())
|
||||||
|
UNWIND nodes(p) AS nn
|
||||||
|
WITH DISTINCT nn
|
||||||
|
CALL path.expand(nn,[">"],[],1,1) YIELD result
|
||||||
|
RETURN extract(i in nodes(result)|i.name);
|
||||||
|
_CQL_
|
||||||
|
BEGIN {
|
||||||
|
split("", cache);
|
||||||
|
split("", roots);
|
||||||
|
idx=0;
|
||||||
|
}
|
||||||
|
{
|
||||||
|
if(!($1 in cache)) {
|
||||||
|
roots[$1] = cache[$1] = ++idx;
|
||||||
|
}
|
||||||
|
if(!($2 in cache)) {
|
||||||
|
cache[$2] = ++idx;
|
||||||
|
}
|
||||||
|
delete roots[$2];
|
||||||
|
print cache[$2], cache[$1], $2;
|
||||||
|
}
|
||||||
|
END {
|
||||||
|
print "0 -1 ."
|
||||||
|
for(root in roots) {
|
||||||
|
print cache[root], 0, root;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_AWK_
|
||||||
|
|
||||||
|
# vim: set noai noet:
|
16
memgraph/example/dump-elk.sh
Executable file
16
memgraph/example/dump-elk.sh
Executable file
@ -0,0 +1,16 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
|
||||||
|
export PATH=/usr/local/bin:$PATH
|
||||||
|
|
||||||
|
date=${1:-$(date -d yesterday +%F)}
|
||||||
|
url=http://127.0.0.1:9200/
|
||||||
|
index=logstash-${date//-/.}
|
||||||
|
output=data/${date}.csv
|
||||||
|
|
||||||
|
mkdir -p ${output%/*}
|
||||||
|
|
||||||
|
elastic-query-export -c $url -i $index -o $output -q '+project:dns -_exists_:message +type:(A CNAME)' -fields '@timestamp,region,client,server,type,query,answer,ttl'
|
||||||
|
|
||||||
|
gzip $output
|
27
memgraph/example/load-csv.cql
Normal file
27
memgraph/example/load-csv.cql
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
|
||||||
|
WITH DISTINCT row.query AS query
|
||||||
|
MERGE (d:Domain {name: query});
|
||||||
|
|
||||||
|
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
|
||||||
|
WITH DISTINCT row.answer AS answer WHERE row.type = 'CNAME'
|
||||||
|
MERGE (d:Domain {name: answer});
|
||||||
|
|
||||||
|
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
|
||||||
|
WITH DISTINCT row.answer AS answer WHERE row.type = 'A'
|
||||||
|
MERGE (i:IPv4 {name: answer});
|
||||||
|
|
||||||
|
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
|
||||||
|
WITH row WHERE row.type = 'A'
|
||||||
|
MATCH (d:Domain {name: row.query}), (i:IPv4 {name: row.answer})
|
||||||
|
MERGE (d)-[r:A]->(i)
|
||||||
|
ON CREATE SET r.created_at = timestamp()/1000000
|
||||||
|
CALL date.parse(replace(row.`@timestamp`, "Z", ""), "s", "%Y-%m-%dT%H:%M:%S.%f", "UTC") YIELD parsed
|
||||||
|
SET r.updated_at = parsed;
|
||||||
|
|
||||||
|
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
|
||||||
|
WITH row WHERE row.type = 'CNAME'
|
||||||
|
MATCH (d1:Domain {name: row.query}), (d2:Domain {name: row.answer})
|
||||||
|
MERGE (d1)-[r:CNAME]->(d2)
|
||||||
|
ON CREATE SET r.created_at = timestamp()/1000000
|
||||||
|
CALL date.parse(replace(row.`@timestamp`, "Z", ""), "s", "%Y-%m-%dT%H:%M:%S.%f", "UTC") YIELD parsed
|
||||||
|
SET r.updated_at = parsed;
|
15
memgraph/example/load-csv.sh
Executable file
15
memgraph/example/load-csv.sh
Executable file
@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
|
||||||
|
FILE=$(date -d yesterday +%F).csv.gz
|
||||||
|
SRC_FILE=/data/dns/data/$FILE
|
||||||
|
DST_FILE=/path/to/dns.csv.gz
|
||||||
|
|
||||||
|
echo "$(date +%FT%T) GET $FILE"
|
||||||
|
scp elk-us:$SRC_FILE $DST_FILE
|
||||||
|
|
||||||
|
echo "$(date +%FT%T) LOAD CSV"
|
||||||
|
docker exec -i memgraph_memgraph_1 mgconsole < load-csv.cql
|
||||||
|
|
||||||
|
echo "$(date +%FT%T) DONE"
|
14
outline-ss-server/data/keygen.sh
Executable file
14
outline-ss-server/data/keygen.sh
Executable file
@ -0,0 +1,14 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# pwgen 12 1
|
||||||
|
|
||||||
|
NAME=${1:?name is empty}
|
||||||
|
|
||||||
|
SERVER="127.0.0.1:9443"
|
||||||
|
CIPHER="chacha20-ietf-poly1305"
|
||||||
|
SECRET=$(yq -o json config.yml | jq -e -r --arg name "$NAME" '.keys[]|select(.id==$name).secret')
|
||||||
|
DIGEST=$(echo -n "$CIPHER:$SECRET" | base64 | sed 's/=*$//')
|
||||||
|
|
||||||
|
echo "ss://$DIGEST@$SERVER/?outline=1"
|
16
outline-ss-server/systemd/outline-ss-server.service
Normal file
16
outline-ss-server/systemd/outline-ss-server.service
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=outline-ss-server
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
User=vpn
|
||||||
|
Group=vpn
|
||||||
|
WorkingDirectory=/etc/outline-ss-server
|
||||||
|
ExecStart=/usr/local/bin/outline-ss-server -config=config.yml -metrics=127.0.0.1:9092 -ip_country_db=ip-country.mmdb -replay_history=10000
|
||||||
|
ExecReload=/bin/kill -HUP $MAINPID
|
||||||
|
LimitNOFILE=65536
|
||||||
|
RestartSec=5
|
||||||
|
Restart=always
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
@ -6,7 +6,7 @@ pihole
|
|||||||
The [Pi-hole®][1] is a DNS sinkhole that protects your devices from unwanted
|
The [Pi-hole®][1] is a DNS sinkhole that protects your devices from unwanted
|
||||||
content, without installing any client-side software.
|
content, without installing any client-side software.
|
||||||
|
|
||||||
- https://docs.pi-hole.net/
|
- https://docs.pi-hole.net/core/pihole-command/
|
||||||
- https://hub.docker.com/r/pihole/pihole/
|
- https://hub.docker.com/r/pihole/pihole/
|
||||||
|
|
||||||
[1]: https://pi-hole.net/
|
[1]: https://pi-hole.net/
|
||||||
|
13
tldextract/Dockerfile
Normal file
13
tldextract/Dockerfile
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
#
|
||||||
|
# Dockerfile for tldextract
|
||||||
|
#
|
||||||
|
|
||||||
|
FROM alpine:3
|
||||||
|
|
||||||
|
RUN apk add --no-cache py3-pip \
|
||||||
|
&& pip install tldextract \
|
||||||
|
&& tldextract --update \
|
||||||
|
&& tldextract --version
|
||||||
|
|
||||||
|
ENTRYPOINT ["/usr/bin/tldextract"]
|
||||||
|
CMD ["--help"]
|
17
tldextract/README.md
Normal file
17
tldextract/README.md
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
tldextract
|
||||||
|
==========
|
||||||
|
|
||||||
|
[tldextract][1] accurately separates a URL's subdomain, domain, and public
|
||||||
|
suffix, using the Public Suffix List (PSL).
|
||||||
|
|
||||||
|
## Tutorial
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create an alias
|
||||||
|
$ alias tldextract='docker run --rm -u $(id -u):$(id -g) vimagick/tldextract'
|
||||||
|
|
||||||
|
# Run it
|
||||||
|
$ tldextract -j www.google.com
|
||||||
|
```
|
||||||
|
|
||||||
|
[1]: https://github.com/john-kurkowski/tldextract
|
69
urlwatch/data/reporters.py
Normal file
69
urlwatch/data/reporters.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
class SlackReporter(TextReporter):
|
||||||
|
"""Send a message to a Slack channel"""
|
||||||
|
|
||||||
|
__kind__ = 'slack'
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.max_length = self.config.get('max_message_length', 40000)
|
||||||
|
|
||||||
|
def submit(self):
|
||||||
|
webhook_url = self.config['webhook_url']
|
||||||
|
text = '\n'.join(super().submit())
|
||||||
|
|
||||||
|
if not text:
|
||||||
|
logger.debug('Not calling {} API (no changes)'.format(self.__kind__))
|
||||||
|
return
|
||||||
|
|
||||||
|
result = None
|
||||||
|
for chunk in chunkstring(text, self.max_length, numbering=True):
|
||||||
|
res = self.submit_chunk(webhook_url, chunk)
|
||||||
|
if res.status_code != requests.codes.ok or res is None:
|
||||||
|
result = res
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def submit_chunk(self, webhook_url, text):
|
||||||
|
logger.debug("Sending {} request with text: {}".format(self.__kind__, text))
|
||||||
|
post_data = self.prepare_post_data(text)
|
||||||
|
result = requests.post(webhook_url, json=post_data)
|
||||||
|
try:
|
||||||
|
if result.status_code == requests.codes.ok:
|
||||||
|
logger.info("{} response: ok".format(self.__kind__))
|
||||||
|
else:
|
||||||
|
logger.error("{} error: {}".format(self.__kind__, result.text))
|
||||||
|
except ValueError:
|
||||||
|
logger.error(
|
||||||
|
"Failed to parse {} response. HTTP status code: {}, content: {}".format(self.__kind__,
|
||||||
|
result.status_code,
|
||||||
|
result.content))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def prepare_post_data(self, text):
|
||||||
|
if self.config.get('rich_text', False):
|
||||||
|
return {
|
||||||
|
"blocks": [
|
||||||
|
{
|
||||||
|
"type": "rich_text",
|
||||||
|
"elements": [
|
||||||
|
{
|
||||||
|
"type": "rich_text_preformatted",
|
||||||
|
"elements": [
|
||||||
|
{"type": "text", "text": text}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return {"text": text}
|
||||||
|
|
||||||
|
|
||||||
|
class MattermostReporter(SlackReporter):
|
||||||
|
"""Send a message to a Mattermost channel"""
|
||||||
|
|
||||||
|
__kind__ = 'mattermost'
|
||||||
|
|
||||||
|
def prepare_post_data(self, text):
|
||||||
|
return {"text": text}
|
14
urlwatch/systemd/urlwatch.service
Normal file
14
urlwatch/systemd/urlwatch.service
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=lightweight incoming webhook server
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
ExecStart=/usr/local/bin/webhook -hooks=hooks.json -hooks=hooks.yaml -hotreload -port=9000 -template -urlprefix=hooks
|
||||||
|
ExecReload=/bin/kill -HUP $MAINPID
|
||||||
|
WorkingDirectory=/etc/webhook/
|
||||||
|
LimitNOFILE=65536
|
||||||
|
RestartSec=5
|
||||||
|
Restart=always
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
Loading…
Reference in New Issue
Block a user