mirror of
https://github.com/vimagick/dockerfiles
synced 2024-06-16 11:58:47 +00:00
Compare commits
1 Commits
de230c4b90
...
94ce218f4f
Author | SHA1 | Date | |
---|---|---|---|
|
94ce218f4f |
60
README.md
60
README.md
@ -154,7 +154,6 @@ A collection of delicious docker recipes.
|
||||
## Utility
|
||||
|
||||
- [x] registry-cli
|
||||
- [x] tldextract
|
||||
|
||||
## Media
|
||||
|
||||
@ -313,13 +312,12 @@ A collection of delicious docker recipes.
|
||||
- [x] 3proxy/3proxy
|
||||
- [x] adguard/adguardhome
|
||||
- [x] ghcr.io/linuxserver/airsonic :musical_note:
|
||||
- [x] apify
|
||||
- [x] actor-node
|
||||
- [x] actor-node-playwright
|
||||
- [x] actor-node-playwright-chrome
|
||||
- [x] actor-node-playwright-firefox
|
||||
- [x] actor-node-playwright-webkit
|
||||
- [x] actor-node-puppeteer-chrome
|
||||
- [x] apify/actor-node
|
||||
- [x] apify/actor-node-puppeteer-chrome
|
||||
- [x] apify/actor-node-playwright
|
||||
- [x] apify/actor-node-playwright-chrome
|
||||
- [x] apify/actor-node-playwright-firefox
|
||||
- [x] apify/actor-node-playwright-webkit
|
||||
- [x] archivebox/archivebox
|
||||
- [x] docker.bintray.io/jfrog/artifactory-oss
|
||||
- [x] jeffail/benthos
|
||||
@ -334,7 +332,7 @@ A collection of delicious docker recipes.
|
||||
- [x] cp-kafka-rest
|
||||
- [x] ksqldb-cli
|
||||
- [x] ksqldb-server
|
||||
- [x] couchdb :bucket:
|
||||
- [x] couchdb
|
||||
- [x] schollz/croc
|
||||
- [x] streamsets/datacollector
|
||||
- [x] daskdev
|
||||
@ -391,51 +389,48 @@ A collection of delicious docker recipes.
|
||||
- [x] ipfs/kubo
|
||||
- [x] heartexlabs/label-studio
|
||||
- [x] martialblog/limesurvey
|
||||
- [x] lldap/lldap
|
||||
- [x] mailhog/mailhog
|
||||
- [x] linuxserver/mastodon
|
||||
- [x] 42wim/matterbridge :octocat:
|
||||
- [x] matrixconduit/matrix-conduit
|
||||
- [x] getmeili/meilisearch :mag:
|
||||
- [x] mitmproxy/mitmproxy
|
||||
- [x] mariadb :bucket:
|
||||
- [x] deluan/navidrome :musical_note:
|
||||
- [x] netdata/netdata
|
||||
- [x] nextcloud
|
||||
- [x] sonatype/nexus3
|
||||
- [ ] jwilder/nginx-proxy
|
||||
- [x] tiangolo/nginx-rtmp :camera:
|
||||
- [x] jazzdd/phpvirtualbox
|
||||
- [x] sonatype/nexus3
|
||||
- [x] jupyter/notebook
|
||||
- [x] mariadb
|
||||
- [x] matomo
|
||||
- [x] memgraph :bucket:
|
||||
- [x] lab
|
||||
- [x] memgraph
|
||||
- [x] memgraph-mage
|
||||
- [x] memgraph-platform
|
||||
- [x] metabase/metabase
|
||||
- [x] metasploitframework/metasploit-framework :skull:
|
||||
- [x] minio/minio
|
||||
- [x] mongo :bucket:
|
||||
- [x] mongo
|
||||
- [x] ccrisan/motioneye
|
||||
- [x] deluan/navidrome :musical_note:
|
||||
- [x] neo4j :bucket:
|
||||
- [x] netdata/netdata
|
||||
- [x] sonatype/nexus3
|
||||
- [x] nextcloud
|
||||
- [ ] jwilder/nginx-proxy
|
||||
- [x] tiangolo/nginx-rtmp :camera:
|
||||
- [x] jupyter/notebook
|
||||
- [x] neo4j
|
||||
- [x] lldap/lldap
|
||||
- [x] luzifer/nginx-sso
|
||||
- [x] n8nio/n8n
|
||||
- [x] illuspas/node-media-server :cn:
|
||||
- [x] jorijn/nostream
|
||||
- [x] scsibug/nostr-rs-relay
|
||||
- [x] notaitech/nudenet
|
||||
- [x] odoo
|
||||
- [x] ohmyform
|
||||
- [x] api
|
||||
- [x] ui
|
||||
- [x] ohmyform/api
|
||||
- [x] ohmyform/ui
|
||||
- [x] osixia/openldap
|
||||
- [x] openresty/openresty
|
||||
- [x] opensearchproject/opensearch :bucket:
|
||||
- [x] opensearchproject/opensearch
|
||||
- [x] kylemanna/openvpn
|
||||
- [x] campbellsoftwaresolutions/osticket
|
||||
- [x] outlinewiki/outline
|
||||
- [x] gabekangas/owncast
|
||||
- [x] owncloud
|
||||
- [x] jorijn/nostream
|
||||
- [x] scsibug/nostr-rs-relay
|
||||
- [x] owntracks
|
||||
- [x] frontend
|
||||
- [x] recorder
|
||||
@ -444,11 +439,10 @@ A collection of delicious docker recipes.
|
||||
- [x] viktorstrate/photoview
|
||||
- [x] phplist/phplist
|
||||
- [x] phpmyadmin
|
||||
- [x] jazzdd/phpvirtualbox
|
||||
- [x] pihole/pihole
|
||||
- [x] mcr.microsoft.com/playwright
|
||||
- [x] portainer/portainer :+1:
|
||||
- [x] postgres :bucket:
|
||||
- [x] postgres
|
||||
- [x] postgrest/postgrest
|
||||
- [x] prefecthq/prefect
|
||||
- [x] prom/prometheus
|
||||
@ -494,7 +488,7 @@ A collection of delicious docker recipes.
|
||||
- [x] tensorflow
|
||||
- [x] serving
|
||||
- [x] kitabisa/teler
|
||||
- [x] tile38/tile38 :bucket:
|
||||
- [x] tile38/tile38
|
||||
- [x] traccar/traccar
|
||||
- [x] traefik
|
||||
- [x] trinodb/trino
|
||||
|
@ -88,23 +88,11 @@ $ pip install selenium
|
||||
$ python screenshot.py
|
||||
$ imgcat google.png
|
||||
|
||||
$ http :3000/screenshot \
|
||||
$ http http://127.0.0.1:3000/screenshot \
|
||||
url=https://www.youtube.com \
|
||||
options[fullPage]:=true \
|
||||
gotoOptions[waitUntil]=networkidle2 > youtube.png
|
||||
$ imgcat youtube.png
|
||||
|
||||
$ http :3000/scrape url=https://www.youtube.com elements[0][selector]=title debug[network]:=true |
|
||||
jq -r '.debug.network.outbound[].url' |
|
||||
xargs -r tldextract -j |
|
||||
jq -r 'select(.fqdn|length>0).fqdn' |
|
||||
sort -u
|
||||
accounts.google.com
|
||||
fonts.googleapis.com
|
||||
fonts.gstatic.com
|
||||
googleads.g.doubleclick.net
|
||||
i.ytimg.com
|
||||
www.youtube.com
|
||||
```
|
||||
|
||||
[1]: https://docs.browserless.io/
|
||||
|
@ -5,10 +5,9 @@
|
||||
FROM alpine:3
|
||||
MAINTAINER EasyPi Software Foundation
|
||||
|
||||
ARG FTL_VERSION=v5.25.1
|
||||
ARG FTL_VERSION=v5.23
|
||||
ARG FTL_FILE=pihole-FTL-musl-linux-x86_64
|
||||
ARG FTL_GIT=https://github.com/pi-hole
|
||||
ARG FTL_URL=${FTL_GIT}/FTL/releases/download/${FTL_VERSION}/${FTL_FILE}
|
||||
ARG FTL_URL=https://github.com/pi-hole/FTL/releases/download/${FTL_VERSION}/${FTL_FILE}
|
||||
|
||||
RUN set -xe \
|
||||
&& apk add --no-cache curl \
|
||||
@ -16,15 +15,6 @@ RUN set -xe \
|
||||
&& curl -sSL ${FTL_URL} -o /usr/bin/pihole-FTL \
|
||||
&& chmod +x /usr/bin/pihole-FTL \
|
||||
&& pihole-FTL --version \
|
||||
&& mkdir -p /opt/pihole \
|
||||
&& cd /opt/pihole \
|
||||
&& curl -sSL -O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/list.sh \
|
||||
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/utils.sh \
|
||||
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/chronometer.sh \
|
||||
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/COL_TABLE \
|
||||
&& cd /usr/local/bin \
|
||||
&& curl -sSL ${FTL_GIT}/pi-hole/raw/master/pihole -o /usr/local/bin/pihole \
|
||||
&& chmod +x /usr/local/bin/pihole /opt/pihole/*.sh \
|
||||
&& apk del curl
|
||||
|
||||
VOLUME /etc/pihole
|
||||
|
@ -1,38 +0,0 @@
|
||||
#
|
||||
# Dockerfile for FTLDNS (pihole-FTL)
|
||||
#
|
||||
|
||||
FROM debian:12-slim
|
||||
MAINTAINER EasyPi Software Foundation
|
||||
|
||||
ARG FTL_VERSION=v5.25.1
|
||||
ARG FTL_FILE=pihole-FTL-aarch64-linux-gnu
|
||||
ARG FTL_GIT=https://github.com/pi-hole
|
||||
ARG FTL_URL=${FTL_GIT}/FTL/releases/download/${FTL_VERSION}/${FTL_FILE}
|
||||
|
||||
RUN set -xe \
|
||||
&& apt update -y \
|
||||
&& apt install -y curl procps \
|
||||
&& echo "conf-dir=/etc/pihole/dnsmasq.d,*.conf" >> /etc/dnsmasq.conf \
|
||||
&& curl -sSL ${FTL_URL} -o /usr/bin/pihole-FTL \
|
||||
&& chmod +x /usr/bin/pihole-FTL \
|
||||
&& pihole-FTL --version \
|
||||
&& mkdir -p /opt/pihole \
|
||||
&& cd /opt/pihole \
|
||||
&& curl -sSL -O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/list.sh \
|
||||
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/utils.sh \
|
||||
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/chronometer.sh \
|
||||
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/COL_TABLE \
|
||||
&& cd /usr/local/bin \
|
||||
&& curl -sSL ${FTL_GIT}/pi-hole/raw/master/pihole -o /usr/local/bin/pihole \
|
||||
&& chmod +x /usr/local/bin/pihole /opt/pihole/*.sh \
|
||||
&& apt remove -y curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
VOLUME /etc/pihole
|
||||
|
||||
EXPOSE 4711/tcp \
|
||||
53/tcp \
|
||||
53/udp
|
||||
|
||||
CMD ["pihole-FTL", "-f"]
|
@ -10,29 +10,14 @@ $ docker compose up -d
|
||||
|
||||
$ dig @127.0.0.1 -p 53 www.youtube.com
|
||||
|
||||
$ docker compose exec ftldns bash
|
||||
>>> pihole-FTL sql -h gravity.db
|
||||
.schema domainlist
|
||||
insert into domainlist(type, domain) values (3, '(\.|^)youtube\.com$');
|
||||
.quit
|
||||
>>> pihole --regex '(\.|^)baidu\.com$'
|
||||
>>> pihole --wild 'qq.com'
|
||||
>>> pihole-FTL regex-test www.baidu.com
|
||||
>>> pihole --wild -d youtube.com
|
||||
>>> pihole --regex -l
|
||||
Displaying regex blacklist:
|
||||
1: (\.|^)baidu\.com$ (enabled, last modified Tue, 27 Feb 2024 11:17:59 +0000)
|
||||
2: (\.|^)qq\.com$ (enabled, last modified Tue, 27 Feb 2024 11:22:17 +0000)
|
||||
>>> pihole -c
|
||||
|¯¯¯(¯)_|¯|_ ___|¯|___ Core: API Offline
|
||||
| ¯_/¯|_| ' \/ _ \ / -_)
|
||||
|_| |_| |_||_\___/_\___|
|
||||
——————————————————————————————————————————————————————————
|
||||
>>> exit
|
||||
$ docker compose exec ftldns pihole-FTL sql -h gravity.db
|
||||
>>> .schema domainlist
|
||||
>>> insert into domainlist(type, domain) values (3, '(\.|^)youtube\.com$');
|
||||
>>> .quit
|
||||
|
||||
$ docker compose kill -s RTMIN ftldns
|
||||
$ docker compose exec ftldns kill -RTMIN 1
|
||||
|
||||
$ dig @127.0.0.1 -p 53 www.baidu.com
|
||||
$ dig @127.0.0.1 -p 53 www.youtube.com
|
||||
|
||||
$ telnet 127.0.0.1 4711
|
||||
>version
|
||||
@ -43,8 +28,7 @@ $ telnet 127.0.0.1 4711
|
||||
>quit
|
||||
```
|
||||
|
||||
> Read more about [domainlist][2] and [telnet-api][3].
|
||||
> Read more about [telnet-api][2].
|
||||
|
||||
[1]: https://github.com/pi-hole/FTL
|
||||
[2]: https://docs.pi-hole.net/database/gravity/#domain-tables-domainlist
|
||||
[3]: https://docs.pi-hole.net/ftldns/telnet-api/
|
||||
[2]: https://docs.pi-hole.net/ftldns/telnet-api/
|
||||
|
@ -1,7 +0,0 @@
|
||||
memgraph
|
||||
========
|
||||
|
||||
[Memgraph][1] is an open source graph database built for real-time streaming
|
||||
and compatible with Neo4j.
|
||||
|
||||
[1]: https://github.com/memgraph/memgraph
|
@ -1,31 +0,0 @@
|
||||
#
|
||||
# See: https://memgraph.com/docs/configuration/configuration-settings
|
||||
#
|
||||
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
|
||||
memgraph:
|
||||
image: memgraph/memgraph-mage:1.14.1-memgraph-2.14.1
|
||||
ports:
|
||||
- "7444:7444" # Log
|
||||
- "7687:7687" # Bolt
|
||||
volumes:
|
||||
# ./data/etc:/etc/memgraph
|
||||
- ./data/log:/var/log/memgraph
|
||||
- ./data/var:/var/lib/memgraph
|
||||
environment:
|
||||
- MEMGRAPH=--telemetry-enabled=false
|
||||
restart: unless-stopped
|
||||
|
||||
lab:
|
||||
image: memgraph/lab:2.11.1
|
||||
ports:
|
||||
- "3000:3000" # Web
|
||||
environment:
|
||||
- QUICK_CONNECT_MG_HOST=memgraph
|
||||
- QUICK_CONNECT_MG_PORT=7687
|
||||
depends_on:
|
||||
- memgraph
|
||||
restart: unless-stopped
|
@ -1,36 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
HOST=${1:?DOMAIN or IP is empty}
|
||||
|
||||
docker exec -i memgraph_memgraph_1 mgconsole -output-format=csv <<- _CQL_ | sed -e 's/"//g' | tail -n +2 | tr '[],[]' ' ' | gawk -f /dev/fd/3 3<<- "_AWK_" | column -t -i1 -p2 -r3 -H1,2 | sed 's/─/& /'
|
||||
MATCH p=(n)-[*]->(m)
|
||||
WHERE any(n in nodes(p) where n.name = '$HOST') AND not exists(()-->(n)) AND not exists((m)-->())
|
||||
UNWIND nodes(p) AS nn
|
||||
WITH DISTINCT nn
|
||||
CALL path.expand(nn,[">"],[],1,1) YIELD result
|
||||
RETURN extract(i in nodes(result)|i.name);
|
||||
_CQL_
|
||||
BEGIN {
|
||||
split("", cache);
|
||||
split("", roots);
|
||||
idx=0;
|
||||
}
|
||||
{
|
||||
if(!($1 in cache)) {
|
||||
roots[$1] = cache[$1] = ++idx;
|
||||
}
|
||||
if(!($2 in cache)) {
|
||||
cache[$2] = ++idx;
|
||||
}
|
||||
delete roots[$2];
|
||||
print cache[$2], cache[$1], $2;
|
||||
}
|
||||
END {
|
||||
print "0 -1 ."
|
||||
for(root in roots) {
|
||||
print cache[root], 0, root;
|
||||
}
|
||||
}
|
||||
_AWK_
|
||||
|
||||
# vim: set noai noet:
|
@ -1,16 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
export PATH=/usr/local/bin:$PATH
|
||||
|
||||
date=${1:-$(date -d yesterday +%F)}
|
||||
url=http://127.0.0.1:9200/
|
||||
index=logstash-${date//-/.}
|
||||
output=data/${date}.csv
|
||||
|
||||
mkdir -p ${output%/*}
|
||||
|
||||
elastic-query-export -c $url -i $index -o $output -q '+project:dns -_exists_:message +type:(A CNAME)' -fields '@timestamp,region,client,server,type,query,answer,ttl'
|
||||
|
||||
gzip $output
|
@ -1,27 +0,0 @@
|
||||
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
|
||||
WITH DISTINCT row.query AS query
|
||||
MERGE (d:Domain {name: query});
|
||||
|
||||
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
|
||||
WITH DISTINCT row.answer AS answer WHERE row.type = 'CNAME'
|
||||
MERGE (d:Domain {name: answer});
|
||||
|
||||
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
|
||||
WITH DISTINCT row.answer AS answer WHERE row.type = 'A'
|
||||
MERGE (i:IPv4 {name: answer});
|
||||
|
||||
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
|
||||
WITH row WHERE row.type = 'A'
|
||||
MATCH (d:Domain {name: row.query}), (i:IPv4 {name: row.answer})
|
||||
MERGE (d)-[r:A]->(i)
|
||||
ON CREATE SET r.created_at = timestamp()/1000000
|
||||
CALL date.parse(replace(row.`@timestamp`, "Z", ""), "s", "%Y-%m-%dT%H:%M:%S.%f", "UTC") YIELD parsed
|
||||
SET r.updated_at = parsed;
|
||||
|
||||
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
|
||||
WITH row WHERE row.type = 'CNAME'
|
||||
MATCH (d1:Domain {name: row.query}), (d2:Domain {name: row.answer})
|
||||
MERGE (d1)-[r:CNAME]->(d2)
|
||||
ON CREATE SET r.created_at = timestamp()/1000000
|
||||
CALL date.parse(replace(row.`@timestamp`, "Z", ""), "s", "%Y-%m-%dT%H:%M:%S.%f", "UTC") YIELD parsed
|
||||
SET r.updated_at = parsed;
|
@ -1,15 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
FILE=$(date -d yesterday +%F).csv.gz
|
||||
SRC_FILE=/data/dns/data/$FILE
|
||||
DST_FILE=/path/to/dns.csv.gz
|
||||
|
||||
echo "$(date +%FT%T) GET $FILE"
|
||||
scp elk-us:$SRC_FILE $DST_FILE
|
||||
|
||||
echo "$(date +%FT%T) LOAD CSV"
|
||||
docker exec -i memgraph_memgraph_1 mgconsole < load-csv.cql
|
||||
|
||||
echo "$(date +%FT%T) DONE"
|
@ -1,14 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# pwgen 12 1
|
||||
|
||||
NAME=${1:?name is empty}
|
||||
|
||||
SERVER="127.0.0.1:9443"
|
||||
CIPHER="chacha20-ietf-poly1305"
|
||||
SECRET=$(yq -o json config.yml | jq -e -r --arg name "$NAME" '.keys[]|select(.id==$name).secret')
|
||||
DIGEST=$(echo -n "$CIPHER:$SECRET" | base64 | sed 's/=*$//')
|
||||
|
||||
echo "ss://$DIGEST@$SERVER/?outline=1"
|
@ -1,16 +0,0 @@
|
||||
[Unit]
|
||||
Description=outline-ss-server
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
User=vpn
|
||||
Group=vpn
|
||||
WorkingDirectory=/etc/outline-ss-server
|
||||
ExecStart=/usr/local/bin/outline-ss-server -config=config.yml -metrics=127.0.0.1:9092 -ip_country_db=ip-country.mmdb -replay_history=10000
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
LimitNOFILE=65536
|
||||
RestartSec=5
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
@ -6,7 +6,7 @@ pihole
|
||||
The [Pi-hole®][1] is a DNS sinkhole that protects your devices from unwanted
|
||||
content, without installing any client-side software.
|
||||
|
||||
- https://docs.pi-hole.net/core/pihole-command/
|
||||
- https://docs.pi-hole.net/
|
||||
- https://hub.docker.com/r/pihole/pihole/
|
||||
|
||||
[1]: https://pi-hole.net/
|
||||
|
@ -1,13 +0,0 @@
|
||||
#
|
||||
# Dockerfile for tldextract
|
||||
#
|
||||
|
||||
FROM alpine:3
|
||||
|
||||
RUN apk add --no-cache py3-pip \
|
||||
&& pip install tldextract \
|
||||
&& tldextract --update \
|
||||
&& tldextract --version
|
||||
|
||||
ENTRYPOINT ["/usr/bin/tldextract"]
|
||||
CMD ["--help"]
|
@ -1,17 +0,0 @@
|
||||
tldextract
|
||||
==========
|
||||
|
||||
[tldextract][1] accurately separates a URL's subdomain, domain, and public
|
||||
suffix, using the Public Suffix List (PSL).
|
||||
|
||||
## Tutorial
|
||||
|
||||
```bash
|
||||
# Create an alias
|
||||
$ alias tldextract='docker run --rm -u $(id -u):$(id -g) vimagick/tldextract'
|
||||
|
||||
# Run it
|
||||
$ tldextract -j www.google.com
|
||||
```
|
||||
|
||||
[1]: https://github.com/john-kurkowski/tldextract
|
@ -1,69 +0,0 @@
|
||||
class SlackReporter(TextReporter):
|
||||
"""Send a message to a Slack channel"""
|
||||
|
||||
__kind__ = 'slack'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.max_length = self.config.get('max_message_length', 40000)
|
||||
|
||||
def submit(self):
|
||||
webhook_url = self.config['webhook_url']
|
||||
text = '\n'.join(super().submit())
|
||||
|
||||
if not text:
|
||||
logger.debug('Not calling {} API (no changes)'.format(self.__kind__))
|
||||
return
|
||||
|
||||
result = None
|
||||
for chunk in chunkstring(text, self.max_length, numbering=True):
|
||||
res = self.submit_chunk(webhook_url, chunk)
|
||||
if res.status_code != requests.codes.ok or res is None:
|
||||
result = res
|
||||
|
||||
return result
|
||||
|
||||
def submit_chunk(self, webhook_url, text):
|
||||
logger.debug("Sending {} request with text: {}".format(self.__kind__, text))
|
||||
post_data = self.prepare_post_data(text)
|
||||
result = requests.post(webhook_url, json=post_data)
|
||||
try:
|
||||
if result.status_code == requests.codes.ok:
|
||||
logger.info("{} response: ok".format(self.__kind__))
|
||||
else:
|
||||
logger.error("{} error: {}".format(self.__kind__, result.text))
|
||||
except ValueError:
|
||||
logger.error(
|
||||
"Failed to parse {} response. HTTP status code: {}, content: {}".format(self.__kind__,
|
||||
result.status_code,
|
||||
result.content))
|
||||
return result
|
||||
|
||||
def prepare_post_data(self, text):
|
||||
if self.config.get('rich_text', False):
|
||||
return {
|
||||
"blocks": [
|
||||
{
|
||||
"type": "rich_text",
|
||||
"elements": [
|
||||
{
|
||||
"type": "rich_text_preformatted",
|
||||
"elements": [
|
||||
{"type": "text", "text": text}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
else:
|
||||
return {"text": text}
|
||||
|
||||
|
||||
class MattermostReporter(SlackReporter):
|
||||
"""Send a message to a Mattermost channel"""
|
||||
|
||||
__kind__ = 'mattermost'
|
||||
|
||||
def prepare_post_data(self, text):
|
||||
return {"text": text}
|
@ -1,14 +0,0 @@
|
||||
[Unit]
|
||||
Description=lightweight incoming webhook server
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/local/bin/webhook -hooks=hooks.json -hooks=hooks.yaml -hotreload -port=9000 -template -urlprefix=hooks
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
WorkingDirectory=/etc/webhook/
|
||||
LimitNOFILE=65536
|
||||
RestartSec=5
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
@ -2,42 +2,23 @@
|
||||
# Dockerfile for webhook
|
||||
#
|
||||
|
||||
FROM golang:alpine AS build
|
||||
ENV CGO_ENABLED=0
|
||||
ENV GOOS=linux
|
||||
ENV GOARCH=amd64
|
||||
ENV GO111MODULE=off
|
||||
WORKDIR /go/bin
|
||||
RUN apk add --no-cache git
|
||||
RUN go get -d -v github.com/adnanh/webhook
|
||||
RUN go build --ldflags '-s -extldflags "-static"' -i -o webhook github.com/adnanh/webhook
|
||||
|
||||
FROM alpine:3
|
||||
MAINTAINER EasyPi Software Foundation
|
||||
|
||||
ARG WEBHOOK_VERSION=2.8.1
|
||||
ARG WEBHOOK_OS=linux
|
||||
ARG WEBHOOK_ARCH=amd64
|
||||
ARG WEBHOOK_FILE=webhook-$WEBHOOK_OS-$WEBHOOK_ARCH.tar.gz
|
||||
ARG WEBHOOK_URL=https://github.com/adnanh/webhook/releases/download/$WEBHOOK_VERSION/$WEBHOOK_FILE
|
||||
|
||||
RUN set -xe \
|
||||
&& apk add --no-cache \
|
||||
bash \
|
||||
bind-tools \
|
||||
ca-certificates \
|
||||
coreutils \
|
||||
curl \
|
||||
curlie \
|
||||
gawk \
|
||||
gcompat \
|
||||
grep \
|
||||
iputils-ping \
|
||||
jq \
|
||||
mosquitto-clients \
|
||||
openssh \
|
||||
sed \
|
||||
tar \
|
||||
wget \
|
||||
xsv \
|
||||
&& curl -sSL $WEBHOOK_URL | tar xz -C /usr/local/bin ${WEBHOOK_FILE%.tar.gz}/webhook --strip 1 \
|
||||
&& webhook -version
|
||||
|
||||
RUN apk add --no-cache bash coreutils curl jq
|
||||
COPY --from=build /go/bin/webhook /usr/local/bin/
|
||||
WORKDIR /etc/webhook
|
||||
VOLUME /etc/webhook
|
||||
|
||||
EXPOSE 9000
|
||||
|
||||
ENTRYPOINT ["webhook"]
|
||||
# See: https://github.com/adnanh/webhook/blob/master/docs/Webhook-Parameters.md
|
||||
CMD ["-hooks=hooks.json", "-hooks=hooks.yaml", "-hotreload", "-port=9000", "-template", "-urlprefix=hooks"]
|
||||
# Doc: https://github.com/adnanh/webhook/blob/master/docs/Webhook-Parameters.md
|
||||
CMD ["-hooks", "hooks.json", "-hooks", "hooks.yaml", "-hotreload", "-template", "-verbose"]
|
||||
|
@ -9,7 +9,7 @@ execute configured commands.
|
||||
|
||||
## Directory Tree
|
||||
|
||||
```bash
|
||||
```
|
||||
~/fig/webhook/
|
||||
├── docker-compose.yml
|
||||
└── data/
|
||||
@ -17,24 +17,36 @@ execute configured commands.
|
||||
└── test.sh* (executable)
|
||||
```
|
||||
|
||||
File: data/hooks.yaml
|
||||
docker-compose.yml
|
||||
|
||||
```yaml
|
||||
- id: test
|
||||
execute-command: /etc/webhook/test.sh
|
||||
pass-file-to-command:
|
||||
- source: entire-payload
|
||||
envname: HOOK_PAYLOAD
|
||||
command-working-directory: /etc/webhook
|
||||
include-command-output-in-response: true
|
||||
webhook:
|
||||
image: vimagick/webhook
|
||||
ports:
|
||||
- "9000:9000"
|
||||
volumes:
|
||||
- "./data:/etc/webhook"
|
||||
restart: always
|
||||
```
|
||||
|
||||
File: data/test.sh
|
||||
hooks.json
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"id": "test",
|
||||
"execute-command": "/etc/webhook/test.sh",
|
||||
"command-working-directory": "/etc/webhook",
|
||||
"include-command-output-in-response": true
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
test.sh
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
echo 'hello world'
|
||||
cat $HOOK_PAYLOAD
|
||||
```
|
||||
|
||||
## Up and Running
|
||||
@ -47,27 +59,26 @@ $ chmod +x data/test.sh
|
||||
$ docker-compose up -d
|
||||
Creating webhook_webhook_1...
|
||||
|
||||
$ curl http://localhost:9000/hooks/test -d hello=world
|
||||
$ curl http://localhost:9000/hooks/test
|
||||
hello world
|
||||
{"hello":"world"}
|
||||
|
||||
$ docker-compose logs
|
||||
Attaching to webhook_webhook_1
|
||||
webhook_1 | [webhook] 2024/02/20 04:26:52 version 2.8.1 starting
|
||||
webhook_1 | [webhook] 2024/02/20 04:26:52 setting up os signal watcher
|
||||
webhook_1 | [webhook] 2024/02/20 04:26:52 attempting to load hooks from hooks.json
|
||||
webhook_1 | [webhook] 2024/02/20 04:26:52 loaded 1 hook(s) from file
|
||||
webhook_1 | [webhook] 2024/02/20 04:26:52 > test
|
||||
webhook_1 | [webhook] 2024/02/20 04:26:52 starting insecure (http) webhook on :9000
|
||||
webhook_1 | [webhook] 2024/02/20 04:26:52 os signal watcher ready
|
||||
webhook_1 | [webhook] 2024/02/20 04:27:11 Started GET /hooks/test
|
||||
webhook_1 | [webhook] 2024/02/20 04:27:11 Completed 200 OK in 390.207µs
|
||||
webhook_1 | [webhook] 2024/02/20 04:27:11 test got matched (1 time(s))
|
||||
webhook_1 | [webhook] 2024/02/20 04:27:11 test hook triggered successfully
|
||||
webhook_1 | [webhook] 2024/02/20 04:27:11 executing /scripts/test.sh (/scripts/test.sh) with arguments ["/etc/webhook/test.sh"] and environment [HOOK_PAYLOAD=/etc/webhook/HOOK_PAYLOAD967569167] using /etc/webhook as cwd
|
||||
webhook_1 | [webhook] 2024/02/20 04:27:11 command output: hello world
|
||||
webhook_1 | {"hello":"world"}
|
||||
webhook_1 | [webhook] 2024/02/20 04:27:11 finished handling test
|
||||
webhook_1 | [webhook] 2015/11/05 04:26:52 version 2.3.5 starting
|
||||
webhook_1 | [webhook] 2015/11/05 04:26:52 setting up os signal watcher
|
||||
webhook_1 | [webhook] 2015/11/05 04:26:52 attempting to load hooks from hooks.json
|
||||
webhook_1 | [webhook] 2015/11/05 04:26:52 loaded 1 hook(s) from file
|
||||
webhook_1 | [webhook] 2015/11/05 04:26:52 > test
|
||||
webhook_1 | [webhook] 2015/11/05 04:26:52 starting insecure (http) webhook on :9000
|
||||
webhook_1 | [webhook] 2015/11/05 04:26:52 os signal watcher ready
|
||||
webhook_1 | [webhook] 2015/11/05 04:27:11 Started GET /hooks/test
|
||||
webhook_1 | [webhook] 2015/11/05 04:27:11 Completed 200 OK in 390.207µs
|
||||
webhook_1 | [webhook] 2015/11/05 04:27:11 test got matched (1 time(s))
|
||||
webhook_1 | [webhook] 2015/11/05 04:27:11 test hook triggered successfully
|
||||
webhook_1 | [webhook] 2015/11/05 04:27:11 executing /scripts/test.sh (/scripts/test.sh) with arguments [/scripts/test.sh] using /scripts as cwd
|
||||
webhook_1 | [webhook] 2015/11/05 04:27:11 command output: hello world
|
||||
webhook_1 |
|
||||
webhook_1 | [webhook] 2015/11/05 04:27:11 finished handling test
|
||||
```
|
||||
|
||||
[1]: https://github.com/adnanh/webhook
|
||||
|
1
webhook/data/hooks.json
Normal file
1
webhook/data/hooks.json
Normal file
@ -0,0 +1 @@
|
||||
[]
|
@ -1,13 +0,0 @@
|
||||
- id: test
|
||||
execute-command: /etc/webhook/test.sh
|
||||
pass-file-to-command:
|
||||
- source: entire-payload
|
||||
envname: HOOK_PAYLOAD
|
||||
command-working-directory: /etc/webhook
|
||||
include-command-output-in-response: true
|
||||
|
||||
- id: osmand
|
||||
execute-command: '/bin/echo'
|
||||
pass-arguments-to-command:
|
||||
- source: entire-query
|
||||
response-message: "OK\n"
|
@ -1,3 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo 'hello world'
|
||||
|
||||
cat $HOOK_PAYLOAD
|
||||
|
@ -1,9 +1,7 @@
|
||||
version: "3.8"
|
||||
services:
|
||||
webhook:
|
||||
image: vimagick/webhook
|
||||
ports:
|
||||
- "9000:9000"
|
||||
volumes:
|
||||
- "./data:/etc/webhook"
|
||||
restart: unless-stopped
|
||||
webhook:
|
||||
image: vimagick/webhook
|
||||
ports:
|
||||
- "9000:9000"
|
||||
volumes:
|
||||
- "./data:/etc/webhook"
|
||||
restart: always
|
||||
|
Loading…
Reference in New Issue
Block a user