Compare commits

...

20 Commits

Author SHA1 Message Date
Slava b239523426
Merge 1b7553b0d3 into 6050c5f150 2024-02-28 19:00:53 -07:00
kev 6050c5f150 update 2024-02-28 18:00:28 +08:00
kev 3a84de4841 update ftldns 2024-02-28 10:58:21 +08:00
kevin fa491f5231 update outline-ss-server 2024-02-27 23:12:58 +08:00
kev 6b6d7d76ba update ftldns 2024-02-27 19:21:03 +08:00
kev 1c39413107 update ftldns 2024-02-27 19:20:48 +08:00
kev 324258a705 update ftldns 2024-02-27 17:10:49 +08:00
kev a6fdbcd65b update urlwatch 2024-02-26 15:12:05 +08:00
kev 63d9c9f3e2 update urlwatch 2024-02-26 13:34:21 +08:00
kevin b53ae6ccae update tldextract 2024-02-25 13:52:48 +08:00
kevin f1478baf34 add tldextract 2024-02-25 12:55:14 +08:00
kevin 4651e02449 update browserless 2024-02-24 00:23:20 +08:00
kev f996b2195b update memgraph 2024-02-23 18:15:20 +08:00
kev 83b2702939 update memgraph 2024-02-23 17:16:21 +08:00
kev 9809ddeabf update ftldns 2024-02-23 13:04:10 +08:00
kevin de11914917 update memgraph 2024-02-22 00:16:35 +08:00
kev 84eb9a8130 add memgraph 2024-02-21 16:03:19 +08:00
kevin f45929a477 update webhook 2024-02-20 21:01:16 +08:00
kev a8a528b6b4 update webhook 2024-02-20 18:27:53 +08:00
Slava 1b7553b0d3
Change openssl to libressl 2019-11-20 13:40:21 -08:00
24 changed files with 479 additions and 102 deletions

View File

@ -154,6 +154,7 @@ A collection of delicious docker recipes.
## Utility
- [x] registry-cli
- [x] tldextract
## Media
@ -312,12 +313,13 @@ A collection of delicious docker recipes.
- [x] 3proxy/3proxy
- [x] adguard/adguardhome
- [x] ghcr.io/linuxserver/airsonic :musical_note:
- [x] apify/actor-node
- [x] apify/actor-node-puppeteer-chrome
- [x] apify/actor-node-playwright
- [x] apify/actor-node-playwright-chrome
- [x] apify/actor-node-playwright-firefox
- [x] apify/actor-node-playwright-webkit
- [x] apify
- [x] actor-node
- [x] actor-node-playwright
- [x] actor-node-playwright-chrome
- [x] actor-node-playwright-firefox
- [x] actor-node-playwright-webkit
- [x] actor-node-puppeteer-chrome
- [x] archivebox/archivebox
- [x] docker.bintray.io/jfrog/artifactory-oss
- [x] jeffail/benthos
@ -332,7 +334,7 @@ A collection of delicious docker recipes.
- [x] cp-kafka-rest
- [x] ksqldb-cli
- [x] ksqldb-server
- [x] couchdb
- [x] couchdb :bucket:
- [x] schollz/croc
- [x] streamsets/datacollector
- [x] daskdev
@ -389,48 +391,51 @@ A collection of delicious docker recipes.
- [x] ipfs/kubo
- [x] heartexlabs/label-studio
- [x] martialblog/limesurvey
- [x] lldap/lldap
- [x] mailhog/mailhog
- [x] linuxserver/mastodon
- [x] 42wim/matterbridge :octocat:
- [x] matrixconduit/matrix-conduit
- [x] getmeili/meilisearch :mag:
- [x] mitmproxy/mitmproxy
- [x] deluan/navidrome :musical_note:
- [x] netdata/netdata
- [x] nextcloud
- [x] sonatype/nexus3
- [ ] jwilder/nginx-proxy
- [x] tiangolo/nginx-rtmp :camera:
- [x] jazzdd/phpvirtualbox
- [x] sonatype/nexus3
- [x] jupyter/notebook
- [x] mariadb
- [x] mariadb :bucket:
- [x] matomo
- [x] memgraph :bucket:
- [x] lab
- [x] memgraph
- [x] memgraph-mage
- [x] memgraph-platform
- [x] metabase/metabase
- [x] metasploitframework/metasploit-framework :skull:
- [x] minio/minio
- [x] mongo
- [x] mongo :bucket:
- [x] ccrisan/motioneye
- [x] neo4j
- [x] lldap/lldap
- [x] deluan/navidrome :musical_note:
- [x] neo4j :bucket:
- [x] netdata/netdata
- [x] sonatype/nexus3
- [x] nextcloud
- [ ] jwilder/nginx-proxy
- [x] tiangolo/nginx-rtmp :camera:
- [x] jupyter/notebook
- [x] luzifer/nginx-sso
- [x] n8nio/n8n
- [x] illuspas/node-media-server :cn:
- [x] jorijn/nostream
- [x] scsibug/nostr-rs-relay
- [x] notaitech/nudenet
- [x] odoo
- [x] ohmyform
- [x] ohmyform/api
- [x] ohmyform/ui
- [x] api
- [x] ui
- [x] osixia/openldap
- [x] openresty/openresty
- [x] opensearchproject/opensearch
- [x] opensearchproject/opensearch :bucket:
- [x] kylemanna/openvpn
- [x] campbellsoftwaresolutions/osticket
- [x] outlinewiki/outline
- [x] gabekangas/owncast
- [x] owncloud
- [x] jorijn/nostream
- [x] scsibug/nostr-rs-relay
- [x] owntracks
- [x] frontend
- [x] recorder
@ -439,10 +444,11 @@ A collection of delicious docker recipes.
- [x] viktorstrate/photoview
- [x] phplist/phplist
- [x] phpmyadmin
- [x] jazzdd/phpvirtualbox
- [x] pihole/pihole
- [x] mcr.microsoft.com/playwright
- [x] portainer/portainer :+1:
- [x] postgres
- [x] postgres :bucket:
- [x] postgrest/postgrest
- [x] prefecthq/prefect
- [x] prom/prometheus
@ -488,7 +494,7 @@ A collection of delicious docker recipes.
- [x] tensorflow
- [x] serving
- [x] kitabisa/teler
- [x] tile38/tile38
- [x] tile38/tile38 :bucket:
- [x] traccar/traccar
- [x] traefik
- [x] trinodb/trino

View File

@ -88,11 +88,23 @@ $ pip install selenium
$ python screenshot.py
$ imgcat google.png
$ http http://127.0.0.1:3000/screenshot \
$ http :3000/screenshot \
url=https://www.youtube.com \
options[fullPage]:=true \
gotoOptions[waitUntil]=networkidle2 > youtube.png
$ imgcat youtube.png
$ http :3000/scrape url=https://www.youtube.com elements[0][selector]=title debug[network]:=true |
jq -r '.debug.network.outbound[].url' |
xargs -r tldextract -j |
jq -r 'select(.fqdn|length>0).fqdn' |
sort -u
accounts.google.com
fonts.googleapis.com
fonts.gstatic.com
googleads.g.doubleclick.net
i.ytimg.com
www.youtube.com
```
[1]: https://docs.browserless.io/

View File

@ -5,9 +5,10 @@
FROM alpine:3
MAINTAINER EasyPi Software Foundation
ARG FTL_VERSION=v5.23
ARG FTL_VERSION=v5.25.1
ARG FTL_FILE=pihole-FTL-musl-linux-x86_64
ARG FTL_URL=https://github.com/pi-hole/FTL/releases/download/${FTL_VERSION}/${FTL_FILE}
ARG FTL_GIT=https://github.com/pi-hole
ARG FTL_URL=${FTL_GIT}/FTL/releases/download/${FTL_VERSION}/${FTL_FILE}
RUN set -xe \
&& apk add --no-cache curl \
@ -15,6 +16,15 @@ RUN set -xe \
&& curl -sSL ${FTL_URL} -o /usr/bin/pihole-FTL \
&& chmod +x /usr/bin/pihole-FTL \
&& pihole-FTL --version \
&& mkdir -p /opt/pihole \
&& cd /opt/pihole \
&& curl -sSL -O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/list.sh \
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/utils.sh \
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/chronometer.sh \
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/COL_TABLE \
&& cd /usr/local/bin \
&& curl -sSL ${FTL_GIT}/pi-hole/raw/master/pihole -o /usr/local/bin/pihole \
&& chmod +x /usr/local/bin/pihole /opt/pihole/*.sh \
&& apk del curl
VOLUME /etc/pihole

38
ftldns/Dockerfile.debian Normal file
View File

@ -0,0 +1,38 @@
#
# Dockerfile for FTLDNS (pihole-FTL)
#
FROM debian:12-slim
MAINTAINER EasyPi Software Foundation
ARG FTL_VERSION=v5.25.1
ARG FTL_FILE=pihole-FTL-aarch64-linux-gnu
ARG FTL_GIT=https://github.com/pi-hole
ARG FTL_URL=${FTL_GIT}/FTL/releases/download/${FTL_VERSION}/${FTL_FILE}
RUN set -xe \
&& apt update -y \
&& apt install -y curl procps \
&& echo "conf-dir=/etc/pihole/dnsmasq.d,*.conf" >> /etc/dnsmasq.conf \
&& curl -sSL ${FTL_URL} -o /usr/bin/pihole-FTL \
&& chmod +x /usr/bin/pihole-FTL \
&& pihole-FTL --version \
&& mkdir -p /opt/pihole \
&& cd /opt/pihole \
&& curl -sSL -O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/list.sh \
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/utils.sh \
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/chronometer.sh \
-O ${FTL_GIT}/pi-hole/raw/master/advanced/Scripts/COL_TABLE \
&& cd /usr/local/bin \
&& curl -sSL ${FTL_GIT}/pi-hole/raw/master/pihole -o /usr/local/bin/pihole \
&& chmod +x /usr/local/bin/pihole /opt/pihole/*.sh \
&& apt remove -y curl \
&& rm -rf /var/lib/apt/lists/*
VOLUME /etc/pihole
EXPOSE 4711/tcp \
53/tcp \
53/udp
CMD ["pihole-FTL", "-f"]

View File

@ -10,14 +10,29 @@ $ docker compose up -d
$ dig @127.0.0.1 -p 53 www.youtube.com
$ docker compose exec ftldns pihole-FTL sql -h gravity.db
>>> .schema domainlist
>>> insert into domainlist(type, domain) values (3, '(\.|^)youtube\.com$');
>>> .quit
$ docker compose exec ftldns bash
>>> pihole-FTL sql -h gravity.db
.schema domainlist
insert into domainlist(type, domain) values (3, '(\.|^)youtube\.com$');
.quit
>>> pihole --regex '(\.|^)baidu\.com$'
>>> pihole --wild 'qq.com'
>>> pihole-FTL regex-test www.baidu.com
>>> pihole --wild -d youtube.com
>>> pihole --regex -l
Displaying regex blacklist:
1: (\.|^)baidu\.com$ (enabled, last modified Tue, 27 Feb 2024 11:17:59 +0000)
2: (\.|^)qq\.com$ (enabled, last modified Tue, 27 Feb 2024 11:22:17 +0000)
>>> pihole -c
|¯¯¯(¯)_|¯|_ ___|¯|___ Core: API Offline
| ¯_/¯|_| ' \/ _ \ / -_)
|_| |_| |_||_\___/_\___|
——————————————————————————————————————————————————————————
>>> exit
$ docker compose exec ftldns kill -RTMIN 1
$ docker compose kill -s RTMIN ftldns
$ dig @127.0.0.1 -p 53 www.youtube.com
$ dig @127.0.0.1 -p 53 www.baidu.com
$ telnet 127.0.0.1 4711
>version
@ -28,7 +43,8 @@ $ telnet 127.0.0.1 4711
>quit
```
> Read more about [telnet-api][2].
> Read more about [domainlist][2] and [telnet-api][3].
[1]: https://github.com/pi-hole/FTL
[2]: https://docs.pi-hole.net/ftldns/telnet-api/
[2]: https://docs.pi-hole.net/database/gravity/#domain-tables-domainlist
[3]: https://docs.pi-hole.net/ftldns/telnet-api/

7
memgraph/README.md Normal file
View File

@ -0,0 +1,7 @@
memgraph
========
[Memgraph][1] is an open source graph database built for real-time streaming
and compatible with Neo4j.
[1]: https://github.com/memgraph/memgraph

View File

@ -0,0 +1,31 @@
#
# See: https://memgraph.com/docs/configuration/configuration-settings
#
version: "3.8"
services:
memgraph:
image: memgraph/memgraph-mage:1.14.1-memgraph-2.14.1
ports:
- "7444:7444" # Log
- "7687:7687" # Bolt
volumes:
# ./data/etc:/etc/memgraph
- ./data/log:/var/log/memgraph
- ./data/var:/var/lib/memgraph
environment:
- MEMGRAPH=--telemetry-enabled=false
restart: unless-stopped
lab:
image: memgraph/lab:2.11.1
ports:
- "3000:3000" # Web
environment:
- QUICK_CONNECT_MG_HOST=memgraph
- QUICK_CONNECT_MG_PORT=7687
depends_on:
- memgraph
restart: unless-stopped

36
memgraph/example/dnslookup Executable file
View File

@ -0,0 +1,36 @@
#!/bin/bash
HOST=${1:?DOMAIN or IP is empty}
docker exec -i memgraph_memgraph_1 mgconsole -output-format=csv <<- _CQL_ | sed -e 's/"//g' | tail -n +2 | tr '[],[]' ' ' | gawk -f /dev/fd/3 3<<- "_AWK_" | column -t -i1 -p2 -r3 -H1,2 | sed 's/─/& /'
MATCH p=(n)-[*]->(m)
WHERE any(n in nodes(p) where n.name = '$HOST') AND not exists(()-->(n)) AND not exists((m)-->())
UNWIND nodes(p) AS nn
WITH DISTINCT nn
CALL path.expand(nn,[">"],[],1,1) YIELD result
RETURN extract(i in nodes(result)|i.name);
_CQL_
BEGIN {
split("", cache);
split("", roots);
idx=0;
}
{
if(!($1 in cache)) {
roots[$1] = cache[$1] = ++idx;
}
if(!($2 in cache)) {
cache[$2] = ++idx;
}
delete roots[$2];
print cache[$2], cache[$1], $2;
}
END {
print "0 -1 ."
for(root in roots) {
print cache[root], 0, root;
}
}
_AWK_
# vim: set noai noet:

16
memgraph/example/dump-elk.sh Executable file
View File

@ -0,0 +1,16 @@
#!/bin/bash
cd "$(dirname "${BASH_SOURCE[0]}")"
export PATH=/usr/local/bin:$PATH
date=${1:-$(date -d yesterday +%F)}
url=http://127.0.0.1:9200/
index=logstash-${date//-/.}
output=data/${date}.csv
mkdir -p ${output%/*}
elastic-query-export -c $url -i $index -o $output -q '+project:dns -_exists_:message +type:(A CNAME)' -fields '@timestamp,region,client,server,type,query,answer,ttl'
gzip $output

View File

@ -0,0 +1,27 @@
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
WITH DISTINCT row.query AS query
MERGE (d:Domain {name: query});
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
WITH DISTINCT row.answer AS answer WHERE row.type = 'CNAME'
MERGE (d:Domain {name: answer});
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
WITH DISTINCT row.answer AS answer WHERE row.type = 'A'
MERGE (i:IPv4 {name: answer});
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
WITH row WHERE row.type = 'A'
MATCH (d:Domain {name: row.query}), (i:IPv4 {name: row.answer})
MERGE (d)-[r:A]->(i)
ON CREATE SET r.created_at = timestamp()/1000000
CALL date.parse(replace(row.`@timestamp`, "Z", ""), "s", "%Y-%m-%dT%H:%M:%S.%f", "UTC") YIELD parsed
SET r.updated_at = parsed;
LOAD CSV FROM "/path/to/dns.csv.gz" WITH HEADER AS row
WITH row WHERE row.type = 'CNAME'
MATCH (d1:Domain {name: row.query}), (d2:Domain {name: row.answer})
MERGE (d1)-[r:CNAME]->(d2)
ON CREATE SET r.created_at = timestamp()/1000000
CALL date.parse(replace(row.`@timestamp`, "Z", ""), "s", "%Y-%m-%dT%H:%M:%S.%f", "UTC") YIELD parsed
SET r.updated_at = parsed;

15
memgraph/example/load-csv.sh Executable file
View File

@ -0,0 +1,15 @@
#!/bin/bash
cd "$(dirname "${BASH_SOURCE[0]}")"
FILE=$(date -d yesterday +%F).csv.gz
SRC_FILE=/data/dns/data/$FILE
DST_FILE=/path/to/dns.csv.gz
echo "$(date +%FT%T) GET $FILE"
scp elk-us:$SRC_FILE $DST_FILE
echo "$(date +%FT%T) LOAD CSV"
docker exec -i memgraph_memgraph_1 mgconsole < load-csv.cql
echo "$(date +%FT%T) DONE"

View File

@ -0,0 +1,14 @@
#!/bin/bash
set -euo pipefail
# pwgen 12 1
NAME=${1:?name is empty}
SERVER="127.0.0.1:9443"
CIPHER="chacha20-ietf-poly1305"
SECRET=$(yq -o json config.yml | jq -e -r --arg name "$NAME" '.keys[]|select(.id==$name).secret')
DIGEST=$(echo -n "$CIPHER:$SECRET" | base64 | sed 's/=*$//')
echo "ss://$DIGEST@$SERVER/?outline=1"

View File

@ -0,0 +1,16 @@
[Unit]
Description=outline-ss-server
After=network.target
[Service]
User=vpn
Group=vpn
WorkingDirectory=/etc/outline-ss-server
ExecStart=/usr/local/bin/outline-ss-server -config=config.yml -metrics=127.0.0.1:9092 -ip_country_db=ip-country.mmdb -replay_history=10000
ExecReload=/bin/kill -HUP $MAINPID
LimitNOFILE=65536
RestartSec=5
Restart=always
[Install]
WantedBy=multi-user.target

View File

@ -6,7 +6,7 @@ pihole
The [Pi-hole®][1] is a DNS sinkhole that protects your devices from unwanted
content, without installing any client-side software.
- https://docs.pi-hole.net/
- https://docs.pi-hole.net/core/pihole-command/
- https://hub.docker.com/r/pihole/pihole/
[1]: https://pi-hole.net/

13
tldextract/Dockerfile Normal file
View File

@ -0,0 +1,13 @@
#
# Dockerfile for tldextract
#
FROM alpine:3
RUN apk add --no-cache py3-pip \
&& pip install tldextract \
&& tldextract --update \
&& tldextract --version
ENTRYPOINT ["/usr/bin/tldextract"]
CMD ["--help"]

17
tldextract/README.md Normal file
View File

@ -0,0 +1,17 @@
tldextract
==========
[tldextract][1] accurately separates a URL's subdomain, domain, and public
suffix, using the Public Suffix List (PSL).
## Tutorial
```bash
# Create an alias
$ alias tldextract='docker run --rm -u $(id -u):$(id -g) vimagick/tldextract'
# Run it
$ tldextract -j www.google.com
```
[1]: https://github.com/john-kurkowski/tldextract

View File

@ -0,0 +1,69 @@
class SlackReporter(TextReporter):
"""Send a message to a Slack channel"""
__kind__ = 'slack'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.max_length = self.config.get('max_message_length', 40000)
def submit(self):
webhook_url = self.config['webhook_url']
text = '\n'.join(super().submit())
if not text:
logger.debug('Not calling {} API (no changes)'.format(self.__kind__))
return
result = None
for chunk in chunkstring(text, self.max_length, numbering=True):
res = self.submit_chunk(webhook_url, chunk)
if res.status_code != requests.codes.ok or res is None:
result = res
return result
def submit_chunk(self, webhook_url, text):
logger.debug("Sending {} request with text: {}".format(self.__kind__, text))
post_data = self.prepare_post_data(text)
result = requests.post(webhook_url, json=post_data)
try:
if result.status_code == requests.codes.ok:
logger.info("{} response: ok".format(self.__kind__))
else:
logger.error("{} error: {}".format(self.__kind__, result.text))
except ValueError:
logger.error(
"Failed to parse {} response. HTTP status code: {}, content: {}".format(self.__kind__,
result.status_code,
result.content))
return result
def prepare_post_data(self, text):
if self.config.get('rich_text', False):
return {
"blocks": [
{
"type": "rich_text",
"elements": [
{
"type": "rich_text_preformatted",
"elements": [
{"type": "text", "text": text}
]
}
]
}
]
}
else:
return {"text": text}
class MattermostReporter(SlackReporter):
"""Send a message to a Mattermost channel"""
__kind__ = 'mattermost'
def prepare_post_data(self, text):
return {"text": text}

View File

@ -0,0 +1,14 @@
[Unit]
Description=lightweight incoming webhook server
After=network.target
[Service]
ExecStart=/usr/local/bin/webhook -hooks=hooks.json -hooks=hooks.yaml -hotreload -port=9000 -template -urlprefix=hooks
ExecReload=/bin/kill -HUP $MAINPID
WorkingDirectory=/etc/webhook/
LimitNOFILE=65536
RestartSec=5
Restart=always
[Install]
WantedBy=multi-user.target

View File

@ -2,23 +2,42 @@
# Dockerfile for webhook
#
FROM golang:alpine AS build
ENV CGO_ENABLED=0
ENV GOOS=linux
ENV GOARCH=amd64
ENV GO111MODULE=off
WORKDIR /go/bin
RUN apk add --no-cache git
RUN go get -d -v github.com/adnanh/webhook
RUN go build --ldflags '-s -extldflags "-static"' -i -o webhook github.com/adnanh/webhook
FROM alpine:3
MAINTAINER EasyPi Software Foundation
RUN apk add --no-cache bash coreutils curl jq
COPY --from=build /go/bin/webhook /usr/local/bin/
ARG WEBHOOK_VERSION=2.8.1
ARG WEBHOOK_OS=linux
ARG WEBHOOK_ARCH=amd64
ARG WEBHOOK_FILE=webhook-$WEBHOOK_OS-$WEBHOOK_ARCH.tar.gz
ARG WEBHOOK_URL=https://github.com/adnanh/webhook/releases/download/$WEBHOOK_VERSION/$WEBHOOK_FILE
RUN set -xe \
&& apk add --no-cache \
bash \
bind-tools \
ca-certificates \
coreutils \
curl \
curlie \
gawk \
gcompat \
grep \
iputils-ping \
jq \
mosquitto-clients \
openssh \
sed \
tar \
wget \
xsv \
&& curl -sSL $WEBHOOK_URL | tar xz -C /usr/local/bin ${WEBHOOK_FILE%.tar.gz}/webhook --strip 1 \
&& webhook -version
WORKDIR /etc/webhook
VOLUME /etc/webhook
EXPOSE 9000
ENTRYPOINT ["webhook"]
# Doc: https://github.com/adnanh/webhook/blob/master/docs/Webhook-Parameters.md
CMD ["-hooks", "hooks.json", "-hooks", "hooks.yaml", "-hotreload", "-template", "-verbose"]
# See: https://github.com/adnanh/webhook/blob/master/docs/Webhook-Parameters.md
CMD ["-hooks=hooks.json", "-hooks=hooks.yaml", "-hotreload", "-port=9000", "-template", "-urlprefix=hooks"]

View File

@ -9,7 +9,7 @@ execute configured commands.
## Directory Tree
```
```bash
~/fig/webhook/
├── docker-compose.yml
└── data/
@ -17,36 +17,24 @@ execute configured commands.
└── test.sh* (executable)
```
docker-compose.yml
File: data/hooks.yaml
```yaml
webhook:
image: vimagick/webhook
ports:
- "9000:9000"
volumes:
- "./data:/etc/webhook"
restart: always
- id: test
execute-command: /etc/webhook/test.sh
pass-file-to-command:
- source: entire-payload
envname: HOOK_PAYLOAD
command-working-directory: /etc/webhook
include-command-output-in-response: true
```
hooks.json
```json
[
{
"id": "test",
"execute-command": "/etc/webhook/test.sh",
"command-working-directory": "/etc/webhook",
"include-command-output-in-response": true
}
]
```
test.sh
File: data/test.sh
```bash
#!/bin/bash
echo 'hello world'
cat $HOOK_PAYLOAD
```
## Up and Running
@ -59,26 +47,27 @@ $ chmod +x data/test.sh
$ docker-compose up -d
Creating webhook_webhook_1...
$ curl http://localhost:9000/hooks/test
$ curl http://localhost:9000/hooks/test -d hello=world
hello world
{"hello":"world"}
$ docker-compose logs
Attaching to webhook_webhook_1
webhook_1 | [webhook] 2015/11/05 04:26:52 version 2.3.5 starting
webhook_1 | [webhook] 2015/11/05 04:26:52 setting up os signal watcher
webhook_1 | [webhook] 2015/11/05 04:26:52 attempting to load hooks from hooks.json
webhook_1 | [webhook] 2015/11/05 04:26:52 loaded 1 hook(s) from file
webhook_1 | [webhook] 2015/11/05 04:26:52 > test
webhook_1 | [webhook] 2015/11/05 04:26:52 starting insecure (http) webhook on :9000
webhook_1 | [webhook] 2015/11/05 04:26:52 os signal watcher ready
webhook_1 | [webhook] 2015/11/05 04:27:11 Started GET /hooks/test
webhook_1 | [webhook] 2015/11/05 04:27:11 Completed 200 OK in 390.207µs
webhook_1 | [webhook] 2015/11/05 04:27:11 test got matched (1 time(s))
webhook_1 | [webhook] 2015/11/05 04:27:11 test hook triggered successfully
webhook_1 | [webhook] 2015/11/05 04:27:11 executing /scripts/test.sh (/scripts/test.sh) with arguments [/scripts/test.sh] using /scripts as cwd
webhook_1 | [webhook] 2015/11/05 04:27:11 command output: hello world
webhook_1 |
webhook_1 | [webhook] 2015/11/05 04:27:11 finished handling test
webhook_1 | [webhook] 2024/02/20 04:26:52 version 2.8.1 starting
webhook_1 | [webhook] 2024/02/20 04:26:52 setting up os signal watcher
webhook_1 | [webhook] 2024/02/20 04:26:52 attempting to load hooks from hooks.json
webhook_1 | [webhook] 2024/02/20 04:26:52 loaded 1 hook(s) from file
webhook_1 | [webhook] 2024/02/20 04:26:52 > test
webhook_1 | [webhook] 2024/02/20 04:26:52 starting insecure (http) webhook on :9000
webhook_1 | [webhook] 2024/02/20 04:26:52 os signal watcher ready
webhook_1 | [webhook] 2024/02/20 04:27:11 Started GET /hooks/test
webhook_1 | [webhook] 2024/02/20 04:27:11 Completed 200 OK in 390.207µs
webhook_1 | [webhook] 2024/02/20 04:27:11 test got matched (1 time(s))
webhook_1 | [webhook] 2024/02/20 04:27:11 test hook triggered successfully
webhook_1 | [webhook] 2024/02/20 04:27:11 executing /scripts/test.sh (/scripts/test.sh) with arguments ["/etc/webhook/test.sh"] and environment [HOOK_PAYLOAD=/etc/webhook/HOOK_PAYLOAD967569167] using /etc/webhook as cwd
webhook_1 | [webhook] 2024/02/20 04:27:11 command output: hello world
webhook_1 | {"hello":"world"}
webhook_1 | [webhook] 2024/02/20 04:27:11 finished handling test
```
[1]: https://github.com/adnanh/webhook

View File

@ -1 +0,0 @@
[]

13
webhook/data/hooks.yaml Normal file
View File

@ -0,0 +1,13 @@
- id: test
execute-command: /etc/webhook/test.sh
pass-file-to-command:
- source: entire-payload
envname: HOOK_PAYLOAD
command-working-directory: /etc/webhook
include-command-output-in-response: true
- id: osmand
execute-command: '/bin/echo'
pass-arguments-to-command:
- source: entire-query
response-message: "OK\n"

View File

@ -1,5 +1,3 @@
#!/bin/bash
echo 'hello world'
cat $HOOK_PAYLOAD

View File

@ -1,7 +1,9 @@
webhook:
image: vimagick/webhook
ports:
- "9000:9000"
volumes:
- "./data:/etc/webhook"
restart: always
version: "3.8"
services:
webhook:
image: vimagick/webhook
ports:
- "9000:9000"
volumes:
- "./data:/etc/webhook"
restart: unless-stopped