pagination-logs (#1)

bugfix: pagination, URL rewriting

chore: update dockerfile

chore: update package-lock.json

feat: add upstream default upstream dns server as CLI flag

bugfix: better parsing of client IP for query log

got key loading to work, removed config file for now

got dashboard api call to work

tstorage sucks

wip

pagination

Co-authored-by: Adam Veldhousen <adam.veldhousen@liveauctioneers.com>
Co-authored-by: Adam Veldhousen <adamveld12@gmail.com>
trunk
Adam Veldhousen 3 years ago
parent 39b0b866d0
commit 977fd7cb2f

@ -0,0 +1,10 @@
.bin
.idea
.DS_Store
client/node_modules
client/public/build
insomnia_collecton.json
config.example.json

@ -0,0 +1,48 @@
name: Lint, Build, Deploy
on:
schedule:
- cron: '0 14 * * *'
push:
branches: [ trunk ]
pull_request:
branches: [ trunk ]
jobs:
gopherhole-verify:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: golangci-lint
uses: golangci/golangci-lint-action@v2
with:
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
version: latest
# - name: Verify gopherhole
# run: |
# go get -u .
# make lint
client-verify:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Verify client
env:
NODE_ENV: production
run: |
cd ./client;
npm run verify
gopherhole-build:
runs-on: ubuntu-latest
needs: [client-verify, gopherhole-verify]
steps:
- uses: actions/checkout@v2
- name: Build gopherhole image
env:
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: |
docker build \
--label="" \
-t vdhsn/gopherhole .

@ -6,14 +6,17 @@ WORKDIR /build/client
RUN apk add --no-cache make
COPY . /build/
RUN npm install && npm run build && mkdir -p /build/.bin/static && cp -R ./public /build/.bin/static
RUN npm install \
&& npm run build \
&& mkdir -p /build/.bin/static \
&& cp -R ./public /build/.bin/static
# DNS/API SERVER BUILD
FROM golang:alpine as build-server
WORKDIR /build/
RUN apk add --no-cache --update make gcc musl-dev
RUN apk add --no-cache --update make gcc musl-dev git
COPY --from=build-client /build /build
RUN make .bin/gopherhole
@ -25,8 +28,7 @@ WORKDIR /opt
RUN apk add --no-cache ca-certificates
RUN addgroup gopherhole \
&& adduser -H -D gopherhole gopherhole \
RUN adduser -H -D gopherhole \
&& mkdir -p /data \
&& chown -R gopherhole /data
@ -34,6 +36,10 @@ COPY --chown=gopherhole:gopherhole --from=build-server /build/.bin/gopherhole /o
USER gopherhole
ENV GOPHERHOLE_UPSTREAM="1.1.1.1:53"
EXPOSE 53/udp 53/tcp 80/tcp
VOLUME "/data"
ENTRYPOINT /opt/gopherhole
CMD /opt/gopherhole -http-address=0.0.0.0:80 -dns-address=0.0.0.0:53 -upstream ${GOPHERHOLE_UPSTREAM} -db-path /data

@ -5,27 +5,27 @@
"requires": true,
"dependencies": {
"@babel/code-frame": {
"version": "7.12.13",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.13.tgz",
"integrity": "sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g==",
"version": "7.14.5",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.14.5.tgz",
"integrity": "sha512-9pzDqyc6OLDaqe+zbACgFkb6fKMNG6CObKpnYXChRsvYGyEdc7CA2BaqeOM+vOtCS5ndmJicPJhKAwYRI6UfFw==",
"dev": true,
"requires": {
"@babel/highlight": "^7.12.13"
"@babel/highlight": "^7.14.5"
}
},
"@babel/helper-validator-identifier": {
"version": "7.14.0",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.0.tgz",
"integrity": "sha512-V3ts7zMSu5lfiwWDVWzRDGIN+lnCEUdaXgtVHJgLb1rGaA6jMrtB9EmE7L18foXJIE8Un/A/h6NJfGQp/e1J4A==",
"version": "7.14.5",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.5.tgz",
"integrity": "sha512-5lsetuxCLilmVGyiLEfoHBRX8UCFD+1m2x3Rj97WrW3V7H3u4RWRXA4evMjImCsin2J2YT0QaVDGf+z8ondbAg==",
"dev": true
},
"@babel/highlight": {
"version": "7.14.0",
"resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.0.tgz",
"integrity": "sha512-YSCOwxvTYEIMSGaBQb5kDDsCopDdiUGsqpatp3fOlI4+2HQSkTmEVWnVuySdAC5EWCqSWWTv0ib63RjR7dTBdg==",
"version": "7.14.5",
"resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.5.tgz",
"integrity": "sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg==",
"dev": true,
"requires": {
"@babel/helper-validator-identifier": "^7.14.0",
"@babel/helper-validator-identifier": "^7.14.5",
"chalk": "^2.0.0",
"js-tokens": "^4.0.0"
}
@ -53,35 +53,35 @@
}
},
"@nodelib/fs.scandir": {
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz",
"integrity": "sha512-33g3pMJk3bg5nXbL/+CY6I2eJDzZAni49PfJnL5fghPTggPvBd/pFNSgJsdAgWptuFu7qq/ERvOYFlhvsLTCKA==",
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
"integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
"dev": true,
"requires": {
"@nodelib/fs.stat": "2.0.4",
"@nodelib/fs.stat": "2.0.5",
"run-parallel": "^1.1.9"
}
},
"@nodelib/fs.stat": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.4.tgz",
"integrity": "sha512-IYlHJA0clt2+Vg7bccq+TzRdJvv19c2INqBSsoOLp1je7xjtr7J26+WXR72MCdvU9q1qTzIWDfhMf+DRvQJK4Q==",
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
"integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
"dev": true
},
"@nodelib/fs.walk": {
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.6.tgz",
"integrity": "sha512-8Broas6vTtW4GIXTAHDoE32hnN2M5ykgCpWGbuXHQ15vEMqr23pB76e/GZcYsZCHALv50ktd24qhEyKr6wBtow==",
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.7.tgz",
"integrity": "sha512-BTIhocbPBSrRmHxOAJFtR18oLhxTtAFDAvL8hY1S3iU8k+E60W/YFs4jrixGzQjMpF4qPXxIQHcjVD9dz1C2QA==",
"dev": true,
"requires": {
"@nodelib/fs.scandir": "2.1.4",
"@nodelib/fs.scandir": "2.1.5",
"fastq": "^1.6.0"
}
},
"@polka/url": {
"version": "1.0.0-next.12",
"resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.12.tgz",
"integrity": "sha512-6RglhutqrGFMO1MNUXp95RBuYIuc8wTnMAV5MUhLmjTOy78ncwOw7RgeQ/HeymkKXRhZd0s2DNrM1rL7unk3MQ==",
"version": "1.0.0-next.15",
"resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.15.tgz",
"integrity": "sha512-15spi3V28QdevleWBNXE4pIls3nFZmBbUGrW9IVPwiQczuSb9n76TCB4bsk8TSel+I1OkHEdPhu5QKMfY6rQHA==",
"dev": true
},
"@popperjs/core": {
@ -158,9 +158,9 @@
}
},
"@tsconfig/svelte": {
"version": "1.0.10",
"resolved": "https://registry.npmjs.org/@tsconfig/svelte/-/svelte-1.0.10.tgz",
"integrity": "sha512-EBrpH2iXXfaf/9z81koiDYkp2mlwW2XzFcAqn6qh7VKyP8zBvHHAQzNhY+W9vH5arAjmGAm5g8ElWq6YmXm3ig==",
"version": "1.0.13",
"resolved": "https://registry.npmjs.org/@tsconfig/svelte/-/svelte-1.0.13.tgz",
"integrity": "sha512-5lYJP45Xllo4yE/RUBccBT32eBlRDbqN8r1/MIvQbKxW3aFqaYPCNgm8D5V20X4ShHcwvYWNlKg3liDh1MlBoA==",
"dev": true
},
"@types/estree": {
@ -170,9 +170,9 @@
"dev": true
},
"@types/node": {
"version": "15.6.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-15.6.1.tgz",
"integrity": "sha512-7EIraBEyRHEe7CH+Fm1XvgqU6uwZN8Q7jppJGcqjROMT29qhAuuOxYB1uEY5UMYQKEmA5D+5tBnhdaPXSsLONA==",
"version": "15.12.2",
"resolved": "https://registry.npmjs.org/@types/node/-/node-15.12.2.tgz",
"integrity": "sha512-zjQ69G564OCIWIOHSXyQEEDpdpGl+G348RAKY0XXy9Z5kU9Vzv1GMNnkar/ZJ8dzXB3COzD9Mo9NtRZ4xfgUww==",
"dev": true
},
"@types/pug": {
@ -248,15 +248,15 @@
"dev": true
},
"autoprefixer": {
"version": "10.2.5",
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.2.5.tgz",
"integrity": "sha512-7H4AJZXvSsn62SqZyJCP+1AWwOuoYpUfK6ot9vm0e87XD6mT8lDywc9D9OTJPMULyGcvmIxzTAMeG2Cc+YX+fA==",
"version": "10.2.6",
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.2.6.tgz",
"integrity": "sha512-8lChSmdU6dCNMCQopIf4Pe5kipkAGj/fvTMslCsih0uHpOrXOPUEVOmYMMqmw3cekQkSD7EhIeuYl5y0BLdKqg==",
"dev": true,
"requires": {
"browserslist": "^4.16.3",
"caniuse-lite": "^1.0.30001196",
"browserslist": "^4.16.6",
"caniuse-lite": "^1.0.30001230",
"colorette": "^1.2.2",
"fraction.js": "^4.0.13",
"fraction.js": "^4.1.1",
"normalize-range": "^0.1.2",
"postcss-value-parser": "^4.1.0"
}
@ -274,9 +274,9 @@
"dev": true
},
"bootstrap": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-5.0.0.tgz",
"integrity": "sha512-tmhPET9B9qCl8dCofvHeiIhi49iBt0EehmIsziZib65k1erBW1rHhj2s/2JsuQh5Pq+xz2E9bEbzp9B7xHG+VA=="
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-5.0.1.tgz",
"integrity": "sha512-Fl79+wsLOZKoiU345KeEaWD0ik8WKRI5zm0YSPj2oF1Qr+BO7z0fco6GbUtqjoG1h4VI89PeKJnMsMMVQdKKTw=="
},
"brace-expansion": {
"version": "1.1.11",
@ -341,9 +341,9 @@
"dev": true
},
"caniuse-lite": {
"version": "1.0.30001223",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001223.tgz",
"integrity": "sha512-k/RYs6zc/fjbxTjaWZemeSmOjO0JJV+KguOBA3NwPup8uzxM1cMhR2BD9XmO86GuqaqTCO8CgkgH9Rz//vdDiA==",
"version": "1.0.30001236",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001236.tgz",
"integrity": "sha512-o0PRQSrSCGJKCPZcgMzl5fUaj5xHe8qA2m4QRvnyY4e1lITqoNkr7q/Oh1NcpGSy0Th97UZ35yoKcINPoq7YOQ==",
"dev": true
},
"chalk": {
@ -358,9 +358,9 @@
}
},
"chart.js": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/chart.js/-/chart.js-3.2.1.tgz",
"integrity": "sha512-XsNDf3854RGZkLCt+5vWAXGAtUdKP2nhfikLGZqud6G4CvRE2ts64TIxTTfspOin2kEZvPgomE29E6oU02dYjQ=="
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/chart.js/-/chart.js-3.3.2.tgz",
"integrity": "sha512-H0hSO7xqTIrwxoACqnSoNromEMfXvfuVnrbuSt2TuXfBDDofbnto4zuZlRtRvC73/b37q3wGAWZyUU41QPvNbA=="
},
"chartjs-color": {
"version": "2.4.1",
@ -435,9 +435,9 @@
"dev": true
},
"commander": {
"version": "2.20.3",
"resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz",
"integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==",
"dev": true
},
"commondir": {
@ -471,9 +471,9 @@
"dev": true
},
"date-fns": {
"version": "2.21.3",
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.21.3.tgz",
"integrity": "sha512-HeYdzCaFflc1i4tGbj7JKMjM4cKGYoyxwcIIkHzNgCkX8xXDNJDZXgDDVchIWpN4eQc3lH37WarduXFZJOtxfw=="
"version": "2.22.1",
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.22.1.tgz",
"integrity": "sha512-yUFPQjrxEmIsMqlHhAhmxkuH769baF21Kk+nZwZGyrMoyLA+LugaQtC0+Tqf9CBUUULWwUJt6Q5ySI3LJDDCGg=="
},
"dedent-js": {
"version": "1.0.1",
@ -493,9 +493,9 @@
"dev": true
},
"detect-indent": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.0.0.tgz",
"integrity": "sha512-oSyFlqaTHCItVRGK5RmrmjB+CmaMOW7IaNA/kdxqhoa6d17j/5ce9O9eWXmV/KEdRwqpQA+Vqe8a8Bsybu4YnA==",
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz",
"integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==",
"dev": true
},
"detective": {
@ -522,9 +522,9 @@
"dev": true
},
"electron-to-chromium": {
"version": "1.3.727",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.727.tgz",
"integrity": "sha512-Mfz4FIB4FSvEwBpDfdipRIrwd6uo8gUDoRDF4QEYb4h4tSuI3ov594OrjU6on042UlFHouIJpClDODGkPcBSbg==",
"version": "1.3.752",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.752.tgz",
"integrity": "sha512-2Tg+7jSl3oPxgsBsWKh5H83QazTkmWG/cnNwJplmyZc7KcN61+I10oUgaXSVk/NwfvN3BdkKDR4FYuRBQQ2v0A==",
"dev": true
},
"escalade": {
@ -583,9 +583,9 @@
}
},
"fraction.js": {
"version": "4.0.13",
"resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.0.13.tgz",
"integrity": "sha512-E1fz2Xs9ltlUp+qbiyx9wmt2n9dRzPsS11Jtdb8D2o+cC7wr9xkkKsVKJuBX0ST+LVS+LhLO+SbLJNtfWcJvXA==",
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.1.1.tgz",
"integrity": "sha512-MHOhvvxHTfRFpF1geTK9czMIZ6xclsEor2wkIGYYq+PxcQqT7vStJqjhe6S1TenZrMZzo+wlqOufBDVepUEgPg==",
"dev": true
},
"fs-extra": {
@ -753,9 +753,9 @@
}
},
"is-core-module": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.3.0.tgz",
"integrity": "sha512-xSphU2KG9867tsYdLD4RWQ1VqdFl4HTO9Thf3I/3dLEfr0dbPTWKsuCKrgqMljg4nPE+Gq0VCnzT3gr0CyBmsw==",
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.4.0.tgz",
"integrity": "sha512-6A2fkfq1rfeQZjxrZJGerpLCTHRNEBiSgnu0+obeJpEPZRUooHgsizvzv0ZjJwOz3iWIHdJtVWJ/tmPr3D21/A==",
"dev": true,
"requires": {
"has": "^1.0.3"
@ -979,9 +979,9 @@
"dev": true
},
"nanoid": {
"version": "3.1.22",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.22.tgz",
"integrity": "sha512-/2ZUaJX2ANuLtTvqTlgqBQNJoQO398KyJgZloL0PZkC0dpysjncRUPsFe3DUPzz/y3h+u7C46np8RMuvF3jsSQ==",
"version": "3.1.23",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.23.tgz",
"integrity": "sha512-FiB0kzdP0FFVGDKlRLEQ1BgDzU87dy5NnzjeW9YZNt+/c3+q82EQDUwniSAUxp/F0gFNI1ZhKU1FqYsMuqZVnw==",
"dev": true
},
"no-case": {
@ -1003,9 +1003,9 @@
}
},
"node-releases": {
"version": "1.1.71",
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.71.tgz",
"integrity": "sha512-zR6HoT6LrLCRBwukmrVbHv0EpEQjksO6GmFcZQQuCAy139BEsoVKPYnf3jongYW83fAa1torLGYwxxky/p28sg==",
"version": "1.1.73",
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.73.tgz",
"integrity": "sha512-uW7fodD6pyW2FZNZnp/Z3hvWKeEW1Y8R1+1CnErE8cXFXzl5blBOoVB41CvMer6P6Q0S5FXDwcHgFd1Wj0U9zg==",
"dev": true
},
"normalize-path": {
@ -1032,9 +1032,9 @@
"dev": true
},
"object-hash": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.1.1.tgz",
"integrity": "sha512-VOJmgmS+7wvXf8CjbQmimtCnEx3IAoLxI3fp2fbWehxrWBcAQFbk+vcwb6vzR0VZv/eNCJ/27j151ZTwqW/JeQ==",
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz",
"integrity": "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==",
"dev": true
},
"once": {
@ -1106,26 +1106,26 @@
"dev": true
},
"path-parse": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz",
"integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==",
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
"dev": true
},
"picomatch": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.3.tgz",
"integrity": "sha512-KpELjfwcCDUb9PeigTs2mBJzXUPzAuP2oPcA989He8Rte0+YUAjw1JVedDhuTKPkHjSYzMN3npC9luThGYEKdg==",
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz",
"integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==",
"dev": true
},
"postcss": {
"version": "8.2.14",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.2.14.tgz",
"integrity": "sha512-+jD0ZijcvyCqPQo/m/CW0UcARpdFylq04of+Q7RKX6f/Tu+dvpUI/9Sp81+i6/vJThnOBX09Quw0ZLOVwpzX3w==",
"version": "8.3.1",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.3.1.tgz",
"integrity": "sha512-9qH0MGjsSm+fjxOi3GnwViL1otfi7qkj+l/WX5gcRGmZNGsIcqc+A5fBkE6PUobEQK4APqYVaES+B3Uti98TCw==",
"dev": true,
"requires": {
"colorette": "^1.2.2",
"nanoid": "^3.1.22",
"source-map": "^0.6.1"
"nanoid": "^3.1.23",
"source-map-js": "^0.6.2"
}
},
"postcss-functions": {
@ -1156,6 +1156,12 @@
"resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz",
"integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==",
"dev": true
},
"source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
"dev": true
}
}
},
@ -1179,9 +1185,9 @@
}
},
"postcss-selector-parser": {
"version": "6.0.5",
"resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.5.tgz",
"integrity": "sha512-aFYPoYmXbZ1V6HZaSvat08M97A8HqO6Pjz+PiNpw/DhuRrC72XWAdp3hL6wusDCN31sSmcZyMGa2hZEuX+Xfhg==",
"version": "6.0.6",
"resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.6.tgz",
"integrity": "sha512-9LXrvaaX3+mcv5xkg5kFwqSzSH1JIObIx51PrndZwlmznwXRfxMddDvo9gve3gVR8ZTKgoFDdWkbRFmEhT4PMg==",
"dev": true,
"requires": {
"cssesc": "^3.0.0",
@ -1210,14 +1216,6 @@
"glob": "^7.0.0",
"postcss": "^8.2.1",
"postcss-selector-parser": "^6.0.2"
},
"dependencies": {
"commander": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz",
"integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==",
"dev": true
}
}
},
"queue-microtask": {
@ -1302,9 +1300,9 @@
"dev": true
},
"rollup": {
"version": "2.47.0",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-2.47.0.tgz",
"integrity": "sha512-rqBjgq9hQfW0vRmz+0S062ORRNJXvwRpzxhFXORvar/maZqY6za3rgQ/p1Glg+j1hnc1GtYyQCPiAei95uTElg==",
"version": "2.51.1",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-2.51.1.tgz",
"integrity": "sha512-8xfDbAtBleXotb6qKEHWuo/jkn94a9dVqGc7Rwl3sqspCVlnCfbRek7ldhCARSi7h32H0xR4QThm1t9zHN+3uw==",
"dev": true,
"requires": {
"fsevents": "~2.3.1"
@ -1428,20 +1426,20 @@
}
},
"sirv": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/sirv/-/sirv-1.0.11.tgz",
"integrity": "sha512-SR36i3/LSWja7AJNRBz4fF/Xjpn7lQFI30tZ434dIy+bitLYSP+ZEenHg36i23V2SGEz+kqjksg0uOGZ5LPiqg==",
"version": "1.0.12",
"resolved": "https://registry.npmjs.org/sirv/-/sirv-1.0.12.tgz",
"integrity": "sha512-+jQoCxndz7L2tqQL4ZyzfDhky0W/4ZJip3XoOuxyQWnAwMxindLl3Xv1qT4x1YX/re0leShvTm8Uk0kQspGhBg==",
"dev": true,
"requires": {
"@polka/url": "^1.0.0-next.9",
"@polka/url": "^1.0.0-next.15",
"mime": "^2.3.1",
"totalist": "^1.0.0"
}
},
"sirv-cli": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/sirv-cli/-/sirv-cli-1.0.11.tgz",
"integrity": "sha512-L8NILoRSBd38VcfFcERYCaVCnWPBLo9G6u/a37UJ8Ysv4DfjizMbFBcM+SswNnndJienhR6qy8KFuAEaeL4g8Q==",
"version": "1.0.12",
"resolved": "https://registry.npmjs.org/sirv-cli/-/sirv-cli-1.0.12.tgz",
"integrity": "sha512-Rs5PvF3a48zuLmrl8vcqVv9xF/WWPES19QawVkpdzqx7vD5SMZS07+ece1gK4umbslXN43YeIksYtQM5csgIzQ==",
"dev": true,
"requires": {
"console-clear": "^1.1.0",
@ -1450,14 +1448,20 @@
"local-access": "^1.0.1",
"sade": "^1.6.0",
"semiver": "^1.0.0",
"sirv": "^1.0.11",
"sirv": "^1.0.12",
"tinydate": "^1.0.0"
}
},
"source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
"version": "0.7.3",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
"integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==",
"dev": true
},
"source-map-js": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-0.6.2.tgz",
"integrity": "sha512-/3GptzWzu0+0MBQFrDKzw/DvvMTUORvgY6k6jd/VS6iCR4RDTKWH6v6WPwQoUO8667uQEf9Oe38DxAYWY5F/Ug==",
"dev": true
},
"source-map-support": {
@ -1468,6 +1472,14 @@
"requires": {
"buffer-from": "^1.0.0",
"source-map": "^0.6.0"
},
"dependencies": {
"source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
"dev": true
}
}
},
"sourcemap-codec": {
@ -1500,9 +1512,9 @@
"integrity": "sha512-q5Dq0/QHh4BLJyEVWGe7Cej5NWs040LWjMbicBGZ+3qpFWJ1YObRmUDZKbbovddLC9WW7THTj3kYbTOFmU9fbg=="
},
"svelte-awesome": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/svelte-awesome/-/svelte-awesome-2.3.1.tgz",
"integrity": "sha512-n+6u0hMTUHvDR+pBbVghEr7TxA1lLoTE3ZuySteDChNGxpW1GMjN2cm6sZ1yr+868HOzoSS529YG02YuwFpxbw==",
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/svelte-awesome/-/svelte-awesome-2.3.2.tgz",
"integrity": "sha512-odkwkVSYEARI/4CskjwoI7KIhds1Ui74H1VFAifWuDoczY+JpPeJuBwDqgo8imM7Bd76U+ns7Yf1iQqveHnx5Q==",
"requires": {
"svelte": "^3.15.0"
}
@ -1527,9 +1539,9 @@
}
},
"svelte-check": {
"version": "1.5.2",
"resolved": "https://registry.npmjs.org/svelte-check/-/svelte-check-1.5.2.tgz",
"integrity": "sha512-x9Pc13r814TKrMXY70IyqDEmPzuFiqNSpBmsrMKrFpi995MiG+lmqYnyw8iQC+DGh7H3eUt3LIFXbNd396XIFw==",
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/svelte-check/-/svelte-check-1.6.0.tgz",
"integrity": "sha512-nQTlbFJWhwoeLY5rkhgbjzGQSwk5F1pRdEXait0EFaQSrE/iJF+PIjrQlk0BjL/ogk9HaR9ZI0DQSYrl7jl3IQ==",
"dev": true,
"requires": {
"chalk": "^4.0.0",
@ -1537,6 +1549,7 @@
"glob": "^7.1.6",
"import-fresh": "^3.2.1",
"minimist": "^1.2.5",
"sade": "^1.7.4",
"source-map": "^0.7.3",
"svelte-preprocess": "^4.0.0",
"typescript": "*"
@ -1582,12 +1595,6 @@
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
"dev": true
},
"source-map": {
"version": "0.7.3",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
"integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==",
"dev": true
},
"supports-color": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
@ -1620,9 +1627,9 @@
}
},
"svelte2tsx": {
"version": "0.1.189",
"resolved": "https://registry.npmjs.org/svelte2tsx/-/svelte2tsx-0.1.189.tgz",
"integrity": "sha512-Mo4Sei1tNYthzSZx6biGSK7pI6/vj7nGvvmSevmLIiws/o1hyj1UIHn+AwqogeA9L46fcvy6WU3t7HxDg+LbLg==",
"version": "0.1.193",
"resolved": "https://registry.npmjs.org/svelte2tsx/-/svelte2tsx-0.1.193.tgz",
"integrity": "sha512-vzy4YQNYDnoqp2iZPnJy7kpPAY6y121L0HKrSBjU/IWW7DQ6T7RMJed2VVHFmVYm0zAGYMDl9urPc6R4DDUyhg==",
"requires": {
"dedent-js": "^1.0.1",
"pascal-case": "^3.1.1"
@ -1637,9 +1644,9 @@
}
},
"tailwindcss": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-2.1.2.tgz",
"integrity": "sha512-T5t+wwd+/hsOyRw2HJuFuv0LTUm3MUdHm2DJ94GPVgzqwPPFa9XxX0KlwLWupUuiOUj6uiKURCzYPHFcuPch/w==",
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-2.1.4.tgz",
"integrity": "sha512-fh1KImDLg6se/Suaelju/5oFbqq1b0ntagmGLu0aG9LlnNPGHgO1n/4E57CbKcCtyz/VYnvVXUiWmfyfBBZQ6g==",
"dev": true,
"requires": {
"@fullhuman/postcss-purgecss": "^3.1.3",
@ -1733,10 +1740,10 @@
"source-map-support": "~0.5.19"
},
"dependencies": {
"source-map": {
"version": "0.7.3",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
"integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==",
"commander": {
"version": "2.20.3",
"resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
"dev": true
}
}
@ -1768,9 +1775,9 @@
"integrity": "sha512-gS9GVHRU+RGn5KQM2rllAlR3dU6m7AcpJKdtH8gFvQiC4Otgk98XnmMU+nZenHt/+VhnBPWwgrJsyrdcw6i23w=="
},
"typescript": {
"version": "4.2.4",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.2.4.tgz",
"integrity": "sha512-V+evlYHZnQkaz8TRBuxTA92yZBPotr5H+WhQ7bD3hZUndx5tGOa1fuCgeSjxAzM1RiN5IzvadIXTVefuuwZCRg==",
"version": "4.3.2",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.3.2.tgz",
"integrity": "sha512-zZ4hShnmnoVnAHpVHWpTcxdv7dWP60S2FsydQLV8V5PbS3FifjWFFRiHSWpDJahly88PRyV5teTSLoq4eG7mKw==",
"dev": true
},
"universalify": {
@ -1792,9 +1799,9 @@
"dev": true
},
"ws": {
"version": "7.4.5",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.4.5.tgz",
"integrity": "sha512-xzyu3hFvomRfXKH8vOFMU3OguG6oOvhXMo3xsGy3xWExqaM2dxBbVxuD99O7m3ZUFMvvscsZDqxfgMaRr/Nr1g==",
"version": "7.4.6",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.4.6.tgz",
"integrity": "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==",
"dev": true
},
"xtend": {

@ -4,7 +4,7 @@
"scripts": {
"build": "NODE_ENV=production rollup -c",
"dev": "NODE_ENV=development rollup -c -w",
"start": "sirv public --single -G -D",
"start": "sirv public --single -G -D --host",
"validate": "svelte-check"
},
"devDependencies": {

@ -33,6 +33,7 @@ export interface LogSearchOptions {
start: Date
end: Date
page: number
pageSize: number
filter: string
}
@ -40,12 +41,14 @@ export const getLogs = async({
start = sub(new Date(), { hours: 24 }),
end = new Date(),
page = 0,
pageSize = 25,
filter = ""
}: LogSearchOptions) => await apiCall<LogPayload>('metrics/log', 'GET', {
filter,
page,
start: getUnixTime(start),
end: getUnixTime(end),
start: getUnixTime(start || sub(new Date(), { hours: 24 })),
end: getUnixTime(end || new Date()),
pageSize
});
export interface StatsSearchOptions {
@ -62,11 +65,20 @@ export enum StatSearchKey {
Protocol = "protocol"
}
export interface Stat {
export interface StatDataset {
label: string
data: DataPoint[]
}
export interface DataPoint {
Header: string
AverageTotalTime: Number
Value: Number
Count: number,
Time: string
}
export interface Stat {
labels: string[]
datasets: StatDataset[]
};
export const getStats = async ({
@ -74,9 +86,9 @@ export const getStats = async ({
end = new Date(),
key = StatSearchKey.Domain,
interval = 30,
}: StatsSearchOptions) => await apiCall<Stat[]>('metrics/stats', 'GET', {
start: getUnixTime(start),
end: getUnixTime(end),
}: StatsSearchOptions) => await apiCall<Stat>('metrics/stats', 'GET', {
start: getUnixTime(start || sub(new Date(), { hours: 24 })),
end: getUnixTime(end || new Date()),
key,
interval
});

@ -29,9 +29,14 @@
export let defaultValue: Date = new Date();
export let value: Date = defaultValue;
$: dateTimeParts = toDateTimeParts(value);
$: dateTimeParts = toDateTimeParts(value || defaultValue || new Date());
const update = ({ target: { value: v } }) => {
if (!dateTimeParts) {
dateTimeParts = toDateTimeParts(defaultValue);
return;
}
const { date, time } = dateTimeParts;
let dateTimePartsInput = isDate(v)

@ -6,6 +6,9 @@
PaginationLink,
} from "sveltestrap";
const pageSizes = [25, 50, 100, 250];
const pagerElementsCount = 10;
export let page: number = 0;
export let pages: number = 0;
export let total: number = 0;
@ -19,41 +22,58 @@
$: {
pagesList = [];
for (
let i = Math.max(page - 5, 0);
i < Math.min(pages, page + 5);
let i = Math.max(page - pagerElementsCount, 0);
i < Math.min(pages, page + pagerElementsCount);
i++
) {
pagesList.push(i);
}
}
const handlePageSizeChange = ({ target: { value } }) => {
pageSize = Number(value);
};
const handlePageSizeChange = ({ target: { value } }) =>
(pageSize = Number(value));
const pageSizes = [25, 50, 100, 250];
const handlePageChange = (p) => (e) => {
e.preventDefault();
page = p;
console.debug(`changing to page ${page}`);
};
</script>
<section class="flex flex-row my-2 justify-between items-center">
<p>Page {pageIndex}/{pageCount}</p>
<div>
<Pagination size="sm">
<PaginationItem>
<PaginationLink first href="#" />
<PaginationItem disabled={pageIndex === 1}>
<PaginationLink first href="#" on:click={handlePageChange(0)} />
</PaginationItem>
<PaginationItem>
<PaginationLink previous href="#" />
<PaginationItem disabled={pageIndex === 1}>
<PaginationLink
previous
href="#"
on:click={handlePageChange(page - 1)}
/>
</PaginationItem>
{#each pagesList as p}
<PaginationItem>
<PaginationLink href="#">{p + 1}</PaginationLink>
<PaginationItem active={pageIndex - 1 === p}>
<PaginationLink on:click={handlePageChange(p)} href="#"
>{p + 1}</PaginationLink
>
</PaginationItem>
{/each}
<PaginationItem>
<PaginationLink next href="#" />
<PaginationItem disabled={pageIndex === pageCount}>
<PaginationLink
next
href="#"
on:click={handlePageChange(page + 1)}
/>
</PaginationItem>
<PaginationItem>
<PaginationLink last href="#" />
<PaginationItem disabled={pageIndex === pageCount}>
<PaginationLink
last
href="#"
on:click={handlePageChange(pageCount)}
/>
</PaginationItem>
</Pagination>
</div>

@ -6,15 +6,16 @@
export let logs: Log[] = [];
export let page: number = 0;
export let pages: number = 0;
export let pageSize: number = 0;
export let total: number = 0;
$: pageSize = logs.length;
// $: pageSize = logs.length;
$: hasData = !!(logs && logs.length > 0);
</script>
<section class="flex flex-column text-sm">
{#if hasData}
<LogPager {page} {pages} pageLimit{pageSize} {total} />
<LogPager bind:page {pages} bind:pageSize {total} />
<Table rows={logs} let:row hover bordered>
<Column header="Started">
{row.Started}
@ -46,7 +47,7 @@
{row.TotalTimeMs}
</Column>
</Table>
<LogPager {page} {pages} pageLimit{pageSize} {total} />
<LogPager bind:page {pages} bind:pageSize {total} />
{:else}
<p>No Logs yet!</p>
<p>

@ -50,12 +50,20 @@
</InputGroup>
</FormGroup>
<FormGroup class="flex flex-row">
<DatetimePicker
label="Start"
defaultValue={start}
bind:value={start}
/>
<DatetimePicker label="End" defaultValue={end} bind:value={end} />
<section class="px-1">
<DatetimePicker
label="Start"
defaultValue={start}
bind:value={start}
/>
</section>
<section class="px-1">
<DatetimePicker
label="End"
defaultValue={end}
bind:value={end}
/>
</section>
</FormGroup>
<FormGroup class="mx-2">
<Dropdown>

@ -2,45 +2,23 @@
import { onMount } from "svelte";
import type { Stat } from "../api";
import randomColor from "randomcolor";
import { Chart, registerables } from "chart.js";
Chart.register(...registerables);
export let stats: Stat[] = [];
const transformStats = (ostats) => {
const chartData = ostats.reduce((agg, x) => {
let root = agg[x.Header] || {
labels: [],
dataset: {
label: x.Header,
borderColor: randomColor({
luminosity: "dark",
}), //"rgb(75,192,192)",
data: [],
},
};
root.dataset.data = root.dataset.data.concat(x.Count);
root.labels = root.labels.concat(x.Time);
agg[x.Header] = root;
return agg;
}, {});
export let stats: Stat = null;
export let column: string = null;
const finalChartData = Object.keys(chartData).map((x) => chartData[x]);
const finalChartLabels =
finalChartData.length > 0 ? finalChartData[0].labels : [];
return {
labels: finalChartLabels,
datasets: finalChartData.map((x) => x.dataset),
};
};
const generateChartOptions = (s: [], empty: Boolean = false) => {
const generateChartOptions = (s: Stat = null, empty: Boolean = false) => {
let labels = [];
let datasets = [];
if (s && s.length > 0) {
({ labels, datasets } = transformStats(s));
if (s) {
labels = s.labels;
datasets = s.datasets.map(({ label, data }) => ({
label,
data: data.map((x) => x.Count),
borderColor: randomColor(),
}));
}
var delayed;
@ -54,17 +32,25 @@
responsive: true,
maintainAspectRatio: false,
scales: {
// x: {
// type: "time",
// ticks: {
// source: "auto",
// // Disabled rotation for performance
// maxRotation: 0,
// autoSkip: true,
// },
// },
x: {
title: {
label: "time",
display: true,
},
// type: "time",
// ticks: {
// source: "auto",
// // Disabled rotation for performance
// maxRotation: 0,
// autoSkip: true,
// },
},
y: {
stacked: true,
min: 0,
ticks: {
stepSize: 5,
},
},
},
hoverRadius: 5,
@ -79,6 +65,10 @@
algorithm: "lttb",
samples: 60,
},
title: {
display: true,
text: `Count by ${column}`,
},
},
animations: {
radius: {
@ -117,7 +107,7 @@
chartInstance = new Chart(ctx, generateChartOptions(stats, true));
});
const update = (s) => {
const update = (s: Stat) => {
if (chartInstance) {
const { options, data } = generateChartOptions(s, false);
chartInstance.options = options;

@ -1,7 +1,6 @@
<script lang="ts">
import { onMount } from "svelte";
import { navigate } from "svelte-routing";
import { getUnixTime, isEqual, sub } from "date-fns";
import { getUnixTime, sub } from "date-fns";
import { buildQueryParams, fromUnixTimeSafe } from "../api/util";
import { getLogs, getStats, LogPayload, StatSearchKey } from "../api";
@ -17,24 +16,28 @@
const { search } = location;
let params = new URLSearchParams(search.substring(1));
export let start: Date =
fromUnixTimeSafe(params.get("start")) || sub(new Date(), { hours: 24 });
export let end: Date = fromUnixTimeSafe(params.get("end")) || new Date();
const aggKey = params.get("key");
const fixed = aggKey
? aggKey[0].toUpperCase() + aggKey.substr(1)
: "Domain";
export let filter: string = params.get("filter") || "";
export let chartKey: StatSearchKey =
StatSearchKey[params.get("key")] || StatSearchKey.Domain;
export let chartInterval: number = 30;
export let logPage: number = 0;
let start: Date = fromUnixTimeSafe(params.get("start")) || null;
let end: Date = fromUnixTimeSafe(params.get("end")) || null;
let logErrorMsg: string = null;
let filter: string = params.get("filter") || "";
let chartKey: StatSearchKey = StatSearchKey[fixed] || StatSearchKey.Domain;
let chartInterval: number = 30;
let page: number = Number(params.get("page") || 0);
let chartData: Stat = null;
let chartErrorMsg: string = null;
let chartDataLoading: Boolean = false;
let logDataLoading: Boolean = false;
let chartData: Stat[] = [];
let logs: Log[] = [];
let pageSize: number = 50;
let logErrorMsg: string = null;
let logDataLoading: Boolean = false;
let pageSize: number = Number(params.get("pageSize") || 25);
let pageCount: number = 0;
let logCount: number = 0;
@ -49,7 +52,8 @@
const { error, payload } = await getLogs({
start,
end,
page: logPage,
page,
pageSize,
filter,
});
logDataLoading = false;
@ -62,7 +66,7 @@
return payload;
};
const fetchStats = async () => {
const fetchStats = async (): Promise<Stat> => {
if (chartDataLoading) {
console.warn("tried loading stats while already loading");
return;
@ -80,49 +84,62 @@
if (error) {
chartErrorMsg = error;
return [];
return null;
}
return payload;
};
let done = true;
const updateData = async (evt) => {
if (chartDataLoading || logDataLoading) {
console.warn("SKIPPED DATA FETCH");
if (chartDataLoading || logDataLoading || !done) {
console.debug("SKIPPED DATA FETCH");
return;
}
console.groupCollapsed("Stats Data Update");
const { filter: eFilter, start: eStart, end: eEnd, key: eKey } = evt;
console.info("handled search, fetching new data:", evt);
const {
filter: eFilter,
start: eStart,
end: eEnd,
key: eKey,
page: ePage,
pageSize: eps,
} = evt;
console.debug("handled search, fetching new data:", evt);
console.groupEnd();
let truePage =
logCount <= pageSize * page ? 0 : ePage === 0 ? null : ePage;
if (logCount <= pageSize * page) {
console.warn(
"adjusting log page because logCount is too small for current settings"
);
page = 0;
}
navigate(
`${location?.pathname}${buildQueryParams({
start: getUnixTime(eStart),
end: getUnixTime(eEnd),
filter: eFilter,
key: eKey,
start: eStart ? getUnixTime(eStart) : null,
end: eEnd ? getUnixTime(eEnd) : null,
filter: eFilter === "" ? eFilter : null,
key: eKey !== "domain" ? eKey : null,
page: truePage,
pageSize: eps && eps !== 25 ? eps : null,
})}`,
{ replace: true }
);
const [logPayload, chartPayload] = await Promise.all([
fetchLogs(),
fetchStats(),
]);
chartData = chartPayload;
[{ page, total: logCount, pageCount, pageSize, logs }, chartData] =
await Promise.all([fetchLogs(), fetchStats()]);
({
page: logPage,
total: logCount,
pageCount,
pageSize,
logs,
} = logPayload);
done = true;
};
$: updateData({ start, end, key: chartKey, filter });
// $: updateChart({ start, end, key: chartKey });
// $: updateLog({ start, end, page, pageSize });
$: updateData({ start, end, key: chartKey, filter, pageSize, page });
</script>
<PageContainer
@ -137,7 +154,7 @@
{:else if chartErrorMsg}
<p>{chartErrorMsg}</p>
{:else}
<TimeChart stats={chartData} />
<TimeChart stats={chartData} column={chartKey} />
{/if}
</section>
<section class="my-5">
@ -148,10 +165,10 @@
{:else}
<LogViewer
pages={pageCount}
page={logPage}
total={logCount}
bind:pageSize
{logs}
bind:page
bind:pageSize
/>
{/if}
</section>

@ -1,5 +1,5 @@
{
"database": "./db.sqlite",
"database": ".",
"cache": "in-memory",
"http-addr": "localhost:8000",
"dns-addr": "localhost:5353",

@ -1,10 +1,6 @@
package main
import (
"encoding/json"
"fmt"
"os"
"github.com/adamveld12/gopherhole/internal"
)
@ -16,16 +12,3 @@ type StartupConfig struct {
Recursors []string `json:"recursors"`
Rules []internal.Rule `json:"rules"`
}
func LoadStartupConfig(conf *StartupConfig, file string) error {
data, err := os.Open(file)
if err != nil {
return fmt.Errorf("could not open file: %w", err)
}
if err := json.NewDecoder(data).Decode(conf); err != nil {
return fmt.Errorf("could not read json file: %w", err)
}
return nil
}

@ -12,6 +12,7 @@ github.com/miekg/dns v1.1.41 h1:WMszZWJG0XmzbK9FEmzH2TVcqYzFesusSIB41b8KHxY=
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 h1:qWPm9rbaAMKs8Bq/9LRpbMqxWRVUAQwMI9fVrssnTfw=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210303074136-134d130e1a04 h1:cEhElsAv9LUt9ZUUocxzWe05oFLVd+AA2nstydTeI8g=

@ -20,10 +20,11 @@ func (dm *DomainManager) ServeDNS(w dns.ResponseWriter, r *dns.Msg) {
start := time.Now()
q := r.Question[0]
responseMessage := new(dns.Msg)
ql := QueryLog{
Started: start.UTC(),
Protocol: w.RemoteAddr().Network(),
ClientIP: strings.Split(w.RemoteAddr().String(), ":")[0],
ClientIP: w.RemoteAddr().String()[:strings.LastIndex(w.RemoteAddr().String(), ":")],
Domain: q.Name,
Status: NoAnswer,
}
@ -55,12 +56,13 @@ func (dm *DomainManager) ServeDNS(w dns.ResponseWriter, r *dns.Msg) {
responseMessage.Compress = true
ql.TotalTimeMs = int(time.Since(start).Milliseconds())
log.Printf("%+v", ql)
go func() {
if err := dm.Storage.Log(ql); err != nil {
go func(q QueryLog) {
if err := dm.Storage.Log(q); err != nil {
log.Printf("ERROR WRITING LOG: %v", err)
}
}()
}(ql)
if err := w.WriteMsg(responseMessage); err != nil {
log.Println(err)
@ -88,3 +90,22 @@ type QueryLog struct {
Error string
Status ResponseStatus
}
func GetAggregateColumnHeader(ql QueryLog, h LogAggregateColumn) string {
switch h {
case ClientIP:
return ql.ClientIP
case Status:
return string(ql.Status)
case Protocol:
return ql.Protocol
case Domain:
return ql.Domain
case RecurseIP:
return ql.RecurseUpstreamIP
case LookupError:
return ql.Error
}
return ql.Domain
}

@ -42,24 +42,8 @@ func NewAdminHandler(c Cache, s Storage, re *RuleEngine, content fs.FS) http.Han
handler.Use(middleware.RealIP)
handler.Use(middleware.Logger)
handler.Use(middleware.Recoverer)
handler.Use(middleware.Timeout(time.Second * 5))
// TODO: smarter way https://github.com/go-chi/chi/issues/403
// handler.Handle("/build/", http.StripPrefix("/build/", http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
// file, err := content.Open(fmt.Sprintf("client/public/build/%s", r.URL.Path))
// if err != nil {
// rw.WriteHeader(http.StatusNotFound)
// return
// }
// defer file.Close()
// if _, err := io.Copy(rw, file); err != nil {
// rw.WriteHeader(http.StatusInternalServerError)
// return
// }
// })))
handler.Route("/api/v1", func(r chi.Router) {
r.Use(middleware.AllowContentType("application/json; utf-8", "application/json"))
r.Use(cors.Handler(cors.Options{
@ -69,6 +53,10 @@ func NewAdminHandler(c Cache, s Storage, re *RuleEngine, content fs.FS) http.Han
AllowCredentials: false,
MaxAge: 300,
}))
r.Use(middleware.SetHeader("Content-Type", "application/json; utf-8"))
r.Use(middleware.Timeout(time.Second * 5))
r.Get("/metrics/log", RestHandler(a.getLog).ToHF())
r.Get("/metrics/stats", RestHandler(a.getStats).ToHF())
@ -83,6 +71,7 @@ func NewAdminHandler(c Cache, s Storage, re *RuleEngine, content fs.FS) http.Han
r.Get("/recursors/{id:[0-9]+}", RestHandler(a.getRecursor).ToHF())
r.Delete("/recursor/{id:[0-9]+}", RestHandler(a.deleteRecursor).ToHF())
r.HandleFunc("/signal", a.signal)
// r.Put("/rules/lists", a.addRulelist)
// r.Get("/rules/lists", a.getRuleLists)
// r.Post("/rules/lists/reload/{id}", a.reloadRuleLists)
@ -92,7 +81,6 @@ func NewAdminHandler(c Cache, s Storage, re *RuleEngine, content fs.FS) http.Han
// r.Delete("/cache/purgeall", RestHandler(a.purgeAll).ToHF())
// r.Delete("/cache/purge", a.purgeKey)
// r.Get("/cache", a.getCacheContents)
r.HandleFunc("/signal", a.signal)
})
fs := http.FS(content)

@ -53,7 +53,6 @@ func (a *adminHandler) getStats(r *http.Request) (*RestResponse, error) {
}
return BasicResponse(true, la), nil
}
type LogFilter struct {
@ -64,11 +63,13 @@ func (a *adminHandler) getLog(r *http.Request) (*RestResponse, error) {
q := r.URL.Query()
startFilter := q.Get("start")
endFilter := q.Get("end")
pageSizeStr := q.Get("pageSize")
// filter := LogFilter{Expression: q.Get("filter")}
pageStr := q.Get("page")
var err error
var page int
pageSize := 25
startTime := time.Now().Add(time.Hour * -86400)
endTime := time.Now()
@ -96,11 +97,17 @@ func (a *adminHandler) getLog(r *http.Request) (*RestResponse, error) {
}
}
if pageSizeStr != "" {
if pageSize, err = strconv.Atoi(pageSizeStr); err != nil {
return BasicResponse(false, "pageSize: must be a valid integer"), nil
}
}
gli := GetLogInput{
// Filter: filter,
Start: startTime,
End: endTime,
Limit: 250,
Limit: pageSize,
Page: page,
}

@ -21,7 +21,7 @@ type Resolved struct {
func (r Recursor) Resolve(request *dns.Msg) (Resolved, error) {
var result Resolved
errs := make([]error, len(r.Upstreams))
errs := make([]error, 0, len(r.Upstreams))
var err error
var upstreamsTried int

@ -50,6 +50,8 @@ func getType(rt RuleType) uint16 {
return dns.TypeA
case CNAME:
return dns.TypeCNAME
case Recurse:
break
}
return 0

@ -2,8 +2,9 @@ package internal
import (
"database/sql"
"errors"
"fmt"
"io"
"log"
"net"
"strconv"
"strings"
@ -12,32 +13,174 @@ import (
_ "github.com/mattn/go-sqlite3"
)
const ISO8601 = "2006-01-02 15:04:05.999"
type Storage interface {
io.Closer
Open() error
AddRecursors(net.IP, int, int, int) error
GetRecursors() ([]RecursorRow, error)
UpdateRecursor(int, RecursorRow) error
DeleteRecursors(int) error
AddRule(RuleRow) error
GetRule(int) (RuleRow, error)
GetRules() ([]RuleRow, error)
UpdateRule(int, RuleRow) error
DeleteRule(int) error
Log(QueryLog) error
GetLog(GetLogInput) (GetLogResult, error)
GetLogAggregate(LogAggregateInput) ([]LogAggregateDataPoint, error)
}
const (
defaultSamples = 64
maxSamples = 128
)
type Sqlite struct {
Path string
*sql.DB
}
func (ss *Sqlite) Open() error {
db, err := sql.Open("sqlite3", fmt.Sprintf("%s/db.sqlite?cache=shared&_journal=WAL", ss.Path))
if err != nil {
return fmt.Errorf("could not open db: %w", err)
}
db.SetMaxOpenConns(1)
ss.DB = db
if err := initTable(db); err != nil {
return err
}
return nil
}
func (ss *Sqlite) Close() error {
ss.DB.Close()
return nil
}
func (ss *Sqlite) GetLogAggregate(la LogAggregateInput) (LogAggregate, error) {
if la.End.IsZero() || la.End.After(time.Now()) {
la.End = time.Now().UTC()
}
if la.Start.After(la.End) {
return LogAggregate{}, errors.New("Start time cannot be before end time")
}
if la.Start.IsZero() {
la.Start = time.Now().UTC().Add(time.Hour * -12)
}
timespanSecs := int(la.End.Sub(la.Start) / time.Second)
// how many data points to show on the line plot
sampleCount := defaultSamples
if la.IntervalSeconds <= 0 {
la.IntervalSeconds = timespanSecs / sampleCount
}
sampleCount = timespanSecs / la.IntervalSeconds
// cap to prevent performance issues
if sampleCount > maxSamples {
sampleCount = maxSamples
la.IntervalSeconds = timespanSecs / sampleCount
}
log.Printf("%+v - samples: %v - timespan (seconds): %v", la, sampleCount, timespanSecs)
switch la.Column {
case string(Domain):
case string(Status):
case string(ClientIP):
case string(Protocol):
break
default:
la.Column = string(Domain)
}
logs, err := ss.GetLog(GetLogInput{
Start: la.Start,
End: la.End,
Limit: 10000,
Page: 0,
})
if err != nil {
return LogAggregate{}, err
}
if logs.PageCount > 1 {
return LogAggregate{}, fmt.Errorf("more than one page available: %v", logs.PageCount)
}
buckets := map[string][]StatsDataPoint{}
for _, l := range logs.Logs {
k := GetAggregateColumnHeader(l, LogAggregateColumn(la.Column))
if _, ok := buckets[k]; !ok {
buckets[k] = make([]StatsDataPoint, sampleCount+1)
}
dataset := buckets[k]
timeIndex := int(l.Started.Sub(la.Start)/time.Second) / la.IntervalSeconds
ladp := dataset[timeIndex]
ladp.Header = k
offsetSecs := (timeIndex * la.IntervalSeconds)
ladp.Time = la.Start.Add(time.Duration(offsetSecs) * time.Second)
ladp.Count += 1
ladp.Value += float64(l.TotalTimeMs)
buckets[k][timeIndex] = ladp
}
laResult := LogAggregate{
Labels: make([]string, sampleCount),
Datasets: make([]LogAggregateDataset, len(buckets)),
}
for idx := 0; idx < sampleCount; idx++ {
offsetSecs := (idx * la.IntervalSeconds)
ts := la.Start.Add(time.Duration(offsetSecs) * time.Second)
laResult.Labels[idx] = ts.Format("01-02 15:04:05")
idx := 0
for k, v := range buckets {
ladp := v[idx]
if ladp.Time.IsZero() {
v[idx].Time = ts
}
laResult.Datasets[idx].Dataset = v
laResult.Datasets[idx].Label = k
idx++
}
}
return laResult, nil
}
type LogAggregate struct {
Labels []string `json:"labels"`
Datasets []LogAggregateDataset `json:"datasets"`
}
type LogAggregateDataset struct {
Label string `json:"label"`
Dataset []StatsDataPoint `json:"data"`
}
func (ss *Sqlite) Log(ql QueryLog) error {
sql := `
INSERT INTO log
(started, clientIp, protocol, domain, totalTimeMs, error, recurseRoundTripTimeMs, recurseUpstreamIp, status)
VALUES
(?, ?, ?, ?, ?, ?, ?, ?, ?);
`
if _, err := ss.DB.Exec(sql,
ql.Started.UTC().Format(ISO8601),
ql.ClientIP,
ql.Protocol,
ql.Domain,
ql.TotalTimeMs,
ql.Error,
ql.RecurseRoundTripTimeMs,
ql.RecurseUpstreamIP,
ql.Status,
); err != nil {
return err
}
return nil
}
func (ss *Sqlite) GetRecursors() ([]RecursorRow, error) {
sql := `
SELECT id, ipAddress, timeoutMs, weight FROM recursors ORDER BY weight ASC;
@ -47,9 +190,12 @@ func (ss *Sqlite) GetRecursors() ([]RecursorRow, error) {
if err != nil {
return nil, fmt.Errorf("could not execute select for recursors: %w", err)
}
defer rows.Close()
if err := rows.Err(); err != nil {
return nil, err
}
results := []RecursorRow{}
for rows.Next() {
var row RecursorRow
@ -73,13 +219,6 @@ func (ss *Sqlite) DeleteRecursors(id int) error {
return nil
}
type RecursorRow struct {
ID int `json:"id"`
IpAddress string `json:"ipAddress"`
TimeoutMs int `json:"timeoutMs"`
Weight int `json:"weight"`
}
func (rr RecursorRow) ValidIp() (net.IP, int, bool) {
ipAddrFrags := strings.Split(rr.IpAddress, ":")
if len(ipAddrFrags) == 0 || len(ipAddrFrags) > 2 {
@ -118,22 +257,6 @@ func (ss *Sqlite) AddRecursors(ip net.IP, port, timeout, weight int) error {
return nil
}
type GetLogInput struct {
Start time.Time `json:"start"`
End time.Time `json:"end"`
DomainFilter string `json:"rawfilter"`
Limit int `json:"pageSize"`
Page int `json:"page"`
}
type RuleRow struct {
ID int `json:"id"`
Weight int `json:"weight"`
Enabled bool `json:"enabled"`
Created time.Time `json:"created"`
Rule
}
func (ss *Sqlite) UpdateRule(id int, in RuleRow) error {
sql := `UPDATE rules SET
name = ?,
@ -234,16 +357,9 @@ func (ss *Sqlite) GetRules() ([]RuleRow, error) {
return results, nil
}
type GetLogResult struct {
GetLogInput
TotalResults int `json:"total"`
PageCount int `json:"pageCount"`
Logs []QueryLog `json:"logs"`
}
func (ss *Sqlite) GetLog(in GetLogInput) (GetLogResult, error) {
if in.Limit <= 0 {
in.Limit = 100
in.Limit = 25
}
if in.Start.IsZero() {
@ -259,21 +375,37 @@ func (ss *Sqlite) GetLog(in GetLogInput) (GetLogResult, error) {
Logs: []QueryLog{},
}
lpi, err := ss.GetPagingInfo(in)
if err != nil {
return glr, err
}
glr.TotalResults = lpi.Total
glr.PageCount = lpi.PageCount + 1
sql := `
SELECT
started, clientIp, protocol, domain, totalTimeMs,
error, recurseRoundTripTimeMs, recurseUpstreamIp, status
FROM
log
WHERE
id > ?
AND strftime('%s', started) > strftime('%s', ?)
AND strftime('%s', started) < strftime('%s', ?)
ORDER BY started DESC
LIMIT ?;
started, clientIp, protocol, domain, totalTimeMs,
error, recurseRoundTripTimeMs, recurseUpstreamIp, status
FROM (
SELECT id,
started,
clientIp,
protocol,
domain,
totalTimeMs,
error,
recurseRoundTripTimeMs,
recurseUpstreamIp,
status
FROM log
WHERE strftime('%s', started) >= strftime('%s', ?)
AND strftime('%s', started) <= strftime('%s', ?)
ORDER BY started DESC
) WHERE id <= ? ORDER BY id DESC LIMIT ?;
`
rows, err := ss.DB.Query(sql, in.Page*in.Limit, in.Start.UTC().Format(ISO8601), in.End.UTC().Format(ISO8601), in.Limit)
rows, err := ss.DB.Query(sql, in.Start.UTC().Format(ISO8601), in.End.UTC().Format(ISO8601), lpi.FirstItemID, in.Limit)
if err != nil {
return glr, fmt.Errorf("issue with GetLog sql query: %w", err)
}
@ -308,164 +440,40 @@ func (ss *Sqlite) GetLog(in GetLogInput) (GetLogResult, error) {
glr.Logs = append(glr.Logs, q)
}
total, pageCount, err := ss.GetPagingInfo(in)
if err != nil {
return glr, err
}
glr.TotalResults = total
glr.PageCount = pageCount
return glr, nil
}
func (ss *Sqlite) GetPagingInfo(in GetLogInput) (totalItems, pageCount int, err error) {
type LogPageInfo struct {
Total int
PageCount int
FirstItemID int
}
func (ss *Sqlite) GetPagingInfo(in GetLogInput) (lpi LogPageInfo, err error) {
sql := `
SELECT
COUNT(*) as totalLogsEntries,
COUNT(*) / ? as pageCount
COUNT(*) / ? as pageCount,
MAX(id) - ? as firstItemId
FROM
log
WHERE
strftime('%s', started) > strftime('%s', ?)
strftime('%s', started) > strftime('%s', ?)
AND strftime('%s', started) < strftime('%s', ?)
ORDER BY id DESC
`
row := ss.QueryRow(sql, in.Limit, in.Start.UTC().Format(ISO8601), in.End.UTC().Format(ISO8601))
if err = row.Scan(&totalItems, &pageCount); err != nil {
pageOffset := in.Limit * in.Page
row := ss.QueryRow(sql, in.Limit, pageOffset, in.Start.UTC().Format(ISO8601), in.End.UTC().Format(ISO8601))
if err = row.Scan(&lpi.Total, &lpi.PageCount, &lpi.FirstItemID); err != nil {
return
}
return
}
type LogAggregateColumn string
var (
Domain = LogAggregateColumn("domain")
ClientIP = LogAggregateColumn("clientIp")
RecurseIP = LogAggregateColumn("recurseUpStreamIP")
Protocol = LogAggregateColumn("protocol")
Status = LogAggregateColumn("status")
AggregateKeys = map[string]LogAggregateColumn{
"domain": Domain,
"clientIp": ClientIP,
"recurseUpStreamIP": RecurseIP,
"protocol": Protocol,
"status": Status,
if pageOffset > lpi.Total {
err = errors.New("page number too high")
}
)
type LogAggregateInput struct {
IntervalSeconds int
Start time.Time
End time.Time
Column string
}
type LogAggregateDataPoint struct {
Header string
AverageTotalTime float64
Count int
Time time.Time
}
func (ss *Sqlite) GetLogAggregate(la LogAggregateInput) ([]LogAggregateDataPoint, error) {
timeWindow := int64(5 * 60)
column := "domain"
if lac, ok := AggregateKeys[la.Column]; ok {
column = string(lac)
}
if la.IntervalSeconds > 0 {
timeWindow = int64(la.IntervalSeconds)
}
sql := `
SELECT
%s,
ROUND(AVG(totalTimeMs), 3) as averageTotalTime,
COUNT(*) as requests,
strftime('%%s', started)/(%d) as "timeWindow"
FROM log
GROUP BY %s, strftime('%%s', started) / (%d)
ORDER BY started ASC;
`
sql = fmt.Sprintf(sql, column, timeWindow, column, timeWindow)
rows, err := ss.Query(sql)
if err != nil {
return nil, err
}
defer rows.Close()
if err := rows.Err(); err != nil {
return nil, err
}
var results []LogAggregateDataPoint
for rows.Next() {
var ladp LogAggregateDataPoint
var timeInterval int64
if err := rows.Scan(
&ladp.Header,
&ladp.AverageTotalTime,
&ladp.Count,
&timeInterval,
); err != nil {
return nil, err
}
ladp.Time = time.Unix(timeInterval*timeWindow, 0)
results = append(results, ladp)
}
return results, nil
}
func (ss *Sqlite) Log(ql QueryLog) error {
sql := `
INSERT INTO log
(started, clientIp, protocol, domain, totalTimeMs, error, recurseRoundTripTimeMs, recurseUpstreamIp, status)
VALUES
(?, ?, ?, ?, ?, ?, ?, ?, ?);
`
if _, err := ss.DB.Exec(sql,
ql.Started.UTC().Format(ISO8601),
ql.ClientIP,
ql.Protocol,
ql.Domain,
ql.TotalTimeMs,
ql.Error,
ql.RecurseRoundTripTimeMs,
ql.RecurseUpstreamIP,
ql.Status,
); err != nil {
return err
}
return nil
}
func (ss *Sqlite) Open() error {
db, err := sql.Open("sqlite3", fmt.Sprintf("%s?cache=shared&_journal=WAL", ss.Path))
if err != nil {
return fmt.Errorf("could not open db: %w", err)
}
db.SetMaxOpenConns(1)
ss.DB = db
if err := initTable(db); err != nil {
return err
}
return nil
return
}
func initTable(db *sql.DB) error {

@ -0,0 +1,96 @@
package internal
import (
"io"
"net"
"time"
)
const ISO8601 = "2006-01-02 15:04:05.999"
var (
Domain = LogAggregateColumn("domain")
ClientIP = LogAggregateColumn("clientIp")
RecurseIP = LogAggregateColumn("recurseUpStreamIP")
Protocol = LogAggregateColumn("protocol")
Status = LogAggregateColumn("status")
LookupError = LogAggregateColumn("error")
AggregateKeys = map[string]LogAggregateColumn{
"domain": Domain,
"clientIp": ClientIP,
"recurseUpStreamIP": RecurseIP,
"protocol": Protocol,
"status": Status,
}
CountMetric = "Count"
TotalTimeMetric = "TotalTimeMs"
ErrorMetric = "Errors"
RecurseTimeMetric = "RecurseTimeMs"
)
type LogAggregateInput struct {
IntervalSeconds int
Start time.Time
End time.Time
Column string
}
type StatsDataPoint struct {
Header string
Value float64
Count int
Time time.Time
}
type Storage interface {
io.Closer
Open() error
AddRecursors(net.IP, int, int, int) error
GetRecursors() ([]RecursorRow, error)
UpdateRecursor(int, RecursorRow) error
DeleteRecursors(int) error
AddRule(RuleRow) error
GetRule(int) (RuleRow, error)
GetRules() ([]RuleRow, error)
UpdateRule(int, RuleRow) error
DeleteRule(int) error
Log(QueryLog) error
GetLog(GetLogInput) (GetLogResult, error)
GetLogAggregate(LogAggregateInput) (LogAggregate, error)
}
type RecursorRow struct {
ID int `json:"id"`
IpAddress string `json:"ipAddress"`
TimeoutMs int `json:"timeoutMs"`
Weight int `json:"weight"`
}
type GetLogInput struct {
Start time.Time `json:"start"`
End time.Time `json:"end"`
DomainFilter string `json:"rawfilter"`
Limit int `json:"pageSize"`
Page int `json:"page"`
}
type RuleRow struct {
ID int `json:"id"`
Weight int `json:"weight"`
Enabled bool `json:"enabled"`
Created time.Time `json:"created"`
Rule
}
type GetLogResult struct {
GetLogInput
TotalResults int `json:"total"`
PageCount int `json:"pageCount"`
Logs []QueryLog `json:"logs"`
}
type LogAggregateColumn string

@ -3,11 +3,8 @@ package main
import (
"context"
"flag"
"fmt"
"log"
"net/http"
"regexp"
"strings"
"time"
"github.com/adamveld12/gopherhole/client/public"
@ -16,27 +13,29 @@ import (
)
var (
configFilePath = flag.String("config", "./config.json", "Config file")
// dbPath = flag.String("db-path", ".", "Directory to write database files to")
// httpAddr = flag.String("http-address", ":8080", "Bind address for http server")
// dnsAddr = flag.String("dns-address", ":53", "Bind address for dns server")
dbPath = flag.String("db-path", ".", "Directory to write database files to")
httpAddr = flag.String("http-address", "0.0.0.0:80", "Bind address for http server")
dnsAddr = flag.String("dns-address", "0.0.0.0:53", "Bind address for dns server")
defaultUpstream = flag.String("upstream", "1.1.1.1:53", "default upstream DNS server when no others are specified")
)
func main() {
flag.Parse()
log.SetFlags(log.LUTC | log.Lshortfile)
var conf StartupConfig
if err := LoadStartupConfig(&conf, *configFilePath); err != nil {
log.Fatalf("%+v", err)
conf := StartupConfig{
HTTPAddr: *httpAddr,
DNSAddr: *dnsAddr,
DatabaseURL: *dbPath,
}
// conf.HTTPAddr = *httpAddr
// conf.DNSAddr = *dnsAddr
log.Printf("%+v", conf)
store := &internal.Sqlite{
Path: conf.DatabaseURL,
}
defer store.Close()
if err := store.Open(); err != nil {
log.Fatalf("COULD NOT OPEN SQLITE DB: %v", err)
}
@ -59,7 +58,7 @@ func main() {
Storage: store,
RuleEvaluator: re,
Recursors: internal.Recursor{
Upstreams: cleanRecursors(conf.Recursors),
Upstreams: []string{*defaultUpstream},
Client: dnsClient,
},
}
@ -76,23 +75,3 @@ func main() {
log.Fatal(err)
}
}
func cleanRecursors(recursors []string) []string {
cr := []string{}
reg := regexp.MustCompile(`^((?:\d{1,4}\.?){4})(?::(\d{0,5}))?`)
for _, r := range recursors {
if !reg.MatchString(r) {
log.Fatalf("%s is not a valid DNS server. Must be in ip:addr format.", r)
}
cleanedIPAddr := r
if !strings.Contains(r, ":") {
cleanedIPAddr = fmt.Sprintf("%s:53", r)
}
cr = append(cr, cleanedIPAddr)
}
log.Println(cr)
return cr
}

@ -1,16 +1,36 @@
LINTBIN := $(shell go env GOPATH)/bin/golangci-lint
COMMIT_SHA := $(shell git rev-parse HEAD | cut -c 1-11)
build: clobber .bin/client/public .bin/gopherhole
dev: clean .bin/gopherhole .bin/config.json
cd .bin && ./gopherhole -config config.json
dev: clean .bin/gopherhole
cd .bin && ./gopherhole -dns-address=:5353 -http-address=:8000
client-dev:
docker run -it --rm --name='client-dev' \
--workdir /opt/client \
-v $$PWD/client:/opt/client \
--entrypoint ash \
--user $${UID}:$${GID} \
--memory=4g \
-p 5000:5000 \
node:lts-alpine
clean:
@rm -rf .bin/gopherhole .bin/config.json .bin/client
@rm -rf .bin/gopherhole .bin/client
clobber: clean
@rm -rf .bin ./client/node_modules ./client/public/build
vdhsn/gopherhole:
docker build -t vdhsn/gopherhole:latest .
docker build \
--label="org.opencontainers.image.created=$(shell date +'%FT%T%:z')" \
--label="org.opencontainers.image.source=https://github.com/adamveld12/gopherhole.git" \
--label="org.opencontainers.image.url=https://github.com/adamveld12/gopherhole" \
--label="org.opencontainers.image.revision=$(COMMIT_SHA)" \
--label="org.opencontainers.image.licenses=MIT" \
--label="org.opencontainers.image.authors=Adam Veldhousen <adam@vdhsn.com>" \
-t vdhsn/gopherhole:latest .
test:
dig -p 5353 twitter.com @localhost
@ -18,22 +38,33 @@ test:
dig -p 5353 loki.veldhousen.ninja @localhost
dig -p 5353 www.liveauctioneers.com @localhost
lint: tidy
# lint: $(LINTBIN) tidy
# golangci-lint run -p bugs \
# -p performance \
# -p unused \
# -p complexity \
# --sort-results \
# --max-same-issues 5 \
# --no-config \
# --verbose \
# --timeout 30s \
# --concurrency 4 \
# --issues-exit-code 1
tidy:
go mod tidy
.PHONY: build clean clobber client-dev dev lint tidy test vdhsn/gopherhole
.PHONY: build clean clobber client-dev dev test vdhsn/gopherhole
.bin:
mkdir -p .bin
.bin/gopherhole: .bin
# @go build --tags "sqlite_foreign_keys fts5" -v -o .bin/gopherhole .
@go build --tags "fts5" -v -o .bin/gopherhole .
.bin/config.json:
@cp ./config.example.json .bin/config.json
client-dev: client/node_modules
cd ./client && npm run dev
.bin/client/public: .bin client/public/build
mkdir -p .bin/client/public
cp -R ./client/public/ .bin/client/
@ -43,3 +74,9 @@ client/public/build: client/node_modules
client/node_modules:
cd ./client && npm install
$(LINTBIN):
echo "installing golangci-lint to $(LINTBIN)..."
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(shell go env GOPATH)/bin latest
chmod +x $(LINTBIN)

Loading…
Cancel
Save