pull/1/head
Adam Veldhousen 3 years ago
parent 8b76101107
commit 39b0b866d0
Signed by: adam
GPG Key ID: 6DB29003C6DD1E4B

@ -7,9 +7,16 @@ export interface Recursor {
weight: number
}
export const getRecursors = async () => await apiCall<Recursor>('recursors')
export const getRecursors = async () => await apiCall<Recursor[]>('recursors')
export class LogPayload {
pageSize: number = 0;
pageCount: number = 0;
page: number = 0;
total: number = 0;
logs: Log[] = [];
}
export interface Log {
Started: string
Domain: string
@ -34,7 +41,7 @@ export const getLogs = async({
end = new Date(),
page = 0,
filter = ""
}: LogSearchOptions) => await apiCall<Log>('metrics/log', 'GET', {
}: LogSearchOptions) => await apiCall<LogPayload>('metrics/log', 'GET', {
filter,
page,
start: getUnixTime(start),
@ -67,7 +74,7 @@ export const getStats = async ({
end = new Date(),
key = StatSearchKey.Domain,
interval = 30,
}: StatsSearchOptions) => await apiCall<Stat>('metrics/stats', 'GET', {
}: StatsSearchOptions) => await apiCall<Stat[]>('metrics/stats', 'GET', {
start: getUnixTime(start),
end: getUnixTime(end),
key,
@ -91,4 +98,4 @@ export interface Rule {
ttl: number
}
export const getRules = () => apiCall<Rule>('rules');
export const getRules = () => apiCall<Rule[]>('rules');

@ -1,8 +1,7 @@
import { sub, format, fromUnixTime } from 'date-fns';
interface APIResponse<T> {
success?: boolean
payload?: T[]
payload?: T
error?: string
}

@ -0,0 +1,87 @@
<script lang="ts">
import {
Input,
Pagination,
PaginationItem,
PaginationLink,
} from "sveltestrap";
export let page: number = 0;
export let pages: number = 0;
export let total: number = 0;
export let pageSize: number = 50;
$: pageIndex = page + 1;
$: pageCount = pages === 0 ? 1 : pages;
$: pageSizeLabel = pageSize + "";
let pagesList = [];
$: {
pagesList = [];
for (
let i = Math.max(page - 5, 0);
i < Math.min(pages, page + 5);
i++
) {
pagesList.push(i);
}
}
const handlePageSizeChange = ({ target: { value } }) => {
pageSize = Number(value);
};
const pageSizes = [25, 50, 100, 250];
</script>
<section class="flex flex-row my-2 justify-between items-center">
<p>Page {pageIndex}/{pageCount}</p>
<div>
<Pagination size="sm">
<PaginationItem>
<PaginationLink first href="#" />
</PaginationItem>
<PaginationItem>
<PaginationLink previous href="#" />
</PaginationItem>
{#each pagesList as p}
<PaginationItem>
<PaginationLink href="#">{p + 1}</PaginationLink>
</PaginationItem>
{/each}
<PaginationItem>
<PaginationLink next href="#" />
</PaginationItem>
<PaginationItem>
<PaginationLink last href="#" />
</PaginationItem>
</Pagination>
</div>
<div class="mx-2 flex flex-row items-center">
Showing&nbsp;
<Input
type="select"
name="page-size"
bsSize="sm"
id="page-size-selector"
value={pageSizeLabel}
on:change={handlePageSizeChange}
>
{#each pageSizes as pg}
<option selected={pageSize === Number(pg)}>{pg}</option>
{/each}
</Input>
<p class="m-0">&nbsp;of {total}</p>
</div>
</section>
<style lang="postcss">
p {
margin: 0;
flex: 0 0 auto;
}
div {
flex: 0 0 auto;
}
</style>

@ -1,13 +1,20 @@
<script lang="ts">
import { Column, Table } from "sveltestrap";
import LogPager from "./LogPager.svelte";
import type { Log } from "../api";
// export let page: number = 0;
export let logs: Log[] = [];
export let page: number = 0;
export let pages: number = 0;
export let total: number = 0;
$: pageSize = logs.length;
$: hasData = !!(logs && logs.length > 0);
</script>
<div class="flex flex-column text-sm">
{#if logs && logs.length > 0}
<section class="flex flex-column text-sm">
{#if hasData}
<LogPager {page} {pages} pageLimit{pageSize} {total} />
<Table rows={logs} let:row hover bordered>
<Column header="Started">
{row.Started}
@ -39,10 +46,11 @@
{row.TotalTimeMs}
</Column>
</Table>
<LogPager {page} {pages} pageLimit{pageSize} {total} />
{:else}
<p>No Logs yet!</p>
<p>
<em>TODO:</em> Link to docs on how to point your router at this server
</p>
{/if}
</div>
</section>

@ -4,7 +4,7 @@
import { getUnixTime, isEqual, sub } from "date-fns";
import { buildQueryParams, fromUnixTimeSafe } from "../api/util";
import { getLogs, getStats, StatSearchKey } from "../api";
import { getLogs, getStats, LogPayload, StatSearchKey } from "../api";
import type { Stat, Log } from "../api";
import PageContainer from "./PageContainer.svelte";
@ -29,16 +29,21 @@
let logErrorMsg: string = null;
let chartErrorMsg: string = null;
let chartDataLoading: Boolean = true;
let logDataLoading: Boolean = true;
let chartDataLoading: Boolean = false;
let logDataLoading: Boolean = false;
let chartData: Stat[] = [];
let logData: Log[] = [];
let logs: Log[] = [];
let pageSize: number = 50;
let pageCount: number = 0;
let logCount: number = 0;
const fetchLogs = async () => {
if (logDataLoading) {
console.warn("tried loading logs while already loading");
return;
}
logErrorMsg = null;
logDataLoading = true;
const { error, payload } = await getLogs({
@ -51,7 +56,7 @@
if (error) {
logErrorMsg = error;
return [];
return new LogPayload();
}
return payload;
@ -60,7 +65,9 @@
const fetchStats = async () => {
if (chartDataLoading) {
console.warn("tried loading stats while already loading");
return;
}
chartErrorMsg = null;
chartDataLoading = true;
const { error, payload } = await getStats({
@ -80,8 +87,15 @@
};
const updateData = async (evt) => {
if (chartDataLoading || logDataLoading) {
console.warn("SKIPPED DATA FETCH");
return;
}
console.groupCollapsed("Stats Data Update");
const { filter: eFilter, start: eStart, end: eEnd, key: eKey } = evt;
console.info("handled search, fetching new data:", evt);
console.groupEnd();
navigate(
`${location?.pathname}${buildQueryParams({
@ -93,9 +107,19 @@
{ replace: true }
);
[logData, chartData] = await Promise.all([fetchLogs(), fetchStats()]);
console.info("handled search, fetching new data:", evt);
console.groupEnd();
const [logPayload, chartPayload] = await Promise.all([
fetchLogs(),
fetchStats(),
]);
chartData = chartPayload;
({
page: logPage,
total: logCount,
pageCount,
pageSize,
logs,
} = logPayload);
};
$: updateData({ start, end, key: chartKey, filter });
@ -122,7 +146,13 @@
{:else if logErrorMsg}
<p>{logErrorMsg}</p>
{:else}
<LogViewer logs={logData} />
<LogViewer
pages={pageCount}
page={logPage}
total={logCount}
bind:pageSize
{logs}
/>
{/if}
</section>
</PageContainer>

@ -6,6 +6,8 @@
import { getRecursors } from "../api";
import type { Recursor } from "../api";
export let location: Location;
let rows: Recursor[] = [];
onMount(async () => {
try {
@ -19,6 +21,7 @@
</script>
<PageContainer
{location}
header="Recursors"
description="List of upstreams servers to use for resolving DNS records"
>

@ -1,8 +1,11 @@
<script lang="ts">
import PageContainer from "./PageContainer.svelte";
export let location: Location;
</script>
<PageContainer
{location}
header="Rule Lists"
description="Import rule lists for maintenance free Ad blocking"
/>

@ -1,9 +1,12 @@
<script lang="ts">
import RulesViewer from "../components/RulesViewer.svelte";
import PageContainer from "./PageContainer.svelte";
export let location: Location;
</script>
<PageContainer
{location}
header="Rules"
description="Specify rules to alter DNS resolution behavior"
>

@ -4,7 +4,6 @@ import (
"database/sql"
"fmt"
"io"
"log"
"net"
"strconv"
"strings"
@ -30,7 +29,7 @@ type Storage interface {
DeleteRule(int) error
Log(QueryLog) error
GetLog(GetLogInput) ([]QueryLog, error)
GetLog(GetLogInput) (GetLogResult, error)
GetLogAggregate(LogAggregateInput) ([]LogAggregateDataPoint, error)
}
@ -120,11 +119,11 @@ func (ss *Sqlite) AddRecursors(ip net.IP, port, timeout, weight int) error {
}
type GetLogInput struct {
Start time.Time
End time.Time
DomainFilter string
Limit int
Page int
Start time.Time `json:"start"`
End time.Time `json:"end"`
DomainFilter string `json:"rawfilter"`
Limit int `json:"pageSize"`
Page int `json:"page"`
}
type RuleRow struct {
@ -235,7 +234,14 @@ func (ss *Sqlite) GetRules() ([]RuleRow, error) {
return results, nil
}
func (ss *Sqlite) GetLog(in GetLogInput) ([]QueryLog, error) {
type GetLogResult struct {
GetLogInput
TotalResults int `json:"total"`
PageCount int `json:"pageCount"`
Logs []QueryLog `json:"logs"`
}
func (ss *Sqlite) GetLog(in GetLogInput) (GetLogResult, error) {
if in.Limit <= 0 {
in.Limit = 100
}
@ -248,6 +254,11 @@ func (ss *Sqlite) GetLog(in GetLogInput) ([]QueryLog, error) {
in.End = time.Now()
}
glr := GetLogResult{
GetLogInput: in,
Logs: []QueryLog{},
}
sql := `
SELECT
started, clientIp, protocol, domain, totalTimeMs,
@ -264,15 +275,14 @@ func (ss *Sqlite) GetLog(in GetLogInput) ([]QueryLog, error) {
rows, err := ss.DB.Query(sql, in.Page*in.Limit, in.Start.UTC().Format(ISO8601), in.End.UTC().Format(ISO8601), in.Limit)
if err != nil {
return nil, fmt.Errorf("issue with GetLog sql query: %w", err)
return glr, fmt.Errorf("issue with GetLog sql query: %w", err)
}
defer rows.Close()
if rerr := rows.Err(); rerr != nil {
return nil, fmt.Errorf("issue with rows object: %w", rerr)
return glr, fmt.Errorf("issue with rows object: %w", rerr)
}
ql := []QueryLog{}
for rows.Next() {
var q QueryLog
var started string
@ -288,19 +298,45 @@ func (ss *Sqlite) GetLog(in GetLogInput) ([]QueryLog, error) {
&q.RecurseUpstreamIP,
&q.Status,
); err != nil {
return nil, fmt.Errorf("issues scanning rows: %w", err)
return glr, fmt.Errorf("issues scanning rows: %w", err)
}
if q.Started, err = time.Parse(ISO8601, started); err != nil {
return nil, fmt.Errorf("could not parse time '%s': %w", started, err)
return glr, fmt.Errorf("could not parse time '%s': %w", started, err)
}
log.Printf("%+v", q)
ql = append(ql, q)
glr.Logs = append(glr.Logs, q)
}
total, pageCount, err := ss.GetPagingInfo(in)
if err != nil {
return glr, err
}
glr.TotalResults = total
glr.PageCount = pageCount
return glr, nil
}
func (ss *Sqlite) GetPagingInfo(in GetLogInput) (totalItems, pageCount int, err error) {
sql := `
SELECT
COUNT(*) as totalLogsEntries,
COUNT(*) / ? as pageCount
FROM
log
WHERE
strftime('%s', started) > strftime('%s', ?)
AND strftime('%s', started) < strftime('%s', ?)
`
row := ss.QueryRow(sql, in.Limit, in.Start.UTC().Format(ISO8601), in.End.UTC().Format(ISO8601))
if err = row.Scan(&totalItems, &pageCount); err != nil {
return
}
log.Printf("%+v", ql)
return ql, nil
return
}
type LogAggregateColumn string

Loading…
Cancel
Save