feat/admin-panel-pagination (#12)
ci.vdhsn.com/push Build is failing Details

- [x] Add pagination features to the API
- [x] Add pagination to api call on frontend
- [x] Add pagination controls

Co-authored-by: Adam Veldhousen <adamveld12@gmail.com>
Reviewed-on: #12
feat/swagger
Adam Veldhousen 11 months ago
parent 92c1fd6f55
commit 898ec6ec3d

@ -11,13 +11,13 @@ GOBIN = $(shell go env GOPATH)/bin
setup: $(GOBIN)/sqlc $(GOBIN)/buf
@awk '{ print $$1 }' .tool-versions | xargs -I {} asdf plugin add {} || true
@asdf install || true
@cd ./src/web-client && npm i
@cd ./src/admin-client && npm i
.PHONY: gen
gen: $(GOBIN)/sqlc buf.lock
@$(GOBIN)/sqlc generate -f ./src/sqlc.yaml
@cd ./src && $(GOBIN)/buf generate
@cd ./src/web-client && npm i
@cd ./src/admin-client && npm i
.PHONY: dev
dev: .kubeconfig

@ -59,7 +59,7 @@ def bh_client(service="", port_forwards=[], labels=['2-services'], deps=['ingres
dockerfile='./src/Dockerfile.frontend'.format(service),
target='development',
build_args={
"service": service
"service": '{}-client'.format(service)
},
entrypoint='vite dev --port=80 --host=0.0.0.0 --strictPort --logLevel info',
live_update=[

@ -0,0 +1,38 @@
<script lang="ts">
import { fade } from 'svelte/transition';
export let page: number = 1;
export let itemCount: number = 0;
export let pageSize: number = 1;
interface PagerParams {
page: number;
pageCount: number;
pageSize: number;
}
export let createUrl: (
//<reference types="svelte" />
arg0: PagerParams
) => string = (): string => '';
$: pageCount = Math.max(1, Math.ceil(itemCount / pageSize));
</script>
<ol class="flex justify-between w-full center text-lg" style="padding: 0 10%;">
{#if page > 1}
<li in:fade>
<a href={createUrl({ page: page - 1, pageCount, pageSize })}> &lt;Previous </a>
</li>
{:else}
<li />
{/if}
<li>Page {page} of {pageCount}</li>
{#if page < pageCount}
<li in:fade>
<a href={createUrl({ page: page + 1, pageCount, pageSize })}> Next&gt; </a>
</li>
{:else}
<li />
{/if}
</ol>

@ -1,12 +1,14 @@
<script lang="ts">
import { createEventDispatcher } from 'svelte';
export let disable: boolean = false;
let target: string = 'All';
const dispatch = createEventDispatcher();
function execScrape() {
dispatch('scrape', { target });
if (!disable) dispatch('scrape', { target });
}
</script>
@ -15,6 +17,7 @@
<option>All</option>
<option>liveauctioneers</option>
</select>
<button class="border-none rounded-r-md bg-bh-gold text-bh-black py-1 px-2">Start Sync</button>
<button class="border-none rounded-r-md bg-bh-gold text-bh-black py-1 px-2" disabled={disable}
>Start Sync</button
>
</form>

@ -1,18 +1,27 @@
<script lang="ts">
import type { PageData } from './$types';
import { fade } from 'svelte/transition';
import { invalidateAll } from '$app/navigation';
import StartScrapeForm from '$lib/StartScrapeForm.svelte';
import ScrapeJobResult from '$lib/ScrapeJobResult.svelte';
import Pager from '$lib/Pager.svelte';
import { fade } from 'svelte/transition';
import { onDestroy, onMount } from 'svelte';
import { invalidateAll } from '$app/navigation';
export let data: PageData;
$: completedJobs = data.jobs.filter(({ completedTs }) => completedTs !== null);
$: activeJobs = data.jobs.filter(({ completedTs }) => completedTs === null);
$: activeJobCount = activeJobs.length;
$: completedJobCount = completedJobs.length;
$: activeJobCount = data.active.length;
let disableSync = false;
var intervalId: any;
onMount(() => {
intervalId = setInterval(invalidateAll, 2000);
});
onDestroy(() => clearInterval(intervalId));
async function onScrape({ detail }) {
disableSync = true;
try {
const response = await fetch(
new Request('/api/v1/sync', {
@ -23,39 +32,63 @@
body: JSON.stringify({ targetSite: detail.target })
})
);
} catch (error) {
} finally {
disableSync = false;
}
}
function buildQueryString({ page, pageSize }: { page: number; pageSize: number }): string {
const qs = [
data.query === '' ? data.query : `query=${data.query}`,
page <= 1 ? '' : `page=${page}`,
`limit=${pageSize}`
]
.filter((x) => x !== '')
.join('&');
const scrapeJob = await response.json();
data.jobs.push(scrapeJob);
console.log(scrapeJob);
} catch (error) {}
return qs !== '' ? `/?${qs}` : qs;
}
const limit = 16;
</script>
<section class="flex w-full flex-col justify-center" in:fade out:fade>
<h1 class="text-2xl mb-8">Sync Status</h1>
<div class="mb-8">
<StartScrapeForm on:scrape={onScrape} />
</div>
<section in:fade>
<h1 class="text-2xl pb-5">Sync Status: {data.total} jobs</h1>
</section>
<section class="pb-5">
<h2 class="text-2xl">{activeJobCount} In Progress</h2>
{#key data.activeTotal}
<ul class="flex">
{#each data.active as job, i}
<li id="job-{job.id}" in:fade={{ delay: i * 90 }}>
<ScrapeJobResult {job} />
</li>
{/each}
</ul>
{/key}
</section>
<StartScrapeForm on:scrape={onScrape} disable={disableSync} />
<section class="py-10">
<h2 class="text-2xl">Completed</h2>
<Pager
page={data.page}
itemCount={data.completeTotal}
pageSize={limit}
createUrl={buildQueryString}
/>
{#key data.completeTotal + data.page}
<ul class="flex flex-wrap justify-between">
{#each data.complete as job, i}
<li id="job-{job.id}" in:fade={{ delay: i * 90 }}>
<ScrapeJobResult {job} />
</li>
{/each}
</ul>
{/key}
</section>
<section>
<h2>{activeJobCount} In Progress Jobs</h2>
<ul class="flex">
{#each activeJobs as j, i}
<li in:fade>
<ScrapeJobResult job={j} />
</li>
<!-- {#if i < 10}
{/if} -->
{/each}
</ul>
<h2>{completedJobCount} Complete</h2>
<ul class="flex flex-wrap justify-between">
{#each completedJobs as j, i}
<li in:fade>
<ScrapeJobResult job={j} />
</li>
<!-- {#if i < 5}
{/if} -->
{/each}
</ul>
<Pager page={data.page} itemCount={data.total} pageSize={limit} createUrl={buildQueryString} />
</section>
</section>

@ -14,18 +14,76 @@ interface Job {
}
interface ScrapeStatusPageData {
jobs: Job[]
page: number;
total: number;
activeTotal: number;
completeTotal: number;
limit: number;
active: Job[];
complete: Job[];
}
export const load = (async ({ fetch, url }): Promise<ScrapeStatusPageData> => {
const searchParams = new SearchParameters(url);
const limit = searchParams.getLimit();
try {
const response = await fetch(API_HOST + `/sync`);
const {results } = await response.json();
const response = await fetch(API_HOST + `/sync${searchParams.toQueryString()}`);
const { active, complete, activeTotal, completeTotal, total, page } = await response.json();
return {
jobs: results || []
active,
complete,
activeTotal,
completeTotal,
total,
page,
limit
};
} catch (e) {
console.log(e);
return { jobs:[] };
return {
activeTotal: 0,
completeTotal: 0,
active: [],
complete: [],
total: 0,
page: 1,
limit
};
}
}) satisfies PageLoad;
class SearchParameters {
page: number;
limit: number;
constructor(url: URL) {
this.page = Number(url.searchParams.get('page') || 1);
if (this.page < 0) {
this.page = 0;
}
this.limit = Number(url.searchParams.get('limit') || 12);
if (this.limit > 32) {
this.limit = 32;
} else if (this.limit < 12) {
this.limit = 12;
}
}
getPage(): number {
return this.page || 1;
}
getLimit(): number {
return this.limit || 12;
}
toQueryString(): string {
const qs = Object.entries(this)
.filter((t) => t.length > 0 && t[1])
.map((t) => `${t[0]}=${t[1]}`)
.join('&');
return qs === '' ? '' : `?${qs}`;
}
}

@ -7,6 +7,6 @@ tests:
statusCodes: [200]
- description: "get list of jobs"
request:
path: "/v1/sync"
path: "/v1/sync?page=1&limit=6"
response:
statusCodes: [200]

@ -37,11 +37,14 @@ message SyncStatus {
message StatusFilter {
int32 page = 1;
int32 id = 2;
int32 limit = 2;
}
message SyncStatusList {
repeated SyncStatus results = 1;
int32 page = 2;
int32 total = 3;
repeated SyncStatus active = 1;
repeated SyncStatus complete = 2;
int32 page = 3;
int32 total = 4;
int32 activeTotal = 5;
int32 completeTotal = 6;
}

@ -1,4 +1,4 @@
-- name: GetJobs :many
-- name: GetCompletedJobs :many
SELECT id,
startedTs,
completedTs,
@ -6,7 +6,31 @@ SELECT id,
auctionsFound,
errors
FROM runner.scrapejob
ORDER BY startedTs DESC;
WHERE completedts is not null OR NOW() >= startedts + (30 ||' minutes')::interval
ORDER BY startedTs DESC
OFFSET (sqlc.arg(page)::INTEGER * sqlc.arg(pageSize)::INTEGER)
LIMIT sqlc.arg(pageSize)::INTEGER;
-- name: GetActiveJobs :many
SELECT id,
startedTs,
completedTs,
targetSiteName,
auctionsFound,
errors
FROM runner.scrapejob
WHERE completedTs is null AND NOW() < startedTs + (30 ||' minutes')::interval
ORDER BY startedTs DESC
OFFSET (sqlc.arg(page)::INTEGER * sqlc.arg(pageSize)::INTEGER)
LIMIT sqlc.arg(pageSize)::INTEGER;
-- name: GetJobCounts :one
SELECT COUNT(*) AS total,
(SELECT COUNT(*) FROM runner.scrapejob
WHERE completedts is not null OR NOW() >= startedts + (30 ||' minutes')::interval) AS completed,
(SELECT COUNT(*) FROM runner.scrapejob
WHERE completedts is null AND NOW() < startedts + (30 ||' minutes')::interval) AS active
FROM runner.scrapejob;
-- name: GetJobByID :one
SELECT id,

@ -64,22 +64,54 @@ func (db *PGRunnerStorage) CompleteScrapeJob(ctx context.Context, ID int, status
return
}
func (db *PGRunnerStorage) GetJobs(ctx context.Context) (results []domain.ScrapeJob, err error) {
var jobs []postgres.RunnerScrapejob
if jobs, err = db.Queries.GetJobs(ctx); err != nil {
func (db *PGRunnerStorage) GetJobs(ctx context.Context, page int32, limit int32) (out domain.GetJobsResult, err error) {
var completeJobs []postgres.RunnerScrapejob
if completeJobs, err = db.Queries.GetCompletedJobs(ctx, postgres.GetCompletedJobsParams{
Page: page,
Pagesize: limit,
}); err != nil {
err = fmt.Errorf("Couldn't get jobs from DB: %w", err)
return
}
for _, j := range jobs {
results = append(results, domain.ScrapeJob{
var activeJobs []postgres.RunnerScrapejob
if activeJobs, err = db.Queries.GetActiveJobs(ctx, postgres.GetActiveJobsParams{
Page: 0,
Pagesize: 64,
}); err != nil {
err = fmt.Errorf("Couldn't get jobs from DB: %w", err)
return
}
var jobCounts postgres.GetJobCountsRow
if jobCounts, err = db.Queries.GetJobCounts(ctx); err != nil {
err = fmt.Errorf("couldn't get total job count: %w", err)
return
}
out.Total = int(jobCounts.Total)
out.ActiveTotal = int(jobCounts.Active)
out.CompletedTotal = int(jobCounts.Completed)
out.Active = mapScrapeJob(activeJobs)
out.Complete = mapScrapeJob(completeJobs)
return
}
func mapScrapeJob(jobs []postgres.RunnerScrapejob) (result []domain.ScrapeJob) {
result = make([]domain.ScrapeJob, len(jobs))
for i, j := range jobs {
result[i] = domain.ScrapeJob{
ID: int(j.ID),
Started: j.Startedts,
Completed: j.Completedts.Time,
TargetSite: j.Targetsitename,
AuctionsFound: int(j.Auctionsfound),
Errors: j.Errors,
})
}
if j.Completedts.Valid {
result[i].Completed = j.Completedts.Time
}
}
return

@ -41,7 +41,7 @@ type (
Storage interface {
CreateScrapeJob(context.Context, string) (ScrapeJob, error)
CompleteScrapeJob(context.Context, int, CompleteScrapeJobStatus) (ScrapeJob, error)
GetJobs(context.Context) ([]ScrapeJob, error)
GetJobs(context.Context, int32, int32) (GetJobsResult, error)
}
CatalogService interface {
@ -75,21 +75,25 @@ func (domain Domain) StartSync(ctx context.Context, in FindNewUpcomingInput) (ou
}
type (
GetJobsInput struct{}
GetJobsOutput struct {
Jobs []ScrapeJob
GetJobsInput struct {
Page int32
Limit int32
}
GetJobsResult struct {
Total int
ActiveTotal int
Active []ScrapeJob
CompletedTotal int
Complete []ScrapeJob
}
)
func (domain Domain) Status(ctx context.Context, in GetJobsInput) (out GetJobsOutput, err error) {
scrapeJobs, err := domain.Storage.GetJobs(ctx)
if err != nil {
func (domain Domain) Status(ctx context.Context, in GetJobsInput) (out GetJobsResult, err error) {
if out, err = domain.Storage.GetJobs(ctx, in.Page, in.Limit); err != nil {
err = fmt.Errorf("could not fetch jobs from storage: %w", err)
return
}
out = GetJobsOutput{Jobs: scrapeJobs}
return
}

@ -3,6 +3,7 @@ package internal
import (
"context"
"git.vdhsn.com/barretthousen/barretthousen/src/lib/kernel"
api "git.vdhsn.com/barretthousen/barretthousen/src/runner/api/grpc"
"git.vdhsn.com/barretthousen/barretthousen/src/runner/internal/domain"
"google.golang.org/grpc"
@ -41,32 +42,52 @@ func (rh *runnerHandler) StartSync(ctx context.Context, cmd *api.SyncParameters)
}
func (rh *runnerHandler) Status(ctx context.Context, cmd *api.StatusFilter) (*api.SyncStatusList, error) {
out, err := rh.domain.Status(ctx, domain.GetJobsInput{})
kernel.TraceLog.Printf("paging options: page %d - limit %d", cmd.Page, cmd.Limit)
if cmd.Limit <= 0 {
cmd.Limit = 64
}
if cmd.Page <= 1 {
cmd.Page = 1
}
out, err := rh.domain.Status(ctx, domain.GetJobsInput{
Page: cmd.Page - 1,
Limit: cmd.Limit,
})
if err != nil {
return nil, status.Errorf(codes.Internal, "method GetJobs failed: %q", err.Error())
}
result := &api.SyncStatusList{
Results: []*api.SyncStatus{},
Page: cmd.Page,
Total: int32(out.Total),
ActiveTotal: int32(out.ActiveTotal),
CompleteTotal: int32(out.CompletedTotal),
Active: mapSyncStatus(out.Active),
Complete: mapSyncStatus(out.Complete),
}
for _, j := range out.Jobs {
return result, nil
}
func mapSyncStatus(in []domain.ScrapeJob) (result []*api.SyncStatus) {
result = make([]*api.SyncStatus, len(in))
for i, j := range in {
var completedTime *timestamppb.Timestamp
if !j.Completed.IsZero() {
completedTime = timestamppb.New(j.Completed)
}
result.Results = append(result.Results, &api.SyncStatus{
result[i] = &api.SyncStatus{
Id: int32(j.ID),
AuctionsFound: int32(j.AuctionsFound),
CreatedTs: timestamppb.New(j.Started),
CompletedTs: completedTime,
TargetSiteName: j.TargetSite,
Errors: j.Errors,
})
}
}
return result, nil
return
}

@ -0,0 +1,38 @@
<script lang="ts">
import { fade } from 'svelte/transition';
export let page: number = 1;
export let itemCount: number = 0;
export let pageSize: number = 1;
interface PagerParams {
page: number;
pageCount: number;
pageSize: number;
}
export let createUrl: (
//<reference types="svelte" />
arg0: PagerParams
) => string = (): string => '';
$: pageCount = Math.max(1, Math.ceil(itemCount / pageSize));
</script>
<ol class="flex justify-between w-full center text-lg" style="padding: 0 10%;">
{#if page > 1}
<li in:fade>
<a href={createUrl({ page: page - 1, pageCount, pageSize })}> &lt;Previous </a>
</li>
{:else}
<li />
{/if}
<li>Page {page} of {pageCount}</li>
{#if page < pageCount}
<li in:fade>
<a href={createUrl({ page: page + 1, pageCount, pageSize })}> Next&gt; </a>
</li>
{:else}
<li />
{/if}
</ol>

@ -1,43 +1,35 @@
<script lang="ts">
import type { PageData } from './$types';
import AuctionResult from '$lib/AuctionResult.svelte';
import Pager from '$lib/Pager.svelte';
import { fade } from 'svelte/transition';
export let data: PageData;
function buildQueryString({ query, page }: { query: string; page: number }): string {
const qs = [query === '' ? query : `query=${query}`, page <= 1 ? '' : `page=${page}`]
function buildQueryString({ page, pageSize }: { page: number; pageSize: number }): string {
const qs = [
data.query === '' ? data.query : `query=${data.query}`,
page <= 1 ? '' : `page=${page}`,
`limit=${pageSize}`
]
.filter((x) => x !== '')
.join('&');
return qs !== '' ? `?${qs}` : qs;
return qs !== '' ? `/?${qs}` : qs;
}
$: hasResults = (data.results || []).length > 0;
$: currentPage = data.page + 1;
$: pageCount = Math.max(1, Math.ceil(data.found / data.limit));
$: hasResults = data?.results?.length || 0;
</script>
{#if hasResults}
<section in:fade>
<h1 class="pb-5 text-lg">Found {data.found} Upcoming & Live Auctions</h1>
<ol class="flex justify-between w-full center" style="padding: 0 10%;">
{#if currentPage > 1}
<li in:fade>
<a href="/{buildQueryString({ query: data.query, page: currentPage - 1 })}"
>&lt;Previous</a
>
</li>
{:else}
<li />
{/if}
<li>Page {currentPage} of {pageCount}</li>
{#if currentPage < pageCount}
<li in:fade>
<a href="/{buildQueryString({ query: data.query, page: currentPage + 1 })}">Next&gt;</a>
</li>
{/if}
</ol>
<Pager
page={data.page + 1}
itemCount={data.found}
pageSize={data.limit}
createUrl={buildQueryString}
/>
</section>
{#key data.results}
@ -49,6 +41,13 @@
{/each}
</ul>
{/key}
<Pager
page={data.page + 1}
itemCount={data.found}
pageSize={data.limit}
createUrl={buildQueryString}
/>
{:else}
<section class="flex w-full flex-col justify-center text-center" in:fade out:fade>
<h1 class="text-2xl">No auctions found.</h1>

Loading…
Cancel
Save