fixed pagination

pull/1/head
Adam Veldhousen 1 year ago
parent fb95d41695
commit be6bae7dcf
Signed by: adam
GPG Key ID: 6DB29003C6DD1E4B

@ -8,7 +8,28 @@
| [Catawiki](https://www.catawiki.com/en/) | | JSON | |
| [ProxiBid](https://www.proxibid.com/) | | None | |
| [The Saleroom](https://www.the-saleroom.com) | | None | |
| [Bonhams](https://www.bonhams.com) | | JSON | |
- Bonhams
```
curl 'https://www.bonhams.com/_next/data/AaGLQwUcnyWHq-PEz9Th6/default/auctions/upcoming.json' \
-H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/113.0' \
-H 'Accept: */*' \
-H 'Accept-Language: en-US,en;q=0.5' \
-H 'Accept-Encoding: gzip, deflate, br' \
-H 'Referer: https://www.bonhams.com/auctions/upcoming/' \
-H 'x-nextjs-data: 1' \
-H 'sentry-trace: 1ad7f55ff4dd4760b0ad3e6cca968714-823c0cb4ef9058f5-0' \
-H 'baggage: sentry-environment=production,sentry-release=0783195e,sentry-transaction=%2Fauctions%2Fupcoming,sentry-public_key=614543fea9dc4eee80879fcc1c88e34a,sentry-trace_id=1ad7f55ff4dd4760b0ad3e6cca968714,sentry-sample_rate=0' \
-H 'DNT: 1' \
-H 'Connection: keep-alive' \
-H 'Cookie: sessionid=m2crnnyyduu2tm6hvx3aa1e6wzy5oez9; ld_id=2420; xm_id=c22a8252-23ba-4b2e-8161-35ae5b8b56ff' \
-H 'Sec-Fetch-Dest: empty' \
-H 'Sec-Fetch-Mode: cors' \
-H 'Sec-Fetch-Site: same-origin' \
-H 'Pragma: no-cache' \
-H 'Cache-Control: no-cache'
```

@ -4,13 +4,32 @@ FROM catalog.upcoming_auctions ua
LEFT JOIN catalog.upcoming_auctions_fts fts on ua.id = fts.auctionid
WHERE
ua.endts >= DATE(NOW()) AND
fts.ts @@ websearch_to_tsquery(sqlc.arg(searchTerm))
ORDER BY ts_rank(fts.ts, websearch_to_tsquery(sqlc.arg(searchTerm))) DESC
OFFSET sqlc.arg(page)::INTEGER * sqlc.arg(pageSize)::INTEGER
LIMIT sqlc.arg(pageSize);
(case when sqlc.arg(searchTerm) = '' then
ua.id > 0
else
fts.ts @@ websearch_to_tsquery(sqlc.arg(searchTerm))
end)
ORDER BY
ua.endts ASC,
(case when sqlc.arg(searchTerm) != '' then
ts_rank(fts.ts, websearch_to_tsquery(sqlc.arg(searchTerm)))
end) DESC
OFFSET (sqlc.arg(page)::INTEGER * sqlc.arg(pageSize)::INTEGER)
LIMIT sqlc.arg(pageSize)::INTEGER;
-- name: GetTotal :one
SELECT COUNT(*) FROM catalog.upcoming_auctions;
-- name: GetTotals :one
SELECT COUNT(*) as total,
(SELECT COUNT(*) FROM catalog.upcoming_auctions ua
LEFT JOIN catalog.upcoming_auctions_fts fts on ua.id = fts.auctionid
WHERE
ua.endts >= DATE(NOW()) AND
(case when sqlc.arg(searchTerm) = '' then
ua.id > 0
else
fts.ts @@ websearch_to_tsquery(sqlc.arg(searchTerm))
end)) as found
FROM catalog.upcoming_auctions
WHERE endts >= DATE(NOW());
-- name: ImportAuction :one
SELECT catalog.bh_import_auction(

@ -15,7 +15,7 @@ type PGCatalogStorage struct {
Queries *postgres.Queries
}
func (ps *PGCatalogStorage) GetUpcoming(ctx context.Context, q domain.UpcomingQuery) (results []domain.Auction, total int64, err error) {
func (ps *PGCatalogStorage) GetUpcoming(ctx context.Context, q domain.UpcomingQuery) (results []domain.Auction, total int64, found int64, err error) {
var pgResults []postgres.CatalogUpcomingAuction
if pgResults, err = ps.Queries.GetUpcoming(ctx, postgres.GetUpcomingParams{
@ -27,11 +27,19 @@ func (ps *PGCatalogStorage) GetUpcoming(ctx context.Context, q domain.UpcomingQu
return
}
if total, err = ps.Queries.GetTotal(ctx); err != nil {
var totalsRow postgres.GetTotalsRow
if totalsRow, err = ps.Queries.GetTotals(ctx, q.Term); err != nil {
err = fmt.Errorf("could not get total auction count: %w", err)
return
}
total = totalsRow.Total
if q.Term == "" {
found = total
} else {
found = totalsRow.Found
}
results = make([]domain.Auction, len(pgResults))
for idx, row := range pgResults {
results[idx] = domain.Auction{

@ -9,7 +9,7 @@ import (
"git.vdhsn.com/barretthousen/barretthousen/src/lib/kernel"
)
var ErrDuplicateAuctionImported = errors.New("this auction's fingerprint matches one that has already been imported")
var ErrDuplicateAuctionImported = errors.New("another auction with the same fingerprint exists")
type (
Usecase struct {
@ -17,7 +17,7 @@ type (
}
Storage interface {
GetUpcoming(context.Context, UpcomingQuery) ([]Auction, int64, error)
GetUpcoming(context.Context, UpcomingQuery) ([]Auction, int64, int64, error)
CreateUpcoming(context.Context, Auction) (string, error)
}
@ -40,12 +40,13 @@ type (
UpcomingResults struct {
Page int
Total int64
Found int64
Results []Auction
}
)
func (d *Usecase) GetUpcoming(ctx context.Context, q UpcomingQuery) (results UpcomingResults, err error) {
if results.Results, results.Total, err = d.Storage.GetUpcoming(ctx, q); err != nil {
if results.Results, results.Total, results.Found, err = d.Storage.GetUpcoming(ctx, q); err != nil {
err = fmt.Errorf("could not get upcoming from storage: %w", err)
return
}
@ -53,19 +54,20 @@ func (d *Usecase) GetUpcoming(ctx context.Context, q UpcomingQuery) (results Upc
return
}
// TODO: tests
func (d *Usecase) ImportAuction(ctx context.Context, in ImportAuctionMessage) (event AuctionCreated, err error) {
if in.Fingerprint, err = d.Storage.CreateUpcoming(ctx, in.Auction); err != nil && !errors.Is(err, ErrDuplicateAuctionImported) {
err = fmt.Errorf("could not import auction: %w", err)
return
}
// Duplicates aren't an error, instead let the requester know
err = nil
event = AuctionCreated{
Auction: in.Auction,
Duplicate: errors.Is(err, ErrDuplicateAuctionImported),
}
err = nil
status := "✅"
if event.Duplicate {
status = "🚫"

@ -12,12 +12,6 @@ import (
"google.golang.org/protobuf/types/known/timestamppb"
)
func NewCatalogServer(d *domain.Usecase) func(grpcServer grpc.ServiceRegistrar, endpoint string) {
return func(grpcServer grpc.ServiceRegistrar, endpoint string) {
api.RegisterCatalogServer(grpcServer, &catalogHandler{domain: d})
}
}
type Domain interface {
GetUpcoming(context.Context, domain.UpcomingQuery) (domain.UpcomingResults, error)
ImportAuction(context.Context, domain.ImportAuctionMessage) (domain.AuctionCreated, error)
@ -28,9 +22,15 @@ type catalogHandler struct {
domain Domain
}
func NewCatalogServer(d *domain.Usecase) func(grpcServer grpc.ServiceRegistrar, endpoint string) {
return func(grpcServer grpc.ServiceRegistrar, endpoint string) {
api.RegisterCatalogServer(grpcServer, &catalogHandler{domain: d})
}
}
func (rh *catalogHandler) GetUpcoming(ctx context.Context, cmd *api.AuctionSearchCriteria) (out *api.GetUpcomingResult, err error) {
page := int(math.Max(0, float64(cmd.GetPage())))
pageSize := int(cmd.GetLimit())
pageSize := int(math.Max(32, float64(cmd.GetLimit())))
if pageSize < 32 {
pageSize = 32
@ -52,7 +52,7 @@ func (rh *catalogHandler) GetUpcoming(ctx context.Context, cmd *api.AuctionSearc
out = &api.GetUpcomingResult{
Page: int32(page),
Total: int32(queryResp.Total),
Found: int32(pageSize),
Found: int32(queryResp.Found),
Results: make([]*api.Auction, len(queryResp.Results)),
}

@ -10,6 +10,7 @@ import (
api "git.vdhsn.com/barretthousen/barretthousen/src/runner/api/grpc"
"github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
"google.golang.org/grpc"
"google.golang.org/grpc/backoff"
"google.golang.org/grpc/credentials/insecure"
)
@ -25,6 +26,12 @@ func (app *ProxyAdminApp) Start(ctx context.Context) error {
grpcMux := runtime.NewServeMux()
err := api.RegisterRunnerHandlerFromEndpoint(ctx, grpcMux, app.Endpoints.Runner, []grpc.DialOption{
grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithConnectParams(grpc.ConnectParams{
Backoff: backoff.Config{
MaxDelay: time.Second * 3,
},
MinConnectTimeout: time.Second,
}),
})
if err != nil {
return err

@ -10,6 +10,7 @@ import (
"git.vdhsn.com/barretthousen/barretthousen/src/lib/kernel"
"github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
"google.golang.org/grpc"
"google.golang.org/grpc/backoff"
"google.golang.org/grpc/credentials/insecure"
)
@ -25,6 +26,12 @@ func (app *ProxyClientApp) Start(ctx context.Context) error {
grpcMux := runtime.NewServeMux()
err := api.RegisterCatalogHandlerFromEndpoint(ctx, grpcMux, app.Endpoints.Catalog, []grpc.DialOption{
grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithConnectParams(grpc.ConnectParams{
Backoff: backoff.Config{
MaxDelay: time.Second * 3,
},
MinConnectTimeout: time.Second,
}),
})
if err != nil {
return err
@ -37,6 +44,7 @@ func (app *ProxyClientApp) Start(ctx context.Context) error {
ReadHeaderTimeout: time.Second,
Handler: http.StripPrefix("/api", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
kernel.TraceLog.Printf("{ \"Client\": \"%s\", \"Path\":\"%s\", \"User-Agent\":\"%s\" } ", r.RemoteAddr, r.URL, r.UserAgent())
// TODO: pull the allowed origin host names from the config file
w.Header().Set("Access-Control-Allow-Origin", "*")
grpcMux.ServeHTTP(w, r)
})),

@ -93,6 +93,7 @@ func (domain Domain) Status(ctx context.Context, in GetJobsInput) (out GetJobsOu
return
}
// TODO: tests
func (domain *Domain) executeScrapeJob(finder UpcomingAuctionFinder, jobID int) {
ctx, cancel := context.WithDeadline(context.TODO(), time.Now().Add(time.Minute))
defer cancel()

@ -26,7 +26,6 @@
sourceSiteURL,
sourceURL,
start,
end,
country,
province
} = auction;
@ -48,7 +47,9 @@
<li class="text-sm">
Available at <a href={sourceSiteURL} target="_blank">{sourceSiteName}</a>
</li>
<li class="py-2"><p>{description}</p></li>
<li class="py-2">
<p>{@html description}</p>
</li>
</ul>
</section>
</article>

@ -8,6 +8,7 @@
async function onSubmit(evt: CustomEvent) {
let { query } = evt.detail;
// TODO: refactor to one source of truth for building query string parameters
await goto(query ? `/?query=${query}` : '/', {
invalidateAll: true
});

@ -2,6 +2,7 @@ import type { LayoutLoad } from './$types';
export const load = (({ url }) => {
return {
// TODO: refactor to one source of truth for all query param values
query: url.searchParams.get('query') || '',
page: parseInt(url.searchParams.get('page') || '0'),
limit: parseInt(url.searchParams.get('pageSize') || '64'),

@ -1,18 +1,19 @@
<script lang="ts">
import type { PageData } from './$types';
import { fade } from 'svelte/transition';
import AuctionResult from '$lib/AuctionResult.svelte';
import { fade } from 'svelte/transition';
export let data: PageData;
$: hasResults = (data.results || []).length > 0;
$: currentPage = data.page + 1;
$: pageCount = Math.floor(data.total / 64);
$: pageCount = Math.max(1, Math.floor(data.found / data.limit));
$: console.log(data.found, ' / ', data.limit, ' = ', data.found / data.limit);
</script>
{#if hasResults}
<section in:fade>
<h1 class="pb-5 text-lg">{data.results.length} of {data.total} Upcoming & Live Auctions</h1>
<h1 class="pb-5 text-lg">Found {data.found} Upcoming & Live Auctions</h1>
<ol class="flex justify-between w-full center" style="padding: 0 10%;">
{#if currentPage > 1}
<li in:fade>
@ -42,6 +43,9 @@
{:else}
<section class="flex w-full flex-col justify-center text-center" in:fade out:fade>
<h1 class="text-2xl">No auctions found.</h1>
{#if data.query !== ''}
<a href="/">Start Over</a>
{/if}
{#if data.query !== 'watch'}
<p>
Try searching <em><a href="/?query=watch">Watch</a></em>

@ -1,35 +1,44 @@
import type { PageLoad } from './$types';
const API_HOST = 'http://localhost:8000/api/v1'
// TODO: change to env var
const API_HOST = 'http://localhost:8000/api/v1';
export const load = (async ({ fetch, url, depends }) => {
const searchTerm = url.searchParams.get('query') || '';
const currentPage = url.searchParams.get('page') || 0;
const currentLimit = url.searchParams.get('limit') || 64;
interface SearchPageData {
page: number
limit: number
found: number
total: number
query: string
results: any[]
}
depends('search');
export const load = (async ({ fetch, url }): Promise<SearchPageData> => {
// TODO: refactor to one source of truth for all query param value fetching
const searchTerm = url.searchParams.get('query') || '';
const currentPage = Number(url.searchParams.get('page') || 0);
const currentLimit = Number(url.searchParams.get('limit') || 64);
try {
// TODO: refactor to one source of truth for all query string building
const response = await fetch(API_HOST + `/upcoming?searchTerm=${searchTerm}&page=${currentPage}&limit=${currentLimit}`);
const { page, total, results } = await response.json() || {};
const { page, total, found, results } = await response.json() || {};
// TODO: return found results so we can do upperbound on pagination
return {
pagination: {
page,
limit: 64,
},
page,
limit: currentLimit,
query: searchTerm,
found,
total,
results
};
} catch (e) {
console.log(e);
return {
pagination: {
page: 0,
limit: 64,
},
page: 0,
limit: currentLimit,
query: searchTerm,
found: 0,
total: 0,
results: []
};

Loading…
Cancel
Save