sync button in tilt, support syncing all targets simultaneously
ci.vdhsn.com/push Build was killed Details

feat/add-catawiki
Adam Veldhousen 10 months ago
parent 75e73a1171
commit 2d45234e7b
Signed by: adam
GPG Key ID: 6DB29003C6DD1E4B

@ -168,12 +168,33 @@ bh_backend_service(service="proxy-web", port_forwards=[
bh_client(service='web', deps=["proxy-web-local"])
bh_client(service='admin', deps=["proxy-admin-local"])
createAdminCmd = ['./hack/create_user.sh', 'admin@barretthousen.com', 'test', 'ADMINISTRATOR']
local(createAdminCmd, quiet=False, echo_off=False)
cmd_button(name='Create Admin User',
argv=createAdminCmd,
text='Create Admin Account',
location=location.NAV,
icon_name='shield_person')
createAdminCmd = [
'./hack/create_user.sh', 'admin@barretthousen.com', 'admin', 'ADMINISTRATOR'
]
loginAdminCmd = [
'curl', '-vvvv', '-X POST', '-H "Content-Type: application/json"',
'-d "{\"email\":\"admin@barretthousen.com\", \"password\":\"admin\"}"',
'http://bh.localhost:8000/api/v1/user'
]
syncCatalogsCmd = [
'curl', '-vvvv', '-X', 'PUT',
'-H', 'Content-Type: application/json',
'-H', 'bh-session-id: 2',
'-d', '{\"targetSite\":\"All\"}',
'http://admin.localhost:8000/api/v1/sync'
]
local(createAdminCmd, quiet=False, echo_off=True)
local(loginAdminCmd, quiet=False, echo_off=True)
local(syncCatalogsCmd, quiet=False, echo_off=True)
cmd_button(
name='Sync Catalogs',
argv=syncCatalogsCmd,
text='Sync Catalogs',
location=location.NAV,
icon_name='sync')

@ -61,22 +61,32 @@ type (
func (domain Domain) StartSync(ctx context.Context, in FindNewUpcomingInput) (out FindNewUpcomingOutput, err error) {
kernel.TraceLog.Printf("%+v", in)
finder, ok := targetsImpls[in.TargetSite]
if !ok {
if in.TargetSite == "All" || in.TargetSite == "" {
for k, v := range targetsImpls {
if out.Job, err = domain.Storage.CreateScrapeJob(ctx, k); err != nil {
err = fmt.Errorf("could not create new scrape job record: %w", err)
continue
}
kernel.InfoLog.Printf("Scrape Job %d starting", out.Job.ID)
go domain.executeScrapeJob(v, out.Job.ID)
}
} else if finder, ok := targetsImpls[in.TargetSite]; ok {
if out.Job, err = domain.Storage.CreateScrapeJob(ctx, in.TargetSite); err != nil {
err = fmt.Errorf("could not create new scrape job record: %w", err)
return
}
kernel.InfoLog.Printf("Scrape Job %d starting", out.Job.ID)
go domain.executeScrapeJob(finder, out.Job.ID)
} else {
kernel.TraceLog.Println("could not find target")
err = errors.New("No scrape job found by name")
return
}
if out.Job, err = domain.Storage.CreateScrapeJob(ctx, in.TargetSite); err != nil {
err = fmt.Errorf("could not create new scrape job record: %w", err)
return
}
kernel.InfoLog.Printf("Scrape Job %d starting", out.Job.ID)
// TODO: make everything after this line async and run after return
go domain.executeScrapeJob(finder, out.Job.ID)
return
}
@ -128,7 +138,7 @@ func (domain *Domain) executeScrapeJob(finder UpcomingAuctionFinder, jobID int)
ace, err := domain.CatalogService.UpdateUpcomingAuction(ctx, auction)
if err != nil {
kernel.ErrorLog.Printf("could not import upcoming auction: %s", err.Error())
kernel.ErrorLog.Printf("[%s] could not import upcoming auction: %s", finder.String(), err.Error())
fmt.Fprintf(errs, "{ \"AuctionFingerprint\": \"%s\", \"error\": \"%s\" }\n", ace.Fingerprint, err.Error())
continue
}
@ -138,10 +148,10 @@ func (domain *Domain) executeScrapeJob(finder UpcomingAuctionFinder, jobID int)
}
}
kernel.TraceLog.Println("waiting for results...")
kernel.TraceLog.Printf("[%s] waiting for results...", finder.String())
if err := errGroup.Wait(); err != nil {
err = fmt.Errorf("an issue occurred while finding upcoming items iteration: %w", err)
fmt.Fprintf(errs, "{ \"error\": \"%s\" }", err.Error())
fmt.Fprintf(errs, "{\"error\": \"%s\" }", err.Error())
}
var completedJob ScrapeJob
@ -150,9 +160,9 @@ func (domain *Domain) executeScrapeJob(finder UpcomingAuctionFinder, jobID int)
AuctionCount: count,
Errors: errs.String(),
}); err != nil {
kernel.ErrorLog.Printf("Could not complete scrape job, failing: %v", err)
kernel.ErrorLog.Printf("[%s] Could not complete scrape job, failing: %v", finder.String(), err)
}
kernel.InfoLog.Printf("Scrape Job %d completed in %v. Successfully imported %d/%d", jobID, completedJob.Completed.Sub(completedJob.Started), count, total)
kernel.InfoLog.Printf("[%s] Scrape Job %d completed in %v. Successfully imported %d/%d", finder.String(), jobID, completedJob.Completed.Sub(completedJob.Started), count, total)
return
}

Loading…
Cancel
Save