|
|
|
@ -61,22 +61,32 @@ type (
|
|
|
|
|
func (domain Domain) StartSync(ctx context.Context, in FindNewUpcomingInput) (out FindNewUpcomingOutput, err error) {
|
|
|
|
|
kernel.TraceLog.Printf("%+v", in)
|
|
|
|
|
|
|
|
|
|
finder, ok := targetsImpls[in.TargetSite]
|
|
|
|
|
if !ok {
|
|
|
|
|
if in.TargetSite == "All" || in.TargetSite == "" {
|
|
|
|
|
for k, v := range targetsImpls {
|
|
|
|
|
|
|
|
|
|
if out.Job, err = domain.Storage.CreateScrapeJob(ctx, k); err != nil {
|
|
|
|
|
err = fmt.Errorf("could not create new scrape job record: %w", err)
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
kernel.InfoLog.Printf("Scrape Job %d starting", out.Job.ID)
|
|
|
|
|
|
|
|
|
|
go domain.executeScrapeJob(v, out.Job.ID)
|
|
|
|
|
}
|
|
|
|
|
} else if finder, ok := targetsImpls[in.TargetSite]; ok {
|
|
|
|
|
if out.Job, err = domain.Storage.CreateScrapeJob(ctx, in.TargetSite); err != nil {
|
|
|
|
|
err = fmt.Errorf("could not create new scrape job record: %w", err)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
kernel.InfoLog.Printf("Scrape Job %d starting", out.Job.ID)
|
|
|
|
|
go domain.executeScrapeJob(finder, out.Job.ID)
|
|
|
|
|
} else {
|
|
|
|
|
kernel.TraceLog.Println("could not find target")
|
|
|
|
|
err = errors.New("No scrape job found by name")
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if out.Job, err = domain.Storage.CreateScrapeJob(ctx, in.TargetSite); err != nil {
|
|
|
|
|
err = fmt.Errorf("could not create new scrape job record: %w", err)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
kernel.InfoLog.Printf("Scrape Job %d starting", out.Job.ID)
|
|
|
|
|
|
|
|
|
|
// TODO: make everything after this line async and run after return
|
|
|
|
|
go domain.executeScrapeJob(finder, out.Job.ID)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@ -128,7 +138,7 @@ func (domain *Domain) executeScrapeJob(finder UpcomingAuctionFinder, jobID int)
|
|
|
|
|
|
|
|
|
|
ace, err := domain.CatalogService.UpdateUpcomingAuction(ctx, auction)
|
|
|
|
|
if err != nil {
|
|
|
|
|
kernel.ErrorLog.Printf("could not import upcoming auction: %s", err.Error())
|
|
|
|
|
kernel.ErrorLog.Printf("[%s] could not import upcoming auction: %s", finder.String(), err.Error())
|
|
|
|
|
fmt.Fprintf(errs, "{ \"AuctionFingerprint\": \"%s\", \"error\": \"%s\" }\n", ace.Fingerprint, err.Error())
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
@ -138,10 +148,10 @@ func (domain *Domain) executeScrapeJob(finder UpcomingAuctionFinder, jobID int)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
kernel.TraceLog.Println("waiting for results...")
|
|
|
|
|
kernel.TraceLog.Printf("[%s] waiting for results...", finder.String())
|
|
|
|
|
if err := errGroup.Wait(); err != nil {
|
|
|
|
|
err = fmt.Errorf("an issue occurred while finding upcoming items iteration: %w", err)
|
|
|
|
|
fmt.Fprintf(errs, "{ \"error\": \"%s\" }", err.Error())
|
|
|
|
|
fmt.Fprintf(errs, "{\"error\": \"%s\" }", err.Error())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var completedJob ScrapeJob
|
|
|
|
@ -150,9 +160,9 @@ func (domain *Domain) executeScrapeJob(finder UpcomingAuctionFinder, jobID int)
|
|
|
|
|
AuctionCount: count,
|
|
|
|
|
Errors: errs.String(),
|
|
|
|
|
}); err != nil {
|
|
|
|
|
kernel.ErrorLog.Printf("Could not complete scrape job, failing: %v", err)
|
|
|
|
|
kernel.ErrorLog.Printf("[%s] Could not complete scrape job, failing: %v", finder.String(), err)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
kernel.InfoLog.Printf("Scrape Job %d completed in %v. Successfully imported %d/%d", jobID, completedJob.Completed.Sub(completedJob.Started), count, total)
|
|
|
|
|
kernel.InfoLog.Printf("[%s] Scrape Job %d completed in %v. Successfully imported %d/%d", finder.String(), jobID, completedJob.Completed.Sub(completedJob.Started), count, total)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|