Compare commits

...

2 Commits

@ -7,6 +7,7 @@ print("""
load('ext://helm_resource', 'helm_resource', 'helm_repo')
load('ext://deployment', 'deployment_create')
load('ext://restart_process', 'docker_build_with_restart')
load('ext://uibutton', 'cmd_button', 'location', 'text_input')
helm_repo('bitnami', 'https://charts.bitnami.com/bitnami', labels=["9-repos"])
helm_repo('traefik', 'https://traefik.github.io/charts', labels=["9-repos"])
@ -167,5 +168,33 @@ bh_backend_service(service="proxy-web", port_forwards=[
bh_client(service='web', deps=["proxy-web-local"])
bh_client(service='admin', deps=["proxy-admin-local"])
createAdminCmd = [
'./hack/create_user.sh', 'admin@barretthousen.com', 'admin', 'ADMINISTRATOR'
]
loginAdminCmd = [
'curl', '-vvvv', '-X POST', '-H "Content-Type: application/json"',
'-d "{\"email\":\"admin@barretthousen.com\", \"password\":\"admin\"}"',
'http://bh.localhost:8000/api/v1/user'
]
syncCatalogsCmd = [
'curl', '-vvvv', '-X', 'PUT',
'-H', 'Content-Type: application/json',
'-H', 'bh-session-id: 2',
'-d', '{\"targetSite\":\"All\"}',
'http://admin.localhost:8000/api/v1/sync'
]
local(createAdminCmd, quiet=False, echo_off=True)
local(loginAdminCmd, quiet=False, echo_off=True)
local(syncCatalogsCmd, quiet=False, echo_off=True)
cmd_button(
name='Sync Catalogs',
argv=syncCatalogsCmd,
text='Sync Catalogs',
location=location.NAV,
icon_name='sync')

@ -0,0 +1,26 @@
#!/bin/bash
main(){
local EMAIL=${1};
if [ -z "${EMAIL}" ]; then
echo "first argument must be an EMAIL address"
exit 1;
fi
local PASSWORD=${2};
if [ -z "${PASSWORD}" ]; then
echo "second argument must be a PASSWORD"
exit 1;
fi
local ROLE=${3:-USER};
local HOST=${4:-"http://bh.localhost:8000"};
curl -X PUT -H 'Content-Type: application/json' \
-d "{ \"email\":\"${EMAIL}\", \"password\":\"${PASSWORD}\", \"role\":\"${ROLE}\" }" \
"${HOST}/api/v1/user";
}
main $@;

@ -3,7 +3,7 @@
main(){
local REPOSITORY=${1};
if [ -z "${REPOSITORY}" ]; then
if [ -z "${REPOSITORY}" ]; then
echo "First argument must be container repository";
exit 1;
fi
@ -20,5 +20,7 @@ main(){
curl -u "${DOCKER_USERNAME}:${DOCKER_PASSWORD}" \
-X PUT -H "Content-Type: ${CONTENT_TYPE}" -d "${MANIFEST}" "${REGISTRY_URL}/${TO}";
echo "";
}

@ -61,22 +61,32 @@ type (
func (domain Domain) StartSync(ctx context.Context, in FindNewUpcomingInput) (out FindNewUpcomingOutput, err error) {
kernel.TraceLog.Printf("%+v", in)
finder, ok := targetsImpls[in.TargetSite]
if !ok {
if in.TargetSite == "All" || in.TargetSite == "" {
for k, v := range targetsImpls {
if out.Job, err = domain.Storage.CreateScrapeJob(ctx, k); err != nil {
err = fmt.Errorf("could not create new scrape job record: %w", err)
continue
}
kernel.InfoLog.Printf("Scrape Job %d starting", out.Job.ID)
go domain.executeScrapeJob(v, out.Job.ID)
}
} else if finder, ok := targetsImpls[in.TargetSite]; ok {
if out.Job, err = domain.Storage.CreateScrapeJob(ctx, in.TargetSite); err != nil {
err = fmt.Errorf("could not create new scrape job record: %w", err)
return
}
kernel.InfoLog.Printf("Scrape Job %d starting", out.Job.ID)
go domain.executeScrapeJob(finder, out.Job.ID)
} else {
kernel.TraceLog.Println("could not find target")
err = errors.New("No scrape job found by name")
return
}
if out.Job, err = domain.Storage.CreateScrapeJob(ctx, in.TargetSite); err != nil {
err = fmt.Errorf("could not create new scrape job record: %w", err)
return
}
kernel.InfoLog.Printf("Scrape Job %d starting", out.Job.ID)
// TODO: make everything after this line async and run after return
go domain.executeScrapeJob(finder, out.Job.ID)
return
}
@ -128,7 +138,7 @@ func (domain *Domain) executeScrapeJob(finder UpcomingAuctionFinder, jobID int)
ace, err := domain.CatalogService.UpdateUpcomingAuction(ctx, auction)
if err != nil {
kernel.ErrorLog.Printf("could not import upcoming auction: %s", err.Error())
kernel.ErrorLog.Printf("[%s] could not import upcoming auction: %s", finder.String(), err.Error())
fmt.Fprintf(errs, "{ \"AuctionFingerprint\": \"%s\", \"error\": \"%s\" }\n", ace.Fingerprint, err.Error())
continue
}
@ -138,10 +148,10 @@ func (domain *Domain) executeScrapeJob(finder UpcomingAuctionFinder, jobID int)
}
}
kernel.TraceLog.Println("waiting for results...")
kernel.TraceLog.Printf("[%s] waiting for results...", finder.String())
if err := errGroup.Wait(); err != nil {
err = fmt.Errorf("an issue occurred while finding upcoming items iteration: %w", err)
fmt.Fprintf(errs, "{ \"error\": \"%s\" }", err.Error())
fmt.Fprintf(errs, "{\"error\": \"%s\" }", err.Error())
}
var completedJob ScrapeJob
@ -150,9 +160,9 @@ func (domain *Domain) executeScrapeJob(finder UpcomingAuctionFinder, jobID int)
AuctionCount: count,
Errors: errs.String(),
}); err != nil {
kernel.ErrorLog.Printf("Could not complete scrape job, failing: %v", err)
kernel.ErrorLog.Printf("[%s] Could not complete scrape job, failing: %v", finder.String(), err)
}
kernel.InfoLog.Printf("Scrape Job %d completed in %v. Successfully imported %d/%d", jobID, completedJob.Completed.Sub(completedJob.Started), count, total)
kernel.InfoLog.Printf("[%s] Scrape Job %d completed in %v. Successfully imported %d/%d", finder.String(), jobID, completedJob.Completed.Sub(completedJob.Started), count, total)
return
}

Loading…
Cancel
Save