Skip to content

Commit

Permalink
manager error refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
BLCK-B committed May 28, 2024
1 parent 92630d3 commit 013ac22
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 35 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,9 @@ public ScraperGenericException(String url) {
super(url);
}

@Override
public String toString() {
return "generic exception";
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -40,42 +40,12 @@ public int loadWithScrapers() {
public int scrapeNext() {
double startTime = System.currentTimeMillis();
Scraper scraper = scrapers.getFirst();
for (int i = 0; i <= 2; i++) {
for (int i = 0; i <= 2; ++i) {
try {
scraper.scrape(jsoupTimeout);
break; // exception = will not break
}
catch (ScraperTimeoutException e) {
switch (i) {
case 0 -> log.error(e, ErrorLogging.Severity.INFO, scraper + " time out");
case 1 -> log.error(e, ErrorLogging.Severity.INFO, scraper + " second time out");
default -> {
log.error(e, ErrorLogging.Severity.INFO, "final time out, disabling source " + scraper);
// remove all scrapers of a faulty source
scrapers.removeIf(s -> s.getClass().equals(scraper.getClass()));
}
}
try {
Thread.sleep(minDelay);
} catch (InterruptedException ex) {
throw new RuntimeException(ex);
}
}
catch (Exception e) {
switch (i) {
case 0 -> log.error(e, ErrorLogging.Severity.WARNING, scraper + " error of scraper");
case 1 -> log.error(e, ErrorLogging.Severity.WARNING, scraper + " second error of scraper");
default -> {
log.error(e, ErrorLogging.Severity.WARNING, "final error of scraper, disabling source " + scraper);
// remove all scrapers of a faulty source
scrapers.removeIf(s -> s.getClass().equals(scraper.getClass()));
}
}
try {
Thread.sleep(minDelay);
} catch (InterruptedException ex) {
throw new RuntimeException(ex);
}
break;
} catch (Exception e) {
scrapeErrorLaunder(i, scraper, e);
}
if (scrapers.isEmpty())
return 0;
Expand All @@ -94,6 +64,21 @@ public int scrapeNext() {
return scrapers.size();
}

private void scrapeErrorLaunder(int i, Scraper scraper, Exception e) {
if (e instanceof ScraperTimeoutException)
log.error(e, ErrorLogging.Severity.INFO, scraper + " scraper threw " + e + " " + i + " times");
else
log.error(e, ErrorLogging.Severity.WARNING, scraper + " scraper threw " + e + " " + i + " times");
// remove scrapers of a faulty source
if (i == 2)
scrapers.removeIf(s -> s.getClass().equals(scraper.getClass()));
try {
Thread.sleep(minDelay);
} catch (InterruptedException ex) {
throw new RuntimeException(ex);
}
}

public long delays(String source) {
long waitTime = (long) (minDelay - sourceTimes.get(source));
if (waitTime > 0) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,9 @@ public ScraperTimeoutException(String url) {
super(url);
}

@Override
public String toString() {
return "time out exception";
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ void erroringSourceCausesWarningLogAndIsRemoved() throws ScraperTimeoutException
assertEquals(4, scraperManager.loadWithScrapers());
assertEquals(1, scraperManager.scrapeNext());
assertEquals(0, scraperManager.scrapeNext());
verify(log, times(6)).error(any(), eq(ErrorLogging.Severity.WARNING), contains("error"));
verify(log, times(6)).error(any(), eq(ErrorLogging.Severity.WARNING), contains("generic"));
}

@Test
Expand Down

0 comments on commit 013ac22

Please sign in to comment.