diff --git a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/Annotator.java b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/Annotator.java index 6cfe540..1689ced 100644 --- a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/Annotator.java +++ b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/Annotator.java @@ -23,7 +23,7 @@ */ public class Annotator { - private static final Logger logger = LoggerFactory.getLogger(Annotator.class); + private static final Logger LOGGER = LoggerFactory.getLogger(Annotator.class); private MongoFileManager mm; @@ -44,7 +44,7 @@ public void annotate(Processings annotator_type) { annotateTeiCollection(annotator_type); } } catch (UnreachableAnnotateServiceException | AnnotatorNotAvailableException e) { - logger.error("Error when annotating. ", e); + LOGGER.error("Error when annotating. ", e); } } @@ -65,7 +65,7 @@ private void annotateTeiCollection(Processings annotator_type) while (mm.hasMore()) { BiblioObject biblioObject = mm.nextBiblioObject(); if (!AnnotateProperties.isReset() && mm.isProcessed(annotator_type)) { - logger.info("\t\t Already annotated by " + annotator_type + ", Skipping..."); + LOGGER.info("\t\t Already annotated by " + annotator_type + ", Skipping..."); continue; } Runnable worker = null; @@ -74,7 +74,7 @@ private void annotateTeiCollection(Processings annotator_type) biblioObject.setTeiCorpus(mm.getTEICorpus(biblioObject)); worker = new NerdAnnotatorWorker(mm, biblioObject); } else { - logger.info("\t\t No TEI available for " + biblioObject.getRepositoryDocId()); + LOGGER.info("\t\t No TEI available for " + biblioObject.getRepositoryDocId()); } } else if (annotator_type == Processings.KEYTERM) { if (biblioObject.getIsProcessedByPub2TEI()) { @@ -83,7 +83,7 @@ private void annotateTeiCollection(Processings annotator_type) biblioObject.setTeiCorpus(mm.getTEICorpus(biblioObject)); worker = new KeyTermAnnotatorWorker(mm, biblioObject); } else { - logger.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId()); + LOGGER.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId()); } } else if (annotator_type == Processings.QUANTITIES) { if (biblioObject.getIsProcessedByPub2TEI()) { @@ -92,7 +92,7 @@ private void annotateTeiCollection(Processings annotator_type) biblioObject.setTeiCorpus(mm.getTEICorpus(biblioObject)); worker = new QuantitiesAnnotatorWorker(mm, biblioObject); } else { - logger.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId()); + LOGGER.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId()); } } else if (annotator_type == Processings.PDFQUANTITIES) { if (biblioObject.getIsWithFulltext()) { @@ -101,7 +101,7 @@ private void annotateTeiCollection(Processings annotator_type) biblioObject.setPdf(bf); worker = new PDFQuantitiesAnnotatorWorker(mm, biblioObject); } else { - logger.info("\t\t No fulltext available for " + biblioObject.getRepositoryDocId()); + LOGGER.info("\t\t No fulltext available for " + biblioObject.getRepositoryDocId()); } } if (worker != null) { @@ -111,7 +111,7 @@ private void annotateTeiCollection(Processings annotator_type) } } } - logger.info("Total: " + nb + " documents annotated."); + LOGGER.info("Total: " + nb + " documents annotated."); } finally { mm.close(); } @@ -132,18 +132,18 @@ private void annotateTeiCollectionMultiThreaded(Processings annotator_type) ThreadPoolExecutor executor = getThreadsExecutor(annotator_type); if (mm.initObjects(null, getQuery(AnnotateProperties.isReset(), annotator_type))) { - //logger.info("processing teis for :" + date); + //LOGGER.info("processing teis for :" + date); while (mm.hasMore()) { BiblioObject biblioObject = mm.nextBiblioObject(); if (!AnnotateProperties.isReset() && mm.isProcessed(annotator_type)) { - logger.info("\t\t Already annotated by " + annotator_type + ", Skipping..."); + LOGGER.info("\t\t Already annotated by " + annotator_type + ", Skipping..."); continue; } // filter based on document size... we should actually annotate only // a given length and then stop if (biblioObject.getTeiCorpus().length() > 300000) { - logger.info("skipping " + biblioObject.getRepositoryDocId() + ": file too large"); + LOGGER.info("skipping " + biblioObject.getRepositoryDocId() + ": file too large"); continue; } Runnable worker = null; @@ -152,7 +152,7 @@ private void annotateTeiCollectionMultiThreaded(Processings annotator_type) biblioObject.setTeiCorpus(mm.getTEICorpus(biblioObject)); worker = new NerdAnnotatorWorker(mm, biblioObject); } else { - logger.info("\t\t No TEI available for " + biblioObject.getRepositoryDocId()); + LOGGER.info("\t\t No TEI available for " + biblioObject.getRepositoryDocId()); } } else if (annotator_type == Processings.KEYTERM) { if (biblioObject.getIsProcessedByPub2TEI()) { @@ -161,7 +161,7 @@ private void annotateTeiCollectionMultiThreaded(Processings annotator_type) biblioObject.setTeiCorpus(mm.getTEICorpus(biblioObject)); worker = new KeyTermAnnotatorWorker(mm, biblioObject); } else { - logger.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId()); + LOGGER.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId()); } } else if (annotator_type == Processings.QUANTITIES) { if (biblioObject.getIsProcessedByPub2TEI()) { @@ -170,7 +170,7 @@ private void annotateTeiCollectionMultiThreaded(Processings annotator_type) biblioObject.setTeiCorpus(mm.getTEICorpus(biblioObject)); worker = new QuantitiesAnnotatorWorker(mm, biblioObject); } else { - logger.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId()); + LOGGER.info("\t\t No Grobid TEI available for " + biblioObject.getRepositoryDocId()); } } else if (annotator_type == Processings.PDFQUANTITIES) { BinaryFile bf = new BinaryFile(); @@ -180,7 +180,7 @@ private void annotateTeiCollectionMultiThreaded(Processings annotator_type) biblioObject.setPdf(bf); worker = new PDFQuantitiesAnnotatorWorker(mm, biblioObject); } else { - logger.info("\t\t No fulltext available for " + biblioObject.getRepositoryDocId()); + LOGGER.info("\t\t No fulltext available for " + biblioObject.getRepositoryDocId()); } } if (worker != null) { @@ -190,7 +190,7 @@ private void annotateTeiCollectionMultiThreaded(Processings annotator_type) } } executor.shutdown(); - logger.info("Jobs done, shutting down thread pool. The executor will wait 2 minutes before forcing the shutdown."); + LOGGER.info("Jobs done, shutting down thread pool. The executor will wait 2 minutes before forcing the shutdown."); try { if (!executor.awaitTermination(Long.MAX_VALUE, TimeUnit.MINUTES)) { executor.shutdownNow(); @@ -198,8 +198,8 @@ private void annotateTeiCollectionMultiThreaded(Processings annotator_type) } catch (InterruptedException e) { executor.shutdownNow(); } - logger.info("Finished all threads"); - logger.info("Total: " + nb + " documents annotated."); + LOGGER.info("Finished all threads"); + LOGGER.info("Total: " + nb + " documents annotated."); } } finally { mm.close(); @@ -248,7 +248,7 @@ private ThreadPoolExecutor getThreadsExecutor(Processings annotator_type) { } else if (annotator_type == annotator_type.PDFQUANTITIES) { nbThreads = AnnotateProperties.getQuantitiesNbThreads(); } - logger.info("Number of threads: " + nbThreads); + LOGGER.info("Number of threads: " + nbThreads); ThreadPoolExecutor executor = new ThreadPoolExecutor(nbThreads, nbThreads, 60000, TimeUnit.MILLISECONDS, blockingQueue); @@ -257,15 +257,15 @@ private ThreadPoolExecutor getThreadsExecutor(Processings annotator_type) { @Override public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { - logger.info("Task Rejected : " + LOGGER.info("Task Rejected : " + ((AnnotatorWorker) r).getRepositoryDocId()); - logger.info("Waiting for 60 second !!"); + LOGGER.info("Waiting for 60 second !!"); try { Thread.sleep(60000); } catch (InterruptedException e) { - logger.error("Error when interrupting the thread. ", e); + LOGGER.error("Error when interrupting the thread. ", e); } - logger.info("Lets add another time : " + LOGGER.info("Lets add another time : " + ((AnnotatorWorker) r).getRepositoryDocId()); executor.execute(r); } diff --git a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/AnnotatorWorker.java b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/AnnotatorWorker.java index aa77c48..fa55383 100644 --- a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/AnnotatorWorker.java +++ b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/AnnotatorWorker.java @@ -13,7 +13,7 @@ */ public abstract class AnnotatorWorker implements Runnable { - private static final Logger logger = LoggerFactory.getLogger(AnnotatorWorker.class); + private static final Logger LOGGER = LoggerFactory.getLogger(AnnotatorWorker.class); protected MongoFileManager mm = null; protected BiblioObject biblioObject = null; protected String annotationsCollection; @@ -29,10 +29,10 @@ public AnnotatorWorker(MongoFileManager mongoManager, @Override public void run() { long startTime = System.nanoTime(); - logger.info("\t\t " + Thread.currentThread().getName() + " Start. Processing = "+biblioObject.getRepositoryDocId()); + LOGGER.info("\t\t " + Thread.currentThread().getName() + " Start. Processing = "+biblioObject.getRepositoryDocId()); processCommand(); long endTime = System.nanoTime(); - logger.info("\t\t " + Thread.currentThread().getName() + " End. :" + (endTime - startTime) / 1000000 + " ms"); + LOGGER.info("\t\t " + Thread.currentThread().getName() + " End. :" + (endTime - startTime) / 1000000 + " ms"); } protected abstract void processCommand() ; protected abstract String annotateDocument() ; diff --git a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/KeyTermAnnotatorWorker.java b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/KeyTermAnnotatorWorker.java index 7e79fa9..7aa9b33 100644 --- a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/KeyTermAnnotatorWorker.java +++ b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/KeyTermAnnotatorWorker.java @@ -22,7 +22,7 @@ */ public class KeyTermAnnotatorWorker extends AnnotatorWorker { - private static final Logger logger = LoggerFactory.getLogger(KeyTermAnnotatorWorker.class); + private static final Logger LOGGER = LoggerFactory.getLogger(KeyTermAnnotatorWorker.class); public KeyTermAnnotatorWorker(MongoFileManager mongoManager, BiblioObject biblioObject) { @@ -36,14 +36,14 @@ protected void processCommand() { if (inserted) { mm.updateBiblioObjectStatus(biblioObject, Processings.KEYTERM, false); - logger.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " annotated by the KeyTerm extraction and disambiguation service."); + LOGGER.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " annotated by the KeyTerm extraction and disambiguation service."); } else { - logger.info("\t\t " + Thread.currentThread().getName() + ": " + LOGGER.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " error occured trying to annotate Keyterms."); } } catch (Exception ex) { - logger.error("\t\t " + Thread.currentThread().getName() + ": TEI could not be processed by the keyterm extractor: " + biblioObject.getRepositoryDocId()); - ex.printStackTrace(); + LOGGER.error("\t\t " + Thread.currentThread().getName() + ": TEI could not be processed by the keyterm extractor: " + biblioObject.getRepositoryDocId()); + LOGGER.error("Error: ", ex); } } @@ -93,7 +93,7 @@ protected String annotateDocument() { json.append("{} }"); } } catch (IOException e) { - logger.error(Thread.currentThread().getName() + ": TEI could not be processed by the keyterm extractor: " + biblioObject.getRepositoryDocId(), e); + LOGGER.error(Thread.currentThread().getName() + ": TEI could not be processed by the keyterm extractor: " + biblioObject.getRepositoryDocId(), e); return null; } return json.toString(); diff --git a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/NerdAnnotatorWorker.java b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/NerdAnnotatorWorker.java index bede329..fa74a7e 100644 --- a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/NerdAnnotatorWorker.java +++ b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/NerdAnnotatorWorker.java @@ -40,7 +40,7 @@ */ public class NerdAnnotatorWorker extends AnnotatorWorker { - private static final Logger logger = LoggerFactory.getLogger(NerdAnnotatorWorker.class); + private static final Logger LOGGER = LoggerFactory.getLogger(NerdAnnotatorWorker.class); public NerdAnnotatorWorker(MongoFileManager mongoManager, BiblioObject biblioObject) { @@ -53,9 +53,9 @@ protected void processCommand() { boolean inserted = mm.insertAnnotation(annotateDocument(), annotationsCollection); if (inserted) { mm.updateBiblioObjectStatus(biblioObject, Processings.NERD, false); - logger.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " annotated by the NERD service."); + LOGGER.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " annotated by the NERD service."); } else { - logger.info("\t\t " + Thread.currentThread().getName() + ": " + LOGGER.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " error occured trying to annotate with NERD."); } } @@ -89,7 +89,7 @@ protected String annotateDocument() { annotateNode(metadata, true, json, null); json.append("] }"); } catch (Exception ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); return null; } @@ -143,11 +143,11 @@ private boolean annotateNode(Node node, NerdClient nerdService = new NerdClient(AnnotateProperties.getNerdHost()); jsonText = nerdService.disambiguateText(text.trim(), language).toString(); } catch (Exception ex) { - logger.error("\t\t " + Thread.currentThread().getName() + ": Text could not be annotated by NERD: " + text); - ex.printStackTrace(); + LOGGER.error("\t\t " + Thread.currentThread().getName() + ": Text could not be annotated by NERD: " + text); + LOGGER.error("Error: ", ex); } if (jsonText == null) { - logger.error("\t\t " + Thread.currentThread().getName() + ": NERD failed annotating text : " + text); + LOGGER.error("\t\t " + Thread.currentThread().getName() + ": NERD failed annotating text : " + text); } if (jsonText != null) { // resulting annotations, with the corresponding id diff --git a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/PDFQuantitiesAnnotatorWorker.java b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/PDFQuantitiesAnnotatorWorker.java index 75e7ad3..28af19c 100644 --- a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/PDFQuantitiesAnnotatorWorker.java +++ b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/PDFQuantitiesAnnotatorWorker.java @@ -15,7 +15,7 @@ */ public class PDFQuantitiesAnnotatorWorker extends AnnotatorWorker { - private static final Logger logger = LoggerFactory.getLogger(PDFQuantitiesAnnotatorWorker.class); + private static final Logger LOGGER = LoggerFactory.getLogger(PDFQuantitiesAnnotatorWorker.class); public PDFQuantitiesAnnotatorWorker(MongoFileManager mongoManager, BiblioObject biblioObject) { @@ -28,10 +28,10 @@ protected void processCommand() { boolean inserted = mm.insertAnnotation(annotateDocument(), annotationsCollection); if (inserted) { mm.updateBiblioObjectStatus(biblioObject, Processings.PDFQUANTITIES, false); - logger.info("\t\t " + Thread.currentThread().getName() + ": " + LOGGER.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " annotated by the QUANTITIES service."); } else { - logger.info("\t\t " + Thread.currentThread().getName() + ": " + LOGGER.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " error occured trying to annotate with QUANTITIES."); } @@ -64,8 +64,8 @@ protected String annotateDocument() { } biblioObject.getPdf().getStream().close(); } catch (Exception ex) { - logger.error("\t\t " + Thread.currentThread().getName() + ": PDF could not be processed by the quantities extractor: "); - ex.printStackTrace(); + LOGGER.error("\t\t " + Thread.currentThread().getName() + ": PDF could not be processed by the quantities extractor: "); + LOGGER.error("Error: ", ex); return null; } return json.toString(); diff --git a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/QuantitiesAnnotatorWorker.java b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/QuantitiesAnnotatorWorker.java index 9e4585c..57db336 100644 --- a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/QuantitiesAnnotatorWorker.java +++ b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/QuantitiesAnnotatorWorker.java @@ -32,7 +32,7 @@ */ public class QuantitiesAnnotatorWorker extends AnnotatorWorker { - private static final Logger logger = LoggerFactory.getLogger(QuantitiesAnnotatorWorker.class); + private static final Logger LOGGER = LoggerFactory.getLogger(QuantitiesAnnotatorWorker.class); public QuantitiesAnnotatorWorker(MongoFileManager mongoManager, BiblioObject biblioObject) { @@ -45,10 +45,10 @@ protected void processCommand() { boolean inserted = mm.insertAnnotation(annotateDocument(), annotationsCollection); if (inserted) { mm.updateBiblioObjectStatus(biblioObject, Processings.QUANTITIES, false); - logger.info("\t\t " + Thread.currentThread().getName() + ": " + LOGGER.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " annotated by the QUANTITIES service."); } else { - logger.info("\t\t " + Thread.currentThread().getName() + ": " + LOGGER.info("\t\t " + Thread.currentThread().getName() + ": " + biblioObject.getRepositoryDocId() + " error occured trying to annotate with QUANTITIES."); } } @@ -66,7 +66,7 @@ protected String annotateDocument() { // parse the TEI docTei = docBuilder.parse(new InputSource(new ByteArrayInputStream(tei.getBytes("UTF-8")))); } catch (Exception ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } StringBuffer json = new StringBuffer(); @@ -107,11 +107,11 @@ private boolean annotateNode(Node node, QuantitiesService quantitiesService = new QuantitiesService(IOUtils.toInputStream(text, "UTF-8")); jsonText = quantitiesService.processTextQuantities(); } catch (Exception ex) { - logger.error("\t\t " + Thread.currentThread().getName() + ": Text could not be annotated by QUANTITIES: " + text); - ex.printStackTrace(); + LOGGER.error("\t\t " + Thread.currentThread().getName() + ": Text could not be annotated by QUANTITIES: " + text); + LOGGER.error("Error: ", ex); } if (jsonText == null) { - logger.error("\t\t " + Thread.currentThread().getName() + ": QUANTITIES failed annotating text : " + text); + LOGGER.error("\t\t " + Thread.currentThread().getName() + ": QUANTITIES failed annotating text : " + text); } if (jsonText != null) { // resulting annotations, with the corresponding id diff --git a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/main/Main.java b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/main/Main.java index bf75ad8..51fb6f2 100644 --- a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/main/Main.java +++ b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/main/Main.java @@ -20,7 +20,7 @@ */ public class Main { - private static final Logger logger = LoggerFactory.getLogger(Main.class); + private static final Logger LOGGER = LoggerFactory.getLogger(Main.class); private static List availableCommands = new ArrayList() { { @@ -36,7 +36,7 @@ public static void main(String[] args) throws UnknownHostException { try { AnnotateProperties.init("anhalytics.properties"); } catch (PropertyException e) { - logger.error(e.getMessage()); + LOGGER.error(e.getMessage()); return; } @@ -68,7 +68,7 @@ private void processCommand() { annotator.annotate(Processings.PDFQUANTITIES); } } catch (ServiceException se) { - logger.error(se.getMessage()); + LOGGER.error(se.getMessage()); } } diff --git a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/AnnotateService.java b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/AnnotateService.java index 51303d1..d30ec78 100644 --- a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/AnnotateService.java +++ b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/AnnotateService.java @@ -18,7 +18,7 @@ */ public abstract class AnnotateService { - private static final Logger logger = LoggerFactory.getLogger(AnnotateService.class); + private static final Logger LOGGER = LoggerFactory.getLogger(AnnotateService.class); //protected String input = null; protected InputStream input = null; @@ -33,7 +33,7 @@ public AnnotateService(InputStream input) { * @return boolean */ public static boolean isAnnotateServiceReady(Processings annotator_type) throws UnreachableAnnotateServiceException { - logger.info("Checking " + annotator_type + " service..."); + LOGGER.info("Checking " + annotator_type + " service..."); int responseCode = 0; HttpURLConnection conn = null; try { @@ -46,12 +46,12 @@ public static boolean isAnnotateServiceReady(Processings annotator_type) throws + (AnnotateProperties.getQuantitiesPort().isEmpty() ? "" : ":" + AnnotateProperties.getQuantitiesPort()) + "/isalive"; } else { // keyterm isalive checking not implemented yet. - logger.info(annotator_type + " service is ok and can be used."); + LOGGER.info(annotator_type + " service is ok and can be used."); return true; } URL url = new URL(urlString); conn = (HttpURLConnection) url.openConnection(); - logger.info(urlString); + LOGGER.info(urlString); conn.setDoOutput(true); conn.setRequestMethod("GET"); responseCode = conn.getResponseCode(); @@ -59,11 +59,11 @@ public static boolean isAnnotateServiceReady(Processings annotator_type) throws throw new UnreachableAnnotateServiceException(responseCode, annotator_type.toString()); } if (responseCode != 200) { - logger.error(annotator_type + " service is not alive."); + LOGGER.error(annotator_type + " service is not alive."); throw new UnreachableAnnotateServiceException(responseCode, annotator_type.toString()); } conn.disconnect(); - logger.info(annotator_type + " service is ok and can be used."); + LOGGER.info(annotator_type + " service is ok and can be used."); return true; } } diff --git a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/KeyTermExtractionService.java b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/KeyTermExtractionService.java index 220a49c..20287bb 100644 --- a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/KeyTermExtractionService.java +++ b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/KeyTermExtractionService.java @@ -32,7 +32,7 @@ */ public class KeyTermExtractionService extends AnnotateService { - private static final Logger logger = LoggerFactory.getLogger(KeyTermExtractionService.class); + private static final Logger LOGGER = LoggerFactory.getLogger(KeyTermExtractionService.class); static private String RESOURCEPATH = "processKeyTermArticleTEI"; @@ -88,7 +88,7 @@ public String runKeyTermExtraction() { br.close(); conn.disconnect(); } catch (ConnectException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); try { Thread.sleep(20000); runKeyTermExtraction(); @@ -96,7 +96,7 @@ public String runKeyTermExtraction() { Thread.currentThread().interrupt(); } } catch (HttpRetryException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); try { Thread.sleep(20000); runKeyTermExtraction(); @@ -104,9 +104,9 @@ public String runKeyTermExtraction() { Thread.currentThread().interrupt(); } } catch (MalformedURLException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } catch (IOException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } return output.toString().trim(); } diff --git a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/PDFQuantitiesService.java b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/PDFQuantitiesService.java index b690b2c..ceadc89 100644 --- a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/PDFQuantitiesService.java +++ b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/PDFQuantitiesService.java @@ -40,7 +40,7 @@ */ public class PDFQuantitiesService extends AnnotateService { - private static final Logger logger = LoggerFactory.getLogger(QuantitiesService.class); + private static final Logger LOGGER = LoggerFactory.getLogger(QuantitiesService.class); static private String REQUEST_PDF_QUANTITIES = "annotateQuantityPDF"; @@ -58,7 +58,7 @@ public String processPDFQuantities() { try { URL url = new URL(AnnotateProperties.getQuantitiesHost() + (AnnotateProperties.getQuantitiesPort().isEmpty() ? "" : ":" + AnnotateProperties.getQuantitiesPort()) + "/" + REQUEST_PDF_QUANTITIES); - logger.info("http://" + AnnotateProperties.getQuantitiesHost() + LOGGER.info("http://" + AnnotateProperties.getQuantitiesHost() + (AnnotateProperties.getQuantitiesPort().isEmpty() ? "" : ":" + AnnotateProperties.getQuantitiesPort()) + "/" + REQUEST_PDF_QUANTITIES); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setDoOutput(true); @@ -88,7 +88,7 @@ public String processPDFQuantities() { throw new RuntimeException("Failed : HTTP error code : " + conn.getResponseCode() + " " + IOUtils.toString(conn.getErrorStream(), "UTF-8")); } - logger.info("Response "+conn.getResponseCode()); + LOGGER.info("Response "+conn.getResponseCode()); InputStream in = conn.getInputStream(); BufferedReader br = new BufferedReader(new InputStreamReader((in))); @@ -102,9 +102,9 @@ public String processPDFQuantities() { conn.disconnect(); } catch (MalformedURLException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } catch (IOException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } //System.out.println(output.toString().trim()); return output.toString().trim(); diff --git a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/QuantitiesService.java b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/QuantitiesService.java index 11a0419..85810e9 100644 --- a/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/QuantitiesService.java +++ b/anhalytics-annotate/src/main/java/fr/inria/anhalytics/annotate/services/QuantitiesService.java @@ -17,7 +17,7 @@ */ public class QuantitiesService extends AnnotateService { - private static final Logger logger = LoggerFactory.getLogger(QuantitiesService.class); + private static final Logger LOGGER = LoggerFactory.getLogger(QuantitiesService.class); static private String REQUEST_TEXT_QUANTITIES = "processQuantityText"; @@ -52,9 +52,9 @@ public String processTextQuantities() { OutputStream os = conn.getOutputStream(); os.write(postDataBytes); os.flush(); - logger.info("Response "+conn.getResponseCode()); + LOGGER.info("Response "+conn.getResponseCode()); if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) { - logger.error("Failed annotating text segment: HTTP error code : " + LOGGER.error("Failed annotating text segment: HTTP error code : " + conn.getResponseCode()); return null; } @@ -68,9 +68,9 @@ public String processTextQuantities() { conn.disconnect(); } catch (MalformedURLException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } catch (IOException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } //System.out.println(output.toString().trim()); return output.toString().trim(); diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/AddressDAO.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/AddressDAO.java index 89b4764..db1f894 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/AddressDAO.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/AddressDAO.java @@ -2,6 +2,7 @@ import fr.inria.anhalytics.commons.entities.Address; import fr.inria.anhalytics.commons.entities.Country; +import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.sql.*; @@ -13,7 +14,7 @@ */ public class AddressDAO extends DAO { - private static final org.slf4j.Logger logger = LoggerFactory.getLogger(AddressDAO.class); + private static final Logger LOGGER = LoggerFactory.getLogger(AddressDAO.class); private static final String SQL_INSERT = "INSERT INTO ADDRESS (addrLine, postBox, postCode, settlement, region, countryID) VALUES (?, ?, ?, ?, ?, ?)"; @@ -112,7 +113,7 @@ public Address find(Long id) throws SQLException { ); } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(preparedStatement); } @@ -135,7 +136,7 @@ public Country findCountry(String iso) { ); } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(preparedStatement); } @@ -157,7 +158,7 @@ public Address getOrganisationAddress(Long orgId) throws SQLException { } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(ps); } @@ -199,7 +200,7 @@ private Address findAddressIfAlreadyStored(Address obj) throws SQLException { ); } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(ps); } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/Conference_EventDAO.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/Conference_EventDAO.java index 87205ca..d4c3c96 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/Conference_EventDAO.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/Conference_EventDAO.java @@ -5,6 +5,9 @@ import fr.inria.anhalytics.commons.entities.Conference_Event; import fr.inria.anhalytics.commons.entities.Country; import fr.inria.anhalytics.commons.entities.Monograph; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -17,6 +20,8 @@ */ public class Conference_EventDAO extends DAO { + protected static final Logger LOGGER = LoggerFactory.getLogger(Conference_EventDAO.class); + private static final String SQL_INSERT = "INSERT INTO CONFERENCE_EVENT (conferenceID, addressID, start_date, end_date, monographID) VALUES (?, ?, ?, ?, ?)"; @@ -124,7 +129,7 @@ public Conference_Event find(Long id) throws SQLException { ); } } catch (SQLException sqle) { - sqle.printStackTrace(); + LOGGER.error("Error: ", sqle); } finally { closeQuietly(preparedStatement); } @@ -143,7 +148,7 @@ public Conference findConferenceByTitle(String title) throws SQLException { conference = new Conference(result.getLong("conferenceID"), result.getString("title")); } } catch (SQLException sqle) { - sqle.printStackTrace(); + LOGGER.error("Error: ", sqle); } finally { closeQuietly(preparedStatement); } @@ -169,7 +174,7 @@ public Conference_Event findByMonograph(Long id) throws SQLException { ); } } catch (SQLException sqle) { - sqle.printStackTrace(); + LOGGER.error("Error: ", sqle); } finally { closeQuietly(ps); } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/DatabaseConnection.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/DatabaseConnection.java index 16906fc..6c7d420 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/DatabaseConnection.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/DatabaseConnection.java @@ -14,7 +14,7 @@ */ public class DatabaseConnection { - private static final Logger logger = LoggerFactory.getLogger(DatabaseConnection.class); + private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseConnection.class); private static Connection connectDB; private static Connection connectBiblioDB; diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/DocumentDAO.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/DocumentDAO.java index 5d7a659..186006e 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/DocumentDAO.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/DocumentDAO.java @@ -17,7 +17,7 @@ */ public class DocumentDAO extends DAO { - private static final Logger logger = LoggerFactory.getLogger(DocumentDAO.class); + private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDAO.class); private static final String SQL_INSERT = "INSERT INTO DOCUMENT (docID, version) VALUES (?, ?)"; @@ -115,7 +115,7 @@ public Document find(String doc_id) { } } catch (SQLException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } finally { closeQuietly(preparedStatement); closeQuietly(preparedStatement1); @@ -166,7 +166,7 @@ public List findAllDocuments() { documents.add(document); } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(preparedStatement); } @@ -186,7 +186,7 @@ public List getDocumentsByOrgId(Long organisationId) { documents.add(find(rs.getString("docID"))); } } catch (SQLException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } finally { closeQuietly(preparedStatement); } @@ -206,7 +206,7 @@ public List getDocumentsByAuthorId(Long personId) { docs.add(find(rs.getString("docID"))); } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(ps); } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/In_SerialDAO.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/In_SerialDAO.java index 316d178..4f1c908 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/In_SerialDAO.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/In_SerialDAO.java @@ -5,6 +5,9 @@ import fr.inria.anhalytics.commons.entities.Journal; import fr.inria.anhalytics.commons.entities.Monograph; import fr.inria.anhalytics.commons.entities.Serial_Identifier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -16,6 +19,8 @@ * @author azhar */ public class In_SerialDAO extends DAO { + + protected static final Logger LOGGER = LoggerFactory.getLogger(In_SerialDAO.class); private static final String SQL_INSERT = "INSERT INTO IN_SERIAL (monographID, collectionID, journalID, volume, number) VALUES (?, ?, ?, ?, ?)"; @@ -172,7 +177,7 @@ public Collection findCollectionByTitle(String title) throws SQLException { collection = new Collection(result.getLong("collectionID"), result.getString("title")); } } catch (SQLException sqle) { - sqle.printStackTrace(); + LOGGER.error("Error: ", sqle); } finally { closeQuietly(preparedStatement); } @@ -191,7 +196,7 @@ public Journal findJournalByTitle(String title) throws SQLException { journal = new Journal(result.getLong("journalID"), result.getString("title")); } } catch (SQLException sqle) { - sqle.printStackTrace(); + LOGGER.error("Error: ", sqle); } finally { closeQuietly(preparedStatement); } @@ -216,7 +221,7 @@ public In_Serial find(Long id) throws SQLException { result.getString("IN_SERIAL.number")); } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(preparedStatement); } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/MonographDAO.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/MonographDAO.java index 61d4022..41b5301 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/MonographDAO.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/MonographDAO.java @@ -1,16 +1,19 @@ package fr.inria.anhalytics.commons.dao; import fr.inria.anhalytics.commons.entities.Monograph; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.sql.*; import java.util.logging.Level; -import java.util.logging.Logger; /** * @author azhar */ public class MonographDAO extends DAO { + protected static final Logger LOGGER = LoggerFactory.getLogger(MonographDAO.class); + private static final String SQL_INSERT = "INSERT INTO MONOGRAPH (type, title, shortname) VALUES (?, ?, ?)"; @@ -44,7 +47,7 @@ public boolean create(Monograph obj) { result = true; } catch (SQLException ex) { - Logger.getLogger(DocumentDAO.class.getName()).log(Level.SEVERE, null, ex); + LOGGER.error("SQL Exception: ", ex); } finally { closeQuietly(statement); } @@ -78,7 +81,7 @@ public Monograph find(Long monographID) { result.getString("shortname")); } } catch (SQLException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } finally { closeQuietly(preparedStatement); } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/PersonDAO.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/PersonDAO.java index d38c8b9..d702667 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/PersonDAO.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/PersonDAO.java @@ -2,6 +2,7 @@ import fr.inria.anhalytics.commons.entities.*; import fr.inria.anhalytics.commons.utilities.Utilities; +import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.sql.*; @@ -11,14 +12,13 @@ import java.util.List; import java.util.Map; import java.util.logging.Level; -import java.util.logging.Logger; /** * @author azhar */ public class PersonDAO extends DAO { - private static final org.slf4j.Logger logger = LoggerFactory.getLogger(PersonDAO.class); + private static final Logger LOGGER = LoggerFactory.getLogger(PersonDAO.class); private static final String SQL_INSERT_PERSON = "INSERT INTO PERSON (title, photo, url, email, phone) VALUES (?, ?, ?, ?, ?)"; @@ -220,7 +220,7 @@ public boolean update(Person obj) throws SQLException { setPersonNameUpdateParameters(obj, preparedStatement2, pn); int code2 = preparedStatement2.executeUpdate(); } catch (com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException e) { - //e.printStackTrace(); + //LOGGER.error("Error: ", e); } } @@ -307,7 +307,7 @@ public Person find(Long id) throws SQLException { if (!person_names.contains(pn)) person_names.add(pn); } catch (ParseException ex) { - Logger.getLogger(PersonDAO.class.getName()).log(Level.SEVERE, null, ex); + LOGGER.error("Error: ", ex); } } @@ -322,7 +322,7 @@ public Person find(Long id) throws SQLException { } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(preparedStatement); closeQuietly(preparedStatement1); @@ -342,7 +342,7 @@ public Map findAllAuthors() throws SQLException { persons.put(rs.getLong("personID"), find(rs.getLong("personID"))); } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(preparedStatement); } @@ -363,7 +363,7 @@ public Map getAuthorsByDocId(String docId) throws SQLException { persons.put(rs.getLong("personID"), person); } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(ps); } @@ -385,7 +385,7 @@ public Map getEditorsByPubId(Long pubId) throws SQLException { } } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(ps); } @@ -424,7 +424,7 @@ public Map getPersonsByOrgID(Long orgID) throws SQLException { persons.put(rs.getLong("personID"), person); } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(ps); } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/PublicationDAO.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/PublicationDAO.java index d93a247..c9bb021 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/PublicationDAO.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/PublicationDAO.java @@ -2,6 +2,7 @@ import fr.inria.anhalytics.commons.entities.*; import fr.inria.anhalytics.commons.utilities.Utilities; +import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.sql.*; @@ -9,14 +10,13 @@ import java.util.ArrayList; import java.util.List; import java.util.logging.Level; -import java.util.logging.Logger; /** * @author azhar */ public class PublicationDAO extends DAO { - private static final org.slf4j.Logger logger = LoggerFactory.getLogger(PublicationDAO.class); + private static final Logger LOGGER = LoggerFactory.getLogger(PublicationDAO.class); private static final String SQL_INSERT = "INSERT INTO PUBLICATION (docID, monographID, publisherID, type, doc_title, date_printed, date_electronic, first_page, last_page, language) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; @@ -125,11 +125,11 @@ public Publication find(Long publication_id) throws SQLException { rs.getString("language") ); } catch (ParseException ex) { - Logger.getLogger(PublicationDAO.class.getName()).log(Level.SEVERE, null, ex); + LOGGER.error("Error: ", ex); } } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(preparedStatement); } @@ -160,12 +160,12 @@ public List findByDocId(String doc_id) throws SQLException { ) ); } catch (ParseException ex) { - Logger.getLogger(PublicationDAO.class.getName()).log(Level.SEVERE, null, ex); + LOGGER.error("Error: ", ex); } } rs.close(); } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(preparedStatement); } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/PublisherDAO.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/PublisherDAO.java index 2cf16f6..e894503 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/PublisherDAO.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/PublisherDAO.java @@ -6,6 +6,8 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; + +import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** @@ -14,7 +16,7 @@ */ public class PublisherDAO extends DAO { - private static final org.slf4j.Logger logger = LoggerFactory.getLogger(PublisherDAO.class); + private static final Logger LOGGER = LoggerFactory.getLogger(PublisherDAO.class); private static final String SQL_INSERT = "INSERT INTO PUBLISHER (name) VALUES (?)"; @@ -83,7 +85,7 @@ public Publisher find(Long publisher_id) throws SQLException { ); } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(preparedStatement); } @@ -104,7 +106,7 @@ private Publisher findPublisherIfAlreadyStored(Publisher obj) throws SQLExceptio ); } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(preparedStatement); } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/AffiliationDAO.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/AffiliationDAO.java index d799388..947382d 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/AffiliationDAO.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/AffiliationDAO.java @@ -14,7 +14,8 @@ import java.text.ParseException; import java.util.ArrayList; import java.util.logging.Level; -import java.util.logging.Logger; + +import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** @@ -23,7 +24,7 @@ */ public class AffiliationDAO extends DAO { - private static final org.slf4j.Logger logger = LoggerFactory.getLogger(AffiliationDAO.class); + private static final Logger LOGGER = LoggerFactory.getLogger(AffiliationDAO.class); private static final String SQL_INSERT = "INSERT INTO AFFILIATION (organisationID, personID, from_date, until_date) VALUES (?, ?, ?, ?)"; @@ -59,11 +60,11 @@ private Affiliation getAffiliationIfAlreadyStored(Person pers, Organisation org) affiliation.addOrganisation(org); } catch (ParseException ex) { - Logger.getLogger(LocationDAO.class.getName()).log(Level.SEVERE, null, ex); + LOGGER.error("SQL Exception: ", ex); } } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(statement); } @@ -125,7 +126,7 @@ public boolean create(Affiliation obj) throws SQLException { result = true; } catch (MySQLIntegrityConstraintViolationException e) { - //e.printStackTrace(); + //LOGGER.error("Error: ", e); }finally{ closeQuietly(statement); } @@ -165,11 +166,11 @@ public Affiliation find(Long id) throws SQLException { Utilities.parseStringDate(result.getString("until_date")) ); } catch (ParseException ex) { - Logger.getLogger(LocationDAO.class.getName()).log(Level.SEVERE, null, ex); + LOGGER.error("Error SQL: ", ex); } } } catch (SQLException ex) { - logger.error(ex.getMessage()); + LOGGER.error(ex.getMessage()); } finally { closeQuietly(preparedStatement); } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/DAOFactory.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/DAOFactory.java index 2186f4c..9b6cc00 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/DAOFactory.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/DAOFactory.java @@ -16,7 +16,7 @@ import fr.inria.anhalytics.commons.dao.PersonDAO; import java.sql.Connection; import java.sql.SQLException; -import java.util.logging.Level; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -26,7 +26,7 @@ */ public class DAOFactory extends AbstractDAOFactory { - private static final Logger logger = LoggerFactory.getLogger(DAOFactory.class); + private static final Logger LOGGER = LoggerFactory.getLogger(DAOFactory.class); protected static Connection conn = null; @@ -88,18 +88,18 @@ public DAO getPublisherDAO() { public void openTransaction() { try { conn.setAutoCommit(false); - logger.info("Storing entry"); + LOGGER.info("Storing entry"); } catch (SQLException e) { - logger.error("There was an error disabling autocommit"); + LOGGER.error("There was an error disabling autocommit"); } } public void endTransaction() { try { conn.commit(); - logger.info("Stored"); + LOGGER.info("Stored"); } catch (SQLException ex) { - logger.error("Error happened while commiting the changes."); + LOGGER.error("Error happened while commiting the changes."); } } @@ -107,9 +107,9 @@ public void rollback() { try { // We rollback the transaction, to the last SavePoint! conn.rollback(); - logger.info("The transaction was rollback."); + LOGGER.info("The transaction was rollback."); } catch (SQLException e1) { - logger.error("There was an error making a rollback"); + LOGGER.error("There was an error making a rollback"); } } @@ -118,7 +118,7 @@ public static void closeConnection() { try { conn.close(); } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/LocationDAO.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/LocationDAO.java index 5ec5cea..2224dbd 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/LocationDAO.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/LocationDAO.java @@ -6,17 +6,20 @@ import fr.inria.anhalytics.commons.entities.Location; import fr.inria.anhalytics.commons.entities.Organisation; import fr.inria.anhalytics.commons.utilities.Utilities; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.sql.*; import java.text.ParseException; import java.util.logging.Level; -import java.util.logging.Logger; /** * @author azhar */ public class LocationDAO extends DAO { + protected static final Logger LOGGER = LoggerFactory.getLogger(LocationDAO.class); + private static final String SQL_INSERT = "INSERT INTO LOCATION (organisationID, addressID, from_date, until_date) VALUES (?, ?, ?, ?)"; @@ -53,11 +56,11 @@ private Location getLocationIfAlreadyStored(Location obj) throws SQLException { Utilities.parseStringDate(rs.getString("until_date")) ); } catch (ParseException ex) { - Logger.getLogger(LocationDAO.class.getName()).log(Level.SEVERE, null, ex); + LOGGER.error("Error: ", ex); } } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(statement); } @@ -111,7 +114,7 @@ public boolean create(Location obj) throws SQLException { int code = statement.executeUpdate(); result = true; } catch (MySQLIntegrityConstraintViolationException e) { - //e.printStackTrace(); + //LOGGER.error("Error: ", e); } finally { closeQuietly(statement); } @@ -149,11 +152,11 @@ public Location find(Long id) throws SQLException { Utilities.parseStringDate(result.getString("until_date")) ); } catch (ParseException ex) { - Logger.getLogger(LocationDAO.class.getName()).log(Level.SEVERE, null, ex); + LOGGER.error("Error: ", ex); } } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(preparedStatement); } @@ -172,7 +175,7 @@ public Long findAddressIdByOrganisationId(Long orgId) throws SQLException { addressId = result.getLong("addressID"); } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(preparedStatement); } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/OrganisationDAO.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/OrganisationDAO.java index 8e097e7..fbcb8af 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/OrganisationDAO.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/anhalytics/OrganisationDAO.java @@ -2,21 +2,25 @@ import com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException; import fr.inria.anhalytics.commons.dao.DAO; +import fr.inria.anhalytics.commons.dao.In_SerialDAO; import fr.inria.anhalytics.commons.entities.*; import fr.inria.anhalytics.commons.utilities.Utilities; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.sql.*; import java.text.ParseException; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; -import java.util.logging.Logger; /** * @author azhar */ public class OrganisationDAO extends DAO { + protected static final Logger LOGGER = LoggerFactory.getLogger(OrganisationDAO.class); + private static final String SQL_INSERT = "INSERT INTO ORGANISATION (type, url, status) VALUES (?, ?, ?)"; @@ -113,7 +117,7 @@ public boolean create(Organisation obj) throws SQLException { } int code1 = statement1.executeUpdate(); } catch (MySQLIntegrityConstraintViolationException e) { - //e.printStackTrace(); + //LOGGER.error("Error: ", e); } } @@ -138,7 +142,7 @@ public boolean create(Organisation obj) throws SQLException { int code1 = statement2.executeUpdate(); } } catch (MySQLIntegrityConstraintViolationException e) { - //e.printStackTrace(); + //LOGGER.error("Error: ", e); } } @@ -210,7 +214,7 @@ public boolean update(Organisation obj) throws SQLException { preparedStatement1.setLong(2, existingName.getOrganisation_nameid()); int code2 = preparedStatement1.executeUpdate(); } catch (MySQLIntegrityConstraintViolationException e) { - //e.printStackTrace(); + //LOGGER.error("Error: ", e); } } //update pub date @@ -225,7 +229,7 @@ public boolean update(Organisation obj) throws SQLException { } int code2 = statement1.executeUpdate(); } catch (MySQLIntegrityConstraintViolationException e) { - //e.printStackTrace(); + //LOGGER.error("Error: ", e); } } } @@ -279,7 +283,7 @@ public boolean update(Organisation obj) throws SQLException { } } } catch (MySQLIntegrityConstraintViolationException e) { - //e.printStackTrace(); + //LOGGER.error("Error: ", e); } } //update end_date if no relation is found yet @@ -323,11 +327,11 @@ private PART_OF getPartOfIfAlreadyStored(Organisation org, Organisation mother_o Utilities.parseStringDate(rs.getString("until_date")) ); } catch (ParseException ex) { - Logger.getLogger(LocationDAO.class.getName()).log(Level.SEVERE, null, ex); + LOGGER.error("Error: ", ex); } } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(statement); } @@ -378,10 +382,10 @@ public Organisation find(Long id) { organisation = setOrganisationParents(organisation); } } catch (ParseException ex) { - Logger.getLogger(LocationDAO.class.getName()).log(Level.SEVERE, null, ex); + LOGGER.error("Error: ", ex); } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(preparedStatement1); closeQuietly(preparedStatement2); @@ -415,7 +419,7 @@ private Organisation setOrganisationParents(Organisation org) { } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(statement); } @@ -469,11 +473,11 @@ public List findMothers(Long id) { rels.add(new PART_OF(org, Utilities.parseStringDate(rs.getString("from_date")), Utilities.parseStringDate(rs.getString("until_date")))); } } catch (ParseException ex) { - Logger.getLogger(OrganisationDAO.class.getName()).log(Level.SEVERE, null, ex); + LOGGER.error("Error: ", ex); } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(preparedStatement); closeQuietly(preparedStatement1); @@ -497,11 +501,11 @@ public List getOrganisationNames(Long id) { names.add(new Organisation_Name(rs.getLong("organisation_nameID"), rs.getString("name"), Utilities.parseStringDate(rs.getString("lastupdate_date")))); } } catch (ParseException ex) { - Logger.getLogger(OrganisationDAO.class.getName()).log(Level.SEVERE, null, ex); + LOGGER.error("Error: ", ex); } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(preparedStatement); } @@ -518,7 +522,7 @@ public List findAllOrganisations() { organisations.add(find(rs.getLong("org.organisationID"))); } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(preparedStatement); } @@ -548,7 +552,7 @@ public List getOrganisationsByDocId(Long docId) { } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(ps); closeQuietly(ps1); @@ -572,7 +576,7 @@ protected Long getOrgEntityIfAlreadyStored(Organisation obj) { } } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(ps); } @@ -602,12 +606,12 @@ public List getAffiliationByPersonID(Person person) { org = find(rs.getLong("organisationID")); affiliation.addOrganisation(org); } catch (ParseException ex) { - Logger.getLogger(LocationDAO.class.getName()).log(Level.SEVERE, null, ex); + LOGGER.error("Error: ", ex); } affiliations.add(affiliation); } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(ps); } @@ -627,7 +631,7 @@ public Document_Organisation getOrganisationByDocumentID(String docID) { dorg.addOrg(find(result.getLong("organisationID"))); } } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } finally { closeQuietly(preparedStatement); } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/biblio/BiblioDAOFactory.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/biblio/BiblioDAOFactory.java index 89bcd30..794c575 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/biblio/BiblioDAOFactory.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/dao/biblio/BiblioDAOFactory.java @@ -28,7 +28,7 @@ */ public class BiblioDAOFactory extends AbstractBiblioDAOFactory { - private static final Logger logger = LoggerFactory.getLogger(BiblioDAOFactory.class); + private static final Logger LOGGER = LoggerFactory.getLogger(BiblioDAOFactory.class); protected static Connection conn = null; public static void initConnection() { @@ -77,18 +77,18 @@ public DAO getPublisherDAO() { public void openTransaction() { try { conn.setAutoCommit(false); - logger.info("Storing entry"); + LOGGER.info("Storing entry"); } catch (SQLException e) { - logger.error("There was an error disabling autocommit"); + LOGGER.error("There was an error disabling autocommit"); } } public void endTransaction() { try { conn.commit(); - logger.info("Entry stored"); + LOGGER.info("Entry stored"); } catch (SQLException ex) { - logger.error("Error happened while commiting the changes."); + LOGGER.error("Error happened while commiting the changes."); } } @@ -96,9 +96,9 @@ public void rollback() { try { // We rollback the transaction, to the last SavePoint! conn.rollback(); - logger.info("The transaction was rollback."); + LOGGER.info("The transaction was rollback."); } catch (SQLException e1) { - logger.error("There was an error making a rollback"); + LOGGER.error("There was an error making a rollback"); } } @@ -107,7 +107,7 @@ public static void closeConnection() { try { conn.close(); } catch (SQLException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } } } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/managers/MongoFileManager.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/managers/MongoFileManager.java index de91af4..58365d9 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/managers/MongoFileManager.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/managers/MongoFileManager.java @@ -23,7 +23,6 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; -import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; @@ -38,7 +37,7 @@ */ public class MongoFileManager extends MongoManager implements MongoCollectionsInterface { - private static final Logger logger = LoggerFactory.getLogger(MongoFileManager.class); + private static final Logger LOGGER = LoggerFactory.getLogger(MongoFileManager.class); public static final DBObject ONLY_WITH_FULLTEXT_PROCESS = new BasicDBObjectBuilder() .add("isWithFulltext", true) @@ -194,7 +193,7 @@ public boolean initObjects(String source, DBObject query) throws MongoException collection.createIndex(ensureIndexQuery, "index_" + StringUtils.join(ensureIndexQuery.keySet(), "_")); cursor = collection.find(query); cursor.addOption(com.mongodb.Bytes.QUERYOPTION_NOTIMEOUT); - logger.info(cursor.size() + " objects found."); + LOGGER.info(cursor.size() + " objects found."); if (cursor.hasNext()) { return true; } else { @@ -218,7 +217,7 @@ public boolean initObjects(String source) throws MongoException { } cursor = collection.find(bdbo); cursor.addOption(com.mongodb.Bytes.QUERYOPTION_NOTIMEOUT); - logger.info(cursor.size() + " objects found."); + LOGGER.info(cursor.size() + " objects found."); if (cursor.hasNext()) { return true; } else { @@ -318,11 +317,8 @@ public void insertBiblioObject(BiblioObject biblioObject) { } } } - } catch (MongoException me) { - me.printStackTrace(); - //rollback - } catch (IOException ioe) { - ioe.printStackTrace(); + } catch (MongoException | IOException me) { + LOGGER.error("Error: ", me); //rollback } collection.insert(document); @@ -334,7 +330,7 @@ public InputStream getFulltext(BiblioObject biblioObject) { try { fulltext = getFulltextByAnhalyticsId(biblioObject.getAnhalyticsId()); } catch (DataException de) { - logger.error("No PDF document was found for : " + biblioObject.getAnhalyticsId(), de); + LOGGER.error("No PDF document was found for : " + biblioObject.getAnhalyticsId(), de); } return fulltext; } @@ -359,7 +355,7 @@ public String getMetadata(BiblioObject biblioObject) { try { metadata = this.getTei(biblioObject.getAnhalyticsId(), MongoCollectionsInterface.METADATAS_TEIS); } catch (DataException de) { - logger.error("No metadata was found for " + biblioObject, de); + LOGGER.error("No metadata was found for " + biblioObject, de); } return metadata; } @@ -369,7 +365,7 @@ public String getTEICorpus(BiblioObject biblioObject) { try { teiCorpus = this.getTei(biblioObject.getAnhalyticsId(), MongoCollectionsInterface.TEI_CORPUS); } catch (DataException de) { - logger.error("No TEI corpus was found for " + biblioObject, de); + LOGGER.error("No TEI corpus was found for " + biblioObject, de); } return teiCorpus; } @@ -379,7 +375,7 @@ public String getGrobidTei(BiblioObject biblioObject) { try { grobidTei = this.getTei(biblioObject.getAnhalyticsId(), MongoCollectionsInterface.GROBID_TEIS); } catch (DataException de) { - logger.error("No corresponding fulltext TEI was found for " + biblioObject); + LOGGER.error("No corresponding fulltext TEI was found for " + biblioObject); } return grobidTei; } @@ -449,7 +445,7 @@ public boolean insertAnnotation(String json, String annotationsCollection) { done = false; } } catch (Exception e) { - logger.error(e.getMessage(), e.getCause()); + LOGGER.error(e.getMessage(), e.getCause()); } return done; } @@ -471,7 +467,7 @@ public boolean insertCrossRefMetadata(String currentAnhalyticsId, String current done = false; } } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } return done; } @@ -489,7 +485,7 @@ public boolean insertPDFDocument(InputStream file, String anhalyticsId) throws M gfsFile.save(); file.close(); // } catch (ParseException e) { -// logger.error(e.getMessage(), e.getCause()); +// LOGGER.error(e.getMessage(), e.getCause()); // } return true; } @@ -506,7 +502,7 @@ public boolean insertTEIcorpus(String TEIcorpus, String anhalyticsId) throws Mon gfsFile.put("anhalyticsId", anhalyticsId); gfsFile.save(); // } catch (ParseException e) { -// logger.error(e.getMessage(), e.getCause()); +// LOGGER.error(e.getMessage(), e.getCause()); // } return true; } @@ -523,7 +519,7 @@ public boolean insertGrobidTei(String grobidTei, String anhalyticsId) throws Mon gfsFile.put("anhalyticsId", anhalyticsId); gfsFile.save(); // } catch (ParseException e) { -// logger.error(e.getMessage(), e.getCause()); +// LOGGER.error(e.getMessage(), e.getCause()); // } return true; } @@ -540,7 +536,7 @@ public boolean insertMetadataDocument(String metadata, String anhalyticsId) thro gfsFile.put("anhalyticsId", anhalyticsId); gfsFile.save(); // } catch (ParseException e) { -// logger.error(e.getMessage(), e.getCause()); +// LOGGER.error(e.getMessage(), e.getCause()); // } return true; @@ -600,7 +596,7 @@ private Annotation getAnnotations(String anhalyticsId, String annotationsCollect // result = annotations.toString(); // } } else { - logger.warn("The annotations for doc " + anhalyticsId + " was not found in " + annotationsCollection); + LOGGER.warn("The annotations for doc " + anhalyticsId + " was not found in " + annotationsCollection); } } finally { if (curs != null) { @@ -634,7 +630,7 @@ public void save(String repositoryDocId, String process, String desc) { // document.put("date", date); collection.insert(document); } catch (Exception e) { - logger.error(e.getMessage(), e.getCause()); + LOGGER.error(e.getMessage(), e.getCause()); } } @@ -662,7 +658,7 @@ public void log(String repositoryDocId, String anhalyticsId, String url, String document.put("embragoDate", date); collection.insert(document); } catch (Exception e) { - logger.error(e.getMessage(), e.getCause()); + LOGGER.error(e.getMessage(), e.getCause()); } } @@ -674,7 +670,7 @@ public void removeEmbargoRecord(String repositoryDocId, String url) { whereQuery.put("url", url); collection.remove(whereQuery); } catch (Exception e) { - logger.error(e.getMessage(), e.getCause()); + LOGGER.error(e.getMessage(), e.getCause()); } } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/properties/AnnotateProperties.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/properties/AnnotateProperties.java index 07b3ab4..50b9a19 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/properties/AnnotateProperties.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/properties/AnnotateProperties.java @@ -1,6 +1,9 @@ package fr.inria.anhalytics.commons.properties; import fr.inria.anhalytics.commons.exceptions.PropertyException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.File; import java.io.FileInputStream; import java.util.Properties; @@ -11,6 +14,7 @@ * @author Achraf, Patrice */ public class AnnotateProperties { + protected static final Logger LOGGER = LoggerFactory.getLogger(AnnotateProperties.class); private static String processName; @@ -62,7 +66,7 @@ public static void init(String properties_filename) { try { setNerdNbThreads(Integer.parseInt(threads)); } catch (java.lang.NumberFormatException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } setKeytermHost(props.getProperty("annotate.keyterm_host")); @@ -71,7 +75,7 @@ public static void init(String properties_filename) { try { setKeytermNbThreads(Integer.parseInt(threads)); } catch (java.lang.NumberFormatException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } setQuantitiesHost(props.getProperty("annotate.quantities_host")); @@ -80,7 +84,7 @@ public static void init(String properties_filename) { try { setQuantitiesNbThreads(Integer.parseInt(threads)); } catch (java.lang.NumberFormatException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } setTmp(props.getProperty("annotate.quantities.tmp")); } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/properties/CommonsProperties.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/properties/CommonsProperties.java index 771f714..4b19da5 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/properties/CommonsProperties.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/properties/CommonsProperties.java @@ -1,5 +1,8 @@ package fr.inria.anhalytics.commons.properties; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.File; import java.io.FileInputStream; import java.io.IOException; @@ -11,6 +14,8 @@ */ public class CommonsProperties { + protected static final Logger LOGGER = LoggerFactory.getLogger(CommonsProperties.class); + private static String mongodbServer; private static int mongodbPort; private static String mongodbDb; @@ -44,7 +49,7 @@ public static void init(String properties_filename, boolean isTest) { setMysql_pass(prop.getProperty("kb.mysql_pass")); } } catch (IOException e) { - e.printStackTrace(); + LOGGER.error("Error init common properties", e); } } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/properties/HarvestProperties.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/properties/HarvestProperties.java index f038350..a3ce17f 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/properties/HarvestProperties.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/properties/HarvestProperties.java @@ -2,6 +2,9 @@ import fr.inria.anhalytics.commons.exceptions.PropertyException; import fr.inria.anhalytics.commons.utilities.Utilities; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.File; import java.io.FileInputStream; import java.util.Properties; @@ -12,6 +15,7 @@ * @author achraf */ public class HarvestProperties { + protected static final Logger LOGGER = LoggerFactory.getLogger(HarvestProperties.class); private static String processName; @@ -60,7 +64,7 @@ public static void init(String properties_filename) { try { setNbThreads(Integer.parseInt(threads)); } catch (java.lang.NumberFormatException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } diff --git a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/utilities/Utilities.java b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/utilities/Utilities.java index a91873b..11ba42f 100644 --- a/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/utilities/Utilities.java +++ b/anhalytics-commons/src/main/java/fr/inria/anhalytics/commons/utilities/Utilities.java @@ -44,7 +44,7 @@ */ public class Utilities { - private static final Logger logger = LoggerFactory.getLogger(Utilities.class); + private static final Logger LOGGER = LoggerFactory.getLogger(Utilities.class); private static Set dates = new LinkedHashSet(); private static String tmpPath; @@ -293,9 +293,9 @@ public static void clearTmpDirectory() { try { File tmpDirectory = new File(tmpPath); FileUtils.cleanDirectory(tmpDirectory); - logger.info("Temporary directory is cleaned."); + LOGGER.info("Temporary directory is cleaned."); } catch (IOException exp) { - logger.error("Error while deleting the temporary directory: " + exp); + LOGGER.error("Error while deleting the temporary directory: " + exp); } } @@ -414,7 +414,7 @@ public static void unzipIt(String file, String outPath) { zis.closeEntry(); } catch (IOException e) { - logger.error("Error when unzipping the file " + file + ".", e); + LOGGER.error("Error when unzipping the file " + file + ".", e); } finally { IOUtils.closeQuietly(zis); } @@ -450,7 +450,7 @@ public static InputStream request(String request) throws MalformedURLException { throw new ServiceException("Can't get data stream.", e); } endTime = System.currentTimeMillis(); - logger.info("spend:" + (endTime - startTime) + " ms"); + LOGGER.info("spend:" + (endTime - startTime) + " ms"); return in; } @@ -472,24 +472,24 @@ private static final HttpURLConnection getConnection(URL url) { connection.setRequestProperty("accept-charset", "UTF-8"); switch (connection.getResponseCode()) { case HttpURLConnection.HTTP_OK: - logger.info(url + " **OK**"); + LOGGER.info(url + " **OK**"); return connection; // **EXIT POINT** fine, go on case HttpURLConnection.HTTP_GATEWAY_TIMEOUT: - logger.info(url + ":" + connection.getResponseCode()); + LOGGER.info(url + ":" + connection.getResponseCode()); break;// retry case HttpURLConnection.HTTP_UNAVAILABLE: - logger.info(url + "**unavailable**" + " :" + connection.getResponseCode()); + LOGGER.info(url + "**unavailable**" + " :" + connection.getResponseCode()); break;// retry, server is unstable default: //stop - logger.info(url + ":" + connection.getResponseCode()); + LOGGER.info(url + ":" + connection.getResponseCode()); throw new ServiceException(url + ":" + connection.getResponseCode()); } // we did not succeed with connection (or we would have returned the connection). connection.disconnect(); // retry retry++; - logger.warn("Failed retry " + retry + "/" + retries); + LOGGER.warn("Failed retry " + retry + "/" + retries); delay = true; if (retry == retries) { throw new ServiceException(url + ":" + connection.getResponseCode()); @@ -531,7 +531,7 @@ public static String formatXMLString(String xmlString) { formatedXml = stringWriter.toString(); } catch (ParserConfigurationException | SAXException | IOException | XPathExpressionException | DOMException | IllegalArgumentException | TransformerException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } return formatedXml; } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/converters/IstexTEIConverter.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/converters/IstexTEIConverter.java index 2f1fcbe..f0929bc 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/converters/IstexTEIConverter.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/converters/IstexTEIConverter.java @@ -13,6 +13,9 @@ import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; + +import fr.inria.anhalytics.harvest.harvesters.Harvester; +import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; @@ -25,6 +28,8 @@ */ public class IstexTEIConverter implements MetadataConverter { + protected static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(IstexTEIConverter.class); + @Override public Element convertMetadataToTEIHeader(Document metadata, Document newTEIcorpus, BiblioObject biblio) { XPath xPath = XPathFactory.newInstance().newXPath(); @@ -40,7 +45,7 @@ public Element convertMetadataToTEIHeader(Document metadata, Document newTEIcorp addDomains(metadata, biblio); teiHeader = (Element) newTEIcorpus.importNode(teiHeader, true); } catch (XPathExpressionException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } return teiHeader; } @@ -60,7 +65,7 @@ private void updatePublicationDate(Document metadata) { } } catch (XPathExpressionException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } @@ -82,7 +87,7 @@ private void updatePublicationType(Document metadata) { textClass.appendChild(classCode); } } catch (XPathExpressionException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } @@ -105,7 +110,7 @@ private void updateKeywords(Document metadata) { existingKeywords.getParentNode().removeChild(existingKeywords); } } catch (XPathExpressionException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } @@ -237,7 +242,7 @@ private void addDomains(Document newTEICorpus, BiblioObject biblioObj) { } } } catch (XPathExpressionException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/crossref/CrossRef.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/crossref/CrossRef.java index fc900c9..87591b1 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/crossref/CrossRef.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/crossref/CrossRef.java @@ -31,7 +31,7 @@ */ public class CrossRef { - private static final Logger logger = LoggerFactory.getLogger(CrossRef.class); + private static final Logger LOGGER = LoggerFactory.getLogger(CrossRef.class); /** * Lookup by DOI - 3 parameters are id, password, doi. @@ -112,7 +112,7 @@ public void findDois() { // Document metadata = null; // // try { -// logger.info("###################" + currentRepositoryDocId + "#######################"); +// LOGGER.info("###################" + currentRepositoryDocId + "#######################"); // doi = mm.getDocumentDoi(currentAnhalyticsId); // if (doi == null || doi.isEmpty()) { // metadata = docBuilder.parse(metadataStream); @@ -136,8 +136,8 @@ public void findDois() { // // if (StringUtils.isNotBlank(title) // && StringUtils.isNotBlank(aut)) { -// logger.info("test retrieval per title, author"); -// logger.info(String.format("persName=%s, title=%s", aut, title)); +// LOGGER.info("test retrieval per title, author"); +// LOGGER.info(String.format("persName=%s, title=%s", aut, title)); // subpath = String.format(TITLE_BASE_QUERY, // HarvestProperties.getCrossrefId(), // HarvestProperties.getCrossrefPwd(), @@ -180,8 +180,8 @@ public void findDois() { // //&& StringUtils.isNotBlank(aut) // && StringUtils.isNotBlank(firstPage)) { // // retrieval per journal title, author, volume, first page -// logger.info("test retrieval per journal title, author, volume, first page"); -// logger.info(String.format("aut=%s, firstPage=%s, journalTitle=%s, volume=%s", +// LOGGER.info("test retrieval per journal title, author, volume, first page"); +// LOGGER.info(String.format("aut=%s, firstPage=%s, journalTitle=%s, volume=%s", // aut, firstPage, journalTitle, volume)); // if (StringUtils.isNotBlank(aut)) { // subpath = String.format(JOURNAL_AUTHOR_BASE_QUERY, @@ -218,16 +218,16 @@ public void findDois() { // } // } // } catch (Exception e) { -// e.printStackTrace(); +// LOGGER.error("Error: ", e); // } // } // if (!HarvestProperties.isProcessByDate()) { // break; // } // } -// logger.info("Done"); +// LOGGER.info("Done"); // } -// logger.info("nb of found doi : " + i); +// LOGGER.info("nb of found doi : " + i); } @@ -238,8 +238,8 @@ private String getMetadataByDoi(String doi) throws Exception { String metadata = ""; ObjectMapper mapper = new ObjectMapper(); URL url = new URL("http://api.crossref.org/works/" + doi); - logger.info("Fetching for metadata: " + url.toString()); - logger.info("Sending: " + url.toString()); + LOGGER.info("Fetching for metadata: " + url.toString()); + LOGGER.info("Sending: " + url.toString()); HttpURLConnection urlConn = null; urlConn = openConnection(url); if (urlConn != null) { @@ -266,10 +266,10 @@ private String getMetadataByDoi(String doi) throws Exception { metadata = " \"metadata\": " + metadata; } in.close(); - logger.info("DOI : " + doi); + LOGGER.info("DOI : " + doi); urlConn.disconnect(); } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } @@ -302,7 +302,7 @@ private String queryCrossref(String query) throws Exception { URL url = new URL("http://" + HarvestProperties.getCrossrefHost() + "/" + query); - logger.info("Sending: " + url.toString()); + LOGGER.info("Sending: " + url.toString()); HttpURLConnection urlConn = openConnection(url); if (urlConn != null) { try { @@ -322,10 +322,10 @@ private String queryCrossref(String query) throws Exception { doi = nl.item(0).getTextContent(); } in.close(); - logger.info("DOI : " + doi); + LOGGER.info("DOI : " + doi); urlConn.disconnect(); } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/crossref/OpenUrl.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/crossref/OpenUrl.java index d82fe3c..62804ae 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/crossref/OpenUrl.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/crossref/OpenUrl.java @@ -10,7 +10,7 @@ */ public class OpenUrl { - private static final Logger logger = LoggerFactory.getLogger(OpenUrl.class); + private static final Logger LOGGER = LoggerFactory.getLogger(OpenUrl.class); private MongoFileManager mm; private static final String IstexURL @@ -26,9 +26,9 @@ public void getIstexUrl() { // try { // Identifier id = mm.nextIdentifier(); // String currentAnhalyticsId = id.getAnhalyticsId(); -// logger.info("################################" + currentAnhalyticsId + "####################"); +// LOGGER.info("################################" + currentAnhalyticsId + "####################"); // URL url = new URL(String.format(IstexURL, id.getDoi())); -// logger.info("Sending: " + url.toString()); +// LOGGER.info("Sending: " + url.toString()); // HttpURLConnection urlConn = null; // try { // urlConn = (HttpURLConnection) url.openConnection(); @@ -49,21 +49,21 @@ public void getIstexUrl() { // urlConn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); // if (urlConn.getResponseCode() == 200) { // String foundurl = urlConn.getURL().toString(); -// logger.info("URL found : " + foundurl); +// LOGGER.info("URL found : " + foundurl); // //mm.updateIdentifier(doi); // //mm.insertBinary(); // } // urlConn.disconnect(); // } catch (Exception e) { -// e.printStackTrace(); +// LOGGER.error("Error: ", e); // } // // } // } catch (Exception e) { -// e.printStackTrace(); +// LOGGER.error("Error: ", e); // } // } -// logger.info("Done."); +// LOGGER.info("Done."); // } } } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/AssetLegendExtracter.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/AssetLegendExtracter.java index 1abdefa..0d2b64f 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/AssetLegendExtracter.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/AssetLegendExtracter.java @@ -1,5 +1,7 @@ package fr.inria.anhalytics.harvest.grobid; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; @@ -13,6 +15,7 @@ */ public class AssetLegendExtracter { + private static final Logger LOGGER = LoggerFactory.getLogger(AssetLegendExtracter.class); static String extractLegendFromTei(String filename, InputStream teiStream) { String legend = null; DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); @@ -43,7 +46,7 @@ static String extractLegendFromTei(String filename, InputStream teiStream) { } } } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } return legend; } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidAnnexWorker.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidAnnexWorker.java index 580e72a..824f5f1 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidAnnexWorker.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidAnnexWorker.java @@ -1,6 +1,9 @@ package fr.inria.anhalytics.harvest.grobid; import fr.inria.anhalytics.commons.data.BiblioObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.File; import javax.xml.parsers.ParserConfigurationException; @@ -10,6 +13,7 @@ * @author Achraf */ public class GrobidAnnexWorker extends GrobidWorker { + private static final Logger LOGGER = LoggerFactory.getLogger(GrobidAnnexWorker.class); public GrobidAnnexWorker(BiblioObject biblioObject, String date, int start, int end) throws ParserConfigurationException { super(biblioObject, start, end); @@ -33,7 +37,7 @@ protected void saveExtractions(String zipDirectoryPath) { } } } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidFulltextWorker.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidFulltextWorker.java index 7bae595..6079abe 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidFulltextWorker.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidFulltextWorker.java @@ -12,7 +12,7 @@ */ public class GrobidFulltextWorker extends GrobidWorker { - private static final Logger logger = LoggerFactory.getLogger(GrobidFulltextWorker.class); + private static final Logger LOGGER = LoggerFactory.getLogger(GrobidFulltextWorker.class); public GrobidFulltextWorker(BiblioObject biblioObject, String date, int start, int end) throws ParserConfigurationException { super(biblioObject, start, end); @@ -42,7 +42,7 @@ public GrobidFulltextWorker(BiblioObject biblioObject, String date, int start, i // } // } // } catch (Exception ex) { -// logger.error(ex.getMessage(), ex.getCause()); +// LOGGER.error(ex.getMessage(), ex.getCause()); // } // } } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidProcess.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidProcess.java index a9f5ce6..3f701aa 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidProcess.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidProcess.java @@ -23,7 +23,7 @@ */ public class GrobidProcess { - private static final Logger logger = LoggerFactory.getLogger(GrobidProcess.class); + private static final Logger LOGGER = LoggerFactory.getLogger(GrobidProcess.class); private MongoFileManager mm; @@ -58,11 +58,11 @@ public void processFulltexts() { // if (toBeGrobidified.contains(biblioObject.getPublicationType().split("_")[0])) { if (!biblioObject.getIsWithFulltext()) { - logger.info("\t\t No fulltext available for : "+biblioObject.getRepositoryDocId()+", Skipping..."); + LOGGER.info("\t\t No fulltext available for : "+biblioObject.getRepositoryDocId()+", Skipping..."); continue; } if (!HarvestProperties.isReset() && mm.isProcessed(Processings.GROBID)) { - logger.info("\t\t Already grobidified, Skipping..."); + LOGGER.info("\t\t Already grobidified, Skipping..."); continue; } @@ -81,10 +81,10 @@ public void processFulltexts() { Runnable worker = new GrobidSimpleFulltextWorker(biblioObject, start, end); executor.execute(worker); } catch (ParserConfigurationException exp) { - logger.error("An error occured while processing the file " + bf.getRepositoryDocId() + LOGGER.error("An error occured while processing the file " + bf.getRepositoryDocId() + ". Continuing the process for the other files.", exp); } catch (DataException dataexp) { - logger.error("Can't get the fulltext PDF for " + bf.getRepositoryDocId() + LOGGER.error("Can't get the fulltext PDF for " + bf.getRepositoryDocId() + ".", dataexp); } // } @@ -92,7 +92,7 @@ public void processFulltexts() { } executor.shutdown(); - logger.info("Jobs done, shutting down thread pool. The executor will wait 2 minutes before forcing the shutdown."); + LOGGER.info("Jobs done, shutting down thread pool. The executor will wait 2 minutes before forcing the shutdown."); try { if (!executor.awaitTermination(Long.MAX_VALUE, TimeUnit.MINUTES)) { executor.shutdownNow(); @@ -101,9 +101,9 @@ public void processFulltexts() { executor.shutdownNow(); } } - logger.info("Finished all threads"); + LOGGER.info("Finished all threads"); } catch (UnreachableGrobidServiceException ugse) { - logger.error(ugse.getMessage()); + LOGGER.error(ugse.getMessage()); } } } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidSimpleFulltextWorker.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidSimpleFulltextWorker.java index 7d3ea72..ab1df12 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidSimpleFulltextWorker.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidSimpleFulltextWorker.java @@ -17,7 +17,7 @@ */ class GrobidSimpleFulltextWorker extends GrobidWorker { - private static final Logger logger = LoggerFactory.getLogger(GrobidSimpleFulltextWorker.class); + private static final Logger LOGGER = LoggerFactory.getLogger(GrobidSimpleFulltextWorker.class); public GrobidSimpleFulltextWorker(BiblioObject biblioObject, int start, int end) throws ParserConfigurationException { super(biblioObject, start, end); @@ -41,7 +41,7 @@ protected void processCommand() { // for now we extract just files with less size (avoid thesis..which may take long time) if (mb <= 15) { - logger.info("\t\t "+Thread.currentThread().getName() +": TEI extraction for : " + biblioObject.getRepositoryDocId() + " sizing :" + mb + "mb"); + LOGGER.info("\t\t "+Thread.currentThread().getName() +": TEI extraction for : " + biblioObject.getRepositoryDocId() + " sizing :" + mb + "mb"); String tei = grobidService.runFullTextGrobid(filepath).trim(); tei = generateIdsGrobidTeiDoc(tei); @@ -49,32 +49,32 @@ protected void processCommand() { if (inserted) { this.saveExtractedDOI(tei); mm.updateBiblioObjectStatus(biblioObject, Processings.GROBID, false); - logger.info("\t\t "+Thread.currentThread().getName() +": " + biblioObject.getRepositoryDocId() + " processed."); + LOGGER.info("\t\t "+Thread.currentThread().getName() +": " + biblioObject.getRepositoryDocId() + " processed."); } else - logger.error("\t\t "+Thread.currentThread().getName() +": Problem occured while saving " + biblioObject.getRepositoryDocId() + " grobid TEI."); + LOGGER.error("\t\t "+Thread.currentThread().getName() +": Problem occured while saving " + biblioObject.getRepositoryDocId() + " grobid TEI."); } else { - logger.info("\t\t "+Thread.currentThread().getName() +": can't extract TEI for : " + biblioObject.getRepositoryDocId() + "size too large : " + mb + "mb"); + LOGGER.info("\t\t "+Thread.currentThread().getName() +": can't extract TEI for : " + biblioObject.getRepositoryDocId() + "size too large : " + mb + "mb"); } } catch (GrobidTimeoutException e) { mm.save(biblioObject.getRepositoryDocId(), "processGrobid", "timed out"); - logger.warn(Thread.currentThread().getName() +"Processing of " + biblioObject.getRepositoryDocId() + " timed out"); + LOGGER.warn(Thread.currentThread().getName() +"Processing of " + biblioObject.getRepositoryDocId() + " timed out"); } catch (RuntimeException e) { - e.printStackTrace(); - logger.error("\t\t "+Thread.currentThread().getName() +": error occurred while processing " + biblioObject.getRepositoryDocId()); + LOGGER.error("Error: ", e); + LOGGER.error("\t\t "+Thread.currentThread().getName() +": error occurred while processing " + biblioObject.getRepositoryDocId()); mm.save(biblioObject.getRepositoryDocId(), "processGrobid", e.getMessage()); - logger.error(e.getMessage(), e.getCause()); + LOGGER.error(e.getMessage(), e.getCause()); } catch (IOException ex) { - logger.error(ex.getMessage(), ex.getCause()); + LOGGER.error(ex.getMessage(), ex.getCause()); } boolean success = false; if(file.exists()) { success = file.delete(); if (!success) { - logger.error( + LOGGER.error( Thread.currentThread().getName() +": Deletion of temporary image files failed for file '" + file.getAbsolutePath() + "'"); }else - logger.info("\t\t "+Thread.currentThread().getName() +" :"+ file.getAbsolutePath() +" deleted."); + LOGGER.info("\t\t "+Thread.currentThread().getName() +" :"+ file.getAbsolutePath() +" deleted."); } } } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidWorker.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidWorker.java index 6b87ac2..4d32fc5 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidWorker.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/grobid/GrobidWorker.java @@ -99,7 +99,7 @@ protected String generateIdsGrobidTeiDoc(String tei) { teiDoc = docBuilder.parse(new ByteArrayInputStream(tei.getBytes())); } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } Utilities.generateIDs(teiDoc); tei = Utilities.toString(teiDoc); @@ -163,8 +163,6 @@ protected void finalize() throws Throwable { protected void saveExtractions(String resultPath) { } - ; - @Override public String toString() { return this.biblioObject.getRepositoryDocId(); diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/harvesters/HALOAIPMHHarvester.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/harvesters/HALOAIPMHHarvester.java index 41a851f..9c8c65d 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/harvesters/HALOAIPMHHarvester.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/harvesters/HALOAIPMHHarvester.java @@ -62,7 +62,7 @@ protected void fetchDocumentsByDate(String date) throws MalformedURLException { if (tokenn != null) { request = String.format("%s/?verb=ListRecords&resumptionToken=%s", this.oai_url, tokenn); } - logger.info("\t Sending: " + request); + LOGGER.info("\t Sending: " + request); InputStream in = Utilities.request(request); grabbedObjects = this.oaiDom.getGrabbedObjects(in); @@ -86,14 +86,14 @@ public void fetchAllDocuments() { String currentDate = ""; try { for (String date : Utilities.getDates()) { - logger.info("Extracting publications TEIs for : " + date); + LOGGER.info("Extracting publications TEIs for : " + date); currentDate = date; fetchDocumentsByDate(date); } } catch (MalformedURLException mue) { - logger.error(mue.getMessage(), mue); + LOGGER.error(mue.getMessage(), mue); } catch (ServiceException se) { - logger.error(se.getMessage(), se); + LOGGER.error(se.getMessage(), se); mm.save(currentDate, "blockedHarvestProcess", se.getMessage()); } } @@ -116,7 +116,7 @@ public void fetchListDocuments() { continue; } if (!HarvestProperties.isReset() && mm.isSavedObject(docID, null)) { - logger.info("\t\t Already grabbed, Skipping..."); + LOGGER.info("\t\t Already grabbed, Skipping..."); continue; } String teiUrl = halUrl + docID + "/tei"; @@ -130,7 +130,7 @@ public void fetchListDocuments() { DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); teiDoc = docBuilder.parse(new ByteArrayInputStream(teiString.getBytes())); } catch (SAXException | ParserConfigurationException | IOException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } Element rootElement = teiDoc.getDocumentElement(); BiblioObject biblioObject = this.oaiDom.processRecord((Element) rootElement); @@ -140,19 +140,19 @@ public void fetchListDocuments() { } saveObjects(); } catch (MalformedURLException mue) { - logger.error(mue.getMessage(), mue); + LOGGER.error(mue.getMessage(), mue); } catch (ServiceException se) { - logger.error(se.getMessage(), se); + LOGGER.error(se.getMessage(), se); mm.save("", "blockedHarvestProcess", se.getMessage()); } catch (IOException e) { - logger.error(e.getMessage(), e); + LOGGER.error(e.getMessage(), e); } finally { try { if (br != null) { br.close(); } } catch (IOException e) { - logger.error(e.getMessage(), e); + LOGGER.error(e.getMessage(), e); } } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/harvesters/Harvester.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/harvesters/Harvester.java index ca13071..a773a34 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/harvesters/Harvester.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/harvesters/Harvester.java @@ -31,7 +31,7 @@ */ public abstract class Harvester { - protected static final Logger logger = LoggerFactory.getLogger(Harvester.class); + protected static final Logger LOGGER = LoggerFactory.getLogger(Harvester.class); protected List grabbedObjects = new ArrayList(); @@ -99,22 +99,22 @@ protected void saveObjects() { pdfUrl = object.getPdf().getUrl(); } String repositoryDocId = object.getRepositoryDocId(); - logger.info("\t\t Processing metadata from " + object.getSource() + " document :" + repositoryDocId); + LOGGER.info("\t\t Processing metadata from " + object.getSource() + " document :" + repositoryDocId); if (metadataString.length() > 0) { if (!HarvestProperties.isReset() && mm.isSavedObject(repositoryDocId, object.getRepositoryDocVersion())) { - logger.info("\t\t Already grabbed, Skipping..."); + LOGGER.info("\t\t Already grabbed, Skipping..."); continue; } try { if (object.getPdf() != null) { - logger.info("\t\t\t\t downloading PDF file."); + LOGGER.info("\t\t\t\t downloading PDF file."); requestFile(object.getPdf()); if(object.getPdf().getStream() == null) object.setIsWithFulltext(Boolean.FALSE); } else { object.setIsWithFulltext(Boolean.FALSE); mm.save(object.getRepositoryDocId(), "harvestProcess", "no URL for binary"); - logger.info("\t\t\t\t PDF not found !"); + LOGGER.info("\t\t\t\t PDF not found !"); } if (object.getAnnexes() != null) { for (BinaryFile file : object.getAnnexes()) { @@ -122,17 +122,17 @@ protected void saveObjects() { } } } catch (BinaryNotAvailableException bna) { - logger.error(bna.getMessage()); + LOGGER.error(bna.getMessage()); mm.save(object.getRepositoryDocId(), "harvestProcess", "file not downloaded"); } catch (ParseException | IOException e) { - logger.error("\t\t Error occured while processing TEI for " + object.getRepositoryDocId(), e); + LOGGER.error("\t\t Error occured while processing TEI for " + object.getRepositoryDocId(), e); mm.save(object.getRepositoryDocId(), "harvestProcess", "harvest error"); } - logger.info("\t\t\t\t Storing object " + repositoryDocId); + LOGGER.info("\t\t\t\t Storing object " + repositoryDocId); mm.insertBiblioObject(object); } else { - logger.info("\t\t\t No TEI metadata !!!"); + LOGGER.info("\t\t\t No TEI metadata !!!"); } } } @@ -145,11 +145,11 @@ protected void requestFile(BinaryFile bf) throws ParseException, IOException { Date embDate = Utilities.parseStringDate(bf.getEmbargoDate()); Date today = new Date(); if (embDate == null || embDate.before(today) || embDate.equals(today)) { - logger.info("\t\t\t Downloading: " + bf.getUrl()); + LOGGER.info("\t\t\t Downloading: " + bf.getUrl()); try { bf.setStream(Utilities.request(bf.getUrl())); } catch (MalformedURLException | ServiceException se) { - logger.error(se.getMessage()); + LOGGER.error(se.getMessage()); throw new BinaryNotAvailableException(); } @@ -160,12 +160,12 @@ protected void requestFile(BinaryFile bf) throws ParseException, IOException { int n = bf.getUrl().lastIndexOf("/"); String filename = bf.getUrl().substring(n + 1); bf.setFileName(filename); - logger.info("\t\t\t\t Getting annex file " + filename + " for pub ID :" + bf.getRepositoryDocId()); + LOGGER.info("\t\t\t\t Getting annex file " + filename + " for pub ID :" + bf.getRepositoryDocId()); } } } else { mm.log(bf.getRepositoryDocId(), bf.getAnhalyticsId(), bf.getUrl(), bf.getDocumentType(), bf.isIsAnnexFile(), "embargo", bf.getEmbargoDate()); - logger.info("\t\t\t file under embargo !"); + LOGGER.info("\t\t\t file under embargo !"); } } } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/harvesters/IstexHarvester.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/harvesters/IstexHarvester.java index 96431f1..ba48594 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/harvesters/IstexHarvester.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/harvesters/IstexHarvester.java @@ -36,7 +36,6 @@ */ public class IstexHarvester extends Harvester { - protected static final Logger logger = LoggerFactory.getLogger(IstexHarvester.class); private static IstexHarvester harvester = null; private static final String istexApiUrl = "https://api.istex.fr/document"; @@ -74,12 +73,12 @@ public void fetchListDocuments() { continue; } if (!HarvestProperties.isReset() && mm.isSavedObject(docID, null)) { - logger.info("\t\t Already grabbed, Skipping..."); + LOGGER.info("\t\t Already grabbed, Skipping..."); continue; } String request = "https://api.istex.fr/document/?q=id:" + docID + "&size=1&output=*"; URL url = new URL(request); - logger.info(request); + LOGGER.info(request); urlConn = (HttpURLConnection) url.openConnection(); if (urlConn != null) { urlConn.setDoInput(true); @@ -100,21 +99,21 @@ public void fetchListDocuments() { } saveObjects(); } catch (MalformedURLException mue) { - logger.error(mue.getMessage(), mue); + LOGGER.error(mue.getMessage(), mue); } catch (ServiceException se) { - logger.error(se.getMessage(), se); + LOGGER.error(se.getMessage(), se); mm.save("", "blockedHarvestProcess", se.getMessage()); } catch (IOException e) { - logger.error(e.getMessage(), e); + LOGGER.error(e.getMessage(), e); } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } finally { try { if (br != null) { br.close(); } } catch (IOException e) { - logger.error(e.getMessage(), e); + LOGGER.error(e.getMessage(), e); } } } @@ -152,7 +151,7 @@ private BiblioObject getBiblioObjectFromHit(JSONObject hit) throws MalformedURLE //get Metadata string from id String request = istexApiUrl + "/" + bo.getRepositoryDocId() + "/fulltext/tei"; - logger.info("Downloading fulltext TEI document :" + bo.getRepositoryDocId()); + LOGGER.info("Downloading fulltext TEI document :" + bo.getRepositoryDocId()); bo.setMetadataURL(request); bo.setMetadata(IOUtils.toString(new URL(request), "UTF-8")); @@ -186,7 +185,7 @@ public void sample() { try { for (String category : categories) { String[] cat = category.split("\\."); - logger.info("\t\t\t\t Sampling " + sampleSize + " documents from category : " + cat[1]); + LOGGER.info("\t\t\t\t Sampling " + sampleSize + " documents from category : " + cat[1]); //count = getCategoryDocCount(cat); // Use scroll // count = 10000; @@ -199,7 +198,7 @@ public void sample() { request = istexApiUrl + "/" + params; URL url = new URL(request); - logger.info(request); + LOGGER.info(request); urlConn = (HttpURLConnection) url.openConnection(); if (urlConn != null) { urlConn.setDoInput(true); @@ -217,14 +216,14 @@ public void sample() { grabbedObjects.add(bo); } } -// logger.info("Saving IDs for download :" + ids); +// LOGGER.info("Saving IDs for download :" + ids); //Download / store docs } saveObjects(); } catch (UnsupportedEncodingException ex) { java.util.logging.Logger.getLogger(IstexHarvester.class.getName()).log(Level.SEVERE, null, ex); } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } @@ -243,7 +242,7 @@ private int getCategoryDocCount(String[] cat) { Long total = null; try { URL url = new URL(request); - logger.info(request); + LOGGER.info(request); urlConn = (HttpURLConnection) url.openConnection(); if (urlConn != null) { urlConn.setDoInput(true); @@ -255,10 +254,10 @@ private int getCategoryDocCount(String[] cat) { JSONObject jsonObject = (JSONObject) jsonParser.parse(json); total = (Long) jsonObject.get("total"); - logger.info(String.format("Found %d entries for %s class.", total, cat[0] + "." + cat[1])); + LOGGER.info(String.format("Found %d entries for %s class.", total, cat[0] + "." + cat[1])); } } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } return total.intValue(); } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/main/Main.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/main/Main.java index 0163063..bc16f65 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/main/Main.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/main/Main.java @@ -14,6 +14,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.net.InetSocketAddress; +import java.net.Proxy; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; @@ -31,7 +33,7 @@ */ public class Main { - private static final Logger logger = LoggerFactory.getLogger(Main.class); + private static final Logger LOGGER = LoggerFactory.getLogger(Main.class); private static List availableCommands = new ArrayList() { { @@ -55,7 +57,7 @@ public static void main(String[] args) throws Exception { try { HarvestProperties.init("anhalytics.properties"); } catch (PropertyException exp) { - logger.error("Something wrong when opening anhalytics.properties", exp); + LOGGER.error("Something wrong when opening anhalytics.properties", exp); return; } @@ -89,10 +91,10 @@ private void processCommand() throws Exception { OpenUrl ou = new OpenUrl(); Harvester harvester = null; - + Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress("wwwout.nims.go.jp", 8888)); if (process.equals("harvestAll")) { if (HarvestProperties.getSource().toLowerCase().equals(Harvester.Source.HAL.getName())) { - harvester = new HALOAIPMHHarvester(); + harvester = new HALOAIPMHHarvester(proxy); } else { harvester = new IstexHarvester(); } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/parsers/HALOAIPMHDomParser.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/parsers/HALOAIPMHDomParser.java index d429aaa..0b8f5bc 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/parsers/HALOAIPMHDomParser.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/parsers/HALOAIPMHDomParser.java @@ -35,7 +35,7 @@ */ public class HALOAIPMHDomParser { - protected static final Logger logger = LoggerFactory.getLogger(HALOAIPMHDomParser.class); + protected static final Logger LOGGER = LoggerFactory.getLogger(HALOAIPMHDomParser.class); private final static String source = Harvester.Source.HAL.getName(); @@ -55,7 +55,7 @@ public List getGrabbedObjects(InputStream in) { //XPath xPath = XPathFactory.newInstance().newXPath();//play with it //NodeList nodes = (NodeList)xPath.evaluate("/OAI-PMH/ListRecords/record/metadata", rootElement, XPathConstants.NODESET); setToken(rootElement); - logger.info("\t \t " + listRecords.getLength() + " records found. processing..."); + LOGGER.info("\t \t " + listRecords.getLength() + " records found. processing..."); if (listRecords.getLength() >= 1) { for (int i = listRecords.getLength() - 1; i >= 0; i--) { @@ -114,9 +114,9 @@ public BiblioObject processRecord(Element record) { // List annexes = getAnnexes(record, repositoryDocId, currentVersion, "", type); // biblioObj.setAnnexes(annexes); - logger.info("\t \t \t tei of " + repositoryDocId + " extracted."); + LOGGER.info("\t \t \t tei of " + repositoryDocId + " extracted."); // } else { -// logger.info("\t \t \t skipping " + completeRepositoryDocId + " , it's not a current version."); +// LOGGER.info("\t \t \t skipping " + completeRepositoryDocId + " , it's not a current version."); // } } return biblioObj; @@ -129,7 +129,7 @@ public String getCurrentVersion(Node record) { Element node = (Element) xPath.compile(OAIPMHPathsItf.EditionElement).evaluate(record, XPathConstants.NODE); currentVersion = node.getAttribute("n"); } catch (DataException | XPathExpressionException ex) { - logger.info("\t \t \t \t No current edition found ."); + LOGGER.info("\t \t \t \t No current edition found ."); } return currentVersion; } @@ -146,7 +146,7 @@ public String getRef(Node ref) { throw new DataException(); } } catch (DataException | XPathExpressionException | DOMException ex) { - logger.info("\t \t \t \t hal ref not found"); + LOGGER.info("\t \t \t \t hal ref not found"); } return reference; } @@ -162,7 +162,7 @@ public String getDoi(Node ref) { throw new DataException(); } } catch (DataException | XPathExpressionException | DOMException ex) { - logger.info("\t \t \t \t doi not found"); + LOGGER.info("\t \t \t \t doi not found"); } return doi; } @@ -184,7 +184,7 @@ public List getDomains(Node ref) { throw new DataException(); } } catch (DataException | XPathExpressionException | DOMException ex) { - logger.info("\t \t \t \t no publication type found"); + LOGGER.info("\t \t \t \t no publication type found"); } return domains; } @@ -200,7 +200,7 @@ public String getPublicationType(Node ref) { throw new DataException(); } } catch (DataException | XPathExpressionException | DOMException ex) { - logger.info("\t \t \t \t no publication type found"); + LOGGER.info("\t \t \t \t no publication type found"); } return type; } @@ -227,7 +227,7 @@ private Document parse(InputStream in) { DocumentBuilder db = dbf.newDocumentBuilder(); return db.parse(in); } catch (DataException | IOException | ParserConfigurationException | SAXException e) {// - logger.error("Could not parse document because " + LOGGER.error("Could not parse document because " + e.getMessage()); } return null; @@ -252,7 +252,7 @@ public BinaryFile getFile(Node record, String repositoryDocId, String repository throw new DataException(); } } catch (DataException | XPathExpressionException ex) { - logger.info("\t \t \t \t No file attached ."); + LOGGER.info("\t \t \t \t No file attached ."); } return file; } @@ -264,7 +264,7 @@ public List getAnnexes(Node record, String repositoryDocId, String r try { nodes = (NodeList) xPath.compile(OAIPMHPathsItf.AnnexesUrlsElement).evaluate(record, XPathConstants.NODESET); } catch (XPathExpressionException ex) { - logger.info("\t \t \t \t No annex files attached ."); + LOGGER.info("\t \t \t \t No annex files attached ."); } String url = null; String embargoDate = null; @@ -293,7 +293,7 @@ public String getTei(Node tei) { DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); teiDoc = docBuilder.parse(new ByteArrayInputStream(teiString.getBytes())); } catch (SAXException | ParserConfigurationException | IOException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } // teiDoc = createTEICorpus(teiDoc); @@ -302,7 +302,7 @@ public String getTei(Node tei) { try { teiString = Utilities.toString(teiDoc); } catch (DataException de) { - de.printStackTrace(); + LOGGER.error("Error: ", de); } return teiString; } @@ -331,7 +331,7 @@ public String getRepositoryDocId(Node record) { throw new DataException(); } } catch (DataException | XPathExpressionException | DOMException ex) { - logger.info("\t \t \t \t no publication repository id found"); + LOGGER.info("\t \t \t \t no publication repository id found"); } return repositoryDocId; } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/teibuild/TeiBuilderWorker.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/teibuild/TeiBuilderWorker.java index 8cb5383..b42e7a1 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/teibuild/TeiBuilderWorker.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/teibuild/TeiBuilderWorker.java @@ -46,7 +46,7 @@ */ public class TeiBuilderWorker implements Runnable { - private static final Logger logger = LoggerFactory.getLogger(TeiBuilderWorker.class); + private static final Logger LOGGER = LoggerFactory.getLogger(TeiBuilderWorker.class); private Steps step; @@ -63,11 +63,11 @@ public TeiBuilderWorker(BiblioObject biblioObject, Steps step) { public void run() { this.mm = MongoFileManager.getInstance(false); long startTime = System.nanoTime(); - logger.info(Thread.currentThread().getName() + " Start. Processing = " + biblioObject.getRepositoryDocId()); + LOGGER.info(Thread.currentThread().getName() + " Start. Processing = " + biblioObject.getRepositoryDocId()); if(step == Steps.TRANSFORM) { try { - logger.info("\t\t transforming :" + biblioObject.getRepositoryDocId()); + LOGGER.info("\t\t transforming :" + biblioObject.getRepositoryDocId()); Document generatedTEIcorpus = createTEICorpus(); if (generatedTEIcorpus != null) { boolean inserted = mm.insertTEIcorpus(Utilities.toString(generatedTEIcorpus), biblioObject.getAnhalyticsId()); @@ -78,18 +78,18 @@ public void run() { biblioObject.setIsMined(Boolean.FALSE); biblioObject.setIsIndexed(Boolean.FALSE); mm.updateBiblioObjectStatus(biblioObject, null, true); - logger.info("\t\t " + biblioObject.getRepositoryDocId()+ " transformed."); + LOGGER.info("\t\t " + biblioObject.getRepositoryDocId()+ " transformed."); } else { - logger.error("\t\t Problem occured while saving " + biblioObject.getRepositoryDocId() + " corpus TEI."); + LOGGER.error("\t\t Problem occured while saving " + biblioObject.getRepositoryDocId() + " corpus TEI."); } } } catch (Exception e) { - logger.error(e.getMessage(), e); + LOGGER.error(e.getMessage(), e); } } else if(step == Steps.APPEND_FULLTEXT) { Document generatedTEIcorpus = null; - logger.info("\t Building TEI for: " + biblioObject.getRepositoryDocId()); + LOGGER.info("\t Building TEI for: " + biblioObject.getRepositoryDocId()); //tei.setTei(Utilities.trimEncodedCharaters(tei.getTei())); try { String grobidTei = mm.getGrobidTei(biblioObject); @@ -101,19 +101,19 @@ public void run() { if (inserted) { biblioObject.setIsFulltextAppended(Boolean.TRUE); mm.updateBiblioObjectStatus(biblioObject, null, true); - logger.info("\t\t " + biblioObject.getRepositoryDocId()+ " built."); + LOGGER.info("\t\t " + biblioObject.getRepositoryDocId()+ " built."); } else { - logger.error("\t\t Problem occured while saving " + biblioObject.getRepositoryDocId() + " corpus TEI."); + LOGGER.error("\t\t Problem occured while saving " + biblioObject.getRepositoryDocId() + " corpus TEI."); } } catch (DataException de) { - logger.error("No corresponding fulltext TEI was found for " + biblioObject.getRepositoryDocId() + "."); + LOGGER.error("No corresponding fulltext TEI was found for " + biblioObject.getRepositoryDocId() + "."); } catch (Exception e) { - logger.error(e.getMessage(), e); + LOGGER.error(e.getMessage(), e); } } long endTime = System.nanoTime(); - logger.info(Thread.currentThread().getName() + " End. :" + (endTime - startTime) / 1000000 + " ms"); + LOGGER.info(Thread.currentThread().getName() + " End. :" + (endTime - startTime) / 1000000 + " ms"); } /** @@ -202,7 +202,7 @@ public Document addGrobidTEIToTEICorpus(String teiCorpus, String grobidTei) thro fillPubDate(doc, teiCorpusDoc); fillAuthors(doc, teiCorpusDoc); } catch (XPathExpressionException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } resultTei = addNewElementToTEI(teiCorpusDoc, grobidTeiElement); @@ -285,7 +285,7 @@ public void fillPubDate(Document doc, Document teiCorpusDoc) throws XPathExpress } } catch (XPathExpressionException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } diff --git a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/teibuild/TeiCorpusBuilderProcess.java b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/teibuild/TeiCorpusBuilderProcess.java index b575e19..75225b8 100644 --- a/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/teibuild/TeiCorpusBuilderProcess.java +++ b/anhalytics-harvest/src/main/java/fr/inria/anhalytics/harvest/teibuild/TeiCorpusBuilderProcess.java @@ -19,7 +19,7 @@ */ public class TeiCorpusBuilderProcess { - private static final Logger logger = LoggerFactory.getLogger(TeiCorpusBuilderProcess.class); + private static final Logger LOGGER = LoggerFactory.getLogger(TeiCorpusBuilderProcess.class); private MongoFileManager mm; @@ -46,7 +46,7 @@ public void transformMetadata() { while (mm.hasMore()) { BiblioObject biblioObject = mm.nextBiblioObject(); if (!HarvestProperties.isReset() && biblioObject.getIsProcessedByPub2TEI()) { - logger.info("\t\t Already transformed, Skipping... " + biblioObject.getRepositoryDocId()); + LOGGER.info("\t\t Already transformed, Skipping... " + biblioObject.getRepositoryDocId()); continue; } biblioObject.setMetadata(mm.getMetadata(biblioObject)); @@ -56,7 +56,7 @@ public void transformMetadata() { } executor.shutdown(); - logger.info("Jobs done, shutting down thread pool. The executor will wait 1 minutes before forcing off. "); + LOGGER.info("Jobs done, shutting down thread pool. The executor will wait 1 minutes before forcing off. "); try { if (!executor.awaitTermination(Long.MAX_VALUE, TimeUnit.MINUTES)) { executor.shutdownNow(); @@ -64,7 +64,7 @@ public void transformMetadata() { } catch (InterruptedException e) { executor.shutdownNow(); } - logger.info("Finished all threads"); + LOGGER.info("Finished all threads"); } /** @@ -90,12 +90,12 @@ public void addGrobidFulltextToTEICorpus() { while (mm.hasMore()) { BiblioObject biblioObject = mm.nextBiblioObject(); if (!HarvestProperties.isReset() && biblioObject.getIsFulltextAppended()) { - logger.info("\t\t Fulltext already appended, Skipping... " + biblioObject.getRepositoryDocId()); + LOGGER.info("\t\t Fulltext already appended, Skipping... " + biblioObject.getRepositoryDocId()); continue; } //grobid tei and tei corpus with metadata initialisation should be available. if (!biblioObject.getIsProcessedByPub2TEI()) { - logger.info("\t\t Metadata TEI not found, first consider creating TEI from metadata, Skipping... " + biblioObject.getRepositoryDocId()); + LOGGER.info("\t\t Metadata TEI not found, first consider creating TEI from metadata, Skipping... " + biblioObject.getRepositoryDocId()); continue; } Runnable worker = new TeiBuilderWorker(biblioObject, Steps.APPEND_FULLTEXT); @@ -105,7 +105,7 @@ public void addGrobidFulltextToTEICorpus() { executor.shutdown(); - logger.info("Jobs done, shutting down thread pool. "); + LOGGER.info("Jobs done, shutting down thread pool. "); try { if (!executor.awaitTermination(Long.MAX_VALUE, TimeUnit.MINUTES)) { executor.shutdownNow(); @@ -113,9 +113,9 @@ public void addGrobidFulltextToTEICorpus() { } catch (InterruptedException e) { executor.shutdownNow(); } - logger.info("Finished all threads"); + LOGGER.info("Finished all threads"); } catch (UnreachableGrobidServiceException ugse) { - logger.error(ugse.getMessage()); + LOGGER.error(ugse.getMessage()); } } } diff --git a/anhalytics-index/src/main/java/fr/inria/anhalytics/index/DocumentIndexer.java b/anhalytics-index/src/main/java/fr/inria/anhalytics/index/DocumentIndexer.java index 887a0e6..4f69481 100644 --- a/anhalytics-index/src/main/java/fr/inria/anhalytics/index/DocumentIndexer.java +++ b/anhalytics-index/src/main/java/fr/inria/anhalytics/index/DocumentIndexer.java @@ -26,7 +26,7 @@ */ public class DocumentIndexer extends Indexer { - private static final Logger logger = LoggerFactory.getLogger(DocumentIndexer.class); + private static final Logger LOGGER = LoggerFactory.getLogger(DocumentIndexer.class); IndexingPreprocess indexingPreprocess; // only annotations under these paths will be indexed for the moment @@ -63,11 +63,11 @@ public int indexTeiCorpus() { while (mm.hasMore()) { BiblioObject biblioObject = mm.nextBiblioObject(); // if (!biblioObject.getIsWithFulltext()) { -// logger.info("\t\t No fulltext available, Skipping..."); +// LOGGER.info("\t\t No fulltext available, Skipping..."); // continue; // } if (!IndexProperties.isReset() && biblioObject.getIsIndexed()) { - logger.info("\t\t Already indexed, Skipping..."); + LOGGER.info("\t\t Already indexed, Skipping..."); continue; } String jsonStr = null; @@ -90,17 +90,17 @@ public int indexTeiCorpus() { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } bulkRequest = client.prepareBulk(); //bulkRequest.setRefresh(true); bulkRequest.setRefreshPolicy(RefreshPolicy.IMMEDIATE); - logger.info("\n Bulk number : " + nb / bulkSize); + LOGGER.info("\n Bulk number : " + nb / bulkSize); } biblioObject.setIsIndexed(Boolean.TRUE); mm.updateBiblioObjectStatus(biblioObject, null, false); } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } } @@ -108,10 +108,10 @@ public int indexTeiCorpus() { // last bulk if (nb % bulkSize != 0) { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); - logger.info("\n One Last Bulk."); + LOGGER.info("\n One Last Bulk."); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } } } else { @@ -138,7 +138,7 @@ public int indexKeytermAnnotations() { continue; annotation.setJson(annotation.getJson().replaceAll("_id", "id")); if (!IndexProperties.isReset() && annotation.isIsIndexed()) { - logger.info("\t\t Already indexed annotations for " + biblioObject.getAnhalyticsId() + ", Skipping..."); + LOGGER.info("\t\t Already indexed annotations for " + biblioObject.getAnhalyticsId() + ", Skipping..."); continue; } try { @@ -153,15 +153,15 @@ public int indexKeytermAnnotations() { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } bulkRequest = client.prepareBulk(); //bulkRequest.setRefresh(true); bulkRequest.setRefreshPolicy(RefreshPolicy.IMMEDIATE); - logger.info("\n Bulk number : " + nb / bulkSize); + LOGGER.info("\n Bulk number : " + nb / bulkSize); } } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } } @@ -169,10 +169,10 @@ public int indexKeytermAnnotations() { if (nb % bulkSize != 0) { // last bulk BulkResponse bulkResponse = bulkRequest.execute().actionGet(); - logger.info("\n One Last Bulk."); + LOGGER.info("\n One Last Bulk."); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } } return nb; @@ -197,7 +197,7 @@ public int indexNerdAnnotations() { annotation.setJson(annotation.getJson().replaceAll("_id", "id")); if (!IndexProperties.isReset() && annotation.isIsIndexed()) { - logger.info("\t\t Already indexed annotations for " + biblioObject.getAnhalyticsId() + ", Skipping..."); + LOGGER.info("\t\t Already indexed annotations for " + biblioObject.getAnhalyticsId() + ", Skipping..."); continue; } @@ -245,16 +245,16 @@ public int indexNerdAnnotations() { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } bulkRequest = client.prepareBulk(); //bulkRequest.setRefresh(true); bulkRequest.setRefreshPolicy(RefreshPolicy.IMMEDIATE); - logger.info("\n Bulk number : " + nb / bulkSize); + LOGGER.info("\n Bulk number : " + nb / bulkSize); } } } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } } @@ -262,10 +262,10 @@ public int indexNerdAnnotations() { // last bulk if (nb % bulkSize != 0) { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); - logger.info("\n One Last Bulk."); + LOGGER.info("\n One Last Bulk."); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } } return nb; @@ -291,7 +291,7 @@ public int indexQuantitiesAnnotations() { annotation.setJson(annotation.getJson().replaceAll("_id", "id")); if (!IndexProperties.isReset() && annotation.isIsIndexed()) { - logger.info("\t\t Already indexed annotations for " + biblioObject.getAnhalyticsId() + ", Skipping..."); + LOGGER.info("\t\t Already indexed annotations for " + biblioObject.getAnhalyticsId() + ", Skipping..."); continue; } try { @@ -305,15 +305,15 @@ public int indexQuantitiesAnnotations() { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } bulkRequest = client.prepareBulk(); //bulkRequest.setRefresh(true); bulkRequest.setRefreshPolicy(RefreshPolicy.IMMEDIATE); - logger.info("\n Bulk number : " + nb / bulkSize); + LOGGER.info("\n Bulk number : " + nb / bulkSize); } } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } } @@ -321,10 +321,10 @@ public int indexQuantitiesAnnotations() { // last bulk if (nb % bulkSize != 0) { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); - logger.info("\n One Last Bulk."); + LOGGER.info("\n One Last Bulk."); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } } @@ -337,7 +337,7 @@ public int indexQuantitiesAnnotations() { */ private List validDocIDs(String anhalyticsId, ObjectMapper mapper) { List results = new ArrayList(); - logger.info("validDocIDs: " + anhalyticsId); + LOGGER.info("validDocIDs: " + anhalyticsId); //String request[] = toBeIndexed.toArray(new String[0]); //String query = "{\"query\": { \"bool\": { \"must\": { \"term\": {\"_id\": \"" + anhalyticsId + "\"}}}}}"; @@ -368,7 +368,7 @@ private List validDocIDs(String anhalyticsId, ObjectMapper mapper) { } } } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } return results; } diff --git a/anhalytics-index/src/main/java/fr/inria/anhalytics/index/Indexer.java b/anhalytics-index/src/main/java/fr/inria/anhalytics/index/Indexer.java index 3e53b75..3d15f61 100644 --- a/anhalytics-index/src/main/java/fr/inria/anhalytics/index/Indexer.java +++ b/anhalytics-index/src/main/java/fr/inria/anhalytics/index/Indexer.java @@ -27,7 +27,7 @@ */ abstract class Indexer { - private static final Logger logger = LoggerFactory.getLogger(Indexer.class); + private static final Logger LOGGER = LoggerFactory.getLogger(Indexer.class); protected MongoFileManager mm; @@ -80,8 +80,8 @@ public void setUpIndex(String indexName) { // create new index and load the appropriate mapping createIndex(indexName); } catch (Exception e) { - logger.error("Sep-up of ElasticSearch failed for index " + indexName + ".", e); - e.printStackTrace(); + LOGGER.error("Sep-up of ElasticSearch failed for index " + indexName + ".", e); + LOGGER.error("Error: ", e); } } @@ -96,8 +96,8 @@ public void setUpIndex(String indexName) { // create new index and load the appropriate mapping createQuantitiesIndex(); } catch (Exception e) { - logger.error("Sep-up of ElasticSearch failed for index " + "quantities" + ".", e); - e.printStackTrace(); + LOGGER.error("Sep-up of ElasticSearch failed for index " + "quantities" + ".", e); + LOGGER.error("Error: ", e); } }*/ @@ -124,9 +124,9 @@ public void setUpIndex(String indexName) { if (!createResponse.isAcknowledged()) { throw new IndexingServiceException("Failed to create index <" + "quantities" + ">"); } - logger.info("Index {} created", "quantities"); + LOGGER.info("Index {} created", "quantities"); } else { - logger.info("Index {} already exists", "quantities"); + LOGGER.info("Index {} already exists", "quantities"); } val = true; return val; @@ -141,13 +141,13 @@ private boolean deleteIndex(String indexName) { DeleteIndexResponse deleteResponse = this.client.admin().indices().delete(new DeleteIndexRequest(indexName)).actionGet(); if (deleteResponse.isAcknowledged()) { - logger.info("Index {} deleted", indexName); + LOGGER.info("Index {} deleted", indexName); val = true; } else { - logger.error("Could not delete index " + indexName); + LOGGER.error("Could not delete index " + indexName); } } catch (IndexNotFoundException e) { - logger.info("Index " + indexName + " not found."); + LOGGER.info("Index " + indexName + " not found."); } return val; @@ -188,9 +188,9 @@ public boolean createIndex(String indexName) { if (!createResponse.isAcknowledged()) { throw new IndexingServiceException("Failed to create index <" + indexName + ">"); } - logger.info("Index {} created", indexName); + LOGGER.info("Index {} created", indexName); } else { - logger.info("Index {} already exists", indexName); + LOGGER.info("Index {} already exists", indexName); } val = true; return val; diff --git a/anhalytics-index/src/main/java/fr/inria/anhalytics/index/IndexingPreprocess.java b/anhalytics-index/src/main/java/fr/inria/anhalytics/index/IndexingPreprocess.java index 5a6cdf2..6f89921 100644 --- a/anhalytics-index/src/main/java/fr/inria/anhalytics/index/IndexingPreprocess.java +++ b/anhalytics-index/src/main/java/fr/inria/anhalytics/index/IndexingPreprocess.java @@ -35,7 +35,7 @@ */ public class IndexingPreprocess { - private static final Logger logger = LoggerFactory.getLogger(IndexingPreprocess.class); + private static final Logger LOGGER = LoggerFactory.getLogger(IndexingPreprocess.class); // this is the list of elements for which the text nodes should be expanded with an additional json // node capturing the nesting xml:lang attribute name/value pair @@ -74,7 +74,7 @@ public String process(String jsonStr, String repositoryDocId, String anhalyticsI JsonNode tei = jsonRoot.findPath("$TEI"); //check if fulltext is there.. if (tei.isNull()) { - logger.info(repositoryDocId + ": element is null -> " + tei.toString()); + LOGGER.info(repositoryDocId + ": element is null -> " + tei.toString()); return null; } if ((teiRoot != null) && (!teiRoot.isMissingNode())) { @@ -799,12 +799,12 @@ public JsonNode getStandoffQuantities(ObjectMapper mapper, String anhalyticsId, Iterator iter = jsonLocalAnnotation.elements(); while (iter.hasNext()) { JsonNode piece = (JsonNode) iter.next(); -//logger.info(piece.toString()); +//LOGGER.info(piece.toString()); JsonNode typeNode = piece.findPath("type"); if ((typeNode == null) || (typeNode.isMissingNode())) continue; String type = typeNode.textValue(); -//logger.info("type is " + type + " / " + piece.toString()); +//LOGGER.info("type is " + type + " / " + piece.toString()); if (type.equals("value")) { JsonNode quantity = piece.findPath("quantity"); @@ -825,10 +825,10 @@ public JsonNode getStandoffQuantities(ObjectMapper mapper, String anhalyticsId, JsonNode newNode = mapper.createObjectNode(); ((ObjectNode) newNode).put(valueTypeMeasure.replace(" ", "_"), val); ((ArrayNode) annotNode).add(newNode); -//logger.info("type is " + type + " / " + annotNode.toString()); +//LOGGER.info("type is " + type + " / " + annotNode.toString()); } else if (type.equals("interval")) { -//logger.info("type is " + type + " / " + piece.toString()); +//LOGGER.info("type is " + type + " / " + piece.toString()); JsonNode quantityMost = piece.findPath("quantityMost"); JsonNode quantityLeast = piece.findPath("quantityLeast"); String valueTypeMeasure = null; @@ -872,7 +872,7 @@ public JsonNode getStandoffQuantities(ObjectMapper mapper, String anhalyticsId, ((ObjectNode) newNode).put(valueTypeMeasure.replace(" ", "_")+"_range", range); ((ArrayNode) annotNode).add(newNode); -//logger.info("type is " + type + " / " + annotNode.toString()); +//LOGGER.info("type is " + type + " / " + annotNode.toString()); } } else if (type.equals("listc")) { @@ -940,7 +940,7 @@ private boolean filterDocuments(JsonNode jsonRoot) { ok = false; } } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } } @@ -967,7 +967,7 @@ private boolean filterDocuments(JsonNode jsonRoot) { ok = false; } } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } } } diff --git a/anhalytics-index/src/main/java/fr/inria/anhalytics/index/KnowledgeBaseIndexer.java b/anhalytics-index/src/main/java/fr/inria/anhalytics/index/KnowledgeBaseIndexer.java index 505a445..f343ada 100644 --- a/anhalytics-index/src/main/java/fr/inria/anhalytics/index/KnowledgeBaseIndexer.java +++ b/anhalytics-index/src/main/java/fr/inria/anhalytics/index/KnowledgeBaseIndexer.java @@ -59,7 +59,7 @@ */ public class KnowledgeBaseIndexer extends Indexer { - private static final Logger logger = LoggerFactory.getLogger(KnowledgeBaseIndexer.class); + private static final Logger LOGGER = LoggerFactory.getLogger(KnowledgeBaseIndexer.class); private static final AbstractDAOFactory adf = AbstractDAOFactory.getFactory(AbstractDAOFactory.DAO_FACTORY); private static final AbstractBiblioDAOFactory biblioadf = AbstractBiblioDAOFactory.getFactory(AbstractBiblioDAOFactory.DAO_FACTORY); @@ -143,21 +143,21 @@ public int indexAuthors() throws SQLException { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } bulkRequest = client.prepareBulk(); //bulkRequest.setRefresh(true); bulkRequest.setRefreshPolicy(RefreshPolicy.IMMEDIATE); - logger.info("\n Bulk number : " + nb / bulkSize); + LOGGER.info("\n Bulk number : " + nb / bulkSize); } } // last bulk if (nb % bulkSize != 0) { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); - logger.info("\n One Last Bulk."); + LOGGER.info("\n One Last Bulk."); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } } } else { @@ -282,7 +282,7 @@ public int indexPublications() throws SQLException { } } catch (Exception e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } documentDocument.put("annotations", result); //HAL domains @@ -296,21 +296,21 @@ public int indexPublications() throws SQLException { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } bulkRequest = client.prepareBulk(); //bulkRequest.setRefresh(true); bulkRequest.setRefreshPolicy(RefreshPolicy.IMMEDIATE); - logger.info("\n Bulk number : " + nb / bulkSize); + LOGGER.info("\n Bulk number : " + nb / bulkSize); } } // last bulk if (nb % bulkSize != 0) { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); - logger.info("\n One Last Bulk."); + LOGGER.info("\n One Last Bulk."); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } } } else { @@ -378,21 +378,21 @@ public int indexOrganisations() throws SQLException { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } bulkRequest = client.prepareBulk(); //bulkRequest.setRefresh(true); bulkRequest.setRefreshPolicy(RefreshPolicy.IMMEDIATE); - logger.info("\n Bulk number : " + nb / bulkSize); + LOGGER.info("\n Bulk number : " + nb / bulkSize); } } // last bulk if (nb % bulkSize != 0) { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); - logger.info("\n One Last Bulk."); + LOGGER.info("\n One Last Bulk."); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item - logger.error(bulkResponse.buildFailureMessage()); + LOGGER.error(bulkResponse.buildFailureMessage()); } } } else { diff --git a/anhalytics-index/src/main/java/fr/inria/anhalytics/index/main/Main.java b/anhalytics-index/src/main/java/fr/inria/anhalytics/index/main/Main.java index 0274463..f3071ef 100644 --- a/anhalytics-index/src/main/java/fr/inria/anhalytics/index/main/Main.java +++ b/anhalytics-index/src/main/java/fr/inria/anhalytics/index/main/Main.java @@ -22,7 +22,7 @@ */ public class Main { - private static final Logger logger = LoggerFactory.getLogger(Main.class); + private static final Logger LOGGER = LoggerFactory.getLogger(Main.class); private static List availableCommands = Arrays.asList("setup", "indexAll", "indexTEI", @@ -35,7 +35,7 @@ public static void main(String[] args) throws UnknownHostException { try { IndexProperties.init("anhalytics.properties"); } catch (Exception e) { - logger.error("Error: ", e); + LOGGER.error("Error: ", e); return; } if (IndexProperties.getFromDate() != null || IndexProperties.getUntilDate() != null) { @@ -125,116 +125,116 @@ private void processCommand() throws UnknownHostException { try { int nbDoc1 = esm.indexTeiCorpus(); - logger.info("Total: " + nbDoc1 + " tei documents indexed."); + LOGGER.info("Total: " + nbDoc1 + " tei documents indexed."); } catch (IndexNotCreatedException e) { - logger.error(IndexProperties.getTeisIndexName() + " not found, setup the index."); - logger.info("The index " + IndexProperties.getTeisIndexName() + " will be created."); + LOGGER.error(IndexProperties.getTeisIndexName() + " not found, setup the index."); + LOGGER.info("The index " + IndexProperties.getTeisIndexName() + " will be created."); esm.createIndex(IndexProperties.getTeisIndexName()); int nbDoc1 = esm.indexTeiCorpus(); - logger.info("Total: " + nbDoc1 + " teis documents indexed."); + LOGGER.info("Total: " + nbDoc1 + " teis documents indexed."); } try { int nbNerdAnnot = esm.indexNerdAnnotations(); - logger.info("Total: " + nbNerdAnnot + " NERD annotations indexed."); + LOGGER.info("Total: " + nbNerdAnnot + " NERD annotations indexed."); } catch (IndexNotCreatedException e) { - logger.error(IndexProperties.getNerdAnnotsIndexName() + " not found, setup the index."); - logger.info("The index " + IndexProperties.getNerdAnnotsIndexName() + " will be created."); + LOGGER.error(IndexProperties.getNerdAnnotsIndexName() + " not found, setup the index."); + LOGGER.info("The index " + IndexProperties.getNerdAnnotsIndexName() + " will be created."); esm.createIndex(IndexProperties.getNerdAnnotsIndexName()); int nbNerdAnnot = esm.indexNerdAnnotations(); - logger.info("Total: " + nbNerdAnnot + " NERD annotations indexed."); + LOGGER.info("Total: " + nbNerdAnnot + " NERD annotations indexed."); } try { int nbKeytermAnnot = esm.indexKeytermAnnotations(); - logger.info("Total: " + nbKeytermAnnot + " Keyterm annotations indexed."); + LOGGER.info("Total: " + nbKeytermAnnot + " Keyterm annotations indexed."); } catch (IndexNotCreatedException e) { - logger.error(IndexProperties.getKeytermAnnotsIndexName() + " not found, setup the index."); - logger.info("The index " + IndexProperties.getKeytermAnnotsIndexName() + " will be created."); + LOGGER.error(IndexProperties.getKeytermAnnotsIndexName() + " not found, setup the index."); + LOGGER.info("The index " + IndexProperties.getKeytermAnnotsIndexName() + " will be created."); esm.createIndex(IndexProperties.getKeytermAnnotsIndexName()); int nbKeytermAnnot = esm.indexKeytermAnnotations(); - logger.info("Total: " + nbKeytermAnnot + " Keyterm annotations indexed."); + LOGGER.info("Total: " + nbKeytermAnnot + " Keyterm annotations indexed."); } try { int nbQuantitiesAnnot = esm.indexQuantitiesAnnotations(); - logger.info("Total: " + nbQuantitiesAnnot + " grobid-quantities annotations indexed."); + LOGGER.info("Total: " + nbQuantitiesAnnot + " grobid-quantities annotations indexed."); } catch (IndexNotCreatedException e) { - logger.error(IndexProperties.getQuantitiesAnnotsIndexName() + " not found, setup the index."); - logger.info("The index " + IndexProperties.getQuantitiesAnnotsIndexName() + " will be created."); + LOGGER.error(IndexProperties.getQuantitiesAnnotsIndexName() + " not found, setup the index."); + LOGGER.info("The index " + IndexProperties.getQuantitiesAnnotsIndexName() + " will be created."); esm.createIndex(IndexProperties.getQuantitiesAnnotsIndexName()); int nbQuantitiesAnnot = esm.indexQuantitiesAnnotations(); - logger.info("Total: " + nbQuantitiesAnnot + " grobid-quantities annotations indexed."); + LOGGER.info("Total: " + nbQuantitiesAnnot + " grobid-quantities annotations indexed."); } } else if (process.equals("indexTEI")) { try { int nbDoc1 = esm.indexTeiCorpus(); - logger.info("Total: " + nbDoc1 + " metadata documents indexed."); + LOGGER.info("Total: " + nbDoc1 + " metadata documents indexed."); } catch (IndexNotCreatedException e) { - logger.error(IndexProperties.getTeisIndexName() + " not found, setup the index."); - logger.info("The index " + IndexProperties.getTeisIndexName() + " will be created."); + LOGGER.error(IndexProperties.getTeisIndexName() + " not found, setup the index."); + LOGGER.info("The index " + IndexProperties.getTeisIndexName() + " will be created."); esm.createIndex(IndexProperties.getTeisIndexName()); int nbDoc1 = esm.indexTeiCorpus(); - logger.info("Total: " + nbDoc1 + " TEI documents indexed."); + LOGGER.info("Total: " + nbDoc1 + " TEI documents indexed."); } } else if (process.equals("indexAnnotations")) { try { int nbNerdAnnot = esm.indexNerdAnnotations(); - logger.info("Total: " + nbNerdAnnot + " NERD annotations indexed."); + LOGGER.info("Total: " + nbNerdAnnot + " NERD annotations indexed."); } catch (IndexNotCreatedException e) { - logger.error(IndexProperties.getNerdAnnotsIndexName() + " not found, setup the index."); - logger.info("The index " + IndexProperties.getNerdAnnotsIndexName() + " will be created."); + LOGGER.error(IndexProperties.getNerdAnnotsIndexName() + " not found, setup the index."); + LOGGER.info("The index " + IndexProperties.getNerdAnnotsIndexName() + " will be created."); esm.createIndex(IndexProperties.getNerdAnnotsIndexName()); int nbNerdAnnot = esm.indexNerdAnnotations(); - logger.info("Total: " + nbNerdAnnot + " NERD annotations indexed."); + LOGGER.info("Total: " + nbNerdAnnot + " NERD annotations indexed."); } try { int nbKeytermAnnot = esm.indexKeytermAnnotations(); - logger.info("Total: " + nbKeytermAnnot + " Keyterm annotations indexed."); + LOGGER.info("Total: " + nbKeytermAnnot + " Keyterm annotations indexed."); } catch (IndexNotCreatedException e) { - logger.error(IndexProperties.getKeytermAnnotsIndexName() + " not found, setup the index."); - logger.info("The index " + IndexProperties.getKeytermAnnotsIndexName() + " will be created."); + LOGGER.error(IndexProperties.getKeytermAnnotsIndexName() + " not found, setup the index."); + LOGGER.info("The index " + IndexProperties.getKeytermAnnotsIndexName() + " will be created."); esm.createIndex(IndexProperties.getKeytermAnnotsIndexName()); int nbKeytermAnnot = esm.indexKeytermAnnotations(); - logger.info("Total: " + nbKeytermAnnot + " Keyterm annotations indexed."); + LOGGER.info("Total: " + nbKeytermAnnot + " Keyterm annotations indexed."); } try { int nbQuantitiesAnnot = esm.indexQuantitiesAnnotations(); - logger.info("Total: " + nbQuantitiesAnnot + " grobid-quantities annotations indexed."); + LOGGER.info("Total: " + nbQuantitiesAnnot + " grobid-quantities annotations indexed."); } catch (IndexNotCreatedException e) { - logger.error(IndexProperties.getQuantitiesAnnotsIndexName() + " not found, setup the index."); - logger.info("The index " + IndexProperties.getQuantitiesAnnotsIndexName() + " will be created."); + LOGGER.error(IndexProperties.getQuantitiesAnnotsIndexName() + " not found, setup the index."); + LOGGER.info("The index " + IndexProperties.getQuantitiesAnnotsIndexName() + " will be created."); esm.createIndex(IndexProperties.getQuantitiesAnnotsIndexName()); int nbQuantitiesAnnot = esm.indexQuantitiesAnnotations(); - logger.info("Total: " + nbQuantitiesAnnot + " grobid-quantities annotations indexed."); + LOGGER.info("Total: " + nbQuantitiesAnnot + " grobid-quantities annotations indexed."); } } else if (process.equals("indexKB")) { KnowledgeBaseIndexer mi = new KnowledgeBaseIndexer(); // this is the KB in fact ! try { int nbAuthtors = mi.indexAuthors(); - logger.info("Total: " + nbAuthtors + " authors indexed."); + LOGGER.info("Total: " + nbAuthtors + " authors indexed."); int nbPubs = mi.indexPublications(); - logger.info("Total: " + nbPubs + " publications indexed."); + LOGGER.info("Total: " + nbPubs + " publications indexed."); int nbOrgs = mi.indexOrganisations(); - logger.info("Total: " + nbOrgs + " organisations indexed."); + LOGGER.info("Total: " + nbOrgs + " organisations indexed."); } catch (IndexNotCreatedException e) { - logger.error(IndexProperties.getKbIndexName() + " not found, setup the index."); - logger.info("The index " + IndexProperties.getKbIndexName() + " will be created."); + LOGGER.error(IndexProperties.getKbIndexName() + " not found, setup the index."); + LOGGER.info("The index " + IndexProperties.getKbIndexName() + " will be created."); esm.createIndex(IndexProperties.getKbIndexName()); try { int nbAuthtors = mi.indexAuthors(); - logger.info("Total: " + nbAuthtors + " authors indexed."); + LOGGER.info("Total: " + nbAuthtors + " authors indexed."); int nbPubs = mi.indexPublications(); - logger.info("Total: " + nbPubs + " publications indexed."); + LOGGER.info("Total: " + nbPubs + " publications indexed."); int nbOrgs = mi.indexOrganisations(); - logger.info("Total: " + nbOrgs + " organisations indexed."); + LOGGER.info("Total: " + nbOrgs + " organisations indexed."); } catch (SQLException sqle) { - sqle.printStackTrace(); + LOGGER.error("Error: ", sqle); } } catch (SQLException sqle) { - sqle.printStackTrace(); + LOGGER.error("Error: ", sqle); } mi.close(); } @@ -242,7 +242,7 @@ private void processCommand() throws UnknownHostException { try { int nbDocs = esm.indexIstexQuantites(); } catch (Exception sqle) { - sqle.printStackTrace(); + LOGGER.error("Error: ", sqle); } } else if (process.equals("setupQuantitiesIndex")) { @@ -251,7 +251,7 @@ private void processCommand() throws UnknownHostException { esm.close(); } catch (ServiceException se) { - logger.error("Error: ", se); + LOGGER.error("Error: ", se); } return; diff --git a/anhalytics-kb/src/main/java/fr/inria/anhalytics/kb/datamine/KnowledgeBaseFeeder.java b/anhalytics-kb/src/main/java/fr/inria/anhalytics/kb/datamine/KnowledgeBaseFeeder.java index 448264f..5255b94 100644 --- a/anhalytics-kb/src/main/java/fr/inria/anhalytics/kb/datamine/KnowledgeBaseFeeder.java +++ b/anhalytics-kb/src/main/java/fr/inria/anhalytics/kb/datamine/KnowledgeBaseFeeder.java @@ -43,7 +43,7 @@ */ public class KnowledgeBaseFeeder { - private static final Logger logger = LoggerFactory.getLogger(KnowledgeBaseFeeder.class); + private static final Logger LOGGER = LoggerFactory.getLogger(KnowledgeBaseFeeder.class); private static final AbstractDAOFactory adf = AbstractDAOFactory.getFactory(AbstractDAOFactory.DAO_FACTORY); @@ -75,7 +75,7 @@ public void initKnowledgeBase() { while (mm.hasMore()) { BiblioObject biblioObject = mm.nextBiblioObject(); if (!KbProperties.isReset() && biblioObject.getIsMined()) { - logger.info("\t\t Already mined, Skipping..."); + LOGGER.info("\t\t Already mined, Skipping..."); continue; } adf.openTransaction(); @@ -90,7 +90,7 @@ public void initKnowledgeBase() { docBuilder = docFactory.newDocumentBuilder(); teiDoc = docBuilder.parse(teiStream); } catch (Exception e) { - logger.error("Error when parsing TEI stream. ", e); + LOGGER.error("Error when parsing TEI stream. ", e); } teiStream.close(); @@ -115,7 +115,7 @@ public void initKnowledgeBase() { NodeList authors = teiHeader.getElementsByTagName("author"); Element monogr = (Element) xPath.compile(TeiPaths.MonogrElement).evaluate(teiDoc, XPathConstants.NODE); NodeList ids = (NodeList) xPath.compile(TeiPaths.IdnoElement).evaluate(teiDoc, XPathConstants.NODESET); - logger.info("Extracting :" + biblioObject.getRepositoryDocId()); + LOGGER.info("Extracting :" + biblioObject.getRepositoryDocId()); if (authors.getLength() > 30) { throw new NumberOfCoAuthorsExceededException("Number of authors exceed 30 co-authors for this publication."); } @@ -137,13 +137,13 @@ public void initKnowledgeBase() { processPersons(authors, "author", pub, teiDoc, authorsFromfulltextTeiHeader); processPersons(editors, "editor", pub, teiDoc, authorsFromfulltextTeiHeader); - logger.info("#################################################################"); + LOGGER.info("#################################################################"); } catch(NumberOfCoAuthorsExceededException e) { - logger.warn("Skipping publication, number of coauthors are exceeding 30", e); + LOGGER.warn("Skipping publication, number of coauthors are exceeding 30", e); adf.rollback(); teiDoc = null; } catch (Exception xpe) { - logger.error("Error during transaction. Rollback records", xpe); + LOGGER.error("Error during transaction. Rollback records", xpe); adf.rollback(); teiDoc = null; } @@ -158,7 +158,7 @@ public void initKnowledgeBase() { } DAOFactory.closeConnection(); - logger.info("DONE."); + LOGGER.info("DONE."); } private static void processIdentifiers(NodeList ids, fr.inria.anhalytics.commons.entities.Document doc, String halId) throws SQLException { @@ -227,7 +227,7 @@ private static void processMonogr(Element monogr, Publication pub) throws SQLExc try { pub.setDate_printed(Utilities.parseStringDate(date)); } catch (ParseException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } } } else if (imprintChildElt.getNodeName().equals("biblScope")) { @@ -651,11 +651,11 @@ public void processCitations() throws SQLException { while (mm.hasMore()) { BiblioObject biblioObject = mm.nextBiblioObject(); if (!KbProperties.isReset() && biblioObject.getIsMined()) { - logger.info("\t\t Already mined, Skipping..."); + LOGGER.info("\t\t Already mined, Skipping..."); continue; } if (!dd.isCitationsMined(biblioObject.getAnhalyticsId())) { - logger.info("Extracting :" + biblioObject.getRepositoryDocId()); + LOGGER.info("Extracting :" + biblioObject.getRepositoryDocId()); abdf.openTransaction(); try { InputStream teiStream = new ByteArrayInputStream(mm.getTEICorpus(biblioObject).getBytes()); @@ -675,7 +675,7 @@ public void processCitations() throws SQLException { } } } catch (Exception xpe) { - xpe.printStackTrace(); + LOGGER.error("Error: ", xpe); abdf.rollback(); } abdf.endTransaction(); @@ -823,7 +823,7 @@ private void processBiblStruct(Element reference, fr.inria.anhalytics.commons.en try { pub.setDate_printed(Utilities.parseStringDate(date)); } catch (ParseException ex) { - ex.printStackTrace(); + LOGGER.error("Error: ", ex); } } else if (imprintChildElt.getNodeName().equals("biblScope")) { String unit = imprintChildElt.getAttribute("unit"); @@ -923,7 +923,7 @@ private Document getDocument(InputStream in) throws IOException, ParserConfigura try { doc = docBuilder.parse(in); } catch (SAXException e) { - e.printStackTrace(); + LOGGER.error("Error: ", e); } return doc; diff --git a/anhalytics-kb/src/main/java/fr/inria/anhalytics/kb/main/Main.java b/anhalytics-kb/src/main/java/fr/inria/anhalytics/kb/main/Main.java index aad9bcb..27c1ec0 100644 --- a/anhalytics-kb/src/main/java/fr/inria/anhalytics/kb/main/Main.java +++ b/anhalytics-kb/src/main/java/fr/inria/anhalytics/kb/main/Main.java @@ -17,7 +17,7 @@ */ public class Main { - private static final Logger logger = LoggerFactory.getLogger(Main.class); + private static final Logger LOGGER = LoggerFactory.getLogger(Main.class); private static List availableCommands = new ArrayList() { { @@ -31,7 +31,7 @@ public static void main(String[] args) throws UnknownHostException, SQLException try { KbProperties.init("anhalytics.properties"); } catch (Exception exp) { - logger.error(exp.getMessage()); + LOGGER.error(exp.getMessage()); return; } @@ -57,7 +57,7 @@ private void processCommand() throws UnknownHostException, SQLException { kbf.processCitations(); } } catch (ServiceException se) { - logger.error("Error: ", se); + LOGGER.error("Error: ", se); } return; }