// la config pré-2010
private DocumentsModel oldConfig = null;
- public Importer(String[] args) {
+ public Importer(final String[] args) {
super(args);
}
@Override
protected void initialize() throws Exception {
- String _tmp = getProps().getProperty("max.concurrent.threads");
+ String _tmp = this.getProps().getProperty("max.concurrent.threads");
try {
- maxConcurrentThreads = Integer.parseInt(_tmp);
- } catch (Exception e) {
+ this.maxConcurrentThreads = Integer.parseInt(_tmp);
+ } catch (final Exception e) {
logger.error("max.concurrent.threads=" + _tmp + " : nombre invalide");
}
_tmp = null;
- _tmp = getProps().getProperty("pre2010.sur-chiffres.documents-def");
+ _tmp = this.getProps().getProperty("pre2010.sur-chiffres.documents-def");
_tmp = _tmp.replaceAll("%basedir%", BatchRunner.getCanonicalBaseDir());
- pre2010ConfDir = new File(_tmp);
- sectionName = getProps().getProperty("section.pre2010.name");
-
- if (!StringUtils.isEmpty(getProps().getProperty("archive.locator"))) {
- String archiveLocatorClassName = getProps().getProperty("archive.locator");
- Class clazz = Class.forName(archiveLocatorClassName);
- archiveLocator = (ArchiveLocator)clazz.newInstance();
- } else if (!StringUtils.isEmpty(getProps().getProperty("xar.storage"))) {
- String xarStoragePath = getProps().getProperty("xar.storage");
- archiveLocator = new BasicArchiveLocator(xarStoragePath);
- }else {
+ this.pre2010ConfDir = new File(_tmp);
+ this.sectionName = this.getProps().getProperty("section.pre2010.name");
+
+ if (!StringUtils.isEmpty(this.getProps().getProperty("archive.locator"))) {
+ final String archiveLocatorClassName = this.getProps().getProperty("archive.locator");
+ final Class clazz = Class.forName(archiveLocatorClassName);
+ this.archiveLocator = (ArchiveLocator) clazz.newInstance();
+ } else if (!StringUtils.isEmpty(this.getProps().getProperty("xar.storage"))) {
+ final String xarStoragePath = this.getProps().getProperty("xar.storage");
+ this.archiveLocator = new BasicArchiveLocator(xarStoragePath);
+ } else {
throw new Exception("le paramètre 'archive.locator' ou 'xar.storage' doit être renseigné");
}
- archiveLocator.initialize();
-
+ this.archiveLocator.initialize();
+
_tmp = null;
- _tmp = getProps().getProperty("temp.directory");
- if (_tmp == null)
+ _tmp = this.getProps().getProperty("temp.directory");
+ if (_tmp == null) {
_tmp = System.getProperty("java.io.tmpdir");
- rootTempDirectory = new File(_tmp);
+ }
+ this.rootTempDirectory = new File(_tmp);
_tmp = null;
- _tmp = getProps().getProperty("rules.directory");
+ _tmp = this.getProps().getProperty("rules.directory");
_tmp = _tmp.replaceAll("%basedir%", BatchRunner.getCanonicalBaseDir());
- rulesDirectory = new File(_tmp);
- LocalLoader ll = new LocalLoader();
- oldConfig = ll.loadConfigFromFiles(pre2010ConfDir.getAbsolutePath());
- if ("true".equals(getProps().getProperty("drop.log.files.after.import")))
- dropLogFilesAfterImport = true;
+ this.rulesDirectory = new File(_tmp);
+ final LocalLoader ll = new LocalLoader();
+ this.oldConfig = ll.loadConfigFromFiles(this.pre2010ConfDir.getAbsolutePath());
+ if ("true".equals(this.getProps().getProperty("drop.log.files.after.import"))) {
+ this.dropLogFilesAfterImport = true;
+ }
}
- public static boolean doesOpennedTaskExistsForArchive(String archiveName, String exercice) throws SQLException {
+ public static boolean doesOpennedTaskExistsForArchive(final String archiveName, final String exercice) throws SQLException {
boolean ret = false;
Connection con = null;
try {
con = PoolManager.getInstance().getConnection();
- PreparedStatement ps = con.prepareStatement("SELECT IMPORT_ID FROM IMPORT_JOBS WHERE ARCHIVE_NAME=? AND EXERCICE=? AND IMPORT_STATUS IN (?,?)");
+ final PreparedStatement ps = con.prepareStatement("SELECT IMPORT_ID FROM IMPORT_JOBS WHERE ARCHIVE_NAME=? AND EXERCICE=? AND IMPORT_STATUS IN (?,?)");
int i = 1;
ps.setString(i++, archiveName);
ps.setString(i++, exercice);
ps.setInt(i++, IMPORT_JOB_STATUS_ASKED);
ps.setInt(i++, IMPORT_JOB_STATUS_RUNNING);
- ResultSet rs = ps.executeQuery();
+ final ResultSet rs = ps.executeQuery();
ret = rs.next();
rs.close();
ps.close();
- } catch (SQLException ex) {
+ } catch (final SQLException ex) {
logger.error("doesOpennedTaskExistsForArchive(String)", ex);
throw ex;
} finally {
- if (con != null)
+ if (con != null) {
PoolManager.getInstance().releaseConnection(con);
+ }
}
return ret;
}
- public static void cancelTasksForArchive(String archiveName) throws SQLException {
+ public static void cancelTasksForArchive(final String archiveName) throws SQLException {
Connection con = null;
try {
con = PoolManager.getInstance().getConnection();
- PreparedStatement ps = con.prepareStatement("UPDATE IMPORT_JOBS SET IMPORT_STATUS=? WHERE ARCHIVE_NAME=? AND IMPORT_STATUS IN (?,?)");
+ final PreparedStatement ps = con.prepareStatement("UPDATE IMPORT_JOBS SET IMPORT_STATUS=? WHERE ARCHIVE_NAME=? AND IMPORT_STATUS IN (?,?)");
ps.setInt(1, IMPORT_JOB_STATUS_CANCELED);
ps.setString(2, archiveName);
ps.setInt(3, IMPORT_JOB_STATUS_ASKED);
ps.setInt(4, IMPORT_JOB_STATUS_RUNNING);
ps.executeUpdate();
ps.close();
- } catch (SQLException ex) {
+ } catch (final SQLException ex) {
logger.error("cancelTasksForArchive(String)", ex);
throw ex;
} finally {
- if (con != null)
+ if (con != null) {
PoolManager.getInstance().releaseConnection(con);
+ }
}
}
- public static void createImportJob(String archiveName, String exercice, String importScheme, String importUser) throws SQLException {
+ public static void createImportJob(final String archiveName, final String exercice, final String importScheme, final String importUser) throws SQLException {
Connection con = null;
try {
con = PoolManager.getInstance().getConnection();
if (con.getMetaData().getURL().contains(":mysql:")) {
- PreparedStatement ps = con.prepareStatement("INSERT INTO IMPORT_JOBS (ARCHIVE_NAME, EXERCICE, IMPORT_SCHEME, USER_LOGIN, IMPORT_STATUS,DATE_MESSAGE, DATE_RAPPEL, DATE_CONFIRM) VALUES (?,?,?,?,?,NULL,NULL,NULL)");
+ final PreparedStatement ps = con.prepareStatement("INSERT INTO IMPORT_JOBS (ARCHIVE_NAME, EXERCICE, IMPORT_SCHEME, USER_LOGIN, IMPORT_STATUS,DATE_MESSAGE, DATE_RAPPEL, DATE_CONFIRM) VALUES (?,?,?,?,?,NULL,NULL,NULL)");
int i = 1;
ps.setString(i++, archiveName);
ps.setString(i++, exercice);
ps.close();
} else if (con.getMetaData().getURL().contains(":oracle:")) {
long id = 0;
- ResultSet rs = con.createStatement().executeQuery("SELECT SQ_IMPORT_JOBS.NEXTVAL FROM DUAL");
- if (rs.next())
+ final ResultSet rs = con.createStatement().executeQuery("SELECT SQ_IMPORT_JOBS.NEXTVAL FROM DUAL");
+ if (rs.next()) {
id = rs.getLong(1);
- else
+ } else {
id = 1;
- PreparedStatement ps = con.prepareStatement("INSERT INTO IMPORT_JOBS (IMPORT_ID, ARCHIVE_NAME, EXERCICE, IMPORT_SCHEME, USER_LOGIN, IMPORT_STATUS) VALUES (?,?,?,?,?,?)");
+ }
+ final PreparedStatement ps = con.prepareStatement("INSERT INTO IMPORT_JOBS (IMPORT_ID, ARCHIVE_NAME, EXERCICE, IMPORT_SCHEME, USER_LOGIN, IMPORT_STATUS) VALUES (?,?,?,?,?,?)");
int i = 1;
ps.setLong(i++, id);
ps.setString(i++, archiveName);
ps.executeUpdate();
ps.close();
}
- } catch (SQLException ex) {
+ } catch (final SQLException ex) {
logger.error("createImportJob(String,String,String)", ex);
throw ex;
} finally {
- if (con != null)
+ if (con != null) {
PoolManager.getInstance().releaseConnection(con);
+ }
}
}
// super.beforeExecute(t, r);
// }
// };
- ExecutorService pool = Executors.newFixedThreadPool(maxConcurrentThreads);
- if (checkNotRunning()) {
+ final ExecutorService pool = Executors.newFixedThreadPool(this.maxConcurrentThreads);
+ if (this.checkNotRunning()) {
Connection con = null;
try {
con = PoolManager.getInstance().getConnection();
- PreparedStatement ps = con.prepareStatement("select IMPORT_ID, ARCHIVE_NAME, EXERCICE, USER_LOGIN, IMPORT_SCHEME FROM IMPORT_JOBS WHERE IMPORT_STATUS=? ORDER BY DATE_DEMANDE ASC");
+ final PreparedStatement ps = con.prepareStatement("select IMPORT_ID, ARCHIVE_NAME, EXERCICE, USER_LOGIN, IMPORT_SCHEME FROM IMPORT_JOBS WHERE IMPORT_STATUS=? ORDER BY DATE_DEMANDE ASC");
ps.setInt(1, IMPORT_JOB_STATUS_ASKED);
- ResultSet rs = ps.executeQuery();
+ final ResultSet rs = ps.executeQuery();
int count = 0;
- while (rs.next() && count < (maxConcurrentThreads * 3)) {
+ while (rs.next() && count < (this.maxConcurrentThreads * 3)) {
count++;
int i = 1;
- long importId = rs.getLong(i++);
- String archiveName = rs.getString(i++);
- String exercice = rs.getString(i++);
- String user = rs.getString(i++);
- String scheme = rs.getString(i++);
- ImporterImpl ii = new ImporterImpl(importId, archiveName, exercice, user, scheme);
+ final long importId = rs.getLong(i++);
+ final String archiveName = rs.getString(i++);
+ final String exercice = rs.getString(i++);
+ final String user = rs.getString(i++);
+ final String scheme = rs.getString(i++);
+ final ImporterImpl ii = new ImporterImpl(importId, archiveName, exercice, user, scheme);
logger.debug("submitting importId=" + importId + " for " + archiveName);
pool.submit(ii);
}
rs.close();
pool.shutdown();
pool.awaitTermination(100, TimeUnit.DAYS);
- oldConfig.prepareForUnload();
- oldConfig = null;
- } catch (Exception ex) {
+ this.oldConfig.prepareForUnload();
+ this.oldConfig = null;
+ } catch (final Exception ex) {
logger.error("doProcess()", ex);
} finally {
- if (con != null)
+ if (con != null) {
PoolManager.getInstance().releaseConnection(con);
- clearLock();
+ }
+ this.clearLock();
}
// } else {
// clearLock();
Traitement traitement;
long importId;
String archiveName;
- String exercice;
String user;
String scheme;
- public ImporterImpl(long importId, String archiveName, String exercice, String user, String scheme) {
+ public ImporterImpl(final long importId, final String archiveName, final String exercice, final String user, final String scheme) {
super();
this.traitement = new Traitement(Importer.class, BATCH_NAME + "<" + archiveName + ">");
this.importId = importId;
this.archiveName = archiveName;
- this.exercice = exercice;
this.user = user;
this.scheme = scheme;
}
con = PoolManager.getInstance().getConnection();
PreparedStatement ps = con.prepareStatement("UPDATE IMPORT_JOBS SET IMPORT_STATUS=? WHERE IMPORT_ID=?");
ps.setInt(1, IMPORT_JOB_STATUS_RUNNING);
- ps.setLong(2, importId);
+ ps.setLong(2, this.importId);
ps.executeUpdate();
ps.close();
// on rend la connection le temps de l'import
PoolManager.getInstance().releaseConnection(con);
con = null;
-
+
// on prend toujours le premier volume
- String archiveFirstVolume = FilenameUtils.getBaseName(archiveName) + "_1." + FilenameUtils.getExtension(archiveName);
- File archiveFile = archiveLocator.getArchiveLocation(archiveFirstVolume);
-
+ final String archiveFirstVolume = FilenameUtils.getBaseName(this.archiveName) + "_1." + FilenameUtils.getExtension(this.archiveName);
+ File archiveFile = Importer.this.archiveLocator.getArchiveLocation(archiveFirstVolume);
+
// cas pourri des vieux EDMN
if (!archiveFile.exists()) {
- archiveFile = archiveLocator.getArchiveLocation(archiveName);
-
+ archiveFile = Importer.this.archiveLocator.getArchiveLocation(this.archiveName);
+
if (!archiveFile.exists()) {
- String archiveFolder = archiveLocator.getFolder().getAbsolutePath();
-
- logger.warn("Fichier non trouvé : ni " + archiveFirstVolume + " ni " + archiveName + " n'ont été trouvés dans " + archiveFolder);
- traitement.addMessage(Traitement.SEVERITY_ERROR, "Fichier non trouvé : ni " + archiveFirstVolume + " ni " + archiveName + " n'ont été trouvés dans " + archiveFolder);
+ final String archiveFolder = Importer.this.archiveLocator.getFolder().getAbsolutePath();
+
+ logger.warn("Fichier non trouvé : ni " + archiveFirstVolume + " ni " + this.archiveName + " n'ont été trouvés dans " + archiveFolder);
+ this.traitement.addMessage(Traitement.SEVERITY_ERROR, "Fichier non trouvé : ni " + archiveFirstVolume + " ni " + this.archiveName + " n'ont été trouvés dans " + archiveFolder);
return;
}
}
-
- DocumentsModel dm = BatchRunner.getInstance().getDocuments();
- ImportServiceProvider isp = new ImportServiceBatchProvider(null, null, null);
- RulesParser rp = new RulesParser(FactoryProvider.getSaxParserFactory());
- File rulesFile = new File(rulesDirectory, scheme);
+
+ final DocumentsModel dm = BatchRunner.getInstance().getDocuments();
+ final ImportServiceProvider isp = new ImportServiceBatchProvider(null, null, null);
+ final RulesParser rp = new RulesParser(FactoryProvider.getSaxParserFactory());
+ final File rulesFile = new File(Importer.this.rulesDirectory, this.scheme);
if (!rulesFile.exists()) {
- logger.error("Le fichier de règle " + scheme + " n'existe pas dans " + rulesDirectory);
- throw new FileNotFoundException("Le fichier de règle " + scheme + " n'existe pas dans " + rulesDirectory);
+ logger.error("Le fichier de règle " + this.scheme + " n'existe pas dans " + Importer.this.rulesDirectory);
+ throw new FileNotFoundException("Le fichier de règle " + this.scheme + " n'existe pas dans " + Importer.this.rulesDirectory);
}
rp.parse(rulesFile);
final RulesModel rules = (RulesModel) rp.getMarshallable();
- ArchiveImporter ai = new ArchiveImporter(dm, archiveFile, isp, getApplicationConfiguration(), rules) {
+ final ArchiveImporter ai = new ArchiveImporter(dm, archiveFile, isp, Importer.this.getApplicationConfiguration(), rules) {
// on vient ici modifier la config de documents, au besoin
@Override
- public void preImport(SectionModel section, Document archiveManifeste) {
- String exercice = archiveManifeste.getRootElement().getAttributeValue("exercice");
- String idColl = archiveManifeste.getRootElement().getAttributeValue("cgIdCol");
- if (exercice == null || idColl == null)
+ public void preImport(final SectionModel section, final Document archiveManifeste) {
+ final String exercice = archiveManifeste.getRootElement().getAttributeValue("exercice");
+ final String idColl = archiveManifeste.getRootElement().getAttributeValue("cgIdCol");
+ if (exercice == null || idColl == null) {
return;
+ }
logger.debug("preImport: -exercice=" + exercice + " -idColl=" + idColl);
if (idColl.length() > 7) {
// on est à peu près certain que ce sera du SPL
// PRE : code degueux pour dépanage (l'import complet provient d'une demande web)
logger.debug("RulesModel : " + rules.getId());
- if (Integer.parseInt(exercice) < 2011 && !"import-full.rul.xml".equals(scheme)) {
+ if (Integer.parseInt(exercice) < 2011 && !"import-full.rul.xml".equals(ImporterImpl.this.scheme)) {
// pré-2010
- if (section.getName().equals(sectionName)) {
+ if (section.getName().equals(Importer.this.sectionName)) {
// alors là, il faut changer de config de
// docs
logger.debug("oldConfig set");
- setDocumentsModel(oldConfig);
+ this.setDocumentsModel(Importer.this.oldConfig);
}
}
}
}
};
- ai.setUser(new LocalUser(user));
- File tempDir = new File(rootTempDirectory, FilenameUtils.getBaseName(archiveName));
+ ai.setUser(new LocalUser(this.user));
+ final File tempDir = new File(Importer.this.rootTempDirectory, FilenameUtils.getBaseName(this.archiveName));
tempDir.mkdirs();
ai.setLocalTempDir(tempDir);
- MDC.put(MultiThreadAppender.MDC_THREAD_KEY, FilenameUtils.getBaseName(archiveName));
+ MDC.put(MultiThreadAppender.MDC_THREAD_KEY, FilenameUtils.getBaseName(this.archiveName));
// on indique qu'il faut splitter les logs
logger.fatal(MultiThreadAppender.START_SPLIT_MSG);
- traitement.addMessage(Traitement.SEVERITY_INFO, "Début de l'import");
- Errors errors = ai.doImport();
+ this.traitement.addMessage(Traitement.SEVERITY_INFO, "Début de l'import");
+ final Errors errors = ai.doImport();
FileUtils.forceDelete(tempDir);
// on cloture le splitt des logs
logger.fatal(MultiThreadAppender.END_SPLIT_MSG);
// on reprend une connection pour la suite
- if (con == null)
+ if (con == null) {
con = PoolManager.getInstance().getConnection();
+ }
if (errors.containsWarning()) {
- StringBuilder sb = new StringBuilder();
- for (Errors.Error error : errors.getErrors()) {
- if (error.getSeverity() >= Errors.SEVERITY_WARNING)
+ final StringBuilder sb = new StringBuilder();
+ for (final Errors.Error error : errors.getErrors()) {
+ if (error.getSeverity() >= Errors.SEVERITY_WARNING) {
sb.append(error.getMessage()).append("\n");
+ }
}
- if (errors.containsError())
- traitement.addMessage(Traitement.SEVERITY_ERROR, sb.toString());
- else
- traitement.addMessage(Traitement.SEVERITY_WARN, sb.toString());
- if (errors.containsError())
- traitement.setResume("Import échoué");
- else
- traitement.setResume("Avertissements pendant l'import");
- traitement.setEnd(System.currentTimeMillis());
+ if (errors.containsError()) {
+ this.traitement.addMessage(Traitement.SEVERITY_ERROR, sb.toString());
+ } else {
+ this.traitement.addMessage(Traitement.SEVERITY_WARN, sb.toString());
+ }
+ if (errors.containsError()) {
+ this.traitement.setResume("Import échoué");
+ } else {
+ this.traitement.setResume("Avertissements pendant l'import");
+ }
+ this.traitement.setEnd(System.currentTimeMillis());
ps = con.prepareStatement("UPDATE IMPORT_JOBS SET IMPORT_STATUS=? WHERE IMPORT_ID=?");
- if (errors.containsError())
+ if (errors.containsError()) {
ps.setInt(1, IMPORT_JOB_STATUS_ERROR);
- else
+ } else {
ps.setInt(1, IMPORT_JOB_STATUS_TERMINATED);
- ps.setLong(2, importId);
+ }
+ ps.setLong(2, this.importId);
ps.executeUpdate();
ps.close();
} else {
ps = con.prepareStatement("UPDATE IMPORT_JOBS SET IMPORT_STATUS=? WHERE IMPORT_ID=?");
ps.setInt(1, IMPORT_JOB_STATUS_TERMINATED);
- ps.setLong(2, importId);
+ ps.setLong(2, this.importId);
ps.executeUpdate();
ps.close();
- traitement.setEnd(System.currentTimeMillis());
- traitement.setResume("Import terminé");
+ this.traitement.setEnd(System.currentTimeMillis());
+ this.traitement.setResume("Import terminé");
}
- File logFile = MultiThreadAppender.getFile(MDC.get(MultiThreadAppender.MDC_THREAD_KEY));
+ final File logFile = MultiThreadAppender.getFile(MDC.get(MultiThreadAppender.MDC_THREAD_KEY));
if (logFile != null) {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- GZIPOutputStream gz = new GZIPOutputStream(baos);
- FileInputStream fis = new FileInputStream(logFile);
- byte[] buff = new byte[2048];
+ final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ final GZIPOutputStream gz = new GZIPOutputStream(baos);
+ final FileInputStream fis = new FileInputStream(logFile);
+ final byte[] buff = new byte[2048];
int read = 0;
while ((read = fis.read(buff)) > 0) {
gz.write(buff, 0, read);
fis.close();
gz.flush();
gz.close();
- traitement.setBinaryData(baos.toByteArray());
- if (dropLogFilesAfterImport)
+ this.traitement.setBinaryData(baos.toByteArray());
+ if (Importer.this.dropLogFilesAfterImport) {
FileUtils.forceDelete(logFile);
+ }
}
MDC.clear();
- } catch (Exception ex) {
- Message msg = traitement.addMessage(Traitement.SEVERITY_ERROR, ex.getMessage());
+ } catch (final Exception ex) {
+ final Message msg = this.traitement.addMessage(Traitement.SEVERITY_ERROR, ex.getMessage());
if (con != null) {
try {
- PreparedStatement ps = con.prepareStatement("UPDATE IMPORT_JOBS SET IMPORT_STATUS=? WHERE IMPORT_ID=?");
+ final PreparedStatement ps = con.prepareStatement("UPDATE IMPORT_JOBS SET IMPORT_STATUS=? WHERE IMPORT_ID=?");
ps.setInt(1, IMPORT_JOB_STATUS_ERROR);
- ps.setLong(2, importId);
+ ps.setLong(2, this.importId);
ps.executeUpdate();
ps.close();
- } catch (SQLException sqlEx) {
+ } catch (final SQLException sqlEx) {
logger.error("setting status to error", ex);
}
}
} finally {
- if (con != null)
+ if (con != null) {
PoolManager.getInstance().releaseConnection(con);
+ }
}
}
}
private final static class LocalUser implements XemeliosUser {
- private String userId;
+ private final String userId;
- public LocalUser(String userId) {
+ public LocalUser(final String userId) {
super();
this.userId = userId;
}
@Override
public String getId() {
- return userId;
+ return this.userId;
}
@Override
public String getDisplayName() {
- return userId;
+ return this.userId;
}
@Override
- public boolean hasRole(String role) {
+ public boolean hasRole(final String role) {
return true;
}
@Override
- public boolean hasDocument(String document) {
+ public boolean hasDocument(final String document) {
return true;
}
@Override
- public boolean hasCollectivite(String collectivite, DocumentModel dm) {
+ public boolean hasCollectivite(final String collectivite, final DocumentModel dm) {
return true;
}
}
super();
}
- public DocumentsModel loadConfigFromFiles(String repertoires) throws SAXException, ParserConfigurationException, IOException {
+ public DocumentsModel loadConfigFromFiles(final String repertoires) throws SAXException, ParserConfigurationException, IOException {
return __loadConfigFromFiles(repertoires);
}
}