ExportPersonaldokumente.groovy hinzugefügt
This commit is contained in:
277
_Vorlagen/Groovy-Skripte/ExportPersonaldokumente.groovy
Normal file
277
_Vorlagen/Groovy-Skripte/ExportPersonaldokumente.groovy
Normal file
@@ -0,0 +1,277 @@
|
||||
package dvelop.nanunana
|
||||
|
||||
import javax.net.ssl.HttpsURLConnection
|
||||
|
||||
import org.bouncycastle.crypto.digests.RIPEMD256Digest
|
||||
import org.bouncycastle.crypto.io.DigestOutputStream
|
||||
|
||||
import com.dvelop.d3.server.Document
|
||||
import com.dvelop.d3.server.core.D3Interface
|
||||
|
||||
import groovy.json.JsonSlurper
|
||||
//update tabExportPersonaldokumente set done =null, err_code = null, err_msg = null, returncodePdfCreation = null where orgExtension = 'TIF'
|
||||
/*
|
||||
create table tabExportPersonaldokumente (
|
||||
doku_id nvarchar(10),
|
||||
done datetime,
|
||||
err_code int,
|
||||
err_msg nvarchar(250),
|
||||
filehash nvarchar(250),
|
||||
orgExtension nvarchar(10),
|
||||
returncodePdfCreation int
|
||||
);
|
||||
insert into tabExportPersonaldokumente (doku_id, orgExtension) select p.doku_id, p.datei_erw from firmen_spezifisch f join phys_datei p on f.doku_id = p.doku_id where kue_dokuart = 'DPERS' and dok_dat_feld_1 in ('10009');
|
||||
create index idxTabExportPersonaldokumente on tabExportPersonaldokumente(doku_id);
|
||||
create table tabExportMapping(
|
||||
typ nvarchar(50),
|
||||
inhalt nvarchar(50),
|
||||
postfix nvarchar(50)
|
||||
);
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Abmahnung/Notiz', 'Abmahnung', 'ab');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Abmahnung/Notiz', 'Notiz', 'not');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Arbeitsrechtliche Verfahren', 'Schriftverkehr Anwalt', 'sv anw');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Arbeitsrechtliche Verfahren', 'Schriftverkehr Gericht', 'sv ger');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Arbeitsunfähigkeit', 'Reha/Kur', 'reha');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Arbeitsunfähigkeit', 'Sonstiges', 'sons au');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Arbeitsunfähigkeit', 'Unfallanzeige', 'unfall');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Arbeitsunfähigkeit', 'Wiedereingliederung', 'we');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Asienreise', 'Schreiben E&V', 'asien');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Ausbildung/EQ', 'Ausbildung', 'ausb');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Ausbildung/EQ', 'EQ', 'eq');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Beendigung', 'Austrittsunterlagen', 'austritt');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Beendigung', 'Beendigungsschreiben', 'kdg');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Beurteilung/MA-Gespräch', 'Beurteilungen', 'beurt');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Beurteilung/MA-Gespräch', 'MA-Gespräch', 'ma ge');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Bewerbung', 'Bewerbung', 'bew');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Bewerbung', 'nachgereichte Unterlagen', 'nach U');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Corona', 'Kurzarbeit', 'ka co');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Corona', 'Quarantäne', 'qua co');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Corona', 'Sonderzahlung', 'sz co');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Corona', 'Sonstiges', 'sons co');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Dienstwagen', 'KFZ-Überlassung', 'kfz');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Elternzeit/Mutterschutz', 'Beschäftigungsverbot', 'bv');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Elternzeit/Mutterschutz', 'Elternzeit', 'etz');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Elternzeit/Mutterschutz', 'Mutterschutz', 'muschu');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Forderungen', 'Insolvenz', 'insol');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Forderungen', 'Pfändung', 'pfänd');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Forderungen', 'Schuldanerkenntnis', 'schuld');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Forderungen', 'Überzahlung', 'überzahl');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Inventar', 'Inventar', 'inven');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Prämie/Ziele', 'Prämie', 'prämie');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Prämie/Ziele', 'Ziele', 'ziele');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Sonderleistungen', 'Darlehen', 'darl');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Sonderleistungen', 'Dauerreisekostenvorschuss', 'reiseko');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Sonderleistungen', 'Jubiläum', 'jubi');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Sonderleistungen', 'Pensionskasse (HPK)/Direktversicherung', 'pens');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Sonderleistungen', 'Umzugskosten/Mietkosten', 'umzug');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Sonstiges', 'Sonstiges', 'so');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertrag', 'Filialwechsel', 'fw');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertrag', 'Funktionszulage/Positionsveränderung', 'funkt');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertrag', 'Gehaltserhöhung', 'gehalt');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertrag', 'Kurzarbeit', 'ka');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertrag', 'Lohnbescheid CZ', 'lohn cz');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertrag', 'Stundenveränderung', 'stunden');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertrag', 'Vertrag', 'av');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertrag', 'Vertragsverlängerung', 'verl');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertragsergänzungen', 'Einarbeitung', 'eina');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertragsergänzungen', 'Empfangsbestätigungen', 'empfang');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertragsergänzungen', 'Fahrtkostenzuschuss', 'fkz');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertragsergänzungen', 'Homeoffice', 'ho');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertragsergänzungen', 'KiTa-Zuschuss', 'kita');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertragsergänzungen', 'Mankoabrede', 'manko');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vertragsergänzungen', 'Nebentätigkeit', 'neben');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Vollmacht', 'Vollmacht', 'voll');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Weiterbildung', 'extern', 'ext');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Weiterbildung', 'intern', 'int');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Zeugnis', 'IHK/Schule', 'ihk zeug');
|
||||
insert into tabExportMapping(typ, inhalt, postfix) values ('Zeugnis', 'NN/Thevs', 'nn zeug');
|
||||
*/
|
||||
|
||||
class Config {
|
||||
class DDF {
|
||||
static final def PERS_NR = 1
|
||||
static final def DOK_DATUM = 50
|
||||
static final def DOK_TYP = 7
|
||||
static final def DOK_INHALT = 8
|
||||
}
|
||||
static final def DOC_TYPE = "DPERS"
|
||||
static def LAST_ID = ""
|
||||
static final def EXPORT_PATH = "g:\\Export"
|
||||
|
||||
class API {
|
||||
static final def BASE_URI = ""
|
||||
static final def REPOSITORY_ID = ""
|
||||
static final def API_KEY = ""
|
||||
}
|
||||
|
||||
class Counter {
|
||||
static def i = 0
|
||||
static def ok = 0
|
||||
static def err = 0
|
||||
static def max = 0
|
||||
static def convert_ok = 0
|
||||
static def convert_error = 0
|
||||
}
|
||||
|
||||
static PERS_NR_MAP = [
|
||||
'33648' : '44523'
|
||||
, '10092' : '48124'
|
||||
, '27456' : '47475'
|
||||
, '38009' : '57613'
|
||||
, '25428' : '42959'
|
||||
, '14180' : '47479'
|
||||
, '39907' : '87316'
|
||||
, '28110' : '79438'
|
||||
, '28173' : '43116'
|
||||
, '38831' : '43271'
|
||||
, '47901' : '61719'
|
||||
, '57499' : '87318'
|
||||
, '60703' : '70438'
|
||||
, '58096' : '93812'
|
||||
, '60408' : '93812'
|
||||
, '72840' : '94291'
|
||||
, '76168' : '90006'
|
||||
]
|
||||
}
|
||||
|
||||
class Downloader {
|
||||
|
||||
static def authSessionId, expire
|
||||
static void login() {
|
||||
HttpsURLConnection request= new URL(Config.API.BASE_URI + "/identityprovider/login").openConnection()
|
||||
request.requestMethod = "GET"
|
||||
request.setRequestProperty("Origin", Config.API.BASE_URI)
|
||||
request.setRequestProperty("Accept", "application/json")
|
||||
request.setRequestProperty("Authorization", "Bearer ${Config.API.API_KEY}")
|
||||
|
||||
if (request.responseCode == 200) {
|
||||
def json = request.inputStream.withCloseable { stream ->
|
||||
new JsonSlurper().parse(stream as InputStream)
|
||||
}
|
||||
|
||||
authSessionId = json.AuthSessionId
|
||||
expire = new Date().getTime()
|
||||
} else {
|
||||
throw new Exception("Login fehlgeschlagen -> $request.responseCode")
|
||||
}
|
||||
}
|
||||
|
||||
static boolean isExpired() {
|
||||
new Date().getTime() < (expire + 3600000)
|
||||
}
|
||||
|
||||
static String downloadDocument(String docId, String filePath) {
|
||||
if (authSessionId == null || isExpired()) {
|
||||
login()
|
||||
}
|
||||
def out = new File(filePath)
|
||||
if (out.exists()) {
|
||||
throw new Exception("Datei bereits vorhanden: $filePath")
|
||||
}
|
||||
|
||||
String baseRequest = Config.API.BASE_URI + "/dms/r/${Config.API.REPOSITORY_ID}/o2/${docId}/v/current/b/main/c"
|
||||
|
||||
HttpsURLConnection request = new URL(baseRequest).openConnection() as HttpsURLConnection
|
||||
request.requestMethod = "GET"
|
||||
request.setRequestProperty("Origin", Config.API.BASE_URI)
|
||||
request.setRequestProperty("Accept", "application/json")
|
||||
request.setRequestProperty("Authorization", "Bearer $authSessionId")
|
||||
|
||||
if (request.responseCode == 200) {
|
||||
request.inputStream.withCloseable { stream ->
|
||||
out << stream.getBytes()
|
||||
}
|
||||
|
||||
return getRIPEMD256(out)
|
||||
} else {
|
||||
throw new Exception ("Fehler beim Herunterladen von $baseRequest")
|
||||
}
|
||||
}
|
||||
|
||||
static private def getRIPEMD256( File file ) {
|
||||
DigestOutputStream dos = new DigestOutputStream(new RIPEMD256Digest()).withStream { os ->
|
||||
file.withInputStream { is -> os << is }
|
||||
}
|
||||
"RIPEMD256:" + dos.getDigest().encodeBase64()
|
||||
}
|
||||
}
|
||||
|
||||
def getDocsToExport(D3Interface d3) {
|
||||
def query = "SELECT top 100 doku_id FROM d3pa.dbo.tabExportPersonaldokumente WHERE doku_id > ? and err_code is null order by doku_id"
|
||||
def params = [Config.LAST_ID]
|
||||
|
||||
return d3.sql.executeAndGet(query, params)
|
||||
}
|
||||
|
||||
def getPostfix(D3Interface d3, Document doc) {
|
||||
def rows
|
||||
if (doc.field[Config.DDF.DOK_INHALT] == null){
|
||||
rows = d3.sql.executeAndGet("SELECT postfix FROM d3pa.dbo.tabExportMapping WHERE typ = ? and inhalt is null", [doc.field[Config.DDF.DOK_TYP]])
|
||||
} else {
|
||||
rows = d3.sql.executeAndGet("SELECT postfix FROM d3pa.dbo.tabExportMapping WHERE typ = ? and isnull(inhalt, 'NULL') = isnull(?, 'NULL')", [doc.field[Config.DDF.DOK_TYP], doc.field[Config.DDF.DOK_INHALT]])
|
||||
}
|
||||
|
||||
if (rows?.size()== 1) {
|
||||
return rows.first().postfix
|
||||
} else {
|
||||
throw new Exception("Postfix für ${doc.field[Config.DDF.DOK_TYP]}, ${doc.field[Config.DDF.DOK_INHALT]} konnte nicht ermittelt werden!")
|
||||
}
|
||||
}
|
||||
|
||||
d3 = (D3Interface) getProperty("d3")
|
||||
|
||||
new File("${Config.EXPORT_PATH}\\TIF").mkdirs()
|
||||
|
||||
Config.Counter.max = d3.sql.executeAndGet("SELECT count(*) anzahl FROM d3pa.dbo.tabExportPersonaldokumente WHERE err_code is null").first().anzahl
|
||||
|
||||
def docs = getDocsToExport(d3)
|
||||
|
||||
while (docs?.size() > 0) {
|
||||
docs.each {
|
||||
d3.log.statusBar("${Config.Counter.i++}/${Config.Counter.max} - ok=${Config.Counter.ok}, err=${Config.Counter.err}, convert_ok=${Config.Counter.convert_ok}, convert_error=${Config.Counter.convert_error}")
|
||||
try {
|
||||
Config.LAST_ID = it.doku_id
|
||||
def doc = d3.archive.getDocument(it.doku_id)
|
||||
|
||||
def postfix = getPostfix(d3, doc)
|
||||
|
||||
def persNr = doc.field[Config.DDF.PERS_NR]
|
||||
if ( Config.PERS_NR_MAP.containsKey(persNr) ) {
|
||||
d3.log.info("PersNr $persNr wird uebersetzt in ${Config.PERS_NR_MAP[persNr]}")
|
||||
persNr = Config.PERS_NR_MAP[persNr]
|
||||
} else {
|
||||
d3.log.info("PersNr $persNr wird nicht uebersetzt")
|
||||
}
|
||||
|
||||
def exportPath = "${persNr}_${(doc.field[Config.DDF.DOK_DATUM] ?: new Date(doc.created.getTime())).format('dd_MM_yyyy')}_${doc.field[Config.DDF.DOK_INHALT]}_${postfix}_${doc.id}.${doc.fileExtension}"
|
||||
exportPath = exportPath.replaceAll(/[\\/?<>:*]/, "-")
|
||||
if (doc.fileExtension?.toUpperCase() == "TIF"){
|
||||
exportPath = "${Config.EXPORT_PATH}\\TIF\\$exportPath"
|
||||
} else {
|
||||
exportPath = "${Config.EXPORT_PATH}\\$exportPath"
|
||||
}
|
||||
def hash = Downloader.downloadDocument(doc.id, exportPath)
|
||||
|
||||
def d3hash = d3.sql.executeAndGet("select isnull(file_hash, md5) file_hash from d3pa.dbo.files_datentraeger where doku_id= ? and index_version = ? ", [it.doku_id, doc.fileIdCurrentVersion]).file_hash.first()
|
||||
d3.log.info([it.doku_id, doc.fileIdCurrentVersion])
|
||||
|
||||
if ( hash == d3hash ) {
|
||||
d3.sql.execute("update d3pa.dbo.tabExportPersonaldokumente set err_code = 0, err_msg = null, done = getdate(), filehash = ?, dateiname = ? where doku_id = ?", [hash, exportPath, it.doku_id])
|
||||
} else {
|
||||
d3.log.info( "fileHashd3=${d3hash}")
|
||||
d3.log.info( "hashberechnet=$hash")
|
||||
d3.sql.execute("update d3pa.dbo.tabExportPersonaldokumente set err_code = 2, err_msg = 'Der berechnete hash stimmt nicht überein!', done = getdate(), filehash = ?, dateiname = ? where doku_id = ?", [hash, exportPath, it.doku_id])
|
||||
}
|
||||
|
||||
Config.Counter.ok++
|
||||
} catch (Exception e) {
|
||||
d3.log.error(e.message)
|
||||
d3.sql.execute("update d3pa.dbo.tabExportPersonaldokumente set err_code = 1, err_msg = ?, done = getdate(), filehash = null where doku_id = ?", [e.message.take(250), it.doku_id])
|
||||
Config.Counter.err++
|
||||
}
|
||||
}
|
||||
docs = getDocsToExport(d3)
|
||||
}
|
||||
d3.log.statusBar("Export abgeschlossen: ${Config.Counter.i}/${Config.Counter.max} - ok=${Config.Counter.ok}, err=${Config.Counter.err}, convert_ok=${Config.Counter.convert_ok}, convert_error=${Config.Counter.convert_error}")
|
||||
return null
|
||||
|
||||
Reference in New Issue
Block a user