Skripte hinzugefügt

This commit is contained in:
2025-11-14 17:11:43 +01:00
parent 5489edbbd4
commit f8a3306af5
8 changed files with 675 additions and 0 deletions

BIN
DMG Dental/.DS_Store vendored Normal file

Binary file not shown.

BIN
Hermes/.DS_Store vendored

Binary file not shown.

BIN
Kleeberg/.DS_Store vendored Normal file

Binary file not shown.

BIN
Optima/.DS_Store vendored

Binary file not shown.

BIN
_Vorlagen/.DS_Store vendored Normal file

Binary file not shown.

View File

@@ -0,0 +1,235 @@
package dds.tools.scripts
import javax.swing.JOptionPane
import com.dvelop.d3.server.core.D3Interface
import groovy.sql.GroovyRowResult
def adminUser = "d3_service"
boolean noHooks = true
class Config{
static def MODE = 2 // 0: Es werden nur die SQL-Statements in die Info-Log-Dateien ausgegeben
// 1: Es werden alle Dokumente durchlaufen, es werden jedoch keine Updates ausgeführt
// 2: Das Script aktualisiert die konfigurierten Dokumente
static TOP_CLAUSE = "TOP 100"
static def LOG_PATH = "C:\\TEMP"
static List<Rule> rules = []
static ok = 0
static errors = 0
static def init(D3Interface d3) {
d3.log.info("Regelwerk wird erstellt")
//rules.add(new Rule(d3, "DOKUMENTARKUERZEL", [new FieldTupel("Integer FELD_NR_QUELLE", "Integer FELD_NR_ZIEL", "Boolean TRUE: Es wird nur der Ganzzahl-Anteil übernommen. Schlägt die Konvertierung fehlt wird null übernommen. - DEFAULT = FALSE")]))
rules.add(new Rule(d3, "DEINV", [new FieldTupel(83, 1), new FieldTupel(17, 62)]))
rules.add(new Rule(d3, "DIERE", [new FieldTupel(55, 1), new FieldTupel(70, 2)]))
rules.add(new Rule(d3, "AA002", [new FieldTupel(6, 60), new FieldTupel(1, 61)]))
d3.log.info("Es wurden ${rules.size()} Regeln angelegt.")
def ld = new File(LOG_PATH)
if ( ! ld.exists()) {
if (!ld.mkdirs()) {
throw new Exception("Das Logverzeichnis '$LOG_PATH' konnte nicht angelegt werden")
}
}
}
}
class Rule{
def docType, ok, errors
List<FieldTupel> mapping
private String selectQuery
private File logFileInfo
private File logFileError
private D3Interface d3
Rule(D3Interface d3, String docType, FieldTupel fieldTupel) {
this.d3 = d3
this.docType = docType
this.mapping = [fieldTupel]
init()
}
Rule(D3Interface d3, String docType, List<FieldTupel> fieldTupelArray){
this.d3 = d3
this.docType = docType
this.mapping = fieldTupelArray
init()
}
def init() {
ok = 0
errors = 0
logFileInfo = new File("${Config.LOG_PATH}\\${docType}_info.txt")
logFileError = new File("${Config.LOG_PATH}\\${docType}_error.txt")
selectQuery = """SELECT ${Config.TOP_CLAUSE} f.doku_id\r\n, ${mapping.collect{ FieldTupel ft -> ft.getSelectClause() }.join('\r\n, ') }
FROM firmen_spezifisch f
WHERE f.kue_dokuart = '${docType}'
AND f.doku_id > ?
AND (${mapping.collect{ FieldTupel ft -> ft.getWhereClause() }.join('\r\nOR ') })
ORDER BY f.doku_id"""
logFileInfo << "${selectQuery}\r\n"
}
List<GroovyRowResult> selectDocuments(D3Interface d3, def lastId) {
def params = [lastId]
logInfo("Selektiere weitere Dokumente mit params=$params")
d3.sql.executeAndGet(selectQuery, params)
}
void logInfo(msg) {
d3.log.info(msg)
logFileInfo << """${new Date().format("yyyy-dd-MM hh:mm:ss")};${(Config.MODE != 2) ? 'SIMULATION!;' : ''}$msg\r\n"""
}
void logError(def msg) {
d3.log.error(msg)
logFileError << """${new Date().format("yyyy-dd-MM hh:mm:ss")};$msg\r\n"""
}
String toString() {
def mappingString = mapping.collect {
it.toString()
}.join(", ")
"dokumentart: $docType, Mapping: $mappingString"
}
}
class FieldTupel{
def oldDbField, newDbField, toInteger
FieldTupel(def oldDbField, def newDbField, def toInteger = false) {
this.oldDbField = oldDbField
this.newDbField = newDbField
this.toInteger = toInteger
if ( (newDbField >= 1 && newDbField <= 49) ) {
println("Zielfeld $newDbField ist STRING")
} else if ( newDbField >= 60 && newDbField <= 69 ) {
println("Zielfeld $newDbField ist MULTI")
} else {
throw new Exception("DB Position $newDbField ist nicht erlaubt als Ziel!")
}
if ( oldDbField >= 60 && oldDbField <= 69 ) {
throw new Exception("DB Position $oldDbField ist nicht erlaubt als Quelle!")
}
}
private getConvertStr( def dbPos ) {
if (dbPos >= 50 && dbPos <= 59) {
"TRY_CONVERT(nvarchar, f.dok_dat_feld_${dbPos}, 104)"
} else if (dbPos >= 70 && dbPos <= 79) {
"TRY_CONVERT(nvarchar, f.dok_dat_feld_${dbPos})"
} else {
if (toInteger) {
"CONVERT(nvarchar, TRY_CONVERT(int, f.dok_dat_feld_${dbPos}))"
} else {
"CONVERT(nvarchar, f.dok_dat_feld_${dbPos})"
}
}
}
def getWhereClause() {
if ( newDbField < 60 || newDbField > 69 ) {
"(f.dok_dat_feld_${oldDbField} IS NOT NULL AND (f.dok_dat_feld_${newDbField} IS NULL OR ${getConvertStr(oldDbField)} <> f.dok_dat_feld_${newDbField} ))".toString()
} else {
"(f.dok_dat_feld_${oldDbField} IS NOT NULL AND NOT EXISTS(SELECT top 1 * FROM firm_spez_mult_val m WHERE f.doku_id = m.doku_id and m.field_no = ${newDbField} AND value_char = ${getConvertStr(oldDbField)} ))".toString()
}
}
def getSelectClause() {
"""${ (newDbField < 60 || newDbField > 69) ? getConvertStr(newDbField) : "'Werte von 60ger Feldern koennen nicht gelogt werden!'" } as dok_dat_feld_${newDbField}, ${getConvertStr(oldDbField)} as dok_dat_feld_${oldDbField}"""
}
String toString(){
"[oldDbPos=$oldDbField, newDbPos=$newDbField, toInteger=$toInteger]"
}
}
def printStatus(D3Interface d3, Integer c, Boolean running = true){
d3.log.statusBar("${ (Config.MODE != 2) ? '(SIMULATON)' : '' }Verarbeite Regel ${c + 1}/${Config.rules.size()}: Ok: ${Config.rules[c].ok}, Fehler: Ok: ${Config.rules[c].errors} | Gesamkt Ok: ${Config.ok}, Fehler: ${Config.errors} ${running ? '...' : '-Script beendet'}")
}
def errors = 0
def updated = 0
D3Interface d3 = getProperty("d3")
try {
d3.log.info("Script ${this.getClass().toString()} gestartet")
d3.log.statusBar("Script ${this.getClass().toString()} gestartet")
Config.init( d3 )
d3.log.info("Es wurden ${Config.rules.size()} Regeln hinterlegt.")
if (Config.MODE == 0) {
d3.log.statusBar("Dialog ist geoeffnet!")
JOptionPane.showMessageDialog(null, "Es wurden die SQL-Statements generiert und in die Info-Log Dateien geschrieben.\nMode=0")
d3.log.statusBar("Script beendet.")
return null
}
if (JOptionPane.OK_OPTION != JOptionPane.showConfirmDialog(null, "Es wurden ${Config.rules.size()} Regeln hinterlegt. Soll das Script ausgefuehrt werden?")) {
d3.log.error("Script durch Benutzer abgebrochen!")
d3.log.statusBar("Script durch Benutzer abgebrochen!")
return 0
}
if (Config.rules.size() > 0) {
Config.rules.eachWithIndex { Rule rule, def index ->
rule.logInfo("START *****************************************************")
rule.logInfo(rule.toString())
def rows = rule.selectDocuments(d3, "0")
while (rows != null && rows.size() > 0) {
rule.logInfo("Anzahl ermittelter Dokumente: ${rows.size()}")
rows.each{ r ->
try {
def doc = d3.archive.getDocument(r.doku_id)
rule.mapping.each { m ->
def newValue = r["dok_dat_feld_${m.oldDbField}"]
d3.log.info("""setField ${m.newDbField} to ${newValue}""")
if ( m.newDbField < 60 || m.newDbField > 69 ) {
doc.field[ m.newDbField ] = r["dok_dat_feld_${m.oldDbField}"]
} else {
doc.field[ m.newDbField ][1] = r["dok_dat_feld_${m.oldDbField}"]
for ( def j = 2; j <= d3.config.value("CUR_60ER_FIELD_NR").toInteger(); j++ ) {
doc.field[ m.newDbField ][j] = null
}
}
}
if (Config.MODE == 2) {
doc.updateAttributes(adminUser, true)
}
Config.ok++
rule.ok++
rule.logInfo("Dokument aktualisiert $r")
} catch (Exception e) {
d3.log.error("$r -> ${e.message}")
rule.logError("${r.doku_id};${e.message}")
rule.errors++
Config.errors++
}
printStatus(d3, index)
}
rows = rule.selectDocuments(d3, rows.doku_id.last())
}
rule.logInfo("Keine (weiteren) Dokumente ermittelt")
rule.logInfo("Zusammenfassung: Ok: ${rule.ok}, Fehler: ${rule.errors}")
rule.logInfo("ENDE *****************************************************")
}
}
printStatus(d3, Config.rules.size() - 1, false)
} catch (Exception e) {
e.printStackTrace()
d3.log.statusBar("Bei der Verarbeitung ist ein Fehler aufgetreten. Bitte kontrollieren Sie das d3 log. - Dialog geöffnet")
JOptionPane.showMessageDialog(null, "Bei der Verarbeitung ist ein Fehler aufgetreten.\r\nBitte kontrollieren Sie das d3 log.")
d3.log.statusBar("Bei der Verarbeitung ist ein Fehler aufgetreten. Bitte kontrollieren Sie das d3 log.")
}
d3.log.info("Script ${this.getClass().toString()} beendet")
//d3.log.statusBar("Script ${this.getClass().toString()} beendet")
return 0

View File

@@ -0,0 +1,258 @@
import groovy.json.JsonBuilder
import groovyx.net.http.FromServer
import groovyx.net.http.HttpBuilder
import groovyx.net.http.ContentTypes
import java.text.SimpleDateFormat
import java.time.YearMonth
class Configuration
{
// login data, used to perform operations which need authentication
public static String API_KEY = "{yourApiKey}"
// Base Url of d.3 system, to target API endpoints
public static String baseUrl = "https://{yourUrl}.de"
// repository of destination, is used to target right repository for searching documents
public static String repositoryId = "{yourRepostoryId}"
public static String logDirPath = "./log/"
public static File logFile = new File(logDirPath + System.currentTimeMillis() + "_log.csv")
public static String dmsUrl = "/dms/r/" + repositoryId
public static HttpBuilder httpBuilder = null
public static String authSessionId = ""
public static String sessionExpire = ""
public static File csvLog = null
}
// must be allowed to set origin header
System.setProperty( "sun.net.http.allowRestrictedHeaders", "true")
// create httpBuilde with baseUrl
log("Create httpBuilder")
Configuration.httpBuilder = HttpBuilder.configure {
request.uri = Configuration.baseUrl
request.headers['Accept'] = 'application/json'
request.headers['Origin'] = Configuration.baseUrl
}
log("httpBuilder created")
login()
uploadDocument()
/**
* Function to upload document / create placeholder
*/
void uploadDocument(){
// check if login is valid
if(isLoginExpired()) {
login()
}
String requestUUID = UUID.randomUUID().toString()
// create new file
// check if directory "Tmp" exists
File tmpDir = new File("./Tmp/")
if(!tmpDir.exists() || !tmpDir.isDirectory()) {
// create directory for tmp files
tmpDir.mkdirs()
}
File tmpFile = new File("./Tmp/mydoc.hc")
tmpFile.createNewFile()
tmpFile.text = "New mydoc: " + System.currentTimeMillis()
Configuration.httpBuilder.post {
request.uri.path = Configuration.dmsUrl + "/blob/chunk/"
request.headers['Authorization'] = 'Bearer ' + Configuration.authSessionId
request.headers['Accept'] = 'application/hal+json'
request.headers['x-dv-request-id'] = requestUUID
request.contentType = ContentTypes.BINARY[0]
request.body = tmpFile.bytes
response.exception { e ->
log("RequestUUID: ${requestUUID} - Upload exception: ${e.message}")
}
response.failure { f ->
log("RequestUUID: ${requestUUID} - Upload failed: ${f.message}")
// if request failed because of "Unathorized" ir "Forbidden" try new login and then send request again
if(f.message.toString().equals("Unauthorized") || f.message.toString().equals("Forbidden") ) {
login()
}
}
response.success { s, bytes ->
// get header for Location
String locationUrl = FromServer.Header.find(s.getHeaders(), "Location").parsed
if(locationUrl != null && !locationUrl.equals("")) {
log("RequestUUID: ${requestUUID} - Upload of binary successful -> locationUrl: ${locationUrl} ")
//Now assign metadata to uploaded document
saveNewUploadedDocument(locationUrl)
}
}
}
}
/**
* Function to assign metadata to uploaded document
*/
void saveNewUploadedDocument(String locationUrl){
// check if login is valid
if(isLoginExpired()) {
login()
}
String requestUUID = UUID.randomUUID().toString()
Map bodyMap = new HashMap<>()
bodyMap.put("filename", "mydoc.hc")
//Your file
bodyMap.put("sourceCategory", "DTEST")
bodyMap.put("sourceId", Configuration.dmsUrl + "/source")
bodyMap.put("contentLocationUri", locationUrl)
List propertiesList = new ArrayList()
Map propertyMap = new HashMap<>()
//Your metadata
propertyMap.put("key", "1")
propertyMap.put("value", "myDocValue")
propertyMap.put("key", "2")
propertyMap.put("value", "myDocValue2")
propertiesList.add(propertyMap)
Map propertiesMap = new HashMap()
propertiesMap.put("properties", propertiesList)
bodyMap.put("sourceProperties", propertiesMap)
JsonBuilder jsonBuilder = new JsonBuilder()
jsonBuilder.content = bodyMap
Configuration.httpBuilder.post {
request.uri.path = Configuration.dmsUrl + "/o2m"
request.headers['Authorization'] = 'Bearer ' + Configuration.authSessionId
request.headers['Accept'] = 'application/hal+json'
request.headers['x-dv-request-id'] = requestUUID
request.contentType = ContentTypes.JSON[0]//'application/hal+json'
request.body = jsonBuilder.toPrettyString()
response.parser(ContentTypes.JSON[0]) {config, resp ->
String responseText = resp.inputStream.getText()
log("RequestUUID: ${requestUUID} - ResponseText: ${responseText}")
}
response.exception { e ->
log("RequestUUID: ${requestUUID} - Save uploaded file exception: ${e.message}")
}
response.failure { f ->
log("RequestUUID: ${requestUUID} - Save uploaded file failed: ${f.message}")
// if request failed because of "Unathorized" ir "Forbidden" try new login and then send request again
if(f.message.toString().equals("Unauthorized") || f.message.toString().equals("Forbidden") ) {
login()
}
}
response.success { s ->
log("RequestUUID: ${requestUUID} - Save uploaded file successful")
// get docId from Location Header
String locationHeader = FromServer.Header.find(s.getHeaders(), "Location").parsed
if(locationHeader != null && !locationHeader.equals("")) {
String[] locationParts = locationHeader.split("/o2m/")
if(locationParts.size() == 2) {
String[] secondParts = locationParts[1].split("\\?")
String docId = secondParts[0]
log("RequestUUID: ${requestUUID} - Save uploaded file successful: DocId - " + docId)
}
}
}
}
}
/**
* Function to perform login request
*/
void login() {
String requestUUID = UUID.randomUUID().toString()
Configuration.httpBuilder.get {
request.uri.path = '/identityprovider/login'
request.headers['Authorization'] = 'Bearer ' + Configuration.API_KEY
request.headers['x-dv-request-id'] = requestUUID
request.contentType = ContentTypes.URLENC
response.exception { e ->
log("RequestUUID: ${requestUUID} - Login exception")
}
response.failure { f ->
log("RequestUUID: ${requestUUID} - Login failed: ${f.message}")
}
response.success { s, json ->
log("RequestUUID: ${requestUUID} - Login success")
Configuration.authSessionId = json.getAt("authSessionId")
Configuration.sessionExpire = json.getAt("expire")
if(Configuration.authSessionId == null || Configuration.authSessionId.equals("") || Configuration.authSessionId.equals("null")) {
log("AuthSessionId not given with first letter small, try upper case")
Configuration.authSessionId = json.getAt("AuthSessionId")
}
if(Configuration.sessionExpire == null || Configuration.sessionExpire.equals("") || Configuration.sessionExpire.equals("null")) {
log("Expire not given with first letter small, try upper case")
Configuration.sessionExpire = json.getAt("Expire")
}
}
}
}
/**
* Function to check if authSessionId is given and still not expired
*
* @return boolean true if login is not valid
*/
boolean isLoginExpired() {
boolean result = false
if(Configuration.authSessionId == null || Configuration.authSessionId.equals("")) {
result = true
}
if(Configuration.sessionExpire == null || Configuration.sessionExpire.equals("")) {
result = true
} else {
// check if sessionExpire is grater then current timestamp
long nowTimestamp = System.currentTimeMillis()
// convert sessionExpire to timestamp
SimpleDateFormat inputFormat = new SimpleDateFormat("yyyy-MM-dd'T'H:m:s.S'Z'")
Date expireDate = inputFormat.parse(Configuration.sessionExpire)
long expireTimestamp = expireDate.time
if(nowTimestamp>=expireTimestamp) {
result = true
}
}
return result
}
/**
* Function to log given message to log file and to console
* @param message
*/
void log(String message) {
String messageWithTimestamp = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").format(new Date(System.currentTimeMillis())) + " : ${message}"
println(messageWithTimestamp)
if(!Configuration.logFile.exists()) {
// check if directory exists
// check if directory "Log" exists
File logDir = new File(Configuration.logDirPath)
if(!logDir.exists() || !logDir.isDirectory()) {
// create directory for log files
logDir.mkdirs()
}
Configuration.logFile.createNewFile()
}
Configuration.logFile.append(messageWithTimestamp + "\n")
}

View File

@@ -0,0 +1,182 @@
SAVE_SRV_CHANGES_ATTRIBUTES = "1"
msg query "Wollen Sie wirklich die Migration 'Attribut Einfachfeld' durchführen?!" confirm_global
if !confirm_global
return
//-----------------------------------------------------------------------------------
//-----------------------------------------------------------------------------------
//----------------------Konfigurationsbereich allgemein------------------------------
//-----------------------------------------------------------------------------------
//-----------------------------------------------------------------------------------
vars log_folder = "D\:\\Migration" //Übergeordnete Ablage der Logdatei
vars log_file = "change_field.log" //Name der Logdatei
vars doc_type = "DPRVZ" //Kürzel der Dokumentart
vars old_field = "20" //altes Mehrfachfeld
vars new_field = "31" //Neues Mehrfachfeld
vars mode = "move" //Modus: move/copy
//Achtung: SQL-Statement muss ggfs. zusätzlich angepasst werden
//-----------------------------------------------------------------------------------
//-----------------------------------------------------------------------------------
//-----------------Ende Konfigurationsbereich allgemein------------------------------
//-----------------------------------------------------------------------------------
//-----------------------------------------------------------------------------------
//Zusammenstellen des Pfads für die Logdatei
vars logpath = log_folder ## "\\" ## log_file
vars retval = check_folders_and_files()
if(retval != 0)
{
return 1
}
//-----------------------------------------------------------------------------------
//-----------------------------------------------------------------------------------
//----------------------Konfigurationsbereich SQL------------------------------------
//-----------------------------------------------------------------------------------
//-----------------------------------------------------------------------------------
vars value_help, value_value
DBMS DECLARE value_cursor CURSOR FOR \
select doku_id, dok_dat_feld_:old_field from firmen_spezifisch \
where kue_dokuart = :+doc_type \
and dok_dat_feld_:old_field is not null
DBMS WITH CURSOR value_cursor ALIAS value_help, value_value
DBMS WITH CURSOR value_cursor EXECUTE
while (@dmrowcount > 0)
{
call change_doc_type(value_help, value_value)
DBMS WITH CURSOR value_cursor CONTINUE
}
//-----------------------------------------------------------------------------------
//-----------------------------------------------------------------------------------
//-----------------Ende Konfigurationsbereich SQL------------------------------------
//-----------------------------------------------------------------------------------
//-----------------------------------------------------------------------------------
call write_log("Ende Kopieraktion")
msg emsg "Skipt wurde beendet"
//-----------------------------------------------------------------------------------
//-----------------------------------------------------------------------------------
//----------------------Funktionsbereich---------------------------------------------
//-----------------------------------------------------------------------------------
//-----------------------------------------------------------------------------------
proc change_doc_type(doc_id, value)
{
msg d_msg "Bearbeitete " ## :+doc_id
SERVER_API_NO_HOOKS = 1 //Kein Einstiegspunkt
vars retval_check = api_function( "attribute_update_single", new_field, value, 1, doc_id, "Master")
SERVER_API_NO_HOOKS = 0 //Wieder aktiviert
vars text
if (retval_check == 0)
{
text = doc_id ## " - " ## new_field ## " - erfolgreich kopiert"
call write_log(text)
if(mode == "move")
{
SERVER_API_NO_HOOKS = 1 //Kein Einstiegspunkt
vars retval_check = api_function( "attribute_update_single", old_field, "", 1, doc_id, "Master")
SERVER_API_NO_HOOKS = 0 //Wieder aktiviert
if (retval_check == 0)
{
text = doc_id ## " - " ## old_field ## " - erfolgreich gelöscht"
call write_log(text)
}
else
{
text = doc_id ## " - " ## old_field ## " - Löschen fehlgeschlagen!"
call write_log(text)
}
}
}
else
{
text = doc_id ## " - " ## new_field ## " - Kopieren fehlgeschlagen!"
call write_log(text)
}
}
proc check_folders_and_files()
{
//Ordner der Logdatei überprüfen
retval = check_folders()
if (retval == 1)
{
msg emsg "Angegebener Ordner für Logausgabe existiert nicht und konnte nicht angelegt werden -> Abbruch"
return 1
}
//Prüfung, ob Logdatei bereits existiert
retval = api_function ("file_exist", logpath)
//1 = Logdatei existiert bereits
if (retval == 1)
{
msg emsg "Logdatei existiert bereits -> Abbruch"
return 1
}
call write_log("Start Kopieraktion")
//Ungleich = 0, Fehler beim Schreiben der Logdatei
if (retval != 0 && retval != 1)
{
msg emsg "Logdatei konnte nicht erstellt werden -> Abbruch"
return 1
}
}
proc check_folders()
{
//Überprüfung, ob Arbeitsverzeichnis-Auftrag bereits existiert
vars retval
retval = api_function( "directory_exist", log_folder)
if (retval == 0) // falls Ordner nicht existiert wird er angelegt
{
retval = api_function ("directory_create", log_folder)
if (retval != 0) // falls Ordner nicht angelegt werden konnte --> abbruch
{
return 1
}
}
}
proc write_log(text)
{
vars tag_uhrzeit
call api_function("get_current_datetime", "%0d.%0m.%4y %0h\:%0M\:%0s")
tag_uhrzeit = api_single_info
text = tag_uhrzeit ## " - " ## text
//In die Logdatei wird ein Timestamp + übergebenen Text geschrieben
vars retval
retval = api_function ("file_add_line", logpath, text, "APPEND")
}