blob: 1812528d4cd6204e5fd616ecb73969887a094465 [file] [log] [blame]
/**
*
* Copyright (c) 2011, 2016 - Loetz GmbH&Co.KG (69115 Heidelberg, Germany)
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License 2.0
* which accompanies this distribution, and is available at
* https://www.eclipse.org/legal/epl-2.0/
*
* SPDX-License-Identifier: EPL-2.0
*
* Contributors:
* Christophe Loetz (Loetz GmbH&Co.KG) - initial implementation
*
*
* This copyright notice shows up in the generated Java code
*
*/
package org.eclipse.osbp.xtext.datainterchange.jvmmodel
import com.vaadin.shared.ui.label.ContentMode
import com.vaadin.ui.Button
import com.vaadin.ui.Button.ClickEvent
import com.vaadin.ui.Button.ClickListener
import com.vaadin.ui.Component
import com.vaadin.ui.HorizontalLayout
import com.vaadin.ui.Label
import com.vaadin.ui.NativeButton
import com.vaadin.ui.Panel
import com.vaadin.ui.ProgressBar
import com.vaadin.ui.UI
import java.io.BufferedInputStream
import java.io.BufferedOutputStream
import java.io.File
import java.io.FileInputStream
import java.io.FileOutputStream
import java.io.IOException
import java.io.InputStream
import java.io.PrintWriter
import java.io.StringReader
import java.io.StringWriter
import java.net.MalformedURLException
import java.net.URI
import java.net.URISyntaxException
import java.net.URL
import java.nio.file.FileAlreadyExistsException
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
import java.nio.file.StandardOpenOption
import java.text.MessageFormat
import java.util.ArrayList
import java.util.Arrays
import java.util.Date
import java.util.HashMap
import java.util.List
import java.util.MissingResourceException
import java.util.Properties
import java.util.UUID
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
import javax.inject.Inject
import javax.persistence.EntityManager
import javax.persistence.EntityTransaction
import javax.persistence.TypedQuery
import javax.persistence.criteria.CriteriaBuilder
import javax.persistence.criteria.CriteriaQuery
import javax.persistence.criteria.JoinType
import javax.persistence.criteria.Root
import javax.validation.ConstraintViolation
import javax.validation.ConstraintViolationException
import javax.xml.parsers.DocumentBuilderFactory
import javax.xml.transform.OutputKeys
import javax.xml.transform.Transformer
import javax.xml.transform.TransformerConfigurationException
import javax.xml.transform.TransformerFactory
import javax.xml.transform.dom.DOMSource
import javax.xml.transform.stream.StreamResult
import javax.xml.transform.stream.StreamSource
import org.apache.commons.lang.StringEscapeUtils
import org.apache.log4j.lf5.util.StreamUtils
import org.eclipse.e4.core.di.extensions.EventUtils
import org.eclipse.e4.core.services.events.IEventBroker
import org.eclipse.emf.common.util.EList
import org.eclipse.emf.ecore.EObject
import org.eclipse.emf.ecore.resource.Resource
import org.eclipse.emf.ecore.util.EcoreUtil
import org.eclipse.osbp.blob.service.BlobService
import org.eclipse.osbp.blob.service.BlobTypingAPI
import org.eclipse.osbp.datainterchange.api.DataInterchangeException
import org.eclipse.osbp.datainterchange.api.IDataInterchange
import org.eclipse.osbp.dsl.entity.xtext.extensions.EntityTypesBuilder
import org.eclipse.osbp.dsl.entity.xtext.extensions.ModelExtensions
import org.eclipse.osbp.dsl.semantic.common.types.LAnnotationTarget
import org.eclipse.osbp.dsl.semantic.common.types.LAttribute
import org.eclipse.osbp.dsl.semantic.common.types.LDataType
import org.eclipse.osbp.dsl.semantic.common.types.LFeature
import org.eclipse.osbp.dsl.semantic.common.types.LReference
import org.eclipse.osbp.dsl.semantic.entity.LEntity
import org.eclipse.osbp.dsl.semantic.entity.LEntityAttribute
import org.eclipse.osbp.dsl.semantic.entity.LEntityFeature
import org.eclipse.osbp.dsl.semantic.entity.LEntityReference
import org.eclipse.osbp.eventbroker.EventBrokerMsg
import org.eclipse.osbp.preferences.ProductConfiguration
import org.eclipse.osbp.runtime.common.event.EventDispatcherEvent
import org.eclipse.osbp.runtime.common.event.EventDispatcherEvent.EventDispatcherCommand
import org.eclipse.osbp.ui.api.datamart.IDataMart.EType
import org.eclipse.osbp.utils.entityhelper.DataType
import org.eclipse.osbp.xtext.addons.EObjectHelper
import org.eclipse.osbp.xtext.basic.generator.BasicDslGeneratorUtils
import org.eclipse.osbp.xtext.datainterchange.DataInterchange
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeBean
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeBlobMapping
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeEntityExpression
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFile
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFileCSV
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFileEDI
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFileXML
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFormat
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeLookup
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeMapping
import org.eclipse.osbp.xtext.datainterchange.DataInterchangePackage
import org.eclipse.osbp.xtext.datainterchange.DataInterchangePredefinedExpression
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeValueMapping
import org.eclipse.osbp.xtext.datainterchange.EntityManagerMode
import org.eclipse.osbp.xtext.datainterchange.common.WorkerThreadRunnable
import org.eclipse.osbp.xtext.entitymock.common.IEntityImportInitializationListener
import org.eclipse.osbp.xtext.i18n.DSLOutputConfigurationProvider
import org.eclipse.osbp.xtext.i18n.I18NModelGenerator
import org.eclipse.persistence.config.PersistenceUnitProperties
import org.eclipse.persistence.config.PessimisticLock
import org.eclipse.persistence.config.QueryHints
import org.eclipse.xtext.generator.IFileSystemAccess
import org.eclipse.xtext.generator.IOutputConfigurationProvider
import org.eclipse.xtext.naming.IQualifiedNameProvider
import org.eclipse.xtext.naming.QualifiedName
import org.eclipse.xtext.xbase.compiler.GeneratorConfig
import org.eclipse.xtext.xbase.compiler.ImportManager
import org.eclipse.xtext.xbase.jvmmodel.IJvmDeclaredTypeAcceptor
import org.osgi.framework.Bundle
import org.osgi.framework.BundleContext
import org.osgi.framework.FrameworkUtil
import org.osgi.service.event.Event
import org.osgi.service.event.EventHandler
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import org.w3c.dom.Document
import org.w3c.dom.Element
import org.xml.sax.SAXException
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeExportHide
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeExportFilter
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeGroup
import org.eclipse.xtext.common.types.JvmAnnotationReference
import org.eclipse.xtext.common.types.JvmGenericType
import org.eclipse.osbp.runtime.common.annotations.CreateBy
import org.eclipse.xtext.common.types.JvmAnnotationType
import org.eclipse.osbp.runtime.common.annotations.Dirty
import org.eclipse.osbp.runtime.common.annotations.UpdateAt
import org.eclipse.osbp.runtime.common.annotations.UpdateBy
import org.eclipse.osbp.runtime.common.annotations.CreateAt
class ParameterValue {
var public HashMap<String,String> modifiers = <String,String>newHashMap()
var public String value
}
class DataDSLModelGenerator extends I18NModelGenerator {
@Inject extension IQualifiedNameProvider
@Inject extension ModelExtensions
@Inject extension EntityTypesBuilder
@Inject extension IOutputConfigurationProvider outputConfig
@Inject extension BasicDslGeneratorUtils
@Inject
@Extension
private DataType dtType;
val static protected CAPTION__REPFIX_I18NKEY_EXPORT = "export"
val static protected CAPTION__REPFIX_I18NKEY_IMPORT = "import"
var public static String pckgName = null
val DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
val transformerFactory = TransformerFactory.newInstance()
var Transformer transformer = null
var db = dbf.newDocumentBuilder()
var domImpl = db.DOMImplementation
def String generateKey(String name, QualifiedName packageName) {
var pattern = "(\\W)"
if (name !== null) {
var newName = name.replaceAll(pattern ,"_").toLowerCase
System.out.println(newName)
if (packageName !== null) {
return packageName.toString.concat(".").concat(newName)
}
else {
return newName
}
}
return null
}
def void generatePckgName(DataInterchangePackage pkg, IJvmDeclaredTypeAcceptor acceptor) {
pckgName = pkg.name
}
override doGenerate(Resource input, IFileSystemAccess fsa) {
addTranslatables('''«CAPTION__REPFIX_I18NKEY_EXPORT»,«CAPTION__REPFIX_I18NKEY_IMPORT»''')
// identation for pretty xml output
transformerFactory.setAttribute("indent-number", 4);
transformer = transformerFactory.newTransformer()
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "no");
transformer.setOutputProperty(OutputKeys.INDENT, "yes")
transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8")
transformer.setOutputProperty(OutputKeys.STANDALONE, "yes")
transformer.setOutputProperty(OutputKeys.MEDIA_TYPE, "text/xml")
EcoreUtil.getAllContents(EObjectHelper.getSemanticElement(input), false).filter(typeof(DataInterchangePackage)).forEach[
fsa.generatePathConfig(it)
]
EcoreUtil.getAllContents(EObjectHelper.getSemanticElement(input), false).filter(typeof(DataInterchange)).forEach[
// create all smooks config files
fsa.generateImportConfigStub(it)
fsa.generateExportConfigStub(it)
]
super.doGenerate(input, fsa)
}
def void generatePathConfig(IFileSystemAccess fsa, DataInterchangePackage dataInterchangePkg) {
var dir = new File('''«System.getProperty("user.home")»/.osbee''')
if(!dir.exists) {
dir.mkdir
}
dataInterchangePkg.groups.forEach[
var file = new File('''«System.getProperty("user.home")»/.osbee/«it.name»Config.xml''');
file.setWritable(true, false);
if(!file.exists) {
file.createNewFile
val properties = new Properties();
it.datInts.forEach[
properties.put('''«it.name»-«WorkerThreadRunnable.Direction.EXPORT.toString().toLowerCase()»'''.toString, it.getUrl)
properties.put('''«it.name»-«WorkerThreadRunnable.Direction.IMPORT.toString().toLowerCase()»'''.toString, it.getUrl)
]
var fileOutput = new FileOutputStream(file);
properties.storeToXML(fileOutput, "dataInterchange file URLs");
fileOutput.close
}
]
}
def getUrl(DataInterchange di) {
switch(di.fileEndpoint) {
DataInterchangeFileXML:
return (di.fileEndpoint as DataInterchangeFileXML).fileURL
DataInterchangeFileCSV:
return (di.fileEndpoint as DataInterchangeFileCSV).fileURL
DataInterchangeFileEDI:
return (di.fileEndpoint as DataInterchangeFileEDI).fileURL
}
}
def void generateExportConfigStub(IFileSystemAccess fsa, DataInterchange dataInterchange) {
var body = ""
dbf.namespaceAware = true
var document = domImpl.createDocument("http://www.milyn.org/xsd/smooks-1.1.xsd", "smooks-resource-list", null)
var config = document.createElement("resource-config")
var selector = document.createAttribute("selector")
selector.textContent = "global-parameters"
config.attributeNode = selector
var pEl = document.createElement("param")
var name = document.createAttribute("name")
name.textContent = "stream.filter.type"
pEl.attributeNode = name
var value = document.createTextNode("SAX")
pEl.appendChild(value)
config.appendChild(pEl)
document.documentElement.appendChild(config)
var cartridges = <String,String>newHashMap()
cartridges.put("xmlns:jb", "http://www.milyn.org/xsd/smooks/javabean-1.4.xsd")
switch (dataInterchange.fileEndpoint) {
DataInterchangeFileCSV: {
var delimiter = ""
var quote = ""
cartridges.put("xmlns:ftl", "http://www.milyn.org/xsd/smooks/freemarker-1.1.xsd")
var csv = dataInterchange.fileEndpoint as DataInterchangeFileCSV
if(csv.delimiter !== null) {
delimiter = StringEscapeUtils.unescapeHtml(csv.delimiter)
}
if(csv.quoteCharacter !== null) {
quote = StringEscapeUtils.unescapeHtml(csv.quoteCharacter)
}
dataInterchange.generateExportConfig(document, dataInterchange.fileEndpoint, delimiter, quote)
}
DataInterchangeFileXML: {
cartridges.put("xmlns:ftl", "http://www.milyn.org/xsd/smooks/freemarker-1.1.xsd")
dataInterchange.generateExportConfig(document, dataInterchange.fileEndpoint, null, null)
}
DataInterchangeFileEDI: {
}
}
for(cdg:cartridges.keySet) {
document.documentElement.setAttributeNS("http://www.w3.org/2000/xmlns/", cdg, cartridges.get(cdg))
}
var source = new DOMSource(document)
var res = new DataResult()
transformer.transform(source, res)
body = res.result
fsa.generateFile('''«dataInterchange.name»-«WorkerThreadRunnable.Direction.EXPORT.toString().toLowerCase()».xml''', DSLOutputConfigurationProvider.SMOOKS_OUTPUT_ONCE, body)
}
def void generateImportConfigStub(IFileSystemAccess fsa, DataInterchange dataInterchange) {
var body = ""
dbf.namespaceAware = true
var db = dbf.newDocumentBuilder()
var domImpl = db.DOMImplementation
var document = domImpl.createDocument("http://www.milyn.org/xsd/smooks-1.1.xsd", "smooks-resource-list", null)
var fieldList = <LEntityFeature>newArrayList()
var cartridges = <String,String>newHashMap()
var parameters = <String,ParameterValue>newHashMap()
// for the meaning of cartridges see: http://www.smooks.org/mediawiki/index.php?title=V1.5:Smooks_v1.5_User_Guidecartridges
cartridges.put("xmlns:jb", "http://www.milyn.org/xsd/smooks/javabean-1.2.xsd")
cartridges.put("xmlns:dao", "http://www.milyn.org/xsd/smooks/persistence-1.2.xsd")
var pval = new ParameterValue
pval.value = "SAX"
parameters.put("stream.filter.type", pval)
// the smooks.visitor.sort=false removes NPE when using dao:lookup according to:
// http://milyn.996300.n3.nabble.com/jira-Created-MILYN-265-Add-support-for-Producer-Consumer-based-sorting-of-Visitor-logic-td3387.html
pval = new ParameterValue
pval.value = "false"
parameters.put("smooks.visitors.sort", pval)
switch (dataInterchange.fileEndpoint) {
DataInterchangeFileXML: {
// set input type and active filename
pval = new ParameterValue
pval.value = "input.xml"
parameters.put("inputType", pval)
pval = new ParameterValue
pval.value = (dataInterchange.fileEndpoint as DataInterchangeFileXML).fileURL
pval.modifiers.put("type", "input.type.actived")
parameters.put("input.xml", pval)
dataInterchange.generateImportConfig(fieldList, document, dataInterchange.fileEndpoint)
(dataInterchange.fileEndpoint as DataInterchangeFileXML).input(dataInterchange, document)
}
DataInterchangeFileCSV: {
cartridges.put("xmlns:csv", "http://www.milyn.org/xsd/smooks/csv-1.2.xsd")
// set input type and active filename
pval = new ParameterValue
pval.value = "input.csv"
parameters.put("inputType", pval)
pval = new ParameterValue
pval.value = (dataInterchange.fileEndpoint as DataInterchangeFileCSV).fileURL
pval.modifiers.put("type", "input.type.actived")
parameters.put("input.csv", pval)
dataInterchange.generateImportConfig(fieldList, document, dataInterchange.fileEndpoint)
(dataInterchange.fileEndpoint as DataInterchangeFileCSV).input(dataInterchange, fieldList, document)
}
DataInterchangeFileEDI: {
cartridges.put("xmlns:edi", "http://www.milyn.org/xsd/smooks/edi-1.2.xsd")
// set input type and active filename
pval = new ParameterValue
pval.value = "input.edi"
parameters.put("inputType", pval)
pval = new ParameterValue
pval.value = (dataInterchange.fileEndpoint as DataInterchangeFileEDI).fileURL
pval.modifiers.put("type", "input.type.actived")
parameters.put("input.edi", pval)
dataInterchange.generateImportConfig(fieldList, document, dataInterchange.fileEndpoint)
(dataInterchange.fileEndpoint as DataInterchangeFileEDI).input(dataInterchange, document)
}
}
for(cdg:cartridges.keySet) {
document.documentElement.setAttributeNS("http://www.w3.org/2000/xmlns/", cdg, cartridges.get(cdg))
}
var params = document.createElement("params")
for(para:parameters.keySet) {
var pEl = document.createElement("param");
var name = document.createAttribute("name")
name.textContent = para
pEl.attributeNode = name
var value = document.createTextNode(parameters.get(para).value)
pEl.appendChild(value)
for(mod:parameters.get(para).modifiers.keySet) {
var pAt = document.createAttribute(mod)
pAt.textContent = parameters.get(para).modifiers.get(mod)
pEl.attributeNode = pAt
}
params.appendChild(pEl)
}
document.documentElement.appendChild(params)
var source = new DOMSource(document)
var res = new DataResult()
transformer.transform(source, res)
body = res.result
fsa.generateFile('''«dataInterchange.name»-«WorkerThreadRunnable.Direction.IMPORT.toString().toLowerCase()».xml''', DSLOutputConfigurationProvider.SMOOKS_OUTPUT_ONCE, body)
}
def input(DataInterchangeFileXML xml, DataInterchange interchange, Document doc) {
}
def input(DataInterchangeFileCSV csv, DataInterchange interchange, List<LEntityFeature> fieldList, Document doc) {
var reader = doc.createElement("csv:reader")
var fields = doc.createAttribute("fields")
var fldList = <String>newArrayList
var mappingInterchanges = interchange.path.filter[it.hasAttributeMapping]
var lookupInterchanges = interchange.path.filter[it.hasReferenceLookup]
for(f:fieldList) {
if(f instanceof LEntityAttribute){
if(mappingInterchanges.length > 0){
for(mi:mappingInterchanges){
for(mapping:mi.mappings){
if(!fldList.contains(mapping.data) && f.name.equals(mapping.data)){
fldList.add(f.toName)
}
}
}
}else{
fldList.add(f.toName)
}
}else if(f instanceof LEntityReference && lookupInterchanges.length > 0){
//add reference only if they are specified by lookup
for(li:lookupInterchanges){
for(lup:li.lookup){
if(!fldList.contains(lup.dataMap) && f.name.equals(lup.dataMap)){
fldList.add(f.toName)
}
}
}
}
}
fields.textContent = fldList.join(",")
reader.attributeNode = fields
if(csv.delimiter !== null) {
var sep = doc.createAttribute("separator")
sep.textContent = csv.delimiter
reader.attributeNode = sep
}
if(csv.quoteCharacter !== null) {
var quote = doc.createAttribute("quote")
quote.textContent = StringEscapeUtils.unescapeXml(csv.quoteCharacter)
reader.attributeNode = quote;
}
var indent = doc.createAttribute("indent")
indent.textContent = csv.indent.booleanValue.toString
reader.attributeNode = indent
var skip = doc.createAttribute("skipLines")
skip.textContent = csv.skipLines.toString
reader.attributeNode = skip
doc.documentElement.appendChild(reader)
}
def input(DataInterchangeFileEDI edi, DataInterchange interchange, Document doc) {
var reader = doc.createElement("edi:reader")
var mapping = doc.createAttribute("mappingModel")
mapping.textContent = edi.mappingModel
reader.attributeNode = mapping
var validate = doc.createAttribute("validate")
validate.textContent = edi.validate.booleanValue.toString
reader.attributeNode = validate
doc.documentElement.appendChild(reader)
}
def createFreemarker(Document doc, String templateString, DataInterchangeFile endPoint) {
var freemarker = doc.createElement("ftl:freemarker")
var apply = doc.createAttribute("applyOnElement")
apply.textContent = "#document"
freemarker.attributeNode = apply
doc.documentElement.appendChild(freemarker)
var template = doc.createElement("ftl:template")
var tplName = doc.createCDATASection('''«IF endPoint.encoding !== null»<#ftl encoding='«endPoint.encoding»'>«ENDIF»«IF endPoint.locale !== null»<#setting locale="«endPoint.locale»">«ENDIF»'''+templateString.replaceAll("\r",""));
template.appendChild(tplName)
freemarker.appendChild(template)
}
def Element createBean(Document doc, String beanIdName, String className, String elementMap, boolean isList) {
var bean = doc.createElement("jb:bean")
doc.documentElement.appendChild(bean)
var beanId = doc.createAttribute("beanId")
beanId.textContent = beanIdName
bean.attributeNode = beanId
var clazz = doc.createAttribute("class")
clazz.textContent = '''«className»«IF isList»[]«ENDIF»'''
bean.attributeNode = clazz
// is mapping given?
if(elementMap !== null) {
var create = doc.createAttribute("createOnElement")
create.textContent = elementMap
bean.attributeNode = create
}
return bean
}
def Element createProperty(Document doc, Element parent, String propertyName, String decoderName, EType type) {
var Element value = null
// try to find a value element, or create a new one
var node = parent.firstChild
while(node !== null && (!node.nodeName.equals("jb:value") || (node.attributes.getNamedItem("property") !== null && !node.attributes.getNamedItem("property").nodeValue.equals(propertyName)))) {
node = node.nextSibling
}
if(node === null) {
value = doc.createElement("jb:value")
parent.appendChild(value)
} else {
value = node as Element
}
var property = doc.createAttribute("property")
property.textContent = propertyName
value.attributeNode = property
if(decoderName !== null) {
var decoder = doc.createAttribute("decoder")
decoder.textContent = decoderName
value.attributeNode = decoder
}
if (type == EType.DATE) {
var defaultValue = doc.createAttribute("default")
defaultValue.textContent = "null"
value.attributeNode = defaultValue
}
return value
}
def addMapping(Document doc, Element parent, String propertyName, String dataName, boolean byAttribute) {
var elementMap = ""
var attr = parent.attributes.getNamedItem("createOnElement")
if (attr !== null) {
elementMap = attr.textContent
}
var Element value = null
// try to find a value element, or create a new one
var node = parent.firstChild
while(node !== null && node.hasAttributes && (!node.nodeName.equals("jb:value") || (node.attributes.getNamedItem("property") !== null && !node.attributes.getNamedItem("property").nodeValue.equals(propertyName)))) {
node = node.nextSibling
}
if(node === null) {
value = doc.createElement("jb:value")
parent.appendChild(value)
} else {
value = node as Element
}
var data = doc.createAttribute("data")
data.textContent = '''«elementMap»/«IF byAttribute»@«ENDIF»«dataName»'''
value.attributeNode = data
}
def Element createDecodeParam(Document doc, Element parent, String paramName, String paramValue) {
var param = doc.createElement("jb:decodeParam")
parent.appendChild(param)
var name = doc.createAttribute("name")
name.textContent = paramName
param.attributeNode = name
var form = doc.createTextNode(paramValue)
param.appendChild(form)
return param
}
def Element createWiring(Document doc, Element parent, String beanIdRefName, String propertyName, String setterName) {
var value = doc.createElement("jb:wiring")
parent.appendChild(value)
var beanIdRef = doc.createAttribute("beanIdRef")
beanIdRef.textContent = beanIdRefName
value.attributeNode = beanIdRef
if(propertyName !== null) {
var property = doc.createAttribute("property")
property.textContent = propertyName
value.attributeNode = property
}
if(setterName !== null) {
var setter = doc.createAttribute("setterMethod")
setter.textContent = setterName
value.attributeNode = setter
}
return value
}
def Element createExpression(Document doc, Element parent, String propertyName, String propertyValue) {
var expression = doc.createElement("jb:expression")
parent.appendChild(expression)
var property = doc.createAttribute("property")
property.textContent = propertyName
expression.attributeNode = property
var propNode = doc.createTextNode(propertyValue)
expression.appendChild(propNode)
return expression
}
def Element createDaoLocator(Document doc, Element parent, String beanIdName, String elementMap, boolean allowNoResult, boolean allowNonuniqueResult) {
var locator = doc.createElement("dao:locator")
if (parent === null) {
doc.documentElement.appendChild(locator)
} else {
parent.appendChild(locator)
}
var beanId = doc.createAttribute("beanId")
beanId.textContent = beanIdName
locator.attributeNode = beanId
// mapping given?
if(elementMap !== null) {
var lookupOnElement = doc.createAttribute("lookupOnElement")
lookupOnElement.textContent = elementMap
locator.attributeNode = lookupOnElement
}
if(!allowNoResult) {
var onNoResult = doc.createAttribute("onNoResult")
onNoResult.textContent = "EXCEPTION"
locator.attributeNode = onNoResult
}
var uniqueResult = doc.createAttribute("uniqueResult")
uniqueResult.textContent = (!allowNonuniqueResult).booleanValue.toString
locator.attributeNode = uniqueResult
return locator
}
def Element createDaoQuery(Document doc, Element parent, String query) {
var daoQuery = doc.createElement("dao:query")
parent.appendChild(daoQuery)
var queryText = doc.createTextNode(query)
daoQuery.appendChild(queryText)
return daoQuery
}
def Element createDaoParam(Document doc, Element parent, String paramName, String paramValue, String elementMap, String dataMap, boolean byAttribute) {
var Element daoParams = null
var node = parent.firstChild
while(node !== null && !node.nodeName.equals("dao:params")) {
node = node.nextSibling
}
if(node === null) {
daoParams = doc.createElement("dao:params")
parent.appendChild(daoParams)
} else {
daoParams = node as Element
}
var daoValue = doc.createElement("dao:value")
daoParams.appendChild(daoValue)
var param = doc.createAttribute("name")
param.textContent = paramName
daoValue.attributeNode = param
var decoder = doc.createAttribute("decoder")
var paramContent = paramValue
if(paramValue.equalsIgnoreCase("int")){
paramContent = "Integer"
}
decoder.textContent = paramContent
daoValue.attributeNode = decoder
// mapping given?
if(dataMap !== null) {
var data = doc.createAttribute("data")
data.textContent = '''«elementMap»/«IF byAttribute»@«ENDIF»«dataMap»'''
daoValue.attributeNode = data
}
return daoParams
}
def generateExportConfig(DataInterchange dataInterchange, Document doc, DataInterchangeFile endPoint, String delimiter, String quote) {
var substitutionMap = <String,String>newHashMap
var vectorMap = <String,String>newHashMap
var substitutionCount = 0
var fieldList = <LEntityFeature>newArrayList
var String rootEntityName = null
var Document ftlDocument = null
var Element bean = null
var Element oldParent = null
var vector = "vector"
var depth = 0
var LEntity previousEntity = null
// iterate through all path (bean) listed under `path` keyword
for(path : dataInterchange.path) {
// are there any mappings?
var mappingFound = hasAttributeMapping(path)
if (rootEntityName === null && !path.isMarkLatestImport) {
rootEntityName = (path.entity as LAnnotationTarget).toName.toString
}
if (dataInterchange.fileEndpoint instanceof DataInterchangeFileXML) {
var currentKey = '''list«substitutionCount»'''
if(ftlDocument === null) {
ftlDocument = domImpl.createDocument(null, '''«IF dataInterchange.vectorName !== null»«dataInterchange.vectorName»«ELSE»vector«ENDIF»''', null)
}
vector = vectorMap.get(path.entity.toName) ?: vector
substitutionMap.put(currentKey, '''«vector» as «path.entity.toName»''')
if(previousEntity !== null && !previousEntity.hasRelation(path.entity)){
bean = createXmlBean(ftlDocument, oldParent, path.entity, path.format, currentKey, path, dataInterchange)
}else{
oldParent = bean
bean = createXmlBean(ftlDocument, bean, path.entity, path.format, currentKey, path, dataInterchange)
depth = depth + 1
}
substitutionCount = substitutionCount + 1
previousEntity = path.entity
}
for (f : path.entity.allFeatures) {
if(f instanceof LAttribute && !f.toMany &&
path.getExportFilter !== null &&
path.getExportFilter.hiddenproperties !== null &&
!path.getExportFilter.hiddenproperties.empty &&
!path.getExportFilter.hiddenproperties.toList.containsFeature(f)) {
if (!isInternal(f) && // don't export version
!path.isMarkLatestImport &&
((!(f as LEntityAttribute).id && !(f as LEntityAttribute).uuid) || (path.lookupKeys.empty && dataInterchange.mode != EntityManagerMode.PERSIST)) &&
(!mappingFound || path.mappings.isMapped(f))) {
fieldList.add(f as LEntityAttribute)
}
}
if(f instanceof LReference && !f.toMany && path.lookupKeys.empty) {
fieldList.add(f as LEntityReference)
}
// if a feature is a 1-* reference, find if the referenced entity is also under this path.
if(f instanceof LReference && f.toMany && !path.isMarkLatestImport) {
var iter = dataInterchange.path.iterator
var next = iter.next
// find current entity
while (iter.hasNext && next.entity.toName != path.entity.toName) {
next = iter.next
}
while (iter.hasNext) {
// move to next entity
next = iter.next
if (next !== null && next.entity.toName == (f.type as LEntity).toName) {
vectorMap.put(next.entity.toName, '''«path.entity.toName».«f.name»''')
}
}
}
}
}
if(endPoint instanceof DataInterchangeFileCSV) {
createFreemarker(doc, createCsvTemplate(rootEntityName, fieldList, delimiter, quote, dataInterchange.path), endPoint)
}
if(endPoint instanceof DataInterchangeFileXML) {
createFreemarker(doc, createXmlTemplate(ftlDocument, substitutionMap, dataInterchange.path), endPoint)
}
}
def boolean hasRelation(LEntity root, LEntity entity) {
var result = root.features.filter[it instanceof LReference && (it as LReference).type.equals(entity)]
return !result.empty
}
def generateImportConfig(DataInterchange dataInterchange, List<LEntityFeature> fieldList, Document doc, DataInterchangeFile endPoint) {
var isFirst = true
var hasMarker = false
var autoMapping = ""
var byAttribute = false
if(endPoint instanceof DataInterchangeFileCSV) {
autoMapping = "/csv-set"
} else if(endPoint instanceof DataInterchangeFileXML) {
autoMapping = '''«IF dataInterchange.vectorName !== null»«dataInterchange.vectorName»«ELSE»vector«ENDIF»'''
byAttribute = endPoint.byAttribute
}
var depth = 0
var oldMapping = ""
for(path : dataInterchange.path) {
var map = ""
if(path.elementMap === null) {
map = autoMapping
} else {
map = path.elementMap
}
if(path.isMarkLatestImport) {
hasMarker = true
}
var Element bean = null
if(!path.isMarkLatestImport && !hasMarker && isFirst) {
var rootBean = createBean(doc, path.entity.toName+"List", path.entity.fullyQualifiedName.toString, map, true)
createWiring(doc, rootBean, path.entity.toName, null, null)
isFirst = false
}
if(!path.isMarkLatestImport && endPoint instanceof DataInterchangeFileCSV) {
autoMapping = autoMapping + "/csv-record"
}
if(endPoint instanceof DataInterchangeFileXML) {
if(depth < 1) {
autoMapping = '''«autoMapping»/«IF path.nodeName !== null»«path.nodeName»«ELSE»«path.entity.toName»«ENDIF»'''
oldMapping = autoMapping
depth++
}else{
autoMapping = '''«oldMapping»/«IF path.nodeName !== null»«path.nodeName»«ELSE»«path.entity.toName»«ENDIF»'''
}
}
if(path.elementMap === null) {
map = autoMapping
} else {
map = path.elementMap
}
bean = createBean(doc, path.entity.toName, path.entity.fullyQualifiedName.toString, map, false)
// if merge or remove - create a locator for id and version
if(dataInterchange.mode != EntityManagerMode.PERSIST) {
if(!path.lookupKeys.empty) {
createExpression(doc, bean, path.entity.idAttributeName, "?"+path.entity.toName+"Merger."+path.entity.idAttributeName)
if(path.entity.versionAttribute !== null) {
createExpression(doc, bean, path.entity.versionAttributeName, "?"+path.entity.toName+"Merger."+path.entity.versionAttributeName)
}
var locator = createDaoLocator(doc, null, path.entity.toName+"Merger", map, true, false)
createDaoQuery(doc, locator, path.queryKeys)
var pCnt = 0
for(key:path.lookupKeys) {
createDaoParam(doc, locator, '''param«pCnt++»''', key.property.decoder, map, path.mappings.mapElement(key.property.name), byAttribute)
}
}
else if(path.entity.versionAttribute !== null) {
createExpression(doc, bean, path.entity.versionAttributeName, "?"+path.entity.toName+"Merger."+path.entity.versionAttributeName)
var locator = createDaoLocator(doc, null, path.entity.toName+"Merger", map, true, false)
createDaoQuery(doc, locator, path.queryVersion)
createDaoParam(doc, locator, '''param''', path.entity.primaryKeyAttribute.decoder, map, path.entity.idAttributeName, byAttribute)
}
}
// are there any mappings?
var mappingFound = hasAttributeMapping(path)
for (f : path.entity.allFeatures) {
switch f {
LAttribute: {
if (!f.toMany) {
// enable mapping for this field
// if not disposed and not latest marker and not id except no lookup keys given and mode is not persist
if (!isInternal(f)
&&
((!(f as LEntityAttribute).id && !(f as LEntityAttribute).uuid)
||
(path.lookupKeys.empty && dataInterchange.mode != EntityManagerMode.PERSIST))
&&
(!path.isMarkLatestImport || !path.latestProperty.toName.equals(f.toName))
) {
// add to the level's field list
if (!path.isMarkLatestImport) {
fieldList.add(f as LEntityAttribute)
}
var expressionFound = false
// scan expressions for this field
for (expr : path.expression) {
switch(expr) {
DataInterchangeEntityExpression: {
// is there an entity expression for this attribute ?
if (f.toName.equals((expr as DataInterchangeEntityExpression).targetProperty.toName)) {
createExpression(doc, bean, f.toName, (expr as DataInterchangeEntityExpression).entity.toName+"."+(expr as DataInterchangeEntityExpression).property.toName)
expressionFound = true
}
}
DataInterchangePredefinedExpression: {
// is there an predefined expression modeled for this attribute ?
if (f.toName.equals((expr as DataInterchangePredefinedExpression).targetProperty.toName)) {
if("UUID".equals((expr as DataInterchangePredefinedExpression).bean.literal)) {
createExpression(doc, bean, f.toName, "PUUID."+(expr as DataInterchangePredefinedExpression).beanType.getName)
} else {
createExpression(doc, bean, f.toName, "PTIME."+(expr as DataInterchangePredefinedExpression).bean.getName+(expr as DataInterchangePredefinedExpression).beanType.getName)
}
expressionFound = true
}
}
}
}
// scan formats for this field
var formatFound = false
for (format : path.format) {
// is there a format modeled for this attribute ?
if (f.toName.equals(format.targetProperty.toName)) {
var value = createProperty(doc, bean, f.toName, f.decoder, dtType.getBasicType(f as LEntityAttribute))
if(format.format !== null) {
createDecodeParam(doc, value, "format", format.format)
if (endPoint.locale !== null) {
createDecodeParam(doc, value, "locale-language", endPoint.locale.split("_").get(0))
if(endPoint.locale.split("_").size > 1) {
createDecodeParam(doc, value, "locale-country", endPoint.locale.split("_").get(1))
}
}
}
formatFound = true
}
}
// default for mapping purposes
if (!expressionFound && !formatFound) {
// create no property for unmapped marker entities
if (!(endPoint instanceof DataInterchangeFileCSV) || !path.isMarkLatestImport) {
// create no property if mapping is used and this attribute is unmapped
if(!mappingFound || path.mappings.isMapped(f)) {
var etype = dtType.getBasicType(f as LEntityAttribute)
// add enum decoder
if(etype == EType.LENUM){
var value = createProperty(doc, bean, f.toName, "Enum", etype)
createDecodeParam(doc, value, "enumType", f.type.toQualifiedName.toString)
var enumsliterals = f.type.eContents;
for(literal : enumsliterals){
val enumname = literal.fullyQualifiedName.lastSegment
// extra fileds
createDecodeParam(doc, value, enumsliterals.indexOf(literal).toString, enumname)
// mandatory fields
createDecodeParam(doc, value, enumname, enumname)
}
}
else{
// add format decoder
var value = createProperty(doc, bean, f.toName, f.decoder, etype)
if (etype == EType.DATE) {
createDecodeParam(doc, value, "format", "yyyy-MM-dd'T'HH:mm:ss")
} else if((etype == EType.DOUBLE || etype == EType.FLOAT) && endPoint.locale !== null) {
createDecodeParam(doc, value, "format", "#.######")
if (endPoint.locale !== null) {
createDecodeParam(doc, value, "locale-language", endPoint.locale.split("_").get(0))
if(endPoint.locale.split("_").size > 1) {
createDecodeParam(doc, value, "locale-country", endPoint.locale.split("_").get(1))
}
}
}
}
}
}
}
// if mapping given
path.hasBlobMapping = false
for(mapping : path.mappings) {
if(f.name.equals(mapping.property.name)) {
addMapping(doc, bean, f.toName, mapping.data, byAttribute)
if(mapping instanceof DataInterchangeBlobMapping) {
path.hasBlobMapping = true
}
}
}
// default mapping for xml
if (!mappingFound && !path.isMarkLatestImport && endPoint instanceof DataInterchangeFileXML) {
addMapping(doc, bean, f.toName, f.toName, byAttribute)
}
}
}
}
// check relations for modeled lookup
LReference: {
if (f.toMany) {
// one to many
// mapped automatically
var iter = dataInterchange.path.iterator
var next = iter.next
// find this entity
while (iter.hasNext && !next.entity.toName.equals(path.entity.toName)) {
next = iter.next
}
while (iter.hasNext) {
// move to next entity
next = iter.next
if (next !== null && next.entity.toName.equals((f.type as LEntity).toName)) {
createWiring(doc, bean, (f.type as LEntity).toName, null, f.toAdder(f.name).simpleName)
}
}
} else {
// many to one
if(path.lookup.isEmpty) {
fieldList.add(f as LEntityReference)
var field = (f as LEntityReference).type.primaryKeyAttribute
var etype = dtType.getBasicType(field)
for(mapping : path.mappings) {
// might be buggy for xml
if(f.name.equals(mapping.property.name)) {
createProperty(doc, bean, f.toName, field.decoder, etype)
addMapping(doc, bean, f.toName, mapping.data, byAttribute)
}
}
} else {
// generate possible lookups for this many to one relationship
for (lookup : path.lookup) {
// entity and property must match
if (lookup.targetProperty.toName.equals((f as LReference).name)) {
if(endPoint instanceof DataInterchangeFileCSV){
// the field will need to be included for header
fieldList.add(f as LEntityReference)
}
createWiring(doc, bean, (f.type as LEntity).toName, (f as LReference).name, null)
var locator = createDaoLocator(doc, null, (f.type as LEntity).toName, lookup.elementMap, lookup.allowNoResult, lookup.allowNonuniqueResult)
createDaoQuery(doc, locator, (f.type as LEntity).query(lookup))
createDaoParam(doc, locator, "param", lookup.queryProperty.type.name.toFirstUpper, lookup.elementMap, lookup.dataMap, byAttribute)
}
}
}
}
}
}
}
//for CSV file, if no mapping were found, simply map all attributes
if (!mappingFound && !path.isMarkLatestImport && endPoint instanceof DataInterchangeFileCSV) {
for(fld : fieldList) {
if(fld instanceof LEntityAttribute){
addMapping(doc, bean, fld.toName, fld.toName, byAttribute)
}
}
}
}
}
def boolean hasAnnotation(LFeature feature, Class<?> annotationType) {
for(anno:(feature as LEntityAttribute).annotationInfo.annotations) {
if (anno.annotation.annotationType.fullyQualifiedName.toString.equals(annotationType.name)) {
return true;
}
}
return false;
}
def boolean isInternal(LFeature f) {
if("disposed".equals(f.toName)) {
return true
}
if((f as LEntityAttribute).version) {
return true
}
if(f.hasAnnotation(Dirty) || f.hasAnnotation(UpdateAt) || f.hasAnnotation(UpdateBy) || f.hasAnnotation(CreateAt) || f.hasAnnotation(CreateBy)) {
return true
}
return false
}
protected def boolean hasAttributeMapping(DataInterchangeBean path) {
for(mapping : path.mappings) {
for (a : path.entity.allAttributes) {
if(a.name.equals(mapping.property.name)) {
return true
}
}
}
return false
}
protected def boolean hasReferenceLookup(DataInterchangeBean path){
for(lu : path.lookup) {
for(a : path.entity.getAllReferences) {
if(a.name.equals(lu.dataMap)){
return true
}
}
}
return false
}
protected def boolean hasReferenceExport(DataInterchangeBean path){
for(expose : path.exportExposes) {
for(a : path.entity.getAllReferences) {
if(a.name.equals(expose.refEntity.name)){
return true
}
}
}
return false
}
def boolean isMapped(EList<DataInterchangeMapping> list, LFeature attribute) {
if(attribute instanceof LEntityAttribute) {
return !list.filter[it|it.property.name.equals(attribute.name)].empty
}
return false;
}
def String mapElement(EList<DataInterchangeMapping> mappings, String propertyName) {
var element = (mappings.findFirst[it.property.name.equals(propertyName) && it instanceof DataInterchangeValueMapping] as DataInterchangeValueMapping)?.data
if(element === null) {
element = propertyName
}
return element
}
def Element createXmlBean(Document doc, Element parent, LEntity entity, EList<DataInterchangeFormat> formats, String currentKey, DataInterchangeBean path, DataInterchange dataInterchange) {
// are there any mappings?
var mappingFound = hasAttributeMapping(path)
var bean = doc.createElement('''«IF path.nodeName !== null»«path.nodeName»«ELSE»«entity.toName»«ENDIF»''')
var pi = doc.createProcessingInstruction(currentKey, "")
if(parent === null) {
doc.documentElement.appendChild(pi)
doc.documentElement.appendChild(bean)
} else {
bean = doc.createElement('''«IF path.nodeName !== null»«path.nodeName»«ELSE»«entity.toName»«ENDIF»''')
parent.appendChild(pi)
parent.appendChild(bean)
}
for(p:entity.allAttributes) {
if(!isInternal(p) && // don't export version
((!p.id && !p.uuid) || (path.lookupKeys.empty && dataInterchange.mode != EntityManagerMode.PERSIST)) &&
(!mappingFound || path.mappings.isMapped(p))) {
var format = null as DataInterchangeFormat
for (step : formats) {
// is there a format modeled for this attribute ?
if ((p as LEntityFeature).toName.equals(step.targetProperty.toName)) {
if (step.format !== null) {
format = step
}
}
}
var property = doc.createElement(p.toName)
property.textContent = encodeFreemarker(entity.toName, p, format, "", true)
bean.appendChild(property)
}
}
for(e : path.exportExposes){
var expose = e
var segments = <String>newArrayList()
segments.add(expose.refEntity.name)
while(expose.refProperty === null){
expose = expose.subExpose
segments.add(expose.refEntity.name)
}
var property = doc.createElement(expose.refProperty.toName)
var pathToProperty = segments.join(".")
property.textContent = encodeFreemarker('''«entity.toName».«pathToProperty»''', expose.refProperty, null as DataInterchangeFormat, "", true);
bean.appendChild(property)
}
var pa = doc.createProcessingInstruction(currentKey, "")
if(parent === null) {
doc.documentElement.appendChild(pa)
} else {
parent.appendChild(pa)
}
return bean
}
def String decoder(LEntityFeature f) {
return getPrimitiveDataTypeName(f as LEntityAttribute)
}
def String decoder(LEntityAttribute f) {
return getPrimitiveDataTypeName(f)
}
def String getPrimitiveDataTypeName(LEntityAttribute attribute) {
var eType = dtType.getBasicType(attribute)
var String typeName = null
if (eType == EType.DATE) {
typeName = "Date"
}
else if (attribute.type !== null && (attribute.type instanceof LDataType) && (attribute.type as LDataType).jvmTypeReference !== null) {
typeName = (attribute.type as LDataType).jvmTypeReference.simpleName
} else {
typeName = attribute.type.name
}
if ("int".equals(typeName)) {
typeName = "Integer"
}
return typeName.toFirstUpper
}
// <ftl:freemarker applyOnElement="org.eclipse.osbp.foodmart.entities.Mregion">
// <ftl:template>
// <!--
// <#list vector as Mregion>
// <Mregion>
// <sales_city>${Mregion.sales_city}</sales_city>
// <#list Mregion.stores as Mstore>
// <store_type>${Mstore.store_type}</store_type>
// </#list>
// </Mregion>
// </#list>
// -->
// </ftl:template>
// </ftl:freemarker>
def String createXmlTemplate(Document doc, HashMap<String,String> substitutionMap, EList<DataInterchangeBean> path) {
var source = new DOMSource(doc)
var res = new DataResult()
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
transformer.transform(source, res)
var output = res.result
// processing the template string for filtered information
for(f: path){ // for exclusion
if(f.getExportFilter !== null && f.getExportFilter.hiddenproperties !== null && !f.getExportFilter.hiddenproperties.empty){
for(hiddenprop : f.getExportFilter.hiddenproperties){
var attrname = hiddenprop.property.name
if(output.contains(attrname)){
var targetstatement = output.substring(output.indexOf("<"+attrname+">${("+f.entity.name),output.indexOf("</"+attrname+">") + ("</"+attrname+">").length)
output = output.replace(targetstatement, "")
}
}
}
}
// cleaning whitespaces
var result = ""
for(f : output.split("\r")){
if(!f.trim.isEmpty){
result = result.concat(f)
}
}
// processing instructions are embedded in question marks - they must be replaced
for(k:substitutionMap.keySet) {
result = result.replaceFirst(k, '''#list «substitutionMap.get(k)»''')
}
for(l:substitutionMap.keySet) {
result = result.replaceFirst(l, '''/#list''')
}
return result.replace("&lt;","<").replace("&gt;",">").replace("<?","<").replace("?>",">")//.replace("</#","\n</#")
}
def String createCsvTemplate(String rootEntityName, List<LEntityFeature> fieldList, String delimiter, String quote, EList<DataInterchangeBean> paths) {
var tmpList = <String>newArrayList()
var fldList = <String>newArrayList
for(field:fieldList) {
tmpList.add(encodeFreemarker(rootEntityName, field, paths, quote, false))
fldList.add(field.toName)
}
var body = '''
«fldList.join(delimiter)»
<#list vector as «rootEntityName»>
«tmpList.join(delimiter)»
</#list>'''
return body
}
def String encodeFreemarker(String entityName, LEntityFeature field, EList<DataInterchangeBean> paths, String quote, boolean encodeHtml) {
var format = null as DataInterchangeFormat
val entity = field.eContainer as LEntity
for (path : paths) {
if (path.entity.equals(entity)) {
for (step : path.format) {
// is there a format modeled for this attribute ?
if (field.toName.equals(step.targetProperty.toName)) {
if (step.format !== null) {
format = step
}
}
}
}
}
return encodeFreemarker(entityName, field, format, quote, encodeHtml)
}
def String encodeFreemarker(String entityName, LEntityFeature field, DataInterchangeFormat format, String quote, boolean encodeHtml) {
if(field instanceof LEntityAttribute) {
var etype = dtType.getBasicType(field)
if (etype == EType.BOOLEAN) {
return '''${(«entityName».«field.toName»?c)!}'''
}
else if (format !== null) {
return '''${(«entityName».«field.toName»?string["«format.format»"])!}'''
}
else if (etype == EType.DATE) {
return '''${(«entityName».«field.toName»?datetime?iso_local_ms_nz)!}'''
}
else if (etype == EType.STRING) {
return '''«quote»${(«entityName».«field.toName»«IF encodeHtml»?html«ENDIF»)!}«quote»'''
}
else {
return '''${(«entityName».«field.toName»)!}'''
}
} else {
var etype = dtType.getBasicType((field as LEntityReference).type.primaryKeyAttribute)
if (etype == EType.STRING) {
return '''«quote»${(«entityName».«field.toName».«(field as LEntityReference).type.primaryKeyAttribute.name»)!}«quote»'''
} else {
return '''${(«entityName».«field.toName».«(field as LEntityReference).type.primaryKeyAttribute.name»)!}'''
}
}
}
/* create a multistage left joined query to climb up along the one to many relations until import marker */
def String query(LEntity entity, DataInterchangeLookup lookup) {
var aliasCnt = 0
var select = '''«entity.toName» x«aliasCnt»'''
var joinList = <String>newArrayList
var whereList = <String>newArrayList
var qstr = '''x«aliasCnt».«lookup.queryProperty.toName» = :param'''
whereList.add(qstr)
if (lookup.markerPath !== null) {
for(markerEntity:lookup.markerPath.path) {
aliasCnt = aliasCnt + 1
if (markerEntity.markLatest) {
qstr = '''x«aliasCnt».«markerEntity.markerProperty.toName» = 1'''
whereList.add(qstr)
}
for(ff:entity.features) {
if (ff instanceof LReference && !ff.toMany) {
if (markerEntity.markerEntity.toName.equals((ff.type as LEntity).toName)) {
qstr = '''x«aliasCnt-1».«ff.toName» x«aliasCnt»'''
joinList.add(qstr)
}
}
}
}
}
return '''from «select»«IF joinList.size>0» left join «ENDIF»«joinList.join(" left join ")» where «whereList.join(" and ")»'''
}
def String queryKeys(DataInterchangeBean bean) {
var pCnt = 0
var select = '''«bean.entity.toName» x'''
var whereList = <String>newArrayList
for(key:bean.lookupKeys) {
var qstr = '''x.«key.property.toName» = :param«pCnt++»'''
whereList.add(qstr)
}
return '''from «select» where «whereList.join(" and ")»'''
}
def String queryVersion(DataInterchangeBean bean) {
var select = '''«bean.entity.toName» x'''
var qstr = '''x.«bean.entity.idAttributeName» = :param'''
return '''from «select» where «qstr»'''
}
def boolean containsFeature(List<DataInterchangeExportHide> list, LEntityFeature f){
for(exclude : list){
if(exclude.property.equals(f)){
return true
}
}
return false
}
override createAppendable(EObject context, ImportManager importManager, GeneratorConfig config) {
// required to initialize the needed builder to avoid deprecated methods
builder = context.eResource
// ---------
addImportFor(importManager, _typeReferenceBuilder
, FrameworkUtil
, IDataInterchange
, StreamSource
, URL
, URI
, InputStream
, StreamUtils
, MalformedURLException
, FileAlreadyExistsException
, URISyntaxException
, EntityManager
, EntityTransaction
, Logger
, LoggerFactory
, Label
, Button
, NativeButton
, ClickListener
, ClickEvent
, ContentMode
, PersistenceUnitProperties
, HorizontalLayout
, ProgressBar
, IEventBroker
, WorkerThreadRunnable
, HashMap
, Executors
, TimeUnit
, QueryHints
, PessimisticLock
, MissingResourceException
, MessageFormat
, EventHandler
, Event
, EventUtils
, EventBrokerMsg
, Bundle
, BundleContext
, List
, Arrays
, StringWriter
, StringReader
, OutputKeys
, StreamResult
, CriteriaBuilder
, CriteriaQuery
, Root
, TypedQuery
, JoinType
, Paths
, Path
, Files
, StandardOpenOption
, BufferedOutputStream
, BufferedInputStream
, UI
, Pair
, IEntityImportInitializationListener
, ConstraintViolationException
, ConstraintViolation
, EventDispatcherEvent
, EventDispatcherCommand
, DataInterchangeException
, TransformerConfigurationException
, SAXException
, IOException
, UUID
, IOException
, File
, FileInputStream
, Properties
, ProductConfiguration
, PrintWriter
, BlobService
, BlobTypingAPI
, Component
, Panel
, Paths
, Date
)
super.createAppendable(context, importManager, config)
}
}