eclipselink-2.5.1.orig/0000775000000000000000000000000012221374104011644 5ustar eclipselink-2.5.1.orig/commonj/0000775000000000000000000000000012216174400013307 5ustar eclipselink-2.5.1.orig/commonj/sdo/0000775000000000000000000000000012216174400014074 5ustar eclipselink-2.5.1.orig/commonj/sdo/impl/0000775000000000000000000000000012216174400015035 5ustar eclipselink-2.5.1.orig/commonj/sdo/impl/HelperProviderImpl.java0000664000000000000000000000673412216173134021471 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package commonj.sdo.impl; import org.eclipse.persistence.sdo.SDOResolvable; import org.eclipse.persistence.sdo.helper.SDOCopyHelper; import org.eclipse.persistence.sdo.helper.SDODataFactory; import org.eclipse.persistence.sdo.helper.SDODataHelper; import org.eclipse.persistence.sdo.helper.SDOEqualityHelper; import org.eclipse.persistence.sdo.helper.SDOTypeHelper; import org.eclipse.persistence.sdo.helper.SDOXMLHelper; import org.eclipse.persistence.sdo.helper.SDOXSDHelper; import org.eclipse.persistence.sdo.helper.delegates.SDODataFactoryDelegator; import org.eclipse.persistence.sdo.helper.delegates.SDOTypeHelperDelegator; import org.eclipse.persistence.sdo.helper.delegates.SDOXMLHelperDelegator; import org.eclipse.persistence.sdo.helper.delegates.SDOXSDHelperDelegator; import commonj.sdo.helper.CopyHelper; import commonj.sdo.helper.DataFactory; import commonj.sdo.helper.DataHelper; import commonj.sdo.helper.EqualityHelper; import commonj.sdo.helper.TypeHelper; import commonj.sdo.helper.XMLHelper; import commonj.sdo.helper.XSDHelper; public class HelperProviderImpl extends HelperProvider { private static final SDOXMLHelper xmlHelper = new SDOXMLHelperDelegator(); private static final SDOTypeHelper typeHelper = new SDOTypeHelperDelegator(); private static final SDOXSDHelper xsdHelper = new SDOXSDHelperDelegator(); private static final SDODataFactory dataFactory = new SDODataFactoryDelegator(); private static final SDODataHelper dataHelper = new SDODataHelper(); private static final SDOCopyHelper copyHelper = new SDOCopyHelper(); private static final SDOEqualityHelper equalityHelper = new SDOEqualityHelper(); public HelperProviderImpl() { super(); } public CopyHelper copyHelper() { return copyHelper; } public DataFactory dataFactory() { return dataFactory; } public DataHelper dataHelper() { return dataHelper; } public EqualityHelper equalityHelper() { return equalityHelper; } public TypeHelper typeHelper() { return typeHelper; } public XMLHelper xmlHelper() { return xmlHelper; } public XSDHelper xsdHelper() { return xsdHelper; } /** * This class handles resolving objects from a deserialized stream for * Reading */ public ExternalizableDelegator.Resolvable resolvable() { return new SDOResolvable(HelperProvider.getDefaultContext()); } /** * This class handles custom serialization of target objects for * Writing */ public ExternalizableDelegator.Resolvable resolvable(Object target) { return new SDOResolvable(target, HelperProvider.getDefaultContext()); } } eclipselink-2.5.1.orig/OSGI-INF/0000755000000000000000000000000012216174372013026 5ustar eclipselink-2.5.1.orig/OSGI-INF/l10n/0000755000000000000000000000000012216174372013600 5ustar eclipselink-2.5.1.orig/OSGI-INF/l10n/bundle-src.properties0000664000000000000000000000021412216173626017754 0ustar #Source Bundle Localization #Wed Sep 18 01:45:09 EDT 2013 bundleVendor=Eclipse.org - EclipseLink Project bundleName=EclipseLink Core Source eclipselink-2.5.1.orig/license.html0000644000000000000000000003467312216173132014171 0ustar Eclipse License

License

The Eclipse Foundation makes available all content in this plug-in ("Content"). Unless otherwise indicated below, the Content is provided to you under the terms and conditions of the Eclipse Public License Version 1.0 ("EPL") and Eclipse Distribution License Version 1.0 (“EDL”). For purposes of the EPL, "Program" will mean the Content.

If you did not receive this Content directly from the Eclipse Foundation, the Content is being redistributed by another party ("Redistributor") and different terms and conditions may apply to your use of any object code in the Content. Check the Redistributor’s license that was provided with the Content. If no such license exists, contact the Redistributor. Unless otherwise indicated below, the terms and conditions of the EPL and EDL still apply to any source code in the Content and such source code may be obtained at http://www.eclipse.org.

Eclipse Public License - v 1.0

THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.

1. DEFINITIONS

"Contribution" means:

a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
b) in the case of each subsequent Contributor:

i) changes to the Program, and

ii) additions to the Program;

where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.

"Contributor" means any person or entity that distributes the Program.

"Licensed Patents " mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.

"Program" means the Contributions distributed in accordance with this Agreement.

"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.

2. GRANT OF RIGHTS

a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.

b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.

c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.

d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.

3. REQUIREMENTS

A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:

a) it complies with the terms and conditions of this Agreement; and

b) its license agreement:

i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;

ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;

iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and

iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.

When the Program is made available in source code form:

a) it must be made available under this Agreement; and

b) a copy of this Agreement must be included with each copy of the Program.

Contributors may not remove or alter any copyright notices contained within the Program.

Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.

4. COMMERCIAL DISTRIBUTION

Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.

For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.

5. NO WARRANTY

EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.

6. DISCLAIMER OF LIABILITY

EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.

7. GENERAL

If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.

If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.

All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.

Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.

This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.

 

Eclipse Distribution License Version 1.0

Copyright (c) 2007, Eclipse Foundation, Inc. and its licensors.

All rights reserved.

Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

eclipselink-2.5.1.orig/readme.html0000644000000000000000000000301412216173134013767 0ustar EclipseLink 2.5 Readme

EclipseLink 2.5 Read Me

April 30, 2013

The EclipseLink project's goal is to provide a complete persistence framework that is both comprehensive and universal. It will run in any Java environment and read and write objects to virtually any type of data source, including relational databases, XML, JSON, or EIS systems. EclipseLink will focus on providing leading edge support, including advanced feature extensions, for the dominant persistence standards for each target data source; Java Persistence API (JPA) for relational databases, Java API for XML Binding (JAXB) for XML, Java Connector Architecture (JCA) for EIS and other types of legacy systems, and Service Data Objects (SDO).

For tips on getting started with EclipseLink, please see the following resources:

eclipselink-2.5.1.orig/META-INF/0000755000000000000000000000000012216174372013013 5ustar eclipselink-2.5.1.orig/META-INF/services/0000755000000000000000000000000012216174376014642 5ustar eclipselink-2.5.1.orig/META-INF/services/javax.persistence.spi.PersistenceProvider0000664000000000000000000000006012216173132024774 0ustar org.eclipse.persistence.jpa.PersistenceProvider eclipselink-2.5.1.orig/META-INF/services/javax.annotation.processing.Processor0000644000000000000000000000010512216173132024161 0ustar org.eclipse.persistence.internal.jpa.modelgen.CanonicalModelProcessoreclipselink-2.5.1.orig/META-INF/MANIFEST.MF0000644000000000000000000000066612216174370014453 0ustar Manifest-Version: 1.0 Ant-Version: Apache Ant 1.7.1 Created-By: 23.6-b04 (Oracle Corporation) Specification-Title: Eclipse Persistence Services Source Specification-Vendor: Eclipse.org - EclipseLink Project Specification-Version: 2.5.1 Implementation-Title: org.eclipse.persistence Implementation-Vendor: Eclipse.org - EclipseLink Project Implementation-Version: 2.5.1.v20130918-f2b9fc5 Release-Designation: EclipseLink 2.5.1 eclipselink-2.5.1.orig/about.html0000644000000000000000000002723412216173056013661 0ustar About EclipseLink 2.5

EclipseLink 2.5

Eclipse Kepler simultaneous release

Target Date - June 2013

About

The EclipseLink project's goal is to provide a complete persistence framework that is both comprehensive and universal. It will run in any Java environment and read and write objects to virtually any type of data source, including relational databases, and XML. EclipseLink will focus on providing leading edge support, including advanced feature extensions, for the dominant persistence standards for each target data source; Java Persistence API (JPA) for relational databases, Java API for XML Binding (JAXB) for XML, Service Data Objects (SDO), and Database Web services (DBWS).

For tips on getting started with EclipseLink, please see the following resources:

License

The Eclipse Foundation makes available all content in this plug-in ("Content"). Unless otherwise indicated below, the Content is provided to you under the terms and conditions of the Eclipse Public License Version 1.0 ("EPL") and Eclipse Distribution License Version 1.0 (“EDL”). A copy of the EPL is available at http://www.eclipse.org/legal/epl-v10.html and a copy of the EDL is available at http://www.eclipse.org/org/documents/edl-v10.php. For purposes of the EPL, "Program" will mean the Content.

If you did not receive this Content directly from the Eclipse Foundation, the Content is being redistributed by another party ("Redistributor") and different terms and conditions may apply to your use of any object code in the Content. Check the Redistributor’s license that was provided with the Content. If no such license exists, contact the Redistributor. Unless otherwise indicated below, the terms and conditions of the EPL and EDL still apply to any source code in the Content and such source code may be obtained at http://www.eclipse.org.

Third Party Content

The Content includes items that have been sourced from third parties as set out below. If you did not receive this Content directly from the Eclipse Foundation, the following is provided for informational purposes only, and you should look to the Redistributor’s license for terms and conditions of use.


ASM v3.3.1

The EclipseLink Project includes ASM for the purpose of byte code weaving. The AMS library is re-packaged within the source of the project (org.persistence.eclipse.internal.libraries.asm.*) to avoid version collisions with other usage of ASM.

The source code is available within the project's subversion repository. The binaries are distributed within the eclipselink.jar and in the org.eclipse.persistence.asm_*.jar bundle.

http://asm.objectweb.org/license.html

Copyright (c) 2000-2005 INRIA, France Telecom, All rights reserved.

Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:

  1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
  2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
  3. Neither the name of the copyright holders nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THEIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSEARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BELIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, ORCONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OFSUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER INCONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OFTHE POSSIBILITY OF SUCH DAMAGE.

ANTLR v3.2

The ANTLR library (license) is included within EclipseLink Project to enable parsing of the Java Persistence Query language (JP QL). The ANTLR library is re-packaged within the project in the org.eclipse.persistence.internal.libraries.antlr.* packages.

The source is available with the project's subversion repository. The binaries are distributed within the eclipselink.jar and in the org.eclipse.persistence.antlr_3.2.0.v*.jar bundle.

Java Architecture for XML Binding (JAXB) v2.2

The JAXB libraries distributed under CDDL v1.0 are included within the EclipseLink Project to enable the MOXy component's implementation of JAXB.

JAXB Libraries:

Java Persistence (JPA) 2.1

EclipseLink is the Java Persistence (JPA) 2.1 Reference Implementation (JSR 338). The JPA 2.1 specification API is included in EclipseLink under the EPL and EDL licenses.

Java Mail v1.4.0

The Java Mail library (mail.jar) can be downloaded from java.sun.com under CDDL v1.0 and is included within the EclipseLink Project distribution to support Web Services attachment handling in the MOXy component. It is only required when using Java SE 5 (Java Mail is included in Java SE 6).

Java Activation Framework v1.1

The Java Activation Framework (activation.jar) is distributed with the JAXB v2.2 under CDDL v1.0 and is included within the EclipseLink Project distribution to support Web Services attachment handling in the MOXy component. It is only required when using Java SE 5 (The Java Activation Framework is included in Java SE 6).

Streaming API for XML (StAX) v1.0.1

The Streaming API for XML (StAX) library (jsr173_1.0_api.jar) is distributed with the JAXB v2.2 under CDDL v1.0 and is included within the EclipseLink Project distribution as an optional XML processing approach in the MOXy component.

Service Data Objects (SDO) v2.1.1

The Service Data Objects (SDO) API is distributed under a CDDLv1.0 and custom license. It provides the standard API implemented by the EclipseLink Project's SDO component.

Java Connector v1.5

The JCA 1.5 API is distributed under CDDLv1.0 .

This jar is being shipped and required by the Workbench only. When using EclipseLink in a container where JCA integration is required that container will provide the necessary API libraries.

Xerces v2.9.0

Xerces 2.9.0 is available from the Xerces home page. It is distributed under Apache 2.0.

This jar is shipped for the Workbench's use only in the reading and writing of XML configuration files.

Java Servlet 2.4

The Java Servlet 2.4 specification is distributed under the Apache 2.0 license. The source code is part of the Apache Tomcat project

This jar is only required at design time for the DBWS Builder utility.

WSDL4j v1.6.2

WSDL4J 1.6.2 is available for download from the wsdl4j project. It distributed under CPLv1.0 .

This jar is only required at design time for the DBWS Builder utility.

eclipselink-2.5.1.orig/org/0000755000000000000000000000000012216174372012442 5ustar eclipselink-2.5.1.orig/org/eclipse/0000755000000000000000000000000012216174372014066 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/0000755000000000000000000000000012216174372016412 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/0000775000000000000000000000000012216174372020232 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/Association.java0000664000000000000000000000373012216173130023343 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings; import java.util.*; /** *

Purpose: Generic association object. * This can be used to map hashtable/map containers where the key and value primitives or independent objects. * * @author James Sutherland * @since TOPLink/Java 3.0 */ public class Association implements Map.Entry { protected Object key; protected Object value; /** * Default constructor. */ public Association() { super(); } /** * PUBLIC: * Create an association. */ public Association(Object key, Object value) { this.key = key; this.value = value; } /** * PUBLIC: * Return the key. */ public Object getKey() { return key; } /** * PUBLIC: * Return the value. */ public Object getValue() { return value; } /** * PUBLIC: * Set the key. */ public void setKey(Object key) { this.key = key; } /** * PUBLIC: * Set the value. */ public Object setValue(Object value) { Object oldValue = this.value; this.value = value; return oldValue; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/VariableOneToOneMapping.java0000664000000000000000000012452412216173130025544 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings; import java.security.AccessController; import java.security.PrivilegedActionException; import java.util.*; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.expressions.*; import org.eclipse.persistence.indirection.ValueHolder; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.mappings.foundation.AbstractDirectMapping; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.mappings.querykeys.*; /** *

Purpose: Variable one to one mappings are used to represent a pointer references * between a java object and an implementer of an interface. This mapping is usually represented by a single pointer * (stored in an instance variable) between the source and target objects. In the relational * database tables, these mappings are normally implemented using a foreign key and a type code. * * @author Sati * @since TOPLink/Java 2.0 */ public class VariableOneToOneMapping extends ObjectReferenceMapping implements RelationalMapping { protected DatabaseField typeField; protected Map sourceToTargetQueryKeyNames; protected Map typeIndicatorTranslation; /** parallel table typeIndicatorTranslation used prior to initialization to avoid type indicators on Mapping Workbench */ protected Map typeIndicatorNameTranslation; /** * PUBLIC: * Default constructor. */ public VariableOneToOneMapping() { this.selectionQuery = new ReadObjectQuery(); this.sourceToTargetQueryKeyNames = new HashMap(2); this.typeIndicatorTranslation = new HashMap(5); this.typeIndicatorNameTranslation = new HashMap(5); this.foreignKeyFields = NonSynchronizedVector.newInstance(1); //right now only ForeignKeyRelationships are supported this.isForeignKeyRelationship = false; } /** * INTERNAL: */ @Override public boolean isRelationalMapping() { return true; } /** * PUBLIC: * Add a type indicator conversion to this mapping. */ public void addClassIndicator(Class implementer, Object typeIndicator) { if (typeIndicator == null) { typeIndicator = Helper.NULL_VALUE; } getTypeIndicatorTranslation().put(implementer, typeIndicator); getTypeIndicatorTranslation().put(typeIndicator, implementer); } /** * INTERNAL: * Add indicators by classname. For use by the Mapping Workbench to avoid classpath dependencies */ public void addClassNameIndicator(String className, Object typeIndicator) { if (typeIndicator == null) { typeIndicator = Helper.NULL_VALUE; } getTypeIndicatorNameTranslation().put(className, typeIndicator); } /** * PUBLIC: * A foreign key from the source table and abstract query key from the interface descriptor are added to the * mapping. This method is used if there are multiple foreign keys. */ public void addForeignQueryKeyName(DatabaseField sourceForeignKeyField, String targetQueryKeyName) { getSourceToTargetQueryKeyNames().put(sourceForeignKeyField, targetQueryKeyName); getForeignKeyFields().addElement(sourceForeignKeyField); this.setIsForeignKeyRelationship(true); } /** * PUBLIC: * A foreign key from the source table and abstract query key from the interface descriptor are added to the * mapping. This method is used if there are multiple foreign keys. */ public void addForeignQueryKeyName(String sourceForeignKeyFieldName, String targetQueryKeyName) { addForeignQueryKeyName(new DatabaseField(sourceForeignKeyFieldName), targetQueryKeyName); } /** * PUBLIC: * Define the target foreign key relationship in the Variable 1-1 mapping. * This method is used for composite target foreign key relationships, * that is the target object's table has multiple foreign key fields to * the source object's primary key fields. * Both the target foreign key query name and the source primary key field name * must be specified. * The distinction between a foreign key and target foreign key is that the variable 1-1 * mapping will not populate the target foreign key value when written (because it is in the target table). * Normally 1-1's are through foreign keys but in bi-directional 1-1's * the back reference will be a target foreign key. * In obscure composite legacy data models a 1-1 may consist of a foreign key part and * a target foreign key part, in this case both method will be called with the correct parts. */ public void addTargetForeignQueryKeyName(String targetForeignQueryKeyName, String sourcePrimaryKeyFieldName) { DatabaseField sourceField = new DatabaseField(sourcePrimaryKeyFieldName); getSourceToTargetQueryKeyNames().put(sourceField, targetForeignQueryKeyName); } /** * INTERNAL: * Possible for future development, not currently supported. * * Retrieve the value through using batch reading. * This executes a single query to read the target for all of the objects and stores the * result of the batch query in the original query to allow the other objects to share the results. */ @Override protected Object batchedValueFromRow(AbstractRecord row, ObjectLevelReadQuery query, CacheKey parentCacheKey) { throw QueryException.batchReadingNotSupported(this, query); } /** * INTERNAL: * This methods clones all the fields and ensures that each collection refers to * the same clones. */ @Override public Object clone() { VariableOneToOneMapping clone = (VariableOneToOneMapping)super.clone(); Map setOfKeys = new HashMap(getSourceToTargetQueryKeyNames().size()); Map sourceToTarget = new HashMap(getSourceToTargetQueryKeyNames().size()); Vector foreignKeys = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(getForeignKeyFields().size()); if (getTypeField() != null) { clone.setTypeField(this.getTypeField().clone()); } for (Iterator enumtr = getSourceToTargetQueryKeyNames().keySet().iterator(); enumtr.hasNext();) { // Clone the SourceKeyFields DatabaseField field = (DatabaseField)enumtr.next(); DatabaseField clonedField = field.clone(); setOfKeys.put(field, clonedField); // on the next line I'm cloning the query key names sourceToTarget.put(clonedField, getSourceToTargetQueryKeyNames().get(field)); } for (Enumeration enumtr = getForeignKeyFields().elements(); enumtr.hasMoreElements();) { DatabaseField field = (DatabaseField)enumtr.nextElement(); foreignKeys.addElement(setOfKeys.get(field)); } clone.setSourceToTargetQueryKeyFields(sourceToTarget); clone.setForeignKeyFields(foreignKeys); clone.setTypeIndicatorTranslation(new HashMap(this.getTypeIndicatorTranslation())); return clone; } /** * INTERNAL: * Return all the fields populated by this mapping. */ @Override protected Vector collectFields() { DatabaseField type = getTypeField(); //Get a shallow copy of the Vector if (type != null) { Vector sourceFields = (Vector)getForeignKeyFields().clone(); sourceFields.addElement(type); return sourceFields; } else { return getForeignKeyFields(); } } /** * INTERNAL: * Compare the references of the two objects are the same, not the objects themselves. * Used for independent relationships. * This is used for testing and validation purposes. * * Must get separate fields for the objects because we may be adding a different class to the * attribute because of the interface */ @Override protected boolean compareObjectsWithoutPrivateOwned(Object firstObject, Object secondObject, AbstractSession session) { Object firstPrivateObject = getRealAttributeValueFromObject(firstObject, session); Object secondPrivateObject = getRealAttributeValueFromObject(secondObject, session); if ((firstPrivateObject == null) && (secondPrivateObject == null)) { return true; } if ((firstPrivateObject == null) || (secondPrivateObject == null)) { return false; } if (firstPrivateObject.getClass() != secondPrivateObject.getClass()) { return false; } Iterator targetKeys = getSourceToTargetQueryKeyNames().values().iterator(); ClassDescriptor descriptor = session.getDescriptor(firstPrivateObject.getClass()); ClassDescriptor descriptor2 = session.getDescriptor(secondPrivateObject.getClass()); while (targetKeys.hasNext()) { String queryKey = (String)targetKeys.next(); DatabaseField field = descriptor.getObjectBuilder().getFieldForQueryKeyName(queryKey); Object firstObjectField = descriptor.getObjectBuilder().extractValueFromObjectForField(firstPrivateObject, field, session); DatabaseField field2 = descriptor2.getObjectBuilder().getFieldForQueryKeyName(queryKey); Object secondObjectField = descriptor2.getObjectBuilder().extractValueFromObjectForField(secondPrivateObject, field2, session); if (!((firstObjectField == null) && (secondObjectField == null))) { if ((firstObjectField == null) || (secondObjectField == null)) { return false; } if (!firstObjectField.equals(secondObjectField)) { return false; } } } return true; } /** * INTERNAL: * Return the class indicator associations for XML. * List of class-name/value associations. */ public Vector getClassIndicatorAssociations() { Vector associations = new Vector(); Iterator classesEnum = getTypeIndicatorNameTranslation().keySet().iterator(); Iterator valuesEnum = getTypeIndicatorNameTranslation().values().iterator(); while (classesEnum.hasNext()) { Object className = classesEnum.next(); // If the project was built in runtime is a class, MW is a string. if (className instanceof Class) { className = ((Class)className).getName(); } Object value = valuesEnum.next(); associations.addElement(new TypedAssociation(className, value)); } return associations; } /** * INTERNAL: * Return a descriptor for the target of this mapping * For normal ObjectReferenceMappings, we return the reference descriptor. For * a VariableOneToOneMapping, the reference descriptor is often a descriptor for an * interface and does not contain adequate information. As a result, we look up * the descriptor for the specific class we are looking for * Bug 2612571 */ @Override public ClassDescriptor getDescriptorForTarget(Object targetObject, AbstractSession session) { return session.getDescriptor(targetObject); } /** * INTERNAL: * Return the classification for the field contained in the mapping. * This is used to convert the row value to a consistent java value. */ @Override public Class getFieldClassification(DatabaseField fieldToClassify) { if ((getTypeField() != null) && (fieldToClassify.equals(getTypeField()))) { return getTypeField().getType(); } String queryKey = (String)getSourceToTargetQueryKeyNames().get(fieldToClassify); if (queryKey == null) { return null; } // Search any of the implementor descriptors for a mapping for the query-key. Iterator iterator = getReferenceDescriptor().getInterfacePolicy().getChildDescriptors().iterator(); if (iterator.hasNext()) { ClassDescriptor firstChild = (ClassDescriptor)iterator.next(); DatabaseMapping mapping = firstChild.getObjectBuilder().getMappingForAttributeName(queryKey); if ((mapping != null) && (mapping.isDirectToFieldMapping())) { return ((AbstractDirectMapping)mapping).getAttributeClassification(); } QueryKey targetQueryKey = firstChild.getQueryKeyNamed(queryKey); if ((targetQueryKey != null) && (targetQueryKey.isDirectQueryKey())) { return firstChild.getObjectBuilder().getFieldClassification(((DirectQueryKey)targetQueryKey).getField()); } } return null; } /** * PUBLIC: * Return the foreign key field names associated with the mapping. * These are only the source fields that are writable. */ public Vector getForeignKeyFieldNames() { Vector fieldNames = new Vector(getForeignKeyFields().size()); for (Enumeration fieldsEnum = getForeignKeyFields().elements(); fieldsEnum.hasMoreElements();) { fieldNames.addElement(((DatabaseField)fieldsEnum.nextElement()).getQualifiedName()); } return fieldNames; } /** * INTERNAL: * Return the implementor for a specified type */ protected Object getImplementorForType(Object type, AbstractSession session) { if (type == null) { return getTypeIndicatorTranslation().get(Helper.NULL_VALUE); } // Must ensure the type is the same, i.e. Integer != BigDecimal. try { type = session.getDatasourcePlatform().convertObject(type, getTypeField().getType()); } catch (ConversionException e) { throw ConversionException.couldNotBeConverted(this, getDescriptor(), e); } return getTypeIndicatorTranslation().get(type); } /** * PUBLIC: * Return a collection of the field to query key associations. */ public Vector getSourceToTargetQueryKeyFieldAssociations() { Vector associations = new Vector(getSourceToTargetQueryKeyNames().size()); Iterator sourceFieldEnum = getSourceToTargetQueryKeyNames().keySet().iterator(); Iterator targetQueryKeyEnum = getSourceToTargetQueryKeyNames().values().iterator(); while (sourceFieldEnum.hasNext()) { Object fieldValue = ((DatabaseField)sourceFieldEnum.next()).getQualifiedName(); Object attributeValue = targetQueryKeyEnum.next(); associations.addElement(new Association(fieldValue, attributeValue)); } return associations; } /** * INTERNAL: * Returns the source keys to target keys fields association. */ public Map getSourceToTargetQueryKeyNames() { return sourceToTargetQueryKeyNames; } public DatabaseField getTypeField() { return typeField; } /** * PUBLIC: * This method returns the name of the typeField of the mapping. * The type field is used to store the type of object the relationship is referencing. */ public String getTypeFieldName() { if (getTypeField() == null) { return null; } return getTypeField().getQualifiedName(); } /** * INTERNAL: * Return the type for a specified implementor */ protected Object getTypeForImplementor(Class implementor) { Object type = getTypeIndicatorTranslation().get(implementor); if (type == Helper.NULL_VALUE) { type = null; } return type; } /** * INTERNAL: * Return the type indicators. */ public Map getTypeIndicatorTranslation() { return typeIndicatorTranslation; } /** * INTERNAL: * Return the typeIndicatorName translation * Used by the Mapping Workbench to avoid classpath dependencies */ public Map getTypeIndicatorNameTranslation() { if (typeIndicatorNameTranslation.isEmpty() && !typeIndicatorTranslation.isEmpty()) { Iterator keysEnum = typeIndicatorTranslation.keySet().iterator(); Iterator valuesEnum = typeIndicatorTranslation.values().iterator(); while (keysEnum.hasNext()) { Object key = keysEnum.next(); Object value = valuesEnum.next(); if (key instanceof Class) { String className = ((Class)key).getName(); typeIndicatorNameTranslation.put(className, value); } } } return typeIndicatorNameTranslation; } /** * INTERNAL: * Convert all the class-name-based settings in this mapping to actual class-based * settings. This method is used when converting a project that has been built * with class names to a project with classes. */ @Override public void convertClassNamesToClasses(ClassLoader classLoader){ super.convertClassNamesToClasses(classLoader); Iterator iterator = getTypeIndicatorNameTranslation().entrySet().iterator(); this.typeIndicatorTranslation = new HashMap(); while (iterator.hasNext()) { Map.Entry entry = (Map.Entry)iterator.next(); String referenceClassName = (String)entry.getKey(); Object indicator = entry.getValue(); Class referenceClass = null; try{ if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()) { try { referenceClass = (Class)AccessController.doPrivileged(new PrivilegedClassForName(referenceClassName, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(referenceClassName, exception.getException()); } } else { referenceClass = PrivilegedAccessHelper.getClassForName(referenceClassName, true, classLoader); } } catch (ClassNotFoundException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(referenceClassName, exception); } addClassIndicator(referenceClass, indicator); } } /** * INTERNAL: * Initialize the mapping. */ @Override public void initialize(AbstractSession session) { super.initialize(session); initializeForeignKeys(session); setFields(collectFields()); if (usesIndirection()) { for (DatabaseField field : this.fields) { field.setKeepInRow(true); } } if (getTypeField() != null) { setTypeField(getDescriptor().buildField(getTypeField())); } if (shouldInitializeSelectionCriteria()) { initializeSelectionCriteria(session); } } /** * INTERNAL: * The foreign key names and their primary keys are converted to DatabaseField and stored. */ protected void initializeForeignKeys(AbstractSession session) { HashMap newSourceToTargetQueryKeyNames = new HashMap(getSourceToTargetQueryKeyNames().size()); Iterator iterator = getSourceToTargetQueryKeyNames().entrySet().iterator(); while(iterator.hasNext()) { Map.Entry entry = (Map.Entry)iterator.next(); DatabaseField field = getDescriptor().buildField((DatabaseField)entry.getKey()); newSourceToTargetQueryKeyNames.put(field, entry.getValue()); } this.sourceToTargetQueryKeyNames = newSourceToTargetQueryKeyNames; } /** * INTERNAL: * Selection criteria is created with source foreign keys and target keys. * This criteria is then used to read target records from the table. */ public void initializeSelectionCriteria(AbstractSession session) { Expression selectionCriteria = null; Expression expression; ExpressionBuilder expBuilder = new ExpressionBuilder(); Iterator sourceKeysEnum = getSourceToTargetQueryKeyNames().keySet().iterator(); while (sourceKeysEnum.hasNext()) { DatabaseField sourceKey = (DatabaseField)sourceKeysEnum.next(); String target = (String)this.getSourceToTargetQueryKeyNames().get(sourceKey); expression = expBuilder.getParameter(sourceKey).equal(expBuilder.get(target)); if (selectionCriteria == null) { selectionCriteria = expression; } else { selectionCriteria = expression.and(selectionCriteria); } } setSelectionCriteria(selectionCriteria); } /** * INTERNAL: */ @Override public boolean isVariableOneToOneMapping() { return true; } /** * INTERNAL: */ @Override protected Object getPrimaryKeyForObject(Object object, AbstractSession session) { return session.getId(object); } /** * INTERNAL: * Set the type field classification through searching the indicators hashtable. */ @Override public void preInitialize(AbstractSession session) throws DescriptorException { super.preInitialize(session); if (getTypeIndicatorTranslation().isEmpty()) { return; } Class type = null; for (Iterator typeValuesEnum = getTypeIndicatorTranslation().values().iterator(); typeValuesEnum.hasNext() && (type == null);) { Object value = typeValuesEnum.next(); if ((value != Helper.NULL_VALUE) && (!(value instanceof Class))) { type = value.getClass(); } } getTypeField().setType(type); } /** * INTERNAL: * Rehash any maps based on fields. * This is used to clone descriptors for aggregates, which hammer field names. */ @Override public void rehashFieldDependancies(AbstractSession session) { setSourceToTargetQueryKeyFields(Helper.rehashMap(getSourceToTargetQueryKeyNames())); } /** * PUBLIC: * Set the class indicator associations. */ public void setClassIndicatorAssociations(Vector classIndicatorAssociations) { setTypeIndicatorNameTranslation(new HashMap(classIndicatorAssociations.size() + 1)); setTypeIndicatorTranslation(new HashMap((classIndicatorAssociations.size() * 2) + 1)); for (Enumeration associationsEnum = classIndicatorAssociations.elements(); associationsEnum.hasMoreElements();) { Association association = (Association)associationsEnum.nextElement(); Object classValue = association.getKey(); if (classValue instanceof Class) { // 904 projects will be a class type. addClassIndicator((Class)association.getKey(), association.getValue()); } else { addClassNameIndicator((String)association.getKey(), association.getValue()); } } } /** * PUBLIC: * Return the foreign key field names associated with the mapping. * These are only the source fields that are writable. */ public void setForeignKeyFieldNames(Vector fieldNames) { Vector fields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(fieldNames.size()); for (Enumeration fieldNamesEnum = fieldNames.elements(); fieldNamesEnum.hasMoreElements();) { fields.addElement(new DatabaseField((String)fieldNamesEnum.nextElement())); } setForeignKeyFields(fields); if (!fields.isEmpty()) { setIsForeignKeyRelationship(true); } } /** * PUBLIC: * A foreign key from the source table and abstract query key from the interface descriptor are added to the * mapping. This method is used if foreign key is not composite. */ public void setForeignQueryKeyName(String sourceForeignKeyFieldName, String targetQueryKeyName) { addForeignQueryKeyName(sourceForeignKeyFieldName, targetQueryKeyName); } /** * PUBLIC: * Set a collection of the source to target query key/field associations. */ public void setSourceToTargetQueryKeyFieldAssociations(Vector sourceToTargetQueryKeyFieldAssociations) { setSourceToTargetQueryKeyFields(new HashMap(sourceToTargetQueryKeyFieldAssociations.size() + 1)); for (Enumeration associationsEnum = sourceToTargetQueryKeyFieldAssociations.elements(); associationsEnum.hasMoreElements();) { Association association = (Association)associationsEnum.nextElement(); Object sourceField = new DatabaseField((String)association.getKey()); String targetQueryKey = (String)association.getValue(); getSourceToTargetQueryKeyNames().put(sourceField, targetQueryKey); } } /** * INTERNAL: * Set the source keys to target keys fields association. */ protected void setSourceToTargetQueryKeyFields(Map sourceToTargetQueryKeyNames) { this.sourceToTargetQueryKeyNames = sourceToTargetQueryKeyNames; } /** * INTERNAL: * This method set the typeField of the mapping to the parameter field */ public void setTypeField(DatabaseField typeField) { this.typeField = typeField; } /** * PUBLIC: * This method sets the name of the typeField of the mapping. * The type field is used to store the type of object the relationship is referencing. */ public void setTypeFieldName(String typeFieldName) { setTypeField(new DatabaseField(typeFieldName)); } /** * INTERNAL: * Set the typeIndicatorTranslations hashtable to the new Hashtable translations */ protected void setTypeIndicatorTranslation(Map translations) { this.typeIndicatorTranslation = translations; } /** * INTERNAL: * For avoiding classpath dependencies on the Mapping Workbench */ protected void setTypeIndicatorNameTranslation(Map translations) { this.typeIndicatorNameTranslation = translations; } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public Object valueFromObject(Object object, DatabaseField field, AbstractSession session) { // First check if the value can be obtained from the value holder's row. AbstractRecord referenceRow = getIndirectionPolicy().extractReferenceRow(getAttributeValueFromObject(object)); if (referenceRow != null) { Object value = referenceRow.get(field); // Must ensure the classification to get a cache hit. try { value = session.getDatasourcePlatform().convertObject(value, getFieldClassification(field)); } catch (ConversionException e) { throw ConversionException.couldNotBeConverted(this, getDescriptor(), e); } return value; } //2.5.1.6 PWK. added to support batch reading on variable one to ones Object referenceObject = getRealAttributeValueFromObject(object, session); String queryKeyName = (String)getSourceToTargetQueryKeyNames().get(field); ClassDescriptor objectDescriptor = session.getDescriptor(referenceObject.getClass()); DatabaseField targetField = objectDescriptor.getObjectBuilder().getTargetFieldForQueryKeyName(queryKeyName); if (targetField == null) { // Bug 326091 - return the type value if the field passed is the type indicator field if (referenceObject != null && this.typeField != null && field.equals(this.typeField)) { return getTypeForImplementor(referenceObject.getClass()); } else { return null; } } return objectDescriptor.getObjectBuilder().extractValueFromObjectForField(referenceObject, targetField, session); } /** * INTERNAL: * Return the value of the field from the row or a value holder on the query to obtain the object. * Check for batch + aggregation reading. */ @Override public Object valueFromRow(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, CacheKey cacheKey, AbstractSession executionSession, boolean isTargetProtected, Boolean[] wasCacheUsed) throws DatabaseException { if (this.descriptor.getCachePolicy().isProtectedIsolation()) { if (this.isCacheable && isTargetProtected && cacheKey != null) { //cachekey will be null when isolating to uow //used cached collection Object result = null; Object cached = cacheKey.getObject(); if (cached != null) { if (wasCacheUsed != null){ wasCacheUsed[0] = Boolean.TRUE; } return this.getAttributeValueFromObject(cached); } } else if (!this.isCacheable && !isTargetProtected && cacheKey != null) { return this.indirectionPolicy.buildIndirectObject(new ValueHolder(null)); } } if (row.hasSopObject()) { return getAttributeValueFromObject(row.getSopObject()); } // If any field in the foreign key is null then it means there are no referenced objects for (DatabaseField field : getFields()) { if (row.get(field) == null) { return getIndirectionPolicy().nullValueFromRow(); } } if (getTypeField() != null) { // If the query used batched reading, return a special value holder, // or retrieve the object from the query property. if (sourceQuery.isObjectLevelReadQuery() && (((ObjectLevelReadQuery)sourceQuery).isAttributeBatchRead(this.descriptor, getAttributeName()) || (sourceQuery.isReadAllQuery() && shouldUseBatchReading()))) { return batchedValueFromRow(row, ((ObjectLevelReadQuery)sourceQuery), cacheKey); } //If the field is empty we cannot load the object because we do not know what class it will be if (row.get(getTypeField()) == null) { return getIndirectionPolicy().nullValueFromRow(); } Class implementerClass = (Class)getImplementorForType(row.get(getTypeField()), executionSession); ReadObjectQuery query = (ReadObjectQuery)getSelectionQuery().clone(); query.setReferenceClass(implementerClass); query.setSelectionCriteria(getSelectionCriteria()); query.setDescriptor(null);// Must set to null so the right descriptor is used if (sourceQuery.isObjectLevelReadQuery() && (sourceQuery.shouldCascadeAllParts() || (sourceQuery.shouldCascadePrivateParts() && isPrivateOwned()) || (sourceQuery.shouldCascadeByMapping() && this.cascadeRefresh)) ) { query.setShouldRefreshIdentityMapResult(sourceQuery.shouldRefreshIdentityMapResult()); query.setCascadePolicy(sourceQuery.getCascadePolicy()); query.setShouldMaintainCache(sourceQuery.shouldMaintainCache()); // For flashback. if (((ObjectLevelReadQuery)sourceQuery).hasAsOfClause()) { query.setAsOfClause(((ObjectLevelReadQuery)sourceQuery).getAsOfClause()); } //CR #4365 - used to prevent infinit recursion on refresh object cascade all query.setQueryId(sourceQuery.getQueryId()); } return getIndirectionPolicy().valueFromQuery(query, row, executionSession); } else { return super.valueFromRow(row, joinManager, sourceQuery, cacheKey, executionSession, isTargetProtected, wasCacheUsed); } } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ protected void writeFromNullObjectIntoRow(AbstractRecord record) { if (isReadOnly()) { return; } if (isForeignKeyRelationship()) { Enumeration foreignKeys = getForeignKeyFields().elements(); while (foreignKeys.hasMoreElements()) { record.put((DatabaseField)foreignKeys.nextElement(), null); // EL Bug 319759 - if a field is null, then the update call cache should not be used record.setNullValueInFields(true); } } if (getTypeField() != null) { record.put(getTypeField(), null); record.setNullValueInFields(true); } } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. * If the mapping id target foreign key, you must only write the type into the roe, the rest will be updated * when the object itself is written */ @Override public void writeFromObjectIntoRow(Object object, AbstractRecord record, AbstractSession session, WriteType writeType) { if (isReadOnly()) { return; } Object referenceObject = getRealAttributeValueFromObject(object, session); if (referenceObject == null) { writeFromNullObjectIntoRow(record); } else { if (isForeignKeyRelationship()) { Enumeration sourceFields = getForeignKeyFields().elements(); ClassDescriptor descriptor = session.getDescriptor(referenceObject.getClass()); while (sourceFields.hasMoreElements()) { DatabaseField sourceKey = (DatabaseField)sourceFields.nextElement(); String targetQueryKey = (String)getSourceToTargetQueryKeyNames().get(sourceKey); DatabaseField targetKeyField = descriptor.getObjectBuilder().getFieldForQueryKeyName(targetQueryKey); if (targetKeyField == null) { throw DescriptorException.variableOneToOneMappingIsNotDefinedProperly(this, descriptor, targetQueryKey); } Object referenceValue = descriptor.getObjectBuilder().extractValueFromObjectForField(referenceObject, targetKeyField, session); // EL Bug 319759 - if a field is null, then the update call cache should not be used if (referenceValue == null) { record.setNullValueInFields(true); } record.put(sourceKey, referenceValue); } } if (getTypeField() != null) { record.put(getTypeField(), getTypeForImplementor(referenceObject.getClass())); } } } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. * If the mapping id target foreign key, you must only write the type into the roe, the rest will be updated * when the object itself is written */ @Override public void writeFromObjectIntoRowWithChangeRecord(ChangeRecord changeRecord, AbstractRecord record, AbstractSession session, WriteType writeType) { if (isReadOnly()) { return; } ObjectChangeSet changeSet = (ObjectChangeSet)((ObjectReferenceChangeRecord)changeRecord).getNewValue(); if (changeSet == null) { writeFromNullObjectIntoRow(record); } else { Object referenceObject = changeSet.getUnitOfWorkClone(); if (isForeignKeyRelationship()) { Enumeration sourceFields = getForeignKeyFields().elements(); ClassDescriptor descriptor = session.getDescriptor(referenceObject.getClass()); while (sourceFields.hasMoreElements()) { DatabaseField sourceKey = (DatabaseField)sourceFields.nextElement(); String targetQueryKey = (String)getSourceToTargetQueryKeyNames().get(sourceKey); DatabaseField targetKeyField = descriptor.getObjectBuilder().getFieldForQueryKeyName(targetQueryKey); if (targetKeyField == null) { throw DescriptorException.variableOneToOneMappingIsNotDefinedProperly(this, descriptor, targetQueryKey); } Object referenceValue = descriptor.getObjectBuilder().extractValueFromObjectForField(referenceObject, targetKeyField, session); // EL Bug 319759 - if a field is null, then the update call cache should not be used if (referenceValue == null) { record.setNullValueInFields(true); } record.put(sourceKey, referenceValue); } } if (getTypeField() != null) { record.put(getTypeField(), getTypeForImplementor(referenceObject.getClass())); } } } /** * INTERNAL: * This row is built for shallow insert which happens in case of bidirectional inserts. * The foreign keys must be set to null to avoid constraints. */ @Override public void writeFromObjectIntoRowForShallowInsert(Object object, AbstractRecord record, AbstractSession session) { writeFromNullObjectIntoRow(record); } /** * INTERNAL: * This row is built for update after shallow insert which happens in case of bidirectional inserts. * It contains the foreign keys with non null values that were set to null for shallow insert. * If mapping overrides writeFromObjectIntoRowForShallowInsert method it must override this one, too. */ public void writeFromObjectIntoRowForUpdateAfterShallowInsert(Object object, AbstractRecord row, AbstractSession session, DatabaseTable table) { if (!getFields().get(0).getTable().equals(table)) { return; } writeFromObjectIntoRow(object, row, session, WriteType.UPDATE); } /** * INTERNAL: * This row is built for shallow insert which happens in case of bidirectional inserts. * The foreign keys must be set to null to avoid constraints. */ @Override public void writeFromObjectIntoRowForShallowInsertWithChangeRecord(ChangeRecord changeRecord, AbstractRecord record, AbstractSession session) { writeFromNullObjectIntoRow(record); } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public void writeFromObjectIntoRowForWhereClause(ObjectLevelModifyQuery query, AbstractRecord record) { if (isReadOnly()) { return; } Object object; if (query.isDeleteObjectQuery()) { object = query.getObject(); } else { object = query.getBackupClone(); } Object referenceObject = getRealAttributeValueFromObject(object, query.getSession()); if (referenceObject == null) { writeFromNullObjectIntoRow(record); } else { if (isForeignKeyRelationship()) { Enumeration sourceFields = getForeignKeyFields().elements(); ClassDescriptor descriptor = query.getSession().getDescriptor(referenceObject.getClass()); while (sourceFields.hasMoreElements()) { DatabaseField sourceKey = (DatabaseField)sourceFields.nextElement(); String targetQueryKey = (String)getSourceToTargetQueryKeyNames().get(sourceKey); DatabaseField targetKeyField = descriptor.getObjectBuilder().getFieldForQueryKeyName(targetQueryKey); if (targetKeyField == null) { throw DescriptorException.variableOneToOneMappingIsNotDefinedProperly(this, descriptor, targetQueryKey); } Object referenceValue = descriptor.getObjectBuilder().extractValueFromObjectForField(referenceObject, targetKeyField, query.getSession()); if (referenceValue == null) { // EL Bug 319759 - if a field is null, then the update call cache should not be used record.setNullValueInFields(true); } record.put(sourceKey, referenceValue); } } if (getTypeField() != null) { Object typeForImplementor = getTypeForImplementor(referenceObject.getClass()); record.put(getTypeField(), typeForImplementor); } } } /** * INTERNAL: * Write fields needed for insert into the template for with null values. */ @Override public void writeInsertFieldsIntoRow(AbstractRecord record, AbstractSession session) { writeFromNullObjectIntoRow(record); } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/converters/0000775000000000000000000000000012216174372022424 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/converters/ClassInstanceConverter.java0000664000000000000000000001103112216173130027674 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.converters; import java.security.AccessController; import java.security.PrivilegedActionException; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.mappings.foundation.AbstractDirectMapping; import org.eclipse.persistence.sessions.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedNewInstanceFromClass; import org.eclipse.persistence.internal.sessions.AbstractSession; /** * Purpose: Allows a class name to be converted to and from a new instance of the class. * * When using a ClassInstanceConverter, the database will store the Class name and the java object * model will contain an instance of that class initialized with its default constructor * * @author James Sutherland * @since OracleAS TopLink 10g (10.0.3) */ public class ClassInstanceConverter implements Converter { protected DatabaseMapping mapping; /** * PUBLIC: * Default constructor. */ public ClassInstanceConverter() { } /** * INTERNAL: * Convert the class name to a class, then create an instance of the class. */ public Object convertDataValueToObjectValue(Object fieldValue, Session session) { Object attributeValue = null; if (fieldValue != null) { Class attributeClass = (Class)((AbstractSession)session).getDatasourcePlatform().convertObject(fieldValue, ClassConstants.CLASS); try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { attributeValue = AccessController.doPrivileged(new PrivilegedNewInstanceFromClass(attributeClass)); } catch (PrivilegedActionException exception) { throw ConversionException.couldNotBeConverted(fieldValue, attributeClass, exception.getException()); } } else { attributeValue = PrivilegedAccessHelper.newInstanceFromClass(attributeClass); } } catch (Exception exception) { throw ConversionException.couldNotBeConverted(fieldValue, attributeClass, exception); } } return attributeValue; } /** * INTERNAL: * Convert to the field class. */ public Object convertObjectValueToDataValue(Object attributeValue, Session session) { if (attributeValue == null) { return null; } return attributeValue.getClass().getName(); } /** * INTERNAL: * Set the mapping. */ public void initialize(DatabaseMapping mapping, Session session) { this.mapping = mapping; // CR#... Mapping must also have the field classification. if (getMapping().isDirectToFieldMapping()) { AbstractDirectMapping directMapping = (AbstractDirectMapping)getMapping(); // Allow user to specify field type to override computed value. (i.e. blob, nchar) if (directMapping.getFieldClassification() == null) { directMapping.setFieldClassification(ClassConstants.STRING); } } } /** * INTERNAL: * Return the mapping. */ protected DatabaseMapping getMapping() { return mapping; } /** * INTERNAL: * If the converter converts the value to a non-atomic value, i.e. * a value that can have its' parts changed without being replaced, * then it must return false, serialization can be non-atomic. */ public boolean isMutable() { return false; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/converters/EnumTypeConverter.java0000664000000000000000000001516012216173130026717 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * * 30/05/2012-2.4 Guy Pelletier * - 354678: Temp classloader is still being used during metadata processing ******************************************************************************/ package org.eclipse.persistence.mappings.converters; import java.security.AccessController; import java.security.PrivilegedActionException; import java.util.EnumSet; import java.util.Iterator; import org.eclipse.persistence.sessions.Session; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; /** * Purpose: Object type converter is used to match a fixed number of * database data values to a Java enum object value. It can be used when the * values on the database and in the Java differ. To create an object type * converter, simply specify the set of conversion value pairs. A default value * and one-way conversion are also supported for legacy data situations. * * @author Guy Pelletier * @since Toplink 10.1.4RI */ public class EnumTypeConverter extends ObjectTypeConverter { private Class m_enumClass; private String m_enumClassName; private boolean m_useOrdinalValues; /** * PUBLIC: * Creating an enum converter this way will create the conversion values * for you using ordinal or name values. */ public EnumTypeConverter(DatabaseMapping mapping, Class enumClass, boolean useOrdinalValues) { super(mapping); m_enumClass = enumClass; m_enumClassName = enumClass.getName(); m_useOrdinalValues = useOrdinalValues; initializeConversions(m_enumClass); } /** * PUBLIC: * Creating an enum converter this way will create the conversion values * for you using ordinal or name values. */ public EnumTypeConverter(DatabaseMapping mapping, String enumClassName, boolean useOrdinalValues) { this(mapping, enumClassName); m_useOrdinalValues = useOrdinalValues; } /** * PUBLIC: * Creating an enum converter this way expects that you will provide * the conversion values separately. */ public EnumTypeConverter(DatabaseMapping mapping, String enumClassName) { super(mapping); m_enumClassName = enumClassName; } protected void initializeConversions(Class enumClass) { // Initialize conversion if not already set by Converter if (getFieldToAttributeValues().isEmpty()) { EnumSet theEnums = EnumSet.allOf(enumClass); Iterator i = theEnums.iterator(); while (i.hasNext()) { Enum theEnum = i.next(); if (m_useOrdinalValues) { addConversionValue(theEnum.ordinal(), theEnum.name()); } else { addConversionValue(theEnum.name(), theEnum.name()); } } } } public Class getEnumClass() { return m_enumClass; } public String getEnumClassName() { return m_enumClassName; } /** * INTERNAL: * Convert all the class-name-based settings in this converter to actual * class-based settings. This method is used when converting a project * that has been built with class names to a project with classes. * @param classLoader */ public void convertClassNamesToClasses(ClassLoader classLoader) { super.convertClassNamesToClasses(classLoader); // convert if enumClass is null or if different classLoader if (m_enumClass == null || (m_enumClass != null && !m_enumClass.getClassLoader().equals(classLoader))) { try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { m_enumClass = (Class)AccessController.doPrivileged( new PrivilegedClassForName(m_enumClassName, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames( m_enumClassName, exception.getException()); } } else { m_enumClass = PrivilegedAccessHelper.getClassForName(m_enumClassName, true, classLoader); } } catch (ClassNotFoundException exception){ throw ValidationException.classNotFoundWhileConvertingClassNames(m_enumClassName, exception); } } initializeConversions(m_enumClass); } /** * INTERNAL: * Returns the corresponding attribute value for the specified field value. * Wraps the super method to return an Enum type from the string conversion. */ public Object convertDataValueToObjectValue(Object fieldValue, Session session) { Object obj = super.convertDataValueToObjectValue(fieldValue, session); if (fieldValue == null || obj == null) { return obj; } else { return Enum.valueOf(m_enumClass, (String) obj); } } /** * INTERNAL: * Convert Enum object to the data value. Internal enums are stored as * strings (names) so this method wraps the super method in that if * breaks down the enum to a string name before converting it. */ public Object convertObjectValueToDataValue(Object attributeValue, Session session) { if (attributeValue == null) { return super.convertObjectValueToDataValue(null, session); } else { return super.convertObjectValueToDataValue(((Enum)attributeValue).name(), session); } } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/converters/ObjectTypeConverter.java0000664000000000000000000004773312216173130027234 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * * 30/05/2012-2.4 Guy Pelletier * - 354678: Temp classloader is still being used during metadata processing ******************************************************************************/ package org.eclipse.persistence.mappings.converters; import java.lang.reflect.Constructor; import java.security.AccessController; import java.security.PrivilegedActionException; import java.util.*; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.descriptors.TypeMapping; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.mappings.foundation.AbstractDirectMapping; import org.eclipse.persistence.sessions.*; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; import org.eclipse.persistence.internal.security.PrivilegedGetConstructorFor; import org.eclipse.persistence.internal.security.PrivilegedInvokeConstructor; import org.eclipse.persistence.internal.sessions.AbstractSession; /** * Purpose: Object type converter is used to match a fixed number of database data values * to Java object value. It can be used when the values on the database and in the Java differ. * To create an object type converter, simply specify the set of conversion value pairs. * A default value and one-way conversion are also supported for legacy data situations. * * @author James Sutherland * @since Toplink 10 */ public class ObjectTypeConverter implements Converter { // String type names and values set from JPA processing. protected String converterName; protected Class dataType; protected String dataTypeName; protected Class objectType; protected String objectTypeName; protected Map conversionValueStrings; protected Map addToAttributeOnlyConversionValueStrings; protected DatabaseMapping mapping; protected transient Map fieldToAttributeValues; protected Map attributeToFieldValues; protected transient Object defaultAttributeValue; protected String defaultAttributeValueString; protected transient Class fieldClassification; protected transient String fieldClassificationName; /** * PUBLIC: * Default constructor. */ public ObjectTypeConverter() { this.attributeToFieldValues = new HashMap(10); this.fieldToAttributeValues = new HashMap(10); this.conversionValueStrings = new HashMap(10); this.addToAttributeOnlyConversionValueStrings = new HashMap(10); } /** * PUBLIC: * Default constructor. */ public ObjectTypeConverter(DatabaseMapping mapping) { this(); this.mapping = mapping; } /** * PUBLIC: * A type conversion value is a two-way mapping from the database to the object. * The database value will be substituted for the object value when read, * and the object value will be substituted for database value when written. * Note that each field/attribute value must have one and only one attribute/field value to maintain a two-way mapping. */ public void addConversionValue(Object fieldValue, Object attributeValue) { if (fieldValue == null) { fieldValue = Helper.NULL_VALUE; } if (attributeValue == null) { attributeValue = Helper.NULL_VALUE; } getFieldToAttributeValues().put(fieldValue, attributeValue); getAttributeToFieldValues().put(attributeValue, fieldValue); } /** * INTERNAL: * Set from JPA processing where we deal with strings only to avoid * class loader conflicts. */ public void addConversionValueStrings(String dataValue, String objectValue) { this.conversionValueStrings.put(dataValue, objectValue); } /** * PUBLIC: * An attribute only conversion value is a one-way mapping from the database to the object. * This can be used if multiple database values are desired to be mapped to the same object value. * Note that when written only the default value will be used for the attribute, not this value. */ public void addToAttributeOnlyConversionValue(Object fieldValue, Object attributeValue) { if (fieldValue == null) { fieldValue = Helper.NULL_VALUE; } if (attributeValue == null) { attributeValue = Helper.NULL_VALUE; } getFieldToAttributeValues().put(fieldValue, attributeValue); } /** * INTERNAL: * Set from JPA processing where we deal with strings only to avoid * class loader conflicts. */ public void addToAttributeOnlyConversionValueStrings(String dataValue, String objectValue) { this.addToAttributeOnlyConversionValueStrings.put(dataValue, objectValue); } /** * INTERNAL: * Get the attribute to field mapping. */ public Map getAttributeToFieldValues() { return attributeToFieldValues; } /** * INTERNAL: * Convert all the class-name-based settings in this converter to actual * class-based settings. This method is used when converting a project * that has been built with class names to a project with classes. * @param classLoader */ public void convertClassNamesToClasses(ClassLoader classLoader){ if (dataTypeName != null) { dataType = loadClass(dataTypeName, classLoader); } if (objectTypeName != null) { objectType = loadClass(objectTypeName, classLoader); } if (objectType != null && dataType != null) { // Process the data to object mappings. The object and data values // should be primitive wrapper types so we can initialize the // conversion values now. for (String dataValue : conversionValueStrings.keySet()) { String objectValue = conversionValueStrings.get(dataValue); addConversionValue(initObject(dataType, dataValue, true), initObject(objectType, objectValue, false)); } for (String dataValue : addToAttributeOnlyConversionValueStrings.keySet()) { String objectValue = addToAttributeOnlyConversionValueStrings.get(dataValue); addToAttributeOnlyConversionValue(initObject(dataType, dataValue, true), initObject(objectType, objectValue, false)); } if (defaultAttributeValueString != null) { setDefaultAttributeValue(initObject(objectType, defaultAttributeValueString, false)); } } } /** * Load the given class name with the given loader. */ protected Class loadClass(String className, ClassLoader classLoader) { try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { return (Class) AccessController.doPrivileged(new PrivilegedClassForName(className, true, classLoader)); } catch (PrivilegedActionException e) { throw ValidationException.classNotFoundWhileConvertingClassNames(className, e.getException()); } } else { return org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(className, true, classLoader); } } catch (Exception exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(className, exception); } } /** * INTERNAL: * Returns the corresponding attribute value for the specified field value. */ public Object convertDataValueToObjectValue(Object fieldValue, Session session) { Object attributeValue = null; if (fieldValue == null) { attributeValue = getFieldToAttributeValues().get(Helper.NULL_VALUE); } else { try { fieldValue = ((AbstractSession)session).getDatasourcePlatform().getConversionManager().convertObject(fieldValue, getFieldClassification()); } catch (ConversionException e) { throw ConversionException.couldNotBeConverted(mapping, mapping.getDescriptor(), e); } attributeValue = getFieldToAttributeValues().get(fieldValue); if (attributeValue == null) { if (getDefaultAttributeValue() != null) { attributeValue = getDefaultAttributeValue(); } else { // CR#3779 throw DescriptorException.noFieldValueConversionToAttributeValueProvided(fieldValue, getMapping().getField(), getMapping()); } } } return attributeValue; } /** * PUBLIC: * The default value can be used if the database can possibly store additional values then those that * have been mapped. Any value retreived from the database that is not mapped will be substitued for the default value. */ public Object getDefaultAttributeValue() { return defaultAttributeValue; } /** * INTERNAL: * Return the mapping. */ protected DatabaseMapping getMapping() { return mapping; } /** * INTERNAL: * Set the mapping. */ protected void setMapping(DatabaseMapping mapping) { this.mapping = mapping; } /** * INTERNAL: * Set from JPA processing where we deal with strings only to avoid * class loader conflicts. */ public void setObjectTypeName(String objectTypeName) { this.objectTypeName = objectTypeName; } /** * INTERNAL: * Get the type of the field value to allow conversion from the database. */ public Class getFieldClassification() { return fieldClassification; } public String getFieldClassificationName() { if ((fieldClassificationName == null) && (fieldClassification != null)) { fieldClassificationName = fieldClassification.getName(); } return fieldClassificationName; } /** * INTERNAL: * Return the classifiction for the field contained in the mapping. * This is used to convert the row value to a consistent java value. * By default this is null which means unknown. */ public Class getFieldClassification(DatabaseField fieldToClassify) { return getFieldClassification(); } /** * INTERNAL: * Return a collection of the field to attribute value associations. */ public Vector getFieldToAttributeValueAssociations() { Vector associations = new Vector(getFieldToAttributeValues().size()); Iterator fieldValueEnum = getFieldToAttributeValues().keySet().iterator(); Iterator attributeValueEnum = getFieldToAttributeValues().values().iterator(); while (fieldValueEnum.hasNext()) { Object fieldValue = fieldValueEnum.next(); if (fieldValue == Helper.NULL_VALUE) { fieldValue = null; } Object attributeValue = attributeValueEnum.next(); if (attributeValue == Helper.NULL_VALUE) { attributeValue = null; } associations.addElement(new TypeMapping(fieldValue, attributeValue)); } return associations; } /** * INTERNAL: * Get the field to attribute mapping. */ public Map getFieldToAttributeValues() { if (fieldToAttributeValues == null) { fieldToAttributeValues = new HashMap(10); } return fieldToAttributeValues; } /** * INTERNAL: * Convert to the data value. */ public Object convertObjectValueToDataValue(Object attributeValue, Session session) { Object fieldValue; if (attributeValue == null) { fieldValue = getAttributeToFieldValues().get(Helper.NULL_VALUE); } else { fieldValue = getAttributeToFieldValues().get(attributeValue); if (fieldValue == null) { throw DescriptorException.noAttributeValueConversionToFieldValueProvided(attributeValue, getMapping()); } } return fieldValue; } /** * PUBLIC: * This is a very specific protocol which maps fieldValues "T" and "F" * to true and false respectively. */ public void mapBooleans() { addConversionValue("F", Boolean.FALSE); addConversionValue("T", Boolean.TRUE); } /** * PUBLIC: * This is a very specific protocol which maps fieldValues "F" and "M" * to "Female" and "Male" respectively. */ public void mapGenders() { addConversionValue("F", "Female"); addConversionValue("M", "Male"); } /** * PUBLIC: * This is a very specific protocol which maps fieldValues "Y" and "N" * to "Yes" and "No" respectively. */ public void mapResponses() { addConversionValue("Y", "Yes"); addConversionValue("N", "No"); } /** * INTERNAL: * Set the field classification through searching the fields map. */ public void initializeFieldClassification(Session session) throws DescriptorException { if (getFieldToAttributeValues().isEmpty()) { return; } Class type = null; Iterator fieldValuesEnum = getFieldToAttributeValues().keySet().iterator(); while (fieldValuesEnum.hasNext() && (type == null)) { Object value = fieldValuesEnum.next(); if (value != Helper.NULL_VALUE) { type = value.getClass(); } } setFieldClassification(type); // CR#... Mapping must also have the field classification. if (getMapping().isDirectToFieldMapping()) { AbstractDirectMapping directMapping = (AbstractDirectMapping)getMapping(); // Allow user to specify field type to override computed value. (i.e. blob, nchar) if (directMapping.getFieldClassification() == null) { directMapping.setFieldClassification(type); } } } /** * INTERNAL: * Used to initialize string based conversion values set from JPA processing. */ private Object initObject(Class type, String value, boolean isData) { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()) { try { Constructor constructor = (Constructor) AccessController.doPrivileged(new PrivilegedGetConstructorFor(type, new Class[] {String.class}, false)); return AccessController.doPrivileged(new PrivilegedInvokeConstructor(constructor, new Object[] {value})); } catch (PrivilegedActionException exception) { throwInitObjectException(exception, type, value, isData); } } else { try { Constructor constructor = PrivilegedAccessHelper.getConstructorFor(type, new Class[] {String.class}, false); return PrivilegedAccessHelper.invokeConstructor(constructor, new Object[] {value}); } catch (Exception exception) { throwInitObjectException(exception, type, value, isData); } } return null; // keep compiler happy, will never hit. } /** * INTERNAL: * Set the mapping. */ public void initialize(DatabaseMapping mapping, Session session) { this.mapping = mapping; initializeFieldClassification(session); } /** * INTERNAL: * Set the attribute to field mapping. */ public void setAttributeToFieldValues(Map attributeToFieldValues) { this.attributeToFieldValues = attributeToFieldValues; } /** * INTERNAL: * Set from JPA processing where we deal with strings only to avoid * class loader conflicts. */ public void setConverterName(String converterName) { this.converterName = converterName; } /** * INTERNAL: * Set from JPA processing where we deal with strings only to avoid * class loader conflicts. */ public void setDataTypeName(String dataTypeName) { this.dataTypeName = dataTypeName; } /** * PUBLIC: * The default value can be used if the database can possibly store additional values then those that * have been mapped. Any value retreived from the database that is not mapped will be substitued for the default value. */ public void setDefaultAttributeValue(Object defaultAttributeValue) { this.defaultAttributeValue = defaultAttributeValue; } /** * INTERNAL: * Set from JPA processing where we deal with strings only to avoid * class loader conflicts. */ public void setDefaultAttributeValueString(String defaultAttributeValueString) { this.defaultAttributeValueString = defaultAttributeValueString; } /** * INTERNAL: * Set the type of the field value to allow conversion from the database. */ public void setFieldClassification(Class fieldClassification) { this.fieldClassification = fieldClassification; } public void setFieldClassificationName(String fieldClassificationName) { this.fieldClassificationName = fieldClassificationName; } /** * INTERNAL: * Set a collection of the field to attribute value associations. */ public void setFieldToAttributeValueAssociations(Vector fieldToAttributeValueAssociations) { setFieldToAttributeValues(new HashMap(fieldToAttributeValueAssociations.size() + 1)); setAttributeToFieldValues(new HashMap(fieldToAttributeValueAssociations.size() + 1)); for (Enumeration associationsEnum = fieldToAttributeValueAssociations.elements(); associationsEnum.hasMoreElements();) { Association association = (Association)associationsEnum.nextElement(); addConversionValue(association.getKey(), association.getValue()); } } /** * INTERNAL: * Set the field to attribute mapping. */ public void setFieldToAttributeValues(Map fieldToAttributeValues) { this.fieldToAttributeValues = fieldToAttributeValues; } /** * INTERNAL: * If the converter converts the value to a non-atomic value, i.e. * a value that can have its' parts changed without being replaced, * then it must return false, serialization can be non-atomic. */ public boolean isMutable() { return false; } /** * INTERNAL: */ protected void throwInitObjectException(Exception exception, Class type, String value, boolean isData) { if (isData) { throw ValidationException.errorInstantiatingConversionValueData(converterName, value, type, exception); } else { throw ValidationException.errorInstantiatingConversionValueObject(converterName, value, type, exception); } } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/converters/ConverterClass.java0000664000000000000000000002104112216173130026211 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * 10/09/2012-2.5 Guy Pelletier * - 374688: JPA 2.1 Converter support * 10/25/2012-2.5 Guy Pelletier * - 374688: JPA 2.1 Converter support * 10/30/2012-2.5 Guy Pelletier * - 374688: JPA 2.1 Converter support * 11/28/2012-2.5 Guy Pelletier * - 374688: JPA 2.1 Converter support * 06/03/2013-2.5.1 Guy Pelletier * - 402380: 3 jpa21/advanced tests failed on server with * "java.lang.NoClassDefFoundError: org/eclipse/persistence/testing/models/jpa21/advanced/enums/Gender" * 07/16/2013-2.5.1 Guy Pelletier * - 412384: Applying Converter for parameterized basic-type for joda-time's DateTime does not work ******************************************************************************/ package org.eclipse.persistence.mappings.converters; import java.security.AccessController; import java.security.PrivilegedActionException; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.internal.localization.ExceptionLocalization; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; import org.eclipse.persistence.internal.security.PrivilegedNewInstanceFromClass; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.mappings.DirectCollectionMapping; import org.eclipse.persistence.mappings.DirectMapMapping; import org.eclipse.persistence.mappings.DirectToFieldMapping; import org.eclipse.persistence.mappings.converters.Converter; import org.eclipse.persistence.sessions.Session; import javax.persistence.AttributeConverter; import javax.persistence.PersistenceException; /** * A JPA attribute converter class wrapped with an EclipseLink converter. This * class is placed directly on mappings. * * @author Guy Pelletier * @since Eclipselink 2.5 */ public class ConverterClass implements Converter { protected boolean isForMapKey; protected boolean disableConversion; protected Class fieldClassification; protected String fieldClassificationName; protected String attributeConverterClassName; protected AttributeConverter attributeConverter; /** * INTERNAL: * This method will be called when creating a converter for an embedded * mapping attribute. The isForMapKey information will need to be known * for proper initialization. */ public ConverterClass(String attributeConverterClassName, boolean isForMapKey, String fieldClassificationName, boolean disableConversion) { this.isForMapKey = isForMapKey; this.disableConversion = disableConversion; this.fieldClassificationName = fieldClassificationName; this.attributeConverterClassName = attributeConverterClassName; } /** * INTERNAL: * Convert all the class-name-based settings in this converter to actual * class-based settings. This method is used when converting a project * that has been built with class names to a project with classes. */ public void convertClassNamesToClasses(ClassLoader classLoader){ Class attributeConverterClass = null; try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()) { try { attributeConverterClass = (Class) AccessController.doPrivileged(new PrivilegedClassForName(attributeConverterClassName, true, classLoader)); attributeConverter = AccessController.doPrivileged(new PrivilegedNewInstanceFromClass(attributeConverterClass)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(attributeConverterClassName, exception.getException()); } } else { attributeConverterClass = PrivilegedAccessHelper.getClassForName(attributeConverterClassName, true, classLoader); attributeConverter = (AttributeConverter) PrivilegedAccessHelper.newInstanceFromClass(attributeConverterClass); } } catch (ClassNotFoundException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(attributeConverterClassName, exception); } catch (IllegalAccessException exception) { throw ValidationException.errorInstantiatingClass(attributeConverterClass, exception); } catch (InstantiationException exception) { throw ValidationException.errorInstantiatingClass(attributeConverterClass, exception); } try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { fieldClassification = (Class) AccessController.doPrivileged(new PrivilegedClassForName(fieldClassificationName, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(fieldClassificationName, exception.getException()); } } else { fieldClassification = PrivilegedAccessHelper.getClassForName(fieldClassificationName, true, classLoader); } } catch (ClassNotFoundException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(attributeConverterClassName, exception); } } /** * INTERNAL: */ @Override public Object convertDataValueToObjectValue(Object dataValue, Session session) { try { return attributeConverter.convertToEntityAttribute(dataValue); } catch (RuntimeException re) { throw new PersistenceException(ExceptionLocalization.buildMessage("wrap_convert_exception", new Object[]{"convertToEntityAttribute", attributeConverterClassName, dataValue}), re); } } /** * INTERNAL: */ @Override public Object convertObjectValueToDataValue(Object objectValue, Session session) { try { return attributeConverter.convertToDatabaseColumn(objectValue); } catch (RuntimeException re) { throw new PersistenceException(ExceptionLocalization.buildMessage("wrap_convert_exception", new Object[]{"convertToDatabaseColumn", attributeConverterClassName, objectValue}), re); } } /** * INTERNAL: */ @Override public void initialize(DatabaseMapping mapping, Session session) { // Ensure the mapping has the correct field classification set. if (mapping.isDirectToFieldMapping()) { DirectToFieldMapping m = (DirectToFieldMapping) mapping; if (disableConversion) { m.setConverter(null); } else { m.setConverter(this); m.setFieldClassification(fieldClassification); m.setFieldClassificationClassName(fieldClassificationName); } } else if (mapping.isDirectMapMapping() && isForMapKey) { DirectMapMapping m = (DirectMapMapping) mapping; if (disableConversion) { m.setKeyConverter(null); } else { m.setKeyConverter(this); m.setDirectKeyFieldClassification(fieldClassification); m.setDirectKeyFieldClassificationName(fieldClassificationName); } } else if (mapping.isDirectCollectionMapping()) { DirectCollectionMapping m = (DirectCollectionMapping) mapping; if (disableConversion) { m.setValueConverter(null); } else { m.setValueConverter(this); m.setDirectFieldClassification(fieldClassification); m.setDirectFieldClassificationName(fieldClassificationName); } } else { // TODO: what else could it be??? } } /** * INTERNAL: */ @Override public boolean isMutable() { return false; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/converters/TypeConversionConverter.java0000664000000000000000000002227612216173130030146 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 06/03/2013-2.5.1 Guy Pelletier * - 402380: 3 jpa21/advanced tests failed on server with * "java.lang.NoClassDefFoundError: org/eclipse/persistence/testing/models/jpa21/advanced/enums/Gender" ******************************************************************************/ package org.eclipse.persistence.mappings.converters; import java.security.AccessController; import java.security.PrivilegedActionException; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.mappings.foundation.AbstractDirectMapping; import org.eclipse.persistence.sessions.*; import org.eclipse.persistence.exceptions.ConversionException; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; import org.eclipse.persistence.internal.sessions.AbstractSession; /** * Purpose: Type conversion converters are used to explicitly map a database type to a * Java type. * * @author James Sutherland * @since OracleAS TopLink 10g (10.0.3) */ public class TypeConversionConverter implements Converter { protected DatabaseMapping mapping; /** Field type */ protected Class dataClass; protected String dataClassName; /** Object type */ protected Class objectClass; protected String objectClassName; /** * PUBLIC: * Default constructor. */ public TypeConversionConverter() { } /** * PUBLIC: * Default constructor. */ public TypeConversionConverter(DatabaseMapping mapping) { this.mapping = mapping; } /** * INTERNAL: * Convert all the class-name-based settings in this converter to actual class-based * settings. This method is used when converting a project that has been built * with class names to a project with classes. * This method is implemented by subclasses as necessary. * @param classLoader */ public void convertClassNamesToClasses(ClassLoader classLoader){ Class dataClass = null; Class objectClass = null; try{ if (dataClassName != null){ if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { dataClass = (Class)AccessController.doPrivileged(new PrivilegedClassForName(dataClassName, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(dataClassName, exception.getException()); } } else { dataClass = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(dataClassName, true, classLoader); } setDataClass(dataClass); } } catch (ClassNotFoundException exc){ throw ValidationException.classNotFoundWhileConvertingClassNames(dataClassName, exc); } try { if (objectClassName != null){ if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { objectClass = (Class)AccessController.doPrivileged(new PrivilegedClassForName(objectClassName, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(objectClassName, exception.getException()); } } else { objectClass = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(objectClassName, true, classLoader); } setObjectClass(objectClass); } } catch (ClassNotFoundException exc){ throw ValidationException.classNotFoundWhileConvertingClassNames(objectClassName, exc); } }; /** * INTERNAL: * The field value must first be converted to the field type, then the attribute type. */ public Object convertDataValueToObjectValue(Object fieldValue, Session session) { Object attributeValue = fieldValue; if (attributeValue != null) { try { attributeValue = ((AbstractSession)session).getDatasourcePlatform().convertObject(attributeValue, getDataClass()); } catch (ConversionException e) { throw ConversionException.couldNotBeConverted(mapping, mapping.getDescriptor(), e); } try { attributeValue = ((AbstractSession)session).getDatasourcePlatform().convertObject(attributeValue, getObjectClass()); } catch (ConversionException e) { throw ConversionException.couldNotBeConverted(mapping, mapping.getDescriptor(), e); } } return attributeValue; } /** * PUBLIC: * Returns the class type of the object value. */ public Class getObjectClass() { return objectClass; } /** * INTERNAL: * Return the name of the object type for the MW usage. */ public String getObjectClassName() { if ((objectClassName == null) && (objectClass != null)) { objectClassName = objectClass.getName(); } return objectClassName; } /** * PUBLIC: * Returns the class type of the data value. */ public Class getDataClass() { return dataClass; } /** * INTERNAL: * Return the name of the data type for the MW usage. */ public String getDataClassName() { if ((dataClassName == null) && (dataClass != null)) { dataClassName = dataClass.getName(); } return dataClassName; } /** * PUBLIC: * Set the class type of the data value. */ public void setDataClass(Class dataClass) { this.dataClass = dataClass; } /** * INTERNAL: * Set the name of the data type for the MW usage. */ public void setDataClassName(String dataClassName) { this.dataClassName = dataClassName; } /** * PUBLIC: * Set the class type of the object value. */ public void setObjectClass(Class objectClass) { this.objectClass = objectClass; } /** * INTERNAL: * Set the name of the object type for the MW usage. */ public void setObjectClassName(String objectClassName) { this.objectClassName = objectClassName; } /** * INTERNAL: * Convert to the field class. */ public Object convertObjectValueToDataValue(Object attributeValue, Session session) { try { return ((AbstractSession)session).getDatasourcePlatform().convertObject(attributeValue, getDataClass()); } catch (ConversionException e) { throw ConversionException.couldNotBeConverted(mapping, mapping.getDescriptor(), e); } } /** * INTERNAL: * Set the mapping. */ public void initialize(DatabaseMapping mapping, Session session) { this.mapping = mapping; // CR#... Mapping must also have the field classification. if (getMapping().isDirectToFieldMapping()) { AbstractDirectMapping directMapping = (AbstractDirectMapping)getMapping(); // Allow user to specify field type to override computed value. (i.e. blob, nchar) if (directMapping.getFieldClassification() == null) { directMapping.setFieldClassification(getDataClass()); } // Set the object class from the attribute, if null. if (getObjectClass() == null) { setObjectClass(directMapping.getAttributeClassification()); } } else if (getMapping().isDirectCollectionMapping()) { ((DirectCollectionMapping) getMapping()).setAttributeClassification(getObjectClass()); } } /** * INTERNAL: * Return the mapping. */ protected DatabaseMapping getMapping() { return mapping; } /** * INTERNAL: * If the converter converts the value to a non-atomic value, i.e. * a value that can have its' parts changed without being replaced, * then it must return false, serialization can be non-atomic. */ public boolean isMutable() { return false; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/converters/Converter.java0000664000000000000000000000554312216173130025234 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.converters; import java.io.Serializable; import org.eclipse.persistence.core.mappings.converters.CoreConverter; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.sessions.Session; /** *

Purpose: Conversion interface to allow conversion between object and data types. * This can be used in any mapping to convert between the object and data types without requiring code * placed in the object model. * TopLink provides several common converters, but the application can also define it own. * * @see DirectToFieldMapping#setConverter(Converter) * @see DirectCollectionMapping#setConverter(Converter) * @see ObjectTypeConverter * @see TypeConversionConverter * * @author James Sutherland * @since OracleAS TopLink 10g (10.0.3) */ public interface Converter extends CoreConverter, Serializable { /** * PUBLIC: * Convert the object's representation of the value to the databases' data representation. * For example this could convert between a Calendar Java type and the sql.Time datatype. */ @Override Object convertObjectValueToDataValue(Object objectValue, Session session); /** * PUBLIC: * Convert the databases' data representation of the value to the object's representation. * For example this could convert between an sql.Time datatype and the Java Calendar type. */ @Override Object convertDataValueToObjectValue(Object dataValue, Session session); /** * PUBLIC: * If the converter converts the value to a mutable value, i.e. * a value that can have its' parts changed without being replaced, * then it must return true. If the value is not mutable, cannot be changed without * replacing the whole value then false must be returned. * This is used within the UnitOfWork to determine how to clone. */ public boolean isMutable(); /** * PUBLIC: * Allow for any initialization. */ @Override void initialize(DatabaseMapping mapping, Session session); } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/converters/SerializedObjectConverter.java0000664000000000000000000001150112216173130030366 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.converters; import java.io.*; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.mappings.foundation.AbstractDirectMapping; import org.eclipse.persistence.sessions.*; import org.eclipse.persistence.internal.sessions.AbstractSession; /** *

Purpose: The serialized object converter can be used to store an arbitrary object or set of objects into a database blob field. * It uses the Java serializer so the target must be serializable. * * @author James Sutherland * @since OracleAS TopLink 10g (10.0.3) */ public class SerializedObjectConverter implements Converter { protected DatabaseMapping mapping; /** * PUBLIC: * Default constructor. */ public SerializedObjectConverter() { } /** * PUBLIC: * Default constructor. */ public SerializedObjectConverter(DatabaseMapping mapping) { this.mapping = mapping; } /** * INTERNAL: * The fieldValue will be a byte array. Create a ByteArrayInputStream * on the fieldValue. Create an ObjectInputStream on the ByteArrayInputStream * to read in the objects. */ public Object convertDataValueToObjectValue(Object fieldValue, Session session) throws DescriptorException { if (fieldValue == null) { return null; } byte[] bytes; try { bytes = (byte[])((AbstractSession)session).getDatasourcePlatform().convertObject(fieldValue, ClassConstants.APBYTE); } catch (ConversionException e) { throw ConversionException.couldNotBeConverted(mapping, mapping.getDescriptor(), e); } if ((bytes == null) || (bytes.length == 0)) { return null; } ByteArrayInputStream byteIn = new ByteArrayInputStream(bytes); Object object = null; try { // BUG# 2813583 CustomObjectInputStream objectIn = new CustomObjectInputStream(byteIn, session); object = objectIn.readObject(); } catch (Exception exception) { throw DescriptorException.notDeserializable(getMapping(), exception); } return object; } /** * INTERNAL: * Convert the object to a byte array through serialize. */ public Object convertObjectValueToDataValue(Object attributeValue, Session session) { if (attributeValue == null) { return null; } ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); try { ObjectOutputStream objectOut = new ObjectOutputStream(byteOut); objectOut.writeObject(attributeValue); objectOut.flush(); } catch (IOException exception) { throw DescriptorException.notSerializable(getMapping(), exception); } return byteOut.toByteArray(); } /** * INTERNAL: * Set the mapping. */ public void initialize(DatabaseMapping mapping, Session session) { this.mapping = mapping; // CR#... Mapping must also have the field classification. if (getMapping().isDirectToFieldMapping()) { AbstractDirectMapping directMapping = (AbstractDirectMapping)getMapping(); // Allow user to specify field type to override computed value. (i.e. blob, nchar) if (directMapping.getFieldClassification() == null) { directMapping.setFieldClassification(ClassConstants.APBYTE); } } } /** * INTERNAL: * Return the mapping. */ protected DatabaseMapping getMapping() { return mapping; } /** * INTERNAL: * If the converter converts the value to a non-atomic value, i.e. * a value that can have its' parts changed without being replaced, * then it must return false, serialization can be non-atomic. */ public boolean isMutable() { return true; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/xdb/0000775000000000000000000000000012216174372021007 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/xdb/DirectToXMLTypeMapping.java0000664000000000000000000001614512216173130026124 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 11/10/2011-2.4 Guy Pelletier * - 357474: Address primaryKey option from tenant discriminator column ******************************************************************************/ package org.eclipse.persistence.mappings.xdb; import org.eclipse.persistence.exceptions.ConversionException; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.internal.helper.ClassConstants; import org.eclipse.persistence.internal.platform.database.XMLTypePlaceholder; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.mappings.DirectToFieldMapping; import org.eclipse.persistence.platform.xml.XMLComparer; import org.eclipse.persistence.platform.xml.XMLPlatformFactory; import org.eclipse.persistence.platform.xml.XMLTransformer; import org.eclipse.persistence.platform.xml.XMLParser; import org.eclipse.persistence.sessions.Session; import org.w3c.dom.Document; import org.w3c.dom.Node; /** * Purpose: Mapping used to map from a DOM (org.w3c.Document) or XML String into * an Oracle XMLType field, in Oracle 9i XDB. * * @since Toplink 10.1.3 */ public class DirectToXMLTypeMapping extends DirectToFieldMapping { /** * Indicates if we should initialize the whole DOM on a read. * This is only used if the user is mapping from an Oracle Document implementation. */ protected boolean shouldReadWholeDocument = false; /** * Used to convert the DOM to a String. */ private XMLTransformer xmlTransformer; /** * Used to determine if the XML document has been modified. */ private XMLComparer xmlComparer; /** * Used to convert the String to a DOM */ private XMLParser xmlParser; /** * INTERNAL: * Default to mutable if mapped as a DOM. */ public void preInitialize(AbstractSession session) throws DescriptorException { if (this.attributeClassification == null) { this.attributeClassification = getAttributeAccessor().getAttributeClass(); } if ((this.isMutable == null) && (this.attributeClassification != ClassConstants.STRING)) { setIsMutable(true); } super.preInitialize(session); } /** * INTERNAL: * The mapping is initialized with the given session. This mapping is fully initialized * after this. */ @Override public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); setFieldClassification(XMLTypePlaceholder.class); } public DirectToXMLTypeMapping() { super(); this.xmlTransformer = XMLPlatformFactory.getInstance().getXMLPlatform().newXMLTransformer(); this.xmlTransformer.setFormattedOutput(false); this.xmlParser = XMLPlatformFactory.getInstance().getXMLPlatform().newXMLParser(); this.xmlComparer = new XMLComparer(); } /** * PUBLIC: * @param boolean - determines if the Oracle XDB DOM should be fully initialized * on a read. */ public void setShouldReadWholeDocument(boolean readWholeDocument) { this.shouldReadWholeDocument = readWholeDocument; } /** * PUBLIC: * @return boolean - returns true if currently initializing DOMs on reads. */ public boolean shouldReadWholeDocument() { return shouldReadWholeDocument; } /** * INTERNAL: * Get the attribute value for the given field value. If we're mapping to a * Document, we need to check if we should return the Oracle DOM or build a * new one. */ @Override public Object getObjectValue(Object fieldValue, Session session) throws ConversionException { Object attributeValue = fieldValue; try { if (attributeValue != null) { if (this.attributeClassification != ClassConstants.STRING) { String xml = (String)attributeValue; java.io.StringReader reader = new java.io.StringReader(xml); return this.xmlParser.parse(reader); } } } catch (Exception ex) { throw ConversionException.couldNotBeConverted(fieldValue, this.attributeClassification, ex); } if ((attributeValue == null) && (this.nullValue != null)) {// Translate default null value return this.nullValue; } // Allow for user defined conversion to the object value. if (this.converter != null) { attributeValue = this.converter.convertDataValueToObjectValue(attributeValue, session); } return attributeValue; } @Override public boolean isDirectToXMLTypeMapping() { return true; } /** * INTERNAL: * Clone the DOM Document if required. */ @Override protected Object buildCloneValue(Object attributeValue, AbstractSession session) { Object newAttributeValue = attributeValue; if (isMutable() && attributeValue != null) { if ((getAttributeClassification() == ClassConstants.DOCUMENT) || (getAttributeClassification() == ClassConstants.NODE)) { Document doc = (Document)attributeValue; newAttributeValue = doc.cloneNode(true); } } return newAttributeValue; } /** * INTERNAL: * Compare the attribute values. * Compare Nodes if mapped as a DOM. */ @Override protected boolean compareObjectValues(Object firstValue, Object secondValue, AbstractSession session) { // PERF: Check identity before conversion. if (firstValue == secondValue) { return true; } if ((firstValue == null) && (secondValue == null)) { return true; } if ((firstValue == null) || (secondValue == null)) { return false; } if (getAttributeClassification() == ClassConstants.STRING) { return firstValue.equals(secondValue); } else { Object one = getFieldValue(firstValue, session); Object two = getFieldValue(secondValue, session); if (one instanceof Node && two instanceof Node) { return this.xmlComparer.isNodeEqual((Node)one, (Node)two); } return one.equals(two); } } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/ManyToOneMapping.java0000664000000000000000000000250612216173130024254 0ustar /******************************************************************************* * Copyright (c) 2011, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * James Sutherland - initial API and implementation ******************************************************************************/ package org.eclipse.persistence.mappings; /** *

Purpose: Define the relationship to be a ManyToOne. * This is mainly functionally the same as OneToOneMapping. * * @author James Sutherland * @since EclipseLink 2.1 */ public class ManyToOneMapping extends OneToOneMapping { /** * PUBLIC: * Default constructor. */ public ManyToOneMapping() { super(); } /** * INTERNAL: * Related mapping should implement this method to return true. */ @Override public boolean isManyToOneMapping() { return true; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/DatabaseMapping.java0000664000000000000000000022325612216173130024116 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 11/10/2011-2.4 Guy Pelletier * - 357474: Address primaryKey option from tenant discriminator column * 30/05/2012-2.4 Guy Pelletier * - 354678: Temp classloader is still being used during metadata processing * 06/03/2013-2.5.1 Guy Pelletier * - 402380: 3 jpa21/advanced tests failed on server with * "java.lang.NoClassDefFoundError: org/eclipse/persistence/testing/models/jpa21/advanced/enums/Gender" ******************************************************************************/ package org.eclipse.persistence.mappings; import java.beans.PropertyChangeListener; import java.io.*; import java.util.*; import java.security.AccessController; import java.security.PrivilegedActionException; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import org.eclipse.persistence.core.mappings.CoreMapping; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.expressions.*; import org.eclipse.persistence.indirection.*; import org.eclipse.persistence.internal.databaseaccess.DatabaseAccessor; import org.eclipse.persistence.internal.databaseaccess.DatabasePlatform; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.expressions.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.indirection.*; import org.eclipse.persistence.internal.queries.*; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; import org.eclipse.persistence.internal.sessions.remote.*; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.mappings.converters.Converter; import org.eclipse.persistence.mappings.converters.ConverterClass; import org.eclipse.persistence.mappings.converters.ObjectTypeConverter; import org.eclipse.persistence.mappings.converters.TypeConversionConverter; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.remote.*; import org.eclipse.persistence.sessions.CopyGroup; import org.eclipse.persistence.sessions.Project; /** *

Purpose: Defines how an attribute of an object maps to and from the database * *

Responsibilities:

* * @author Sati * @since TOPLink/Java 1.0 */ public abstract class DatabaseMapping extends CoreMapping implements Cloneable, Serializable { public enum WriteType { INSERT, UPDATE, UNDEFINED } /** Used to reduce memory for mappings with no fields. */ protected static final Vector NO_FIELDS = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(0); /** Used to share integer instance to reduce memory. */ protected static final Integer NO_WEIGHT = Integer.valueOf(Integer.MAX_VALUE); protected static final Integer WEIGHT_DIRECT = Integer.valueOf(1); protected static final Integer WEIGHT_TRANSFORM = Integer.valueOf(100); protected static final Integer WEIGHT_AGGREGATE = Integer.valueOf(200); protected static final Integer WEIGHT_TO_ONE = Integer.valueOf(400); /** ClassDescriptor to which this mapping belongs to */ protected ClassDescriptor descriptor; /** Wrapper to store the reference objects. */ protected AttributeAccessor attributeAccessor; /** Makes this mapping read only. No write are performed on it. Default is false */ protected boolean isReadOnly; /** Specifies whether this mapping is optional (i.e. field may be null). Used for DDL generation. */ protected boolean isOptional; /** Specifies whether this mapping is lazy, this means not included in the default fetch group. */ protected Boolean isLazy; /** Fields associated with the mappings are cached */ protected Vector fields; /** It is needed only in remote initialization and mapping is in parent descriptor */ protected boolean isRemotelyInitialized; /** This is a TopLink defined attribute that allows us to sort the mappings */ protected Integer weight = NO_WEIGHT; /** Allow user defined properties. */ protected Map properties; /** Allow the user to defined un-converted properties which will be initialized at runtime. */ protected Map> unconvertedProperties; /** * Used by the CMP3Policy to see if this mapping should be used in * processing pk classes for find methods */ protected boolean derivesId; /** * */ protected boolean isJPAId = false; /** * A mapsId value. */ protected String mapsIdValue; /** * The id mapping this mapping derives. Used by the CMP3Policy to see if * this mapping should be used in processing pk classes for find methods. */ protected DatabaseMapping derivedIdMapping; /** * PERF: Used as a quick check to see if this mapping is a primary key mapping, * set by the object builder during initialization. */ protected boolean isPrimaryKeyMapping = false; /** * PERF: Cache the mappings attribute name. */ protected String attributeName; /** * Records if this mapping is being used as a MapKeyMapping. This is important for recording main mappings */ protected boolean isMapKeyMapping = false; //used by the object build/merge code to control building/merging into the //shared cache. protected boolean isCacheable = true; /** * Irrelevant (and not set) unless descriptor has SerializedObjectPolicy (SOP). * If descriptor has SOP, then ObjectLevelReadQuery (with shouldUseSerializedObjectPolicy flag set to true) * reads in row that contain both field/value pairs and sopObject. * This flag indicates whether the data for this mapping is contained in the row's sopObject or in fields/values. * Boolean.TRUE - sopObject (in sopObject) * Boolean.FALSE - fields/values (out sopObject); * null - both sopObject and fields/values (both in and out sopObject). * While writing to the data base the mapping will be used for writing into sopObject unless this flag is set to Boolean.FALSE; */ protected Boolean isInSopObject; /** * PUBLIC: * Default constructor. */ public DatabaseMapping() { this.isOptional = true; this.isReadOnly = false; this.attributeAccessor = new InstanceVariableAttributeAccessor(); } /** * PUBLIC: * Add an unconverted property (to be initialiazed at runtime) */ public void addUnconvertedProperty(String propertyName, String propertyValue, String propertyType) { List valuePair = new ArrayList(2); valuePair.add(propertyValue); valuePair.add(propertyType); getUnconvertedProperties().put(propertyName, valuePair); } /** * INTERNAL: * Clone the attribute from the clone and assign it to the backup. */ public abstract void buildBackupClone(Object clone, Object backup, UnitOfWorkImpl unitOfWork); /** * INTERNAL: * Require for cloning, the part must be cloned. */ public Object buildBackupCloneForPartObject(Object attributeValue, Object clone, Object backup, UnitOfWorkImpl unitOfWork) { throw DescriptorException.invalidMappingOperation(this, "buildBackupCloneForPartObject"); } /** * INTERNAL: * Clone the attribute from the original and assign it to the clone. */ public abstract void buildClone(Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession); /** * INTERNAL: * A combination of readFromRowIntoObject and buildClone. *

* buildClone assumes the attribute value exists on the original and can * simply be copied. *

* readFromRowIntoObject assumes that one is building an original. *

* Both of the above assumptions are false in this method, and actually * attempts to do both at the same time. *

* Extract value from the row and set the attribute to this value in the * working copy clone. * In order to bypass the shared cache when in transaction a UnitOfWork must * be able to populate working copies directly from the row. */ public abstract void buildCloneFromRow(AbstractRecord databaseRow, JoinedAttributeManager joinManager, Object clone, CacheKey sharedCacheKey, ObjectBuildingQuery sourceQuery, UnitOfWorkImpl unitOfWork, AbstractSession executionSession); /** * INTERNAL: * Builds a shallow original object. Only direct attributes and primary * keys are populated. In this way the minimum original required for * instantiating a working copy clone can be built without placing it in * the shared cache (no concern over cycles). */ public void buildShallowOriginalFromRow(AbstractRecord databaseRow, Object original, JoinedAttributeManager joinManager, ObjectBuildingQuery query, AbstractSession executionSession) { return; } /** * INTERNAL: * Require for cloning, the part must be cloned. */ public Object buildCloneForPartObject(Object attributeValue, Object original, CacheKey cacheKey, Object clone, AbstractSession cloningSession, Integer refreshCascade, boolean isExisting, boolean isFromSharedCache) { throw DescriptorException.invalidMappingOperation(this, "buildCloneForPartObject"); } /** * INTERNAL: * Performs a first level clone of the attribute. This generally means on the container will be cloned. */ public Object buildContainerClone(Object attributeValue, AbstractSession cloningSession){ return attributeValue; } /** * INTERNAL: * Copy of the attribute of the object. * This is NOT used for unit of work but for templatizing an object. */ public void buildCopy(Object copy, Object original, CopyGroup group) { } /** * INTERNAL: * In case Query By Example is used, this method builds and returns an expression that * corresponds to a single attribue and it's value. */ public Expression buildExpression(Object queryObject, QueryByExamplePolicy policy, Expression expressionBuilder, Map processedObjects, AbstractSession session) { return null; } /** * INTERNAL: * Used to allow object level comparisons. */ public Expression buildObjectJoinExpression(Expression base, Object value, AbstractSession session) { throw QueryException.unsupportedMappingForObjectComparison(this, base); } /** * INTERNAL: * Used to allow object level comparisons. */ public Expression buildObjectJoinExpression(Expression base, Expression argument, AbstractSession session) { throw QueryException.unsupportedMappingForObjectComparison(this, base); } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ abstract public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects); /** * INTERNAL: * Cascade removal of orphaned private owned objects from the UnitOfWorkChangeSet */ public void cascadePerformRemovePrivateOwnedObjectFromChangeSetIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { // no-op by default } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ abstract public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects); /** * INTERNAL: * Cascade discover and persist new objects during commit. */ public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow, Set cascadeErrors) { // Do nothing by default, (direct and xml mappings do not require anything). } /** * INTERNAL: * Used by AttributeLevelChangeTracking to update a changeRecord with calculated changes * as apposed to detected changes. If an attribute can not be change tracked it's * changes can be detected through this process. */ public void calculateDeferredChanges(ChangeRecord changeRecord, AbstractSession session){ throw DescriptorException.invalidMappingOperation(this, "calculatedDeferredChanges"); } /** * INTERNAL: * Clones itself. */ @Override public Object clone() { // Bug 3037701 - clone the AttributeAccessor DatabaseMapping mapping = null; try { mapping = (DatabaseMapping)super.clone(); } catch (CloneNotSupportedException e) { throw new InternalError(); } mapping.setAttributeAccessor((AttributeAccessor)attributeAccessor.clone()); return mapping; } /** * INTERNAL: * Helper method to clone vector of fields (used in aggregate initialization cloning). */ protected Vector cloneFields(Vector fields) { Vector clonedFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(); for (Enumeration fieldsEnum = fields.elements(); fieldsEnum.hasMoreElements();) { clonedFields.addElement(((DatabaseField)fieldsEnum.nextElement()).clone()); } return clonedFields; } /** * This method must be overwritten in the subclasses to return a vector of all the * fields this mapping represents. */ protected Vector collectFields() { return NO_FIELDS; } /** * INTERNAL: * This method is used to store the FK fields that can be cached that correspond to noncacheable mappings * the FK field values will be used to re-issue the query when cloning the shared cache entity */ public void collectQueryParameters(Set record){ //no-op for mappings that do not support PROTECTED cache isolation } /** * INTERNAL: * This method was created in VisualAge. * @return prototype.changeset.ChangeRecord */ abstract public ChangeRecord compareForChange(Object clone, Object backup, ObjectChangeSet owner, AbstractSession session); /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ public abstract boolean compareObjects(Object firstObject, Object secondObject, AbstractSession session); /** * INTERNAL: * Convert all the class-name-based settings in this mapping to actual class-based * settings * This method is implemented by subclasses as necessary. * @param classLoader */ public void convertClassNamesToClasses(ClassLoader classLoader) { if (hasUnconvertedProperties()) { for (String propertyName : getUnconvertedProperties().keySet()) { List valuePair = getUnconvertedProperties().get(propertyName); String value = valuePair.get(0); String valueTypeName = valuePair.get(1); Class valueType = String.class; // Have to initialize the valueType now try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()) { try { valueType = (Class) AccessController.doPrivileged(new PrivilegedClassForName(valueTypeName, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(valueTypeName, exception.getException()); } } else { valueType = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(valueTypeName, true, classLoader); } } catch (Exception exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(valueTypeName, exception); } // Add the converted property. If the value type is the same // as the source (value) type, no conversion is made. getProperties().put(propertyName, ConversionManager.getDefaultManager().convertObject(value, valueType)); } } } /** * Convenience method to ensure converters have an opportunity to convert * any class names to classes during project setup. */ protected void convertConverterClassNamesToClasses(Converter converter, ClassLoader classLoader) { if (converter != null) { if (converter instanceof TypeConversionConverter) { ((TypeConversionConverter)converter).convertClassNamesToClasses(classLoader); } else if (converter instanceof ObjectTypeConverter) { // To avoid 1.5 dependencies with the EnumTypeConverter check // against ObjectTypeConverter. ((ObjectTypeConverter) converter).convertClassNamesToClasses(classLoader); } else if (converter instanceof ConverterClass) { ((ConverterClass) converter).convertClassNamesToClasses(classLoader); } } } /** * INTERNAL: * Builder the unit of work value holder. * @param buildDirectlyFromRow indicates that we are building the clone directly * from a row as opposed to building the original from the row, putting it in * the shared cache, and then cloning the original. */ public DatabaseValueHolder createCloneValueHolder(ValueHolderInterface attributeValue, Object original, Object clone, AbstractRecord row, AbstractSession cloningSession, boolean buildDirectlyFromRow) { throw DescriptorException.invalidMappingOperation(this, "createUnitOfWorkValueHolder"); } /** * ADVANCED: * Returns true if the mapping references a JPA ID attribute for the CMP3Policy and JPA ID classes. */ public boolean derivesId() { return derivesId; } /** * INTERNAL: * This method is called to update collection tables prior to commit. */ public void earlyPreDelete(DeleteObjectQuery query, Object object) { } /** * INTERNAL: * Extract the nested attribute expressions that apply to this mapping. * This is used for partial objects and joining. * @param rootExpressionsAllowed true if newRoot itself can be one of the * expressions returned */ protected List extractNestedExpressions(List expressions, ExpressionBuilder newRoot, boolean rootExpressionsAllowed) { List nestedExpressions = new ArrayList(expressions.size()); /* * need to work on all expressions with at least 2 nestings off the base expression builder, excluding * aggregateObjectMapping expressions from the count (only ForeignReferenceMapping expressions count). For those * expressions, If the expression closest to to the Builder is for this mapping, that expression is rebuilt using * newRoot and added to the nestedExpressions list. */ for (Expression next : expressions) { // The expressionBuilder can be one of the locked expressions in // the ForUpdateOfClause. if (!next.isQueryKeyExpression()) { continue; } QueryKeyExpression expression = (QueryKeyExpression)next; ObjectExpression base = expression; boolean afterBase = false; boolean done = false; ObjectExpression prevExpression = base; while (!base.getBaseExpression().isExpressionBuilder()&& !done) { base = (ObjectExpression)base.getBaseExpression(); while (!base.isExpressionBuilder() && (base.getMapping() != null && base.getMapping().isAggregateObjectMapping())) { base = (ObjectExpression)base.getBaseExpression(); } if (base.isExpressionBuilder()){ done = true; //use the one closest to the expression builder that wasn't an aggregate base = prevExpression; } else { prevExpression = base; afterBase = true; } } if (afterBase && base.getName().equals(getAttributeName())) { nestedExpressions.add(expression.rebuildOn(base, newRoot)); } else if (rootExpressionsAllowed && expression.getBaseExpression().isExpressionBuilder() && expression.getName().equals(getAttributeName())) { nestedExpressions.add(newRoot); } } return nestedExpressions; } /** * INTERNAL: * If there is root expression in the list then indicates whether it shouldUseOuterJoin, * otherwise return false. */ protected boolean hasRootExpressionThatShouldUseOuterJoin(List expressions) { for (Iterator expressionsEnum = expressions.iterator(); expressionsEnum.hasNext();) { Expression next = (Expression)expressionsEnum.next(); // The expressionBuilder can be one of the locked expressions in // the ForUpdateOfClause. if (!next.isQueryKeyExpression()) { continue; } QueryKeyExpression expression = (QueryKeyExpression)next; if (expression.getBaseExpression().isExpressionBuilder() && expression.getName().equals(getAttributeName())) { return expression.shouldUseOuterJoin(); } } return false; } /** * INTERNAL: * Used to store un-converted properties, which are subsequenctly converted * at runtime (through the convertClassNamesToClasses method. */ public boolean hasUnconvertedProperties() { return unconvertedProperties != null; } /** * INTERNAL: * An object has been serialized from the server to the client. * Replace the transient attributes of the remote value holders * with client-side objects. */ public abstract void fixObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session); /** * INTERNAL: * At this point, we realize we don't have indirection; * so we need to replace the reference object(s) with * the corresponding object(s) from the remote session. * The default is to do nothing. */ public void fixRealObjectReferences(Object object, Map objectInformation, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { // do nothing } /** * ADVANCED: * Return the attributeAccessor. * The attribute accessor is responsible for setting and retrieving the attribute value * from the object for this mapping. */ @Override public AttributeAccessor getAttributeAccessor() { return attributeAccessor; } /** * PUBLIC: * The classification type for the attribute this mapping represents */ @Override public Class getAttributeClassification() { return null; } /** * PUBLIC: * Return the name of the attribute set in the mapping. */ @Override public String getAttributeName() { // The attribute name on the attributeAccessor will always override any attribute already set // Use the attributeAccessor attribute over the current attribute name if (attributeName == null) { attributeName = getAttributeAccessor().getAttributeName(); } return attributeName; } /** * INTERNAL: * Return the value of an attribute which this mapping represents for an object. */ @Override public Object getAttributeValueFromObject(Object object) throws DescriptorException { try { // PERF: direct-access. return this.attributeAccessor.getAttributeValueFromObject(object); } catch (DescriptorException exception) { exception.setMapping(this); throw exception; } } /** * INTERNAL: * Return the mapping's containerPolicy. */ @Override public ContainerPolicy getContainerPolicy() { throw DescriptorException.invalidMappingOperation(this, "getContainerPolicy"); } /** * ADVANCED: * Set the maps id value */ public DatabaseMapping getDerivedIdMapping() { return derivedIdMapping; } /** * INTERNAL: * Return the descriptor to which this mapping belongs */ public ClassDescriptor getDescriptor() { return descriptor; } /** * INTERNAL: * Return the field associated with this mapping if there is exactly one. * This is required for object relational mapping to print them, but because * they are defined in in an Enterprise context they cannot be cast to. * Mappings that have a field include direct mappings and object relational mappings. */ @Override public DatabaseField getField() { return null; } /** * INTERNAL: * Return the classification for the field contained in the mapping. * This is used to convert the row value to a consistent java value. * By default this is unknown. */ public Class getFieldClassification(DatabaseField fieldToClassify) { return null; } /** * INTERNAL: * Returns the set of fields that should be selected to build this mapping's value(s). * This is used by expressions to determine which fields to include in the select clause for non-object expressions. */ public Vector getSelectFields() { return getFields(); } /** * INTERNAL: * Returns the table(s) that should be selected to build this mapping's value(s). * This is used by expressions to determine which tables to include in the from clause for non-object expressions. */ public Vector getSelectTables() { return new NonSynchronizedVector(0); } /** * INTERNAL: * Returns a vector of all the fields this mapping represents. */ @Override public Vector getFields() { return this.fields; } /** * INTERNAL: * Return the list of fields that should be used if this mapping is used in an order by. * null means this mapping does not need to normalize it fields (it is a field). */ public List getOrderByNormalizedExpressions(Expression base) { return null; } /** * PUBLIC: * This method is invoked reflectively on the reference object to return the value of the * attribute in the object. This method returns the name of the getMethodName or null if not using method access. */ public String getGetMethodName() { if (!getAttributeAccessor().isMethodAttributeAccessor()) { return null; } return ((MethodAttributeAccessor)getAttributeAccessor()).getGetMethodName(); } /** * ADVANCED: * Set the mapped by id value */ public boolean hasMapsIdValue() { return mapsIdValue != null; } /** * ADVANCED: * Set the mapped by id value */ public String getMapsIdValue() { return mapsIdValue; } /** * INTERNAL: * return the object on the client corresponding to the specified object. * The default is to simply return the object itself, without worrying about * maintaining object identity. */ public Object getObjectCorrespondingTo(Object object, DistributedSession session, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query) { return object; } /** * INTERNAL: * used as a temporary store for custom SDK usage */ public Map getProperties() { if (properties == null) {//Lazy initialize to conserve space and allocation time. properties = new HashMap(5); } return properties; } /** * ADVANCED: * Allow user defined properties. */ public Object getProperty(Object property) { if (properties == null) { return null; } return getProperties().get(property); } /** * INTERNAL: * Return the value of an attribute unwrapping value holders if required. */ public Object getRealAttributeValueFromObject(Object object, AbstractSession session) throws DescriptorException { return getRealAttributeValueFromAttribute(getAttributeValueFromObject(object), object, session); } /** * INTERNAL: * Return the value of an attribute unwrapping value holders if required. */ public Object getRealAttributeValueFromAttribute(Object attributeValue, Object object, AbstractSession session) throws DescriptorException { return attributeValue; } /** * INTERNAL: * Trigger the instantiation of the attribute if lazy. */ public void instantiateAttribute(Object object, AbstractSession session) { // Not lazy by default. } /** * INTERNAL: * Return whether the specified object is instantiated. */ public boolean isAttributeValueFromObjectInstantiated(Object object) { return true; } /** * INTERNAL: * Return the value of an attribute, unwrapping value holders if necessary. * If the value is null, build a new container. */ public Object getRealCollectionAttributeValueFromObject(Object object, AbstractSession session) throws DescriptorException { throw DescriptorException.invalidMappingOperation(this, "getRealCollectionAttributeValueFromObject"); } /** * PUBLIC: * Return the referenceDescriptor. This is a descriptor which is associated with * the reference class. */ @Override public ClassDescriptor getReferenceDescriptor() { return null; } /** * INTERNAL: * Return the relationshipPartner mapping for this bi-directional mapping. If the relationshipPartner is null then * this is a uni-directional mapping. */ public DatabaseMapping getRelationshipPartner() { return null; } /** * PUBLIC: * This method is invoked reflectively on the reference object to set the value of the * attribute in the object. This method returns the name of the setMethodName or null if not using method access. */ public String getSetMethodName() { if (!getAttributeAccessor().isMethodAttributeAccessor()) { return null; } return ((MethodAttributeAccessor)getAttributeAccessor()).getSetMethodName(); } /** * INTERNAL: * Used to store un-converted properties, which are subsequenctly converted * at runtime (through the convertClassNamesToClasses method. */ public Map> getUnconvertedProperties() { if (unconvertedProperties == null) { unconvertedProperties = new HashMap>(5); } return unconvertedProperties; } /** * INTERNAL: * extract and return the appropriate value from the * specified remote value holder */ public Object getValueFromRemoteValueHolder(RemoteValueHolder remoteValueHolder) { throw DescriptorException.invalidMappingOperation(this, "getValueFromRemoteValueHolder"); } /** * INTERNAL: * Return the weight of the mapping, used to sort mappings to ensure that * DirectToField Mappings get merged first */ public Integer getWeight() { return this.weight; } /** * INTERNAL: * The returns if the mapping has any constraint dependencies, such as foreign keys and join tables. */ public boolean hasConstraintDependency() { return false; } /** * PUBLIC: * Return if method access is used. */ public boolean isUsingMethodAccess() { return getAttributeAccessor().isMethodAttributeAccessor(); } /** * INTERNAL: * Return if the mapping has any ownership or other dependency over its target object(s). */ public boolean hasDependency() { return false; } /** * INTERNAL: * The returns if the mapping has any inverse constraint dependencies, such as foreign keys and join tables. */ public boolean hasInverseConstraintDependency() { return false; } /** * INTERNAL: * Allow for initialization of properties and validation. */ public void initialize(AbstractSession session) throws DescriptorException { ; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isAggregateCollectionMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isAggregateMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isAggregateObjectMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isCollectionMapping() { return false; } /** * INTERNAL: */ public boolean isDatabaseMapping() { return true; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isDirectCollectionMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isDirectMapMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isDirectToFieldMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isElementCollectionMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isForeignReferenceMapping() { return false; } /** * INTERNAL: * Return whether this mapping should be traversed when we are locking * @return */ public boolean isLockableMapping(){ return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isManyToManyMapping() { return false; } /** * @return the isMapKeyMapping */ public boolean isMapKeyMapping() { return isMapKeyMapping; } /** * INTERNAL */ public boolean isMultitenantPrimaryKeyMapping() { return false; } /** * @param isMapKeyMapping the isMapKeyMapping to set */ public void setIsMapKeyMapping(boolean isMapKeyMapping) { this.isMapKeyMapping = isMapKeyMapping; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isNestedTableMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isObjectReferenceMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isOneToManyMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isOneToOneMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isManyToOneMapping() { return false; } /** * Return whether the value of this mapping is optional (that is, can be * null). This is a hint and is used when generating DDL. */ public boolean isOptional() { return isOptional; } /** * Returns true if this mapping is owned by the parent descriptor. This is generally based on mapping type */ public boolean isOwned(){ return false; } /** * INTERNAL: * Flags that this mapping is part of a JPA id mapping. It should be * temporary though, as the CMP3Policy should be able to figure things * out on its own. The problem being that the JPA mapped superclass * descriptors are not initialized and do not have a CMP3Policy set by * default. */ public boolean isJPAId() { return isJPAId; } /** * Return if this mapping is lazy. * Lazy has different meaning for different mappings. * For basic/direct mappings, this can be used exclude it from the descriptor's * default fetch group. This means that queries will not include the field(s) required * by this mapping by default. * This can only be used if the descriptor has a FetchGroupManager and class implements * the FetchGroupTracker interface (or is weaved). *

* For relationship mappings this should normally be the same value as indirection, * however for eager relationships this can be used with indirection to allow * indirection locking and change tracking, but still always force instantiation. */ public boolean isLazy() { if (isLazy == null) { // False by default for mappings without indirection. isLazy = Boolean.FALSE; } return isLazy; } /** * INTERNAL: * Flags that this mapping is part of a JPA id mapping. It should be * temporary though, as the CMP3Policy should be able to figure things * out on its own. The problem being that the JPA mapped superclass * descriptors are not initialized and do not have a CMP3Policy set by * default. */ public void setIsJPAId() { this.isJPAId = true; } /** * Set if this mapping is lazy. * This can be used for any mapping type to exclude it from the descriptor's * default fetch group. This means that queries will not include the field(s) required * by this mapping by default. * This can only be used if the descriptor has a FetchGroupManager and class implements * the FetchGroupTracker interface (or is weaved). * This is not the same as indirection on relationships (lazy relationships), * as it defers the loading of the source object fields, not the relationship. */ public void setIsLazy(boolean isLazy) { this.isLazy = isLazy; } /** * INTERNAL: * All EIS mappings should implement this method to return true. */ public boolean isEISMapping() { return false; } /** * INTERNAL: * All relational mappings should implement this method to return true. */ public boolean isRelationalMapping() { return false; } /** * INTERNAL: * All relational mappings should implement this method to return true. */ public boolean isXMLMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ @Override public boolean isAbstractDirectMapping() { return false; } /** * INTERNAL: */ public boolean isAbstractColumnMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ @Override public boolean isAbstractCompositeDirectCollectionMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ @Override public boolean isAbstractCompositeObjectMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ @Override public boolean isAbstractCompositeCollectionMapping() { return false; } /** * INTERNAL: * Return if this mapping support joining. */ public boolean isJoiningSupported() { return false; } /** * INTERNAL: * Return if this mapping requires its attribute value to be cloned. */ public boolean isCloningRequired() { return true; } /** * INTERNAL: * Set by the Object builder during initialization returns true if this mapping * is used as a primary key mapping. */ public boolean isPrimaryKeyMapping() { return this.isPrimaryKeyMapping; } /** * INTERNAL: * Returns true if the mapping should be added to the UnitOfWork's list of private owned * objects for private owned orphan removal. */ public boolean isCandidateForPrivateOwnedRemoval() { return isPrivateOwned(); } /** * INTERNAL: * Used when determining if a mapping supports cascaded version optimistic * locking. */ public boolean isCascadedLockingSupported() { return false; } /** * INTERNAL: * Return if this mapping supports change tracking. */ public boolean isChangeTrackingSupported(Project project) { return false; } /** * INTERNAL: * Return if the mapping has ownership over its target object(s). */ public boolean isPrivateOwned() { return false; } /** * Used to signal that this mapping references a protected/isolated entity and requires * special merge/object building behaviour. * */ public boolean isCacheable() { return this.isCacheable; } /** * Used to signal that this mapping references a protected/isolated entity and requires * special merge/object building behaviour. */ public void setIsCacheable(boolean cacheable) { if (!cacheable) { throw ValidationException.operationNotSupported("setIsCacheable"); } } /** * INTERNAL: * Returns true if mapping is read only else false. */ @Override public boolean isReadOnly() { return isReadOnly; } /** * INTERNAL: * Related mapping should implement this method to return true. */ @Override public boolean isReferenceMapping() { return false; } protected boolean isRemotelyInitialized() { return isRemotelyInitialized; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isStructureMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ @Override public boolean isTransformationMapping() { return false; } /** * INTERNAL: */ public boolean isUnidirectionalOneToManyMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isVariableOneToOneMapping() { return false; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isDirectToXMLTypeMapping() { return false; } /** * INTERNAL: * Some mappings support no attribute (transformation and multitenant primary key). */ @Override public boolean isWriteOnly() { return false; } /** * INTERNAL: * Iterate on the appropriate attribute value. */ public abstract void iterate(DescriptorIterator iterator); /** * INTERNAL: * Iterate on the attribute value. * The value holder has already been processed. */ public void iterateOnRealAttributeValue(DescriptorIterator iterator, Object realAttributeValue) { throw DescriptorException.invalidMappingOperation(this, "iterateOnRealAttributeValue"); } /** * Force instantiation of the load group. */ public void load(final Object object, AttributeItem item, final AbstractSession session, final boolean fromFetchGroup) { // Do nothing by default. } /** * Force instantiation of all indirections. */ public void loadAll(Object object, AbstractSession session, IdentityHashSet loaded) { // Do nothing by default. } /** * INTERNAL: * Merge changes from the source to the target object. */ public abstract void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession); /** * INTERNAL: * Merge changes from the source to the target object. */ public abstract void mergeIntoObject(Object target, boolean isTargetUninitialized, Object source, MergeManager mergeManager, AbstractSession targetSession); /** * INTERNAL: * Perform the commit event. * This is used in the uow to delay data modifications. */ public void performDataModificationEvent(Object[] event, AbstractSession session) throws DatabaseException, DescriptorException { throw DescriptorException.invalidDataModificationEvent(this); } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Recurse thru the parts to delete the reference objects after the actual object is deleted. */ public void postDelete(DeleteObjectQuery query) throws DatabaseException { return; } /** * INTERNAL: * Allow for initialization of properties and validation that have dependecies no the descriptor * being initialized. */ public void postInitialize(AbstractSession session) throws DescriptorException { // Nothing by default. } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Recurse thru the parts to insert the reference objects after the actual object is inserted. */ public void postInsert(WriteObjectQuery query) throws DatabaseException { return; } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Recurse thru the parts to update the reference objects after the actual object is updated. */ public void postUpdate(WriteObjectQuery query) throws DatabaseException { return; } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Recurse thru the parts to delete the reference objects before the actual object is deleted. */ public void preDelete(DeleteObjectQuery query) throws DatabaseException { return; } /** * INTERNAL: * Allow for initialization of properties and validation. */ public void preInitialize(AbstractSession session) throws DescriptorException { try { getAttributeAccessor().initializeAttributes(getDescriptor().getJavaClass()); } catch (DescriptorException exception) { exception.setMapping(this); session.getIntegrityChecker().handleError(exception); } } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Recurse thru the parts to insert the reference objects before the actual object is inserted. */ public void preInsert(WriteObjectQuery query) throws DatabaseException { return; } /** * INTERNAL: * A subclass that supports cascade version optimistic locking should * implement this method to properly prepare the locking policy for their * mapping type. */ public void prepareCascadeLockingPolicy() { return; } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Recurse thru the parts to update the reference objects before the actual object is updated. */ public void preUpdate(WriteObjectQuery query) throws DatabaseException { return; } /** * INTERNAL: * Extract value from the row and set the attribute to this value in the object. * return value as this value will have been converted to the appropriate type for * the object. */ public Object readFromRowIntoObject(AbstractRecord databaseRow, JoinedAttributeManager joinManager, Object targetObject, CacheKey parentCacheKey, ObjectBuildingQuery sourceQuery, AbstractSession executionSession, boolean isTargetProtected) throws DatabaseException { Object attributeValue = valueFromRow(databaseRow, joinManager, sourceQuery, parentCacheKey, executionSession, isTargetProtected, null); setAttributeValueInObject(targetObject, attributeValue); return attributeValue; } /** * INTERNAL: * Extract values directly from the result-set. * PERF: This is used for optimized object building directly from the result-set. */ public Object readFromResultSetIntoObject(ResultSet resultSet, Object targetObject, ObjectBuildingQuery query, AbstractSession session, DatabaseAccessor accessor, ResultSetMetaData metaData, int columnNumber, DatabasePlatform platform) throws SQLException { Object attributeValue = valueFromResultSet(resultSet, query, session, accessor, metaData, columnNumber, platform); setAttributeValueInObject(targetObject, attributeValue); return attributeValue; } /** * PUBLIC: * To make mapping read only. * Read-only mappings can be used if two attributes map to the same field. * Read-only mappings cannot be used for the primary key or other required fields. */ public void readOnly() { setIsReadOnly(true); } /** * PUBLIC: * The mapping can be dynamically made either readOnly or readWriteOnly. This makes mapping go back to * default mode. */ public void readWrite() { setIsReadOnly(false); } /** * INTERNAL: * Rehash any hashtables based on fields. * This is used to clone descriptors for aggregates, which hammer field names, * it is probably better not to hammer the field name and this should be refactored. */ public void rehashFieldDependancies(AbstractSession session) { // Should be overwritten by any mapping with fields. } /** * INTERNAL: * Once descriptors are serialized to the remote session. All its mappings and reference descriptors are traversed. Usually * mappings are initilaized and serialized reference descriptors are replaced with local descriptors if they already exist on the * remote session. */ public void remoteInitialization(DistributedSession session) { // Remote mappings is initilaized here again because while serializing only the uninitialized data is passed // as the initialized data is not serializable. if (!isRemotelyInitialized()) { getAttributeAccessor().initializeAttributes(getDescriptor().getJavaClass()); remotelyInitialized(); } } /** * Set the mapping to be initialized for the remote session. */ protected void remotelyInitialized() { isRemotelyInitialized = true; } /** * INTERNAL: * replace the value holders in the specified reference object(s) */ public Map replaceValueHoldersIn(Object object, RemoteSessionController controller) { // by default, do nothing return null; } /** * ADVANCED: * Set the attributeAccessor. * The attribute accessor is responsible for setting and retrieving the attribute value * from the object for this mapping. * This can be set to an implementor of AttributeAccessor if the attribute * requires advanced conversion of the mapping value, or a real attribute does not exist. */ @Override public void setAttributeAccessor(AttributeAccessor attributeAccessor) { String attributeName = getAttributeName(); this.attributeAccessor = attributeAccessor; if (attributeAccessor.getAttributeName() == null) { attributeAccessor.setAttributeName(attributeName); } this.attributeName = null; } /** * PUBLIC: * Sets the name of the attribute in the mapping. */ @Override public void setAttributeName(String attributeName) { getAttributeAccessor().setAttributeName(attributeName); // Clear the mapping attribute name until a getAttributeName() call copies the accessor attributeName this.attributeName = null; } /** * INTERNAL: * Set the value of the attribute mapped by this mapping. */ @Override public void setAttributeValueInObject(Object object, Object value) throws DescriptorException { // PERF: Direct variable access. try { this.attributeAccessor.setAttributeValueInObject(object, value); } catch (DescriptorException exception) { exception.setMapping(this); throw exception; } } /** * INTERNAL: * Set the value of the attribute mapped by this mapping, * placing it inside a value holder if necessary. */ public void setRealAttributeValueInObject(Object object, Object value) throws DescriptorException { try { this.setAttributeValueInObject(object, value); } catch (DescriptorException exception) { exception.setMapping(this); throw exception; } } /** * INTERNAL: * Set the descriptor to which this mapping belongs */ @Override public void setDescriptor(ClassDescriptor descriptor) { this.descriptor = descriptor; } /** * INTERNAL: * Set the mapping's field collection. */ @Override protected void setFields(Vector fields) { this.fields = fields; } /** * PUBLIC: * This method is invoked reflectively on the reference object to return the value of the * attribute in the object. This method sets the name of the getMethodName. */ public void setGetMethodName(String methodName) { if (methodName == null) { return; } // This is done because setting attribute name by defaults create InstanceVariableAttributeAccessor if (getAttributeAccessor() instanceof InstanceVariableAttributeAccessor) { String attributeName = this.attributeAccessor.getAttributeName(); setAttributeAccessor(new MethodAttributeAccessor()); getAttributeAccessor().setAttributeName(attributeName); } ((MethodAttributeAccessor)getAttributeAccessor()).setGetMethodName(methodName); } /** * Used to specify whether the value of this mapping may be null. * This is used when generating DDL. */ public void setIsOptional(boolean isOptional) { this.isOptional = isOptional; } /** * INTERNAL: * Set by the Object builder during initialization returns true if this mapping * is used as a primary key mapping. */ public void setIsPrimaryKeyMapping(boolean isPrimaryKeyMapping) { this.isPrimaryKeyMapping = isPrimaryKeyMapping; } /** * PUBLIC: * Set this mapping to be read only. * Read-only mappings can be used if two attributes map to the same field. * Read-only mappings cannot be used for the primary key or other required fields. */ public void setIsReadOnly(boolean aBoolean) { isReadOnly = aBoolean; } /** * ADVANCED: * Set the maps id value */ public void setMapsIdValue(String mapsIdValue) { this.mapsIdValue = mapsIdValue; } /** * INTERNAL: * Allow user defined properties. */ public void setProperties(Map properties) { this.properties = properties; } /** * ADVANCED: * Allow user defined properties. */ public void setProperty(Object property, Object value) { getProperties().put(property, value); } /** * PUBLIC: * Set the methodName used to set the value for the mapping's attribute into the object. */ public void setSetMethodName(String methodName) { if (methodName == null) { return; } // This is done because setting attribute name by defaults create InstanceVariableAttributeAccessor if (!getAttributeAccessor().isMethodAttributeAccessor()) { String attributeName = this.attributeAccessor.getAttributeName(); setAttributeAccessor(new MethodAttributeAccessor()); getAttributeAccessor().setAttributeName(attributeName); } ((MethodAttributeAccessor)getAttributeAccessor()).setSetMethodName(methodName); } /** * ADVANCED: * Set the weight of the mapping, used to sort mappings * DirectToField Mappings have a default weight of 1 while all other Mappings have a * default weight of MAXINT. Ordering of Mappings can be achieved by setting the weight of * a particular mapping to a value within the above mentioned limits. By ordering mappings * the user can control what order relationships are processed by TopLink. */ // CR 4097 public void setWeight(Integer newWeight) { this.weight = newWeight; } /** * ADVANCED: * This method is used to add an object to a collection once the changeSet is applied. * The referenceKey parameter should only be used for direct Maps. */ public void simpleAddToCollectionChangeRecord(Object referenceKey, Object changeSetToAdd, ObjectChangeSet changeSet, AbstractSession session) throws DescriptorException { throw DescriptorException.invalidMappingOperation(this, "simpleAddToCollectionChangeRecord"); } /** * ADVANCED: * This method is used to remove an object from a collection once the changeSet is applied. * The referenceKey parameter should only be used for direct Maps. */ public void simpleRemoveFromCollectionChangeRecord(Object referenceKey, Object changeSetToAdd, ObjectChangeSet changeSet, AbstractSession session) throws DescriptorException { throw DescriptorException.invalidMappingOperation(this, "simpleRemoveFromCollectionChangeRecord"); } /** * INTERNAL: * Print the mapping attribute name, this is used in error messages. */ public String toString() { return getClass().getName() + "[" + getAttributeName() + "]"; } /** * INTERNAL: * Allow for subclasses to perform validation. */ public void validateAfterInitialization(AbstractSession session) throws DescriptorException { } /** * INTERNAL: * Allow for subclasses to perform validation. */ public void validateBeforeInitialization(AbstractSession session) throws DescriptorException { } /** * INTERNAL: * A subclass should extract the value from the object for the field, if it does not map the field then * it should return null. * Return the Value from the object. */ @Override public Object valueFromObject(Object anObject, DatabaseField field, AbstractSession session) { return null; } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Returns the value for the mapping from the database row. */ public Object valueFromRow(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery query, boolean isTargetProtected) throws DatabaseException { return valueFromRow(row, joinManager, query, null, query.getExecutionSession(), isTargetProtected, null); } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Returns the value for the mapping from the database row. * The execution session is the session the query was executed on, * and its platform should be used for data conversion. */ public Object valueFromRow(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery query, CacheKey cacheKey, AbstractSession session, boolean isTargetProtected, Boolean[] wasCacheUsed) throws DatabaseException { return null; } /** * INTERNAL: * Indicates whether the mapping is in SerializedObjectPolicy's sopObject. */ public boolean isInSopObject() { return this.isInSopObject == null || this.isInSopObject; } /** * INTERNAL: * Indicates whether the mapping is in SerializedObjectPolicy's sopObject and not out of it. */ public boolean isInOnlySopObject() { return this.isInSopObject != null && this.isInSopObject; } /** * INTERNAL: * Indicates whether the mapping is out of SerializedObjectPolicy's sopObject. */ public boolean isOutSopObject() { return this.isInSopObject == null || !this.isInSopObject; } /** * INTERNAL: * Indicates whether the mapping is out of SerializedObjectPolicy's sopObject and not in it. */ public boolean isOutOnlySopObject() { return this.isInSopObject != null && !this.isInSopObject; } /** * INTERNAL: * Indicates whether the mapping is both in and out of SerializedObjectPolicy's sopObject. */ public boolean isInAndOutSopObject() { return this.isInSopObject == null; } /** * INTERNAL: * Set the mapping is in SerializedObjectPolicy's sopObject. */ public void setIsInSopObject() { this.isInSopObject = Boolean.TRUE; } /** * INTERNAL: * Set the mapping is out of SerializedObjectPolicy's sopObject. */ public void setIsOutSopObject() { this.isInSopObject = Boolean.FALSE; } /** * INTERNAL: * Set the mapping is both in and out of SerializedObjectPolicy's sopObject */ public void setIsInAndOutSopObject() { this.isInSopObject = null; } /** * INTERNAL: * Indicates whether the mapping (or at least one of its nested mappings, at any nested depth) * references an entity. * To return true the mapping (or nested mapping) should be ForeignReferenceMapping with non-null and non-aggregate reference descriptor. */ public boolean hasNestedIdentityReference() { return false; } /** * INTERNAL: * Returns the value for the mapping directly from the result-set. * PERF: Used for optimized object building. */ public Object valueFromResultSet(ResultSet resultSet, ObjectBuildingQuery query, AbstractSession session, DatabaseAccessor accessor, ResultSetMetaData metaData, int columnNumber, DatabasePlatform platform) throws SQLException { throw DescriptorException.invalidMappingOperation(this, "valueFromResultSet"); } /** * INTERNAL: * To verify if the specified object has been deleted or not. */ public boolean verifyDelete(Object object, AbstractSession session) throws DatabaseException { return true; } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Write the foreign key values from the attribute to the row. */ public void writeFromAttributeIntoRow(Object attribute, AbstractRecord row, AbstractSession session) { // Do nothing by default. } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Write the attribute value from the object to the row. */ public void writeFromObjectIntoRow(Object object, AbstractRecord row, AbstractSession session, WriteType writeType) { // Do nothing by default. } /** * INTERNAL: * This row is built for shallow insert which happens in case of bidirectional inserts. * If mapping overrides this method it must override writeFromObjectIntoRowForUpdateAfterShallowInsert method, too. */ public void writeFromObjectIntoRowForShallowInsert(Object object, AbstractRecord row, AbstractSession session) { writeFromObjectIntoRow(object, row, session, WriteType.INSERT); } /** * INTERNAL: * This row is built for update after shallow insert which happens in case of bidirectional inserts. * It contains the foreign keys with non null values that were set to null for shallow insert. * If mapping overrides writeFromObjectIntoRowForShallowInsert method it must override this one, too. */ public void writeFromObjectIntoRowForUpdateAfterShallowInsert(Object object, AbstractRecord databaseRow, AbstractSession session, DatabaseTable table) { // Do nothing by default. } /** * INTERNAL: * This row is built for update before shallow delete which happens in case of bidirectional inserts. * It contains the same fields as the row built by writeFromObjectIntoRowForUpdateAfterShallowInsert, but all the values are null. */ public void writeFromObjectIntoRowForUpdateBeforeShallowDelete(Object object, AbstractRecord databaseRow, AbstractSession session, DatabaseTable table) { // Do nothing by default. } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Write the attribute value from the object to the row. */ public void writeFromObjectIntoRowWithChangeRecord(ChangeRecord changeRecord, AbstractRecord row, AbstractSession session, WriteType writeType) { // Do nothing by default. } /** * INTERNAL: * This row is built for shallow insert which happens in case of bidirectional inserts. */ public void writeFromObjectIntoRowForShallowInsertWithChangeRecord(ChangeRecord changeRecord, AbstractRecord row, AbstractSession session) { writeFromObjectIntoRowWithChangeRecord(changeRecord, row, session, WriteType.INSERT); } /** * INTERNAL: */ public void writeFromObjectIntoRowForUpdate(WriteObjectQuery query, AbstractRecord row) { writeFromObjectIntoRow(query.getObject(), row, query.getSession(), WriteType.UPDATE); } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Write the attribute value from the object to the row. */ public void writeFromObjectIntoRowForWhereClause(ObjectLevelModifyQuery query, AbstractRecord row) { Object object; if (query.isDeleteObjectQuery()) { object = query.getObject(); } else { object = query.getBackupClone(); } writeFromObjectIntoRow(object, row, query.getSession(), WriteType.UNDEFINED); } /** * INTERNAL: * Write fields needed for insert into the template for with null values. */ public void writeInsertFieldsIntoRow(AbstractRecord databaseRow, AbstractSession session) { // Do nothing by default. } /** * INTERNAL: * Write fields needed for update into the template for with null values. * By default inserted fields are used. */ public void writeUpdateFieldsIntoRow(AbstractRecord databaseRow, AbstractSession session) { writeInsertFieldsIntoRow(databaseRow, session); } /** * INTERNAL: * Either create a new change record or update the change record with the new value. * This is used by attribute change tracking. */ public void updateChangeRecord(Object clone, Object newValue, Object oldValue, ObjectChangeSet objectChangeSet, UnitOfWorkImpl uow) throws DescriptorException { throw DescriptorException.invalidMappingOperation(this, "updateChangeRecord"); } /** * INTERNAL: * Add or removes a new value and its change set to the collection change record based on the event passed in. This is used by * attribute change tracking. */ public void updateCollectionChangeRecord(org.eclipse.persistence.descriptors.changetracking.CollectionChangeEvent event, ObjectChangeSet objectChangeSet, UnitOfWorkImpl uow) throws DescriptorException { throw DescriptorException.invalidMappingOperation(this, "updateCollectionChangeRecord"); } /** * INTERNAL: * Set the change listener if required. * This is required for collections and aggregates or other change tracked mutable objects. * This is used for resuming or flushing units of work. */ public void setChangeListener(Object clone, PropertyChangeListener listener, UnitOfWorkImpl uow) { // Nothing by default. } /** * ADVANCED: * Used to indicate the mapping references a JPA ID or MapsId attribute * for the CMP3Policy and JPA Id classes (as well as Embeddable Id classes). * This is different from isPrimaryKeyMapping, as an ID mapping is user * specified and can be read only, as long as another writable mapping for * the field exists. */ public void setDerivesId(boolean derivesId) { this.derivesId = derivesId; } /** * ADVANCED: * Used to indicate the mapping references a JPA ID or MapsId attribute * for the CMP3Policy and JPA Id classes (as well as Embeddable Id classes). * This is different from isPrimaryKeyMapping, as an ID mapping is user * specified and can be read only, as long as another writable mapping for * the field exists. */ public void setDerivedIdMapping(DatabaseMapping derivedIdMapping) { this.derivedIdMapping = derivedIdMapping; } /** * INTERNAL: * Directly build a change record without comparison */ public ChangeRecord buildChangeRecord(Object newValue, ObjectChangeSet owner, AbstractSession session) throws DescriptorException { throw DescriptorException.invalidMappingOperation(this, "buildChangeRecord"); } /** * INTERNAL: * Overridden by mappings that require additional processing of the change record after the record has been calculated. */ public void postCalculateChanges(org.eclipse.persistence.sessions.changesets.ChangeRecord changeRecord, UnitOfWorkImpl uow) { } /** * INTERNAL: * Overridden by mappings that require objects to be deleted contribute to change set creation. */ public void postCalculateChangesOnDeleted(Object deletedObject, UnitOfWorkChangeSet uowChangeSet, UnitOfWorkImpl uow) { } /** * INTERNAL: * Overridden by mappings that require objects to be deleted contribute to change set creation. */ public void recordPrivateOwnedRemovals(Object object, UnitOfWorkImpl uow) { } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/TypedAssociation.java0000664000000000000000000000526612216173130024357 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings; /** *

Purpose: Generic association object. * This can be used to map hashtable/map containers where the key and value are non-typed primitives. * * @author James Sutherland * @since TOPLink/Java 3.0 */ public class TypedAssociation extends Association { protected Class keyType; protected Class valueType; /** * Default constructor. */ public TypedAssociation() { super(); } /** * PUBLIC: * Create an association. */ public TypedAssociation(Object key, Object value) { super(key, value); if (key != null) { this.keyType = key.getClass(); } this.value = value; if (value != null) { this.valueType = value.getClass(); } } /** * PUBLIC: * Return the class of the key. */ public Class getKeyType() { return keyType; } /** * PUBLIC: * Return the class of the value. */ public Class getValueType() { return valueType; } /** * INTERNAL: * Handler for the descriptor post build event. * Convert the key and values to their appropriate type. */ public void postBuild(org.eclipse.persistence.descriptors.DescriptorEvent event) { setKey(event.getSession().getDatasourceLogin().getDatasourcePlatform().getConversionManager().convertObject(getKey(), getKeyType())); setValue(event.getSession().getDatasourceLogin().getDatasourcePlatform().getConversionManager().convertObject(getValue(), getValueType())); } /** * PUBLIC: * Set the class of the key. */ public void setKeyType(Class keyType) { this.keyType = keyType; } /** * PUBLIC: * Set the class of the value. */ public void setValueType(Class valueType) { this.valueType = valueType; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/DirectMapMapping.java0000664000000000000000000015474112216173130024264 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 07/19/2011-2.2.1 Guy Pelletier * - 338812: ManyToMany mapping in aggregate object violate integrity constraint on deletion * * 30/05/2012-2.4 Guy Pelletier * - 354678: Temp classloader is still being used during metadata processing ******************************************************************************/ package org.eclipse.persistence.mappings; import java.util.*; import org.eclipse.persistence.descriptors.changetracking.ChangeTracker; import org.eclipse.persistence.descriptors.changetracking.CollectionChangeEvent; import org.eclipse.persistence.descriptors.changetracking.MapChangeEvent; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.expressions.Expression; import org.eclipse.persistence.expressions.ExpressionBuilder; import org.eclipse.persistence.indirection.IndirectCollection; import org.eclipse.persistence.indirection.ValueHolder; import org.eclipse.persistence.internal.descriptors.DescriptorIterator; import org.eclipse.persistence.internal.descriptors.ObjectBuilder; import org.eclipse.persistence.internal.descriptors.changetracking.AttributeChangeListener; import org.eclipse.persistence.internal.descriptors.changetracking.ObjectChangeListener; import org.eclipse.persistence.internal.expressions.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.indirection.*; import org.eclipse.persistence.internal.queries.*; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.sessions.DatabaseRecord; import org.eclipse.persistence.mappings.converters.*; import org.eclipse.persistence.mappings.foundation.MapComponentMapping; import org.eclipse.persistence.queries.*; /** * Mapping for a collection of key-value pairs. * The key and value must be simple types (String, Number, Date, etc.) * and stored in a single table along with a foreign key to the source object. * A converter can be used on the key and value if the desired object types * do not match the data types. * * @see Converter * @see ObjectTypeConverter * @see TypeConversionConverter * @see SerializedObjectConverter * * @author: Steven Vo * @since TopLink 3.5 */ public class DirectMapMapping extends DirectCollectionMapping implements MapComponentMapping { /** * DirectMapCollectionMapping constructor */ public DirectMapMapping() { super(); DataReadQuery query = new DataReadQuery(); this.selectionQuery = query; MappedKeyMapContainerPolicy mapPolicy = new MappedKeyMapContainerPolicy(ClassConstants.Hashtable_Class); mapPolicy.setValueMapping(this); this.containerPolicy = mapPolicy; this.isListOrderFieldSupported = false; } /** * ADVANCED: * Configure the mapping to use a container policy. * This must be a MappedKeyMapContainerPolicy policy. * Set the valueMapping for the policy. */ public void setContainerPolicy(ContainerPolicy containerPolicy) { super.setContainerPolicy(containerPolicy); ((MappedKeyMapContainerPolicy)containerPolicy).setValueMapping(this); } private MappedKeyMapContainerPolicy getMappedKeyMapContainerPolicy(){ return (MappedKeyMapContainerPolicy)containerPolicy; } /** * PUBLIC: * Return the converter on the mapping. * A converter can be used to convert between the key's object value and database value. */ public Converter getKeyConverter() { return getMappedKeyMapContainerPolicy().getKeyConverter(); } /** * PUBLIC: * Set the converter on the mapping. * A converter can be used to convert between the key's object value and database value. */ public void setKeyConverter(Converter keyConverter) { getMappedKeyMapContainerPolicy().setKeyConverter(keyConverter, this); } /** * INTERNAL: * Set the converter class name on the mapping. Initialized in * convertClassNamesToClasses. * A converter can be used to convert between the key's object value and database value. */ public void setKeyConverterClassName(String keyConverterClassName) { getMappedKeyMapContainerPolicy().setKeyConverterClassName(keyConverterClassName, this); } /** * INTERNAL: * Add a new value and its change set to the collection change record. This is used by * attribute change tracking. If a value has changed then issue a remove first with the key * then an add. */ public void addToCollectionChangeRecord(Object newKey, Object newValue, ObjectChangeSet objectChangeSet, UnitOfWorkImpl uow) throws DescriptorException { DirectMapChangeRecord collectionChangeRecord = (DirectMapChangeRecord)objectChangeSet.getChangesForAttributeNamed(this.getAttributeName()); if (collectionChangeRecord == null) { collectionChangeRecord = new DirectMapChangeRecord(objectChangeSet); collectionChangeRecord.setAttribute(getAttributeName()); collectionChangeRecord.setMapping(this); objectChangeSet.addChange(collectionChangeRecord); } collectionChangeRecord.addAdditionChange(newKey, newValue); } /** * INTERNAL: * Require for cloning, the part must be cloned. * Ignore the objects, use the attribute value. */ @Override public Object buildCloneForPartObject(Object attributeValue, Object original, CacheKey cacheKey, Object clone, AbstractSession cloningSession, Integer refreshCascade, boolean isExisting, boolean isFromSharedCache) { if (attributeValue == null) { return containerPolicy.containerInstance(1); } Object clonedAttributeValue = containerPolicy.containerInstance(containerPolicy.sizeFor(attributeValue)); // I need to synchronize here to prevent the collection from changing while I am cloning it. // This will occur when I am merging into the cache and I am instantiating a UOW valueHolder at the same time // I can not synchronize around the clone, as this will cause deadlocks, so I will need to copy the collection then create the clones // I will use a temporary collection to help speed up the process Object temporaryCollection = null; synchronized (attributeValue) { temporaryCollection = containerPolicy.cloneFor(attributeValue); } for (Object keysIterator = containerPolicy.iteratorFor(temporaryCollection); containerPolicy.hasNext(keysIterator);) { Map.Entry entry = (Map.Entry)containerPolicy.nextEntry(keysIterator, cloningSession); Object cloneKey = containerPolicy.buildCloneForKey(entry.getKey(), clone, cacheKey, null, cloningSession, isExisting, isFromSharedCache); Object cloneValue = buildElementClone(entry.getValue(), clone, cacheKey, refreshCascade, cloningSession, isExisting, isFromSharedCache); containerPolicy.addInto(cloneKey, cloneValue, clonedAttributeValue, cloningSession); } return clonedAttributeValue; } /** * INTERNAL: * Used by AttributeLevelChangeTracking to update a changeRecord with calculated changes * as opposed to detected changes. If an attribute can not be change tracked it's * changes can be detected through this process. */ @Override public void calculateDeferredChanges(ChangeRecord changeRecord, AbstractSession session) { DirectMapChangeRecord collectionRecord = (DirectMapChangeRecord)changeRecord; // TODO: Handle events that fired after collection was replaced. compareCollectionsForChange(collectionRecord.getOriginalCollection(), collectionRecord.getLatestCollection(), collectionRecord, session); } /** * INTERNAL: * Cascade discover and persist new objects during commit. */ @Override public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow, Set cascadeErrors) { if (containerPolicy.isMappedKeyMapPolicy()){ Object values = getAttributeValueFromObject(object); if (values != null){ Object iterator = containerPolicy.iteratorFor(values); while (containerPolicy.hasNext(iterator)){ Object wrappedObject = containerPolicy.nextEntry(iterator, uow); containerPolicy.cascadeDiscoverAndPersistUnregisteredNewObjects(wrappedObject, newObjects, unregisteredExistingObjects, visitedObjects, uow, cascadeErrors); } } } } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade */ @Override public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { if (containerPolicy.isMappedKeyMapPolicy()){ Object values = getAttributeValueFromObject(object); if (values != null){ Object iterator = containerPolicy.iteratorFor(values); while (containerPolicy.hasNext(iterator)){ Object wrappedObject = containerPolicy.nextEntry(iterator, uow); containerPolicy.cascadePerformRemoveIfRequired(wrappedObject, uow, visitedObjects); } } } } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ @Override public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { if (containerPolicy.isMappedKeyMapPolicy()){ Object values = getAttributeValueFromObject(object); if (values != null){ Object iterator = containerPolicy.iteratorFor(values); while (containerPolicy.hasNext(iterator)){ Object wrappedObject = containerPolicy.nextEntry(iterator, uow); containerPolicy.cascadeRegisterNewIfRequired(wrappedObject, uow, visitedObjects); } } } } /** * INTERNAL: * This method is used to calculate the differences between two collections. */ @Override public void compareCollectionsForChange(Object oldCollection, Object newCollection, ChangeRecord changeRecord, AbstractSession session) { HashMap originalKeyValues = new HashMap(10); HashMap cloneKeyValues = new HashMap(10); if (oldCollection != null) { Map backUpCollection = (Map)oldCollection; Object backUpIter = containerPolicy.iteratorFor(backUpCollection); while (containerPolicy.hasNext(backUpIter)) {// Make a lookup of the objects Map.Entry entry = (Map.Entry)containerPolicy.nextEntry(backUpIter, session); originalKeyValues.put(entry.getKey(), backUpCollection.get(entry.getKey())); } } Map cloneObjectCollection = (Map)newCollection; Object cloneIter = containerPolicy.iteratorFor(cloneObjectCollection); while (containerPolicy.hasNext(cloneIter)) {//Compare them with the objects from the clone Map.Entry wrappedFirstObject = (Map.Entry)containerPolicy.nextEntry(cloneIter, session); Object firstValue = wrappedFirstObject.getValue(); Object firstKey = wrappedFirstObject.getKey(); Object backupValue = originalKeyValues.get(firstKey); if (!originalKeyValues.containsKey(firstKey)) { cloneKeyValues.put(firstKey, cloneObjectCollection.get(firstKey)); } else if (((backupValue == null) && (firstValue != null)) || (!backupValue.equals(firstValue))) {//the object was not in the backup cloneKeyValues.put(firstKey, cloneObjectCollection.get(firstKey)); } else { originalKeyValues.remove(firstKey); } } ((DirectMapChangeRecord)changeRecord).clearChanges(); ((DirectMapChangeRecord)changeRecord).addAdditionChange(cloneKeyValues); ((DirectMapChangeRecord)changeRecord).addRemoveChange(originalKeyValues); ((DirectMapChangeRecord)changeRecord).setIsDeferred(false); ((DirectMapChangeRecord)changeRecord).setLatestCollection(null); } /** * INTERNAL: * This method compares the changes between two direct collections. Comparisons are made on equality * not identity. */ @Override public ChangeRecord compareForChange(Object clone, Object backUp, ObjectChangeSet owner, AbstractSession session) { Object cloneAttribute = null; Object backUpAttribute = null; cloneAttribute = getAttributeValueFromObject(clone); if ((cloneAttribute != null) && (!getIndirectionPolicy().objectIsInstantiated(cloneAttribute))) { return null; } Map cloneObjectCollection = (Map)getRealCollectionAttributeValueFromObject(clone, session); HashMap originalKeyValues = new HashMap(10); HashMap cloneKeyValues = new HashMap(10); Map backUpCollection = null; if (!owner.isNew()) { backUpAttribute = getAttributeValueFromObject(backUp); if ((backUpAttribute == null) && (cloneAttribute == null)) { return null; } backUpCollection = (Map)getRealCollectionAttributeValueFromObject(backUp, session); } DirectMapChangeRecord changeRecord = new DirectMapChangeRecord(owner); changeRecord.setAttribute(getAttributeName()); changeRecord.setMapping(this); compareCollectionsForChange(backUpCollection, cloneObjectCollection, changeRecord, session); if (changeRecord.hasChanges()) { changeRecord.setOriginalCollection(backUpCollection); return changeRecord; } return null; } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ @Override public boolean compareObjects(Object firstObject, Object secondObject, AbstractSession session) { Object firstObjectMap = getRealCollectionAttributeValueFromObject(firstObject, session); Object secondObjectMap = getRealCollectionAttributeValueFromObject(secondObject, session); return getMappedKeyMapContainerPolicy().compareContainers(firstObjectMap, secondObjectMap); } /* * INTERNAL: * Convert all the class-name-based settings in this mapping to actual * class-based settings. This method is implemented by subclasses as * necessary. * @param classLoader */ @Override public void convertClassNamesToClasses(ClassLoader classLoader) { super.convertClassNamesToClasses(classLoader); if (getDirectKeyField() != null) { getDirectKeyField().convertClassNamesToClasses(classLoader); } } /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection. Returns the key. */ public Object createMapComponentFromRow(AbstractRecord dbRow, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected){ Object key = dbRow.get(getDirectField()); if (getValueConverter() != null){ key = getValueConverter().convertDataValueToObjectValue(key, session); } return key; } /** * INTERNAL: */ public DatabaseField getDirectKeyField() { return getMappedKeyMapContainerPolicy().getDirectKeyField(null); } /** * INTERNAL: * Initialize and validate the mapping properties. */ @Override public void initialize(AbstractSession session) throws DescriptorException { getMappedKeyMapContainerPolicy().setDescriptorForKeyMapping(this.getDescriptor()); if (getKeyConverter() != null) { getKeyConverter().initialize(this, session); } super.initialize(session); } @Override protected void initializeDeleteQuery(AbstractSession session) { if (!getDeleteQuery().hasSessionName()) { getDeleteQuery().setSessionName(session.getName()); } if (hasCustomDeleteQuery()) { return; } Expression builder = new ExpressionBuilder(); Expression directKeyExp = null; List identityFields = getContainerPolicy().getIdentityFieldsForMapKey(); Iterator i = identityFields.iterator(); while (i.hasNext()){ DatabaseField field = i.next(); Expression fieldExpression = builder.getField(field).equal(builder.getParameter(field)); if (directKeyExp == null){ directKeyExp = fieldExpression; } else { directKeyExp = directKeyExp.and(fieldExpression); } } Expression expression = null; SQLDeleteStatement statement = new SQLDeleteStatement(); // Construct an expression to delete from the relation table. for (int index = 0; index < getReferenceKeyFields().size(); index++) { DatabaseField referenceKey = getReferenceKeyFields().get(index); DatabaseField sourceKey = getSourceKeyFields().get(index); Expression subExp1 = builder.getField(referenceKey); Expression subExp2 = builder.getParameter(sourceKey); Expression subExpression = subExp1.equal(subExp2); expression = subExpression.and(expression); } expression = expression.and(directKeyExp); statement.setWhereClause(expression); statement.setTable(getReferenceTable()); getDeleteQuery().setSQLStatement(statement); } /** * Initialize insert query. This query is used to insert the collection of objects into the * reference table. */ @Override protected void initializeInsertQuery(AbstractSession session) { super.initializeInsertQuery(session); getContainerPolicy().addFieldsForMapKey(getInsertQuery().getModifyRow()); } @Override protected void initializeSelectionStatement(AbstractSession session) { if (this.selectionQuery.isReadAllQuery()){ ((ReadAllQuery)this.selectionQuery).addAdditionalField(getDirectField().clone()); } else { SQLSelectStatement statement = (SQLSelectStatement)this.selectionQuery.getSQLStatement(); statement.addTable(getReferenceTable()); statement.addField(getDirectField().clone()); getContainerPolicy().addAdditionalFieldsToQuery(this.selectionQuery, getAdditionalFieldsBaseExpression(this.selectionQuery)); statement.normalize(session, null); } if (this.selectionQuery.isDirectReadQuery()){ ((DirectReadQuery)this.selectionQuery).setResultType(DataReadQuery.MAP); } } /** * INTERNAL: * Iterate on the attribute value. * The value holder has already been processed. * PERF: Avoid iteration if not required. */ @Override public void iterateOnRealAttributeValue(DescriptorIterator iterator, Object realAttributeValue) { super.iterateOnRealAttributeValue(iterator, realAttributeValue); ContainerPolicy cp = getContainerPolicy(); if (realAttributeValue != null && !iterator.shouldIterateOnPrimitives()) { for (Object iter = cp.iteratorFor(realAttributeValue); cp.hasNext(iter);) { Object wrappedObject = cp.nextEntry(iter, iterator.getSession()); cp.iterateOnMapKey(iterator, wrappedObject); } } } /** * INTERNAL: * Iterate on the specified element. */ @Override public void iterateOnElement(DescriptorIterator iterator, Object element) { super.iterateOnElement(iterator, element); ContainerPolicy cp = getContainerPolicy(); for (Object iter = cp.iteratorFor(element); cp.hasNext(iter);) { Object wrappedObject = cp.nextEntry(iter, iterator.getSession()); cp.iterateOnMapKey(iterator, wrappedObject); } } /** * INTERNAL: * Related mapping should implement this method to return true. */ @Override public boolean isDirectMapMapping() { return true; } /** * INTERNAL: * Merge changes from the source to the target object. * Because this is a collection mapping, values are added to or removed from the * collection based on the changeset. */ @Override public void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { if (this.descriptor.getCachePolicy().isProtectedIsolation()&& !this.isCacheable && !targetSession.isProtectedSession()){ setAttributeValueInObject(target, this.indirectionPolicy.buildIndirectObject(new ValueHolder(null))); return; } Map valueOfTarget = null; AbstractSession session = mergeManager.getSession(); //collect the changes into a vector HashMap addObjects = ((DirectMapChangeRecord)changeRecord).getAddObjects(); HashMap removeObjects = ((DirectMapChangeRecord)changeRecord).getRemoveObjects(); //Check to see if the target has an instantiated collection if ((isAttributeValueInstantiated(target)) && (!changeRecord.getOwner().isNew())) { valueOfTarget = (Map)getRealCollectionAttributeValueFromObject(target, session); } else { //if not create an instance of the map valueOfTarget = (Map)containerPolicy.containerInstance(addObjects.size()); } if (!isAttributeValueInstantiated(target)) { if (mergeManager.shouldMergeChangesIntoDistributedCache()) { return; } Object valueOfSource = getRealCollectionAttributeValueFromObject(source, session); for (Object iterator = containerPolicy.iteratorFor(valueOfSource); containerPolicy.hasNext(iterator);) { Map.Entry entry = (Map.Entry)containerPolicy.nextEntry(iterator, session); containerPolicy.addInto(entry.getKey(), entry.getValue(), valueOfTarget, session); } } else { Object synchronizationTarget = valueOfTarget; // For indirect containers the delegate must be synchronized on, // not the wrapper as the clone synchs on the delegate, see bug#5685287. if (valueOfTarget instanceof IndirectCollection) { synchronizationTarget = ((IndirectCollection)valueOfTarget).getDelegateObject(); } synchronized (synchronizationTarget) { // Next iterate over the changes and add them to the container for (Iterator i = removeObjects.keySet().iterator(); i.hasNext();) { Object keyToRemove = i.next(); containerPolicy.removeFrom(keyToRemove, (Object)null, valueOfTarget, session); } for (Iterator i = addObjects.keySet().iterator(); i.hasNext();) { Object keyToAdd = i.next(); Object nextItem = addObjects.get(keyToAdd); if (mergeManager.shouldMergeChangesIntoDistributedCache()) { //bug#4458089 and 4454532- check if collection contains new item before adding during merge into distributed cache if (!containerPolicy.contains(nextItem, valueOfTarget, session)) { containerPolicy.addInto(keyToAdd, nextItem, valueOfTarget, session); } } else { containerPolicy.addInto(keyToAdd, nextItem, valueOfTarget, session); } } } } setRealAttributeValueInObject(target, valueOfTarget); } /** * INTERNAL: * Merge changes from the source to the target object. */ @Override public void mergeIntoObject(Object target, boolean isTargetUnInitialized, Object source, MergeManager mergeManager, AbstractSession targetSession) { if (this.descriptor.getCachePolicy().isProtectedIsolation()&& !this.isCacheable && !targetSession.isProtectedSession()){ setAttributeValueInObject(target, this.indirectionPolicy.buildIndirectObject(new ValueHolder(null))); return; } if (isTargetUnInitialized) { // This will happen if the target object was removed from the cache before the commit was attempted if (mergeManager.shouldMergeWorkingCopyIntoOriginal() && (!isAttributeValueInstantiated(source))) { setAttributeValueInObject(target, getIndirectionPolicy().getOriginalIndirectionObject(getAttributeValueFromObject(source), targetSession)); return; } } if (!shouldMergeCascadeReference(mergeManager)) { // This is only going to happen on mergeClone, and we should not attempt to merge the reference return; } if (mergeManager.shouldRefreshRemoteObject() && usesIndirection()) { mergeRemoteValueHolder(target, source, mergeManager); return; } if (mergeManager.isForRefresh()) { if (!isAttributeValueInstantiated(target)) { // This will occur when the clone's value has not been instantiated yet and we do not need // the refresh that attribute return; } } else if (!isAttributeValueInstantiated(source)) { // I am merging from a clone into an original. No need to do merge if the attribute was never // modified return; } Map valueOfSource = (Map)getRealCollectionAttributeValueFromObject(source, mergeManager.getSession()); // trigger instantiation of target attribute Object valueOfTarget = getRealCollectionAttributeValueFromObject(target, mergeManager.getSession()); Object newContainer = containerPolicy.containerInstance(containerPolicy.sizeFor(valueOfSource)); boolean fireChangeEvents = false; if ((this.getDescriptor().getObjectChangePolicy().isObjectChangeTrackingPolicy()) && (target instanceof ChangeTracker) && (((ChangeTracker)target)._persistence_getPropertyChangeListener() != null)) { fireChangeEvents = true; //Collections may not be indirect list or may have been replaced with user collection. Object iterator = containerPolicy.iteratorFor(valueOfTarget); while (containerPolicy.hasNext(iterator)) { Map.Entry entry = (Map.Entry)containerPolicy.nextEntry(iterator, mergeManager.getSession()); ((ObjectChangeListener)((ChangeTracker)target)._persistence_getPropertyChangeListener()).internalPropertyChange(new MapChangeEvent(target, getAttributeName(), valueOfTarget, entry.getKey(), entry.getValue(), CollectionChangeEvent.REMOVE, false));// make the remove change event fire. } if (newContainer instanceof ChangeTracker) { ((ChangeTracker)newContainer)._persistence_setPropertyChangeListener(((ChangeTracker)target)._persistence_getPropertyChangeListener()); } if (valueOfTarget instanceof ChangeTracker) { ((ChangeTracker)valueOfTarget)._persistence_setPropertyChangeListener(null);//remove listener } } valueOfTarget = newContainer; for (Object sourceValuesIterator = containerPolicy.iteratorFor(valueOfSource); containerPolicy.hasNext(sourceValuesIterator);) { Map.Entry entry = (Map.Entry)containerPolicy.nextEntry(sourceValuesIterator, mergeManager.getSession()); if (fireChangeEvents) { //Collections may not be indirect list or may have been replaced with user collection. ((ObjectChangeListener)((ChangeTracker)target)._persistence_getPropertyChangeListener()).internalPropertyChange(new MapChangeEvent(target, getAttributeName(), valueOfTarget, entry.getKey(), entry.getValue(), CollectionChangeEvent.ADD, false));// make the add change event fire. } containerPolicy.addInto(entry.getKey(), entry.getValue(), valueOfTarget, mergeManager.getSession()); } if (fireChangeEvents && (getDescriptor().getObjectChangePolicy().isAttributeChangeTrackingPolicy())) { // check that there were changes, if not then remove the record. ObjectChangeSet changeSet = ((AttributeChangeListener)((ChangeTracker)target)._persistence_getPropertyChangeListener()).getObjectChangeSet(); if (changeSet != null) { DirectMapChangeRecord changeRecord = (DirectMapChangeRecord)changeSet.getChangesForAttributeNamed(getAttributeName()); if (changeRecord != null) { if (!changeRecord.isDeferred()) { if (!changeRecord.hasChanges()) { changeSet.removeChange(getAttributeName()); } } else { // Must reset the latest collection. changeRecord.setLatestCollection(valueOfTarget); } } } } // Must re-set variable to allow for set method to re-morph changes if the collection is not being stored directly. setRealAttributeValueInObject(target, valueOfTarget); } /** * INTERNAL: * Perform the commit event. * This is used in the uow to delay data modifications. * This is mostly dealt with in the superclass. Private Owned deletes require extra functionality */ @Override public void performDataModificationEvent(Object[] event, AbstractSession session) throws DatabaseException, DescriptorException { super.performDataModificationEvent(event, session); if (event[0] == Delete && containerPolicy.shouldIncludeKeyInDeleteEvent()) { session.deleteObject(event[3]); } } /** * INTERNAL: * Overridden by mappings that require additional processing of the change record after the record has been calculated. */ @Override public void postCalculateChanges(org.eclipse.persistence.sessions.changesets.ChangeRecord changeRecord, UnitOfWorkImpl uow) { // no need for private owned check. This code is only registered for private owned mappings. // targets are added to and/or removed to/from the source. DirectMapChangeRecord mapChangeRecord = (DirectMapChangeRecord)changeRecord; Iterator it = mapChangeRecord.getRemoveObjects().entrySet().iterator(); while(it.hasNext()) { Map.Entry entry = (Map.Entry)it.next(); containerPolicy.postCalculateChanges(entry.getKey(), entry.getValue(), referenceDescriptor, this, uow); } } /** * INTERNAL: * Insert the private owned object. */ @Override public void postInsert(WriteObjectQuery query) throws DatabaseException { Object objects; AbstractRecord databaseRow = new DatabaseRecord(); if (isReadOnly()) { return; } objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); if (this.containerPolicy.isEmpty(objects)) { return; } prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getDescriptor(), query.getSession()); // Extract primary key and value from the source. for (int index = 0; index < getReferenceKeyFields().size(); index++) { DatabaseField referenceKey = getReferenceKeyFields().get(index); DatabaseField sourceKey = getSourceKeyFields().get(index); Object sourceKeyValue = query.getTranslationRow().get(sourceKey); databaseRow.put(referenceKey, sourceKeyValue); } // Extract target field and its value. Construct insert statement and execute it Object keyIter = this.containerPolicy.iteratorFor(objects); while (this.containerPolicy.hasNext(keyIter)) { Map.Entry entry = (Map.Entry)this.containerPolicy.nextEntry(keyIter, query.getSession()); Object value = getFieldValue(entry.getValue(), query.getSession()); databaseRow.put(getDirectField(), value); ContainerPolicy.copyMapDataToRow(getContainerPolicy().getKeyMappingDataForWriteQuery(entry, query.getSession()), databaseRow); // In the uow data queries are cached until the end of the commit. if (query.shouldCascadeOnlyDependentParts()) { // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = Insert; event[1] = getInsertQuery(); event[2] = databaseRow.clone(); query.getSession().getCommitManager().addDataModificationEvent(this, event); } else { query.getSession().executeQuery(getInsertQuery(), databaseRow); } getContainerPolicy().propogatePostInsert(query, entry); } } /** * INTERNAL: * Update private owned part. */ @Override protected void postUpdateWithChangeSet(WriteObjectQuery writeQuery) throws DatabaseException { ObjectChangeSet changeSet = writeQuery.getObjectChangeSet(); DirectMapChangeRecord changeRecord = (DirectMapChangeRecord)changeSet.getChangesForAttributeNamed(this.getAttributeName()); if (changeRecord == null) { return; } for (int index = 0; index < getReferenceKeyFields().size(); index++) { DatabaseField referenceKey = getReferenceKeyFields().get(index); DatabaseField sourceKey = getSourceKeyFields().get(index); Object sourceKeyValue = writeQuery.getTranslationRow().get(sourceKey); writeQuery.getTranslationRow().put(referenceKey, sourceKeyValue); } for (Iterator iterator = changeRecord.getRemoveObjects().entrySet().iterator(); iterator.hasNext();) { Object entry = iterator.next(); AbstractRecord thisRow = writeQuery.getTranslationRow().clone(); ContainerPolicy.copyMapDataToRow(containerPolicy.getKeyMappingDataForWriteQuery(entry, writeQuery.getSession()), thisRow); // Hey I might actually want to use an inner class here... ok array for now. Object[] event = null; if (containerPolicy.shouldIncludeKeyInDeleteEvent()){ event = new Object[4]; event[3] = containerPolicy.keyFromEntry(entry); } else { event = new Object[3]; } event[0] = Delete; event[1] = getDeleteQuery(); event[2] = thisRow; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, event); } for (Iterator iterator = changeRecord.getAddObjects().entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = (Map.Entry)iterator.next(); AbstractRecord thisRow = writeQuery.getTranslationRow().clone(); Object value = changeRecord.getAddObjects().get(entry.getKey()); value = getFieldValue(value, writeQuery.getSession()); ContainerPolicy.copyMapDataToRow(this.containerPolicy.getKeyMappingDataForWriteQuery(entry, writeQuery.getSession()), thisRow); thisRow.add(getDirectField(), value); // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = Insert; event[1] = getInsertQuery(); event[2] = thisRow; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, event); } } /** * INTERNAL: * Propagate the preDelete event through the container policy if necessary */ @Override public void preDelete(DeleteObjectQuery query) throws DatabaseException { if (getContainerPolicy().propagatesEventsToCollection()){ Object queryObject = query.getObject(); Object values = getAttributeValueFromObject(queryObject); Object iterator = containerPolicy.iteratorFor(values); while (containerPolicy.hasNext(iterator)){ Object wrappedObject = containerPolicy.nextEntry(iterator, query.getSession()); containerPolicy.propogatePreDelete(query, wrappedObject); } } super.preDelete(query); } /** * INTERNAL: * Rebuild select query. */ @Override protected void initOrRebuildSelectQuery() { this.selectionQuery = containerPolicy.buildSelectionQueryForDirectCollectionMapping(); } /** * INTERNAL: * Overridden by mappings that require additional processing of the change record after the record has been calculated. */ @Override public void recordPrivateOwnedRemovals(Object object, UnitOfWorkImpl uow) { // no need for private owned check. This code is only registered for private owned mappings. // targets are added to and/or removed to/from the source. Iterator it = (Iterator) containerPolicy.iteratorFor(getRealAttributeValueFromObject(object, uow)); while (it.hasNext()) { Object clone = it.next(); containerPolicy.recordPrivateOwnedRemovals(clone, referenceDescriptor, uow); } } /** * INTERNAL: * Remove a value and its change set from the collection change record. This is used by * attribute change tracking. */ protected void removeFromCollectionChangeRecord(Object newKey, Object newValue, ObjectChangeSet objectChangeSet, UnitOfWorkImpl uow) throws DescriptorException { DirectMapChangeRecord collectionChangeRecord = (DirectMapChangeRecord)objectChangeSet.getChangesForAttributeNamed(this.getAttributeName()); if (collectionChangeRecord == null) { collectionChangeRecord = new DirectMapChangeRecord(objectChangeSet); collectionChangeRecord.setAttribute(getAttributeName()); collectionChangeRecord.setMapping(this); objectChangeSet.addChange(collectionChangeRecord); } collectionChangeRecord.addRemoveChange(newKey, newValue); } /** * INTERNAL: */ public void setDirectKeyField(DatabaseField keyField) { getMappedKeyMapContainerPolicy().setKeyField(keyField, descriptor); } /** * ADVANCED: * Set the class type of the field value. * This can be used if field value differs from the object value, * has specific typing requirements such as usage of java.sql.Blob or NChar. * This must be called after the field name has been set. */ public void setDirectKeyFieldClassification(Class fieldType) { getDirectKeyField().setType(fieldType); } /** * ADVANCED: * Set the class type name of the field value. * This can be used if field value differs from the object value, * has specific typing requirements such as usage of java.sql.Blob or NChar. * This must be called after the direct key field has been set. */ public void setDirectKeyFieldClassificationName(String fieldTypeName) { getDirectKeyField().setTypeName(fieldTypeName); } /** * PUBLIC: * Set the direct key field name in the reference table. * This is the field that the primitive data value of the Map key is stored in. */ public void setDirectKeyFieldName(String fieldName) { setDirectKeyField(new DatabaseField(fieldName)); } /** * INTERNAL: * Either create a new change record or update the change record with the new value. * This is used by attribute change tracking. */ @Override public void updateChangeRecord(Object clone, Object newValue, Object oldValue, ObjectChangeSet objectChangeSet, UnitOfWorkImpl uow) throws DescriptorException { DirectMapChangeRecord collectionChangeRecord = (DirectMapChangeRecord)objectChangeSet.getChangesForAttributeNamed(this.getAttributeName()); if (collectionChangeRecord == null) { collectionChangeRecord = new DirectMapChangeRecord(objectChangeSet); collectionChangeRecord.setAttribute(getAttributeName()); collectionChangeRecord.setMapping(this); objectChangeSet.addChange(collectionChangeRecord); } if (collectionChangeRecord.getOriginalCollection() == null) { collectionChangeRecord.recreateOriginalCollection(oldValue, uow); } collectionChangeRecord.setLatestCollection(newValue); collectionChangeRecord.setIsDeferred(true); objectChangeSet.deferredDetectionRequiredOn(getAttributeName()); } /** * INTERNAL: * Add or removes a new value and its change set to the collection change record based on the event passed in. This is used by * attribute change tracking. */ @Override public void updateCollectionChangeRecord(CollectionChangeEvent event, ObjectChangeSet changeSet, UnitOfWorkImpl uow) { if (event != null ) { //Letting the mapping create and add the ChangeSet to the ChangeRecord rather // than the policy, since the policy doesn't know how to handle DirectCollectionChangeRecord. // if ordering is to be supported in the future, check how the method in CollectionMapping is implemented Object key = null; if (event.getClass().equals(ClassConstants.MapChangeEvent_Class)){ key = ((MapChangeEvent)event).getKey(); } if (event.getChangeType() == CollectionChangeEvent.ADD) { addToCollectionChangeRecord(key, event.getNewValue(), changeSet, uow); } else if (event.getChangeType() == CollectionChangeEvent.REMOVE) { removeFromCollectionChangeRecord(key, event.getNewValue(), changeSet, uow); } else { throw ValidationException.wrongCollectionChangeEventType(event.getChangeType()); } } } /** * PUBLIC: * Configure the mapping to use an instance of the specified container class * to hold the target objects. *

The default container class is java.util.Hashtable. *

The container class must implements (directly or indirectly) the Map interface. *

Note: Do not use both useMapClass(Class concreteClass), useTransparentMap(). The last use of one of the two methods will override the previous one. */ public void useMapClass(Class concreteClass) { if (!Helper.classImplementsInterface(concreteClass, ClassConstants.Map_Class)) { throw DescriptorException.illegalContainerClass(concreteClass); } containerPolicy.setContainerClass(concreteClass); } /** * PUBLIC: * Configure the mapping to use an instance of the specified container class * to hold the target objects. *

The container class must implement (directly or indirectly) the Map interface. *

Note: Do not use both useMapClass(Class concreteClass), useTransparentMap(). The last use of one of the two methods will override the previous one. */ public void useTransparentMap() { setIndirectionPolicy(new TransparentIndirectionPolicy()); useMapClass(ClassConstants.IndirectMap_Class); } /** * PUBLIC: * This is a helper method to set the key converter to a TypeConversionConverter. * This ensures that the key value from the database is converted to the correct * Java type. The converter can also be set directly. * Note that setting the converter to another converter will overwrite this setting. */ public void setKeyClass(Class keyClass) { TypeConversionConverter converter = new TypeConversionConverter(this); converter.setObjectClass(keyClass); setKeyConverter(converter); } /** * PUBLIC: * This is a helper method to get the object class from the key converter * if it is a TypeConversionConverter. * This returns null if not using a TypeConversionConverter key converter. */ public Class getKeyClass() { if ((getKeyConverter() == null) || !(getKeyConverter() instanceof TypeConversionConverter)) { return null; } return ((TypeConversionConverter)getKeyConverter()).getObjectClass(); } /** * PUBLIC: * This is a helper method to set the value converter to a TypeConversionConverter. * This ensures that the value from the database is converted to the correct * Java type. The converter can also be set directly. * Note that setting the converter to another converter will overwrite this setting. */ public void setValueClass(Class valueClass) { TypeConversionConverter converter = new TypeConversionConverter(this); converter.setObjectClass(valueClass); setValueConverter(converter); } /** * ADVANCED: * This method is used to have an object add to a collection once the changeSet is applied * The referenceKey parameter should only be used for direct Maps. */ @Override public void simpleAddToCollectionChangeRecord(Object referenceKey, Object objectToAdd, ObjectChangeSet changeSet, AbstractSession session) { DirectMapChangeRecord collectionChangeRecord = (DirectMapChangeRecord)changeSet.getChangesForAttributeNamed(getAttributeName()); if (collectionChangeRecord == null) { collectionChangeRecord = new DirectMapChangeRecord(changeSet); collectionChangeRecord.setAttribute(getAttributeName()); collectionChangeRecord.setMapping(this); collectionChangeRecord.getAddObjects().put(referenceKey, objectToAdd); changeSet.addChange(collectionChangeRecord); } else { if (collectionChangeRecord.getRemoveObjects().containsKey(referenceKey)) { collectionChangeRecord.getRemoveObjects().remove(referenceKey); } else { collectionChangeRecord.getAddObjects().put(referenceKey, objectToAdd); } } } /** * ADVANCED: * This method is used to have an object removed from a collection once the changeSet is applied * The referenceKey parameter should only be used for direct Maps. */ @Override public void simpleRemoveFromCollectionChangeRecord(Object referenceKey, Object objectToRemove, ObjectChangeSet changeSet, AbstractSession session) { DirectMapChangeRecord collectionChangeRecord = (DirectMapChangeRecord)changeSet.getChangesForAttributeNamed(getAttributeName()); if (collectionChangeRecord == null) { collectionChangeRecord = new DirectMapChangeRecord(changeSet); collectionChangeRecord.setAttribute(getAttributeName()); collectionChangeRecord.setMapping(this); collectionChangeRecord.getRemoveObjects().put(referenceKey, objectToRemove); changeSet.addChange(collectionChangeRecord); } else { if (collectionChangeRecord.getAddObjects().containsKey(referenceKey)) { collectionChangeRecord.getAddObjects().remove(referenceKey); } else { collectionChangeRecord.getRemoveObjects().put(referenceKey, objectToRemove); } } } /** * PUBLIC: * This is a helper method to get the object class from the value converter * if it is a TypeConversionConverter. * This returns null if not using a TypeConversionConverter value converter. */ public Class getValueClass() { if (!(getValueConverter() instanceof TypeConversionConverter)) { return null; } return ((TypeConversionConverter)getValueConverter()).getObjectClass(); } /** * INTERNAL: * Prepare and execute the batch query and store the * results for each source object in a map keyed by the * mappings source keys of the source objects. */ @Override protected void executeBatchQuery(DatabaseQuery query, CacheKey parentCacheKey, Map referenceDataByKey, AbstractSession session, AbstractRecord translationRow) { // Execute query and index resulting object sets by key. List rows = (List)session.executeQuery(query, translationRow); MappedKeyMapContainerPolicy mapContainerPolicy = getMappedKeyMapContainerPolicy(); for (AbstractRecord referenceRow : rows) { Object referenceKey = null; if (query.isObjectBuildingQuery()){ referenceKey = mapContainerPolicy.buildKey(referenceRow, (ObjectBuildingQuery)query, parentCacheKey, session, true); } else { referenceKey = mapContainerPolicy.buildKey(referenceRow, null, parentCacheKey, session, true); } Object referenceValue = referenceRow.get(this.directField); Object eachCacheKey = extractKeyFromTargetRow(referenceRow, session); Object container = referenceDataByKey.get(eachCacheKey); if ((container == null) || (container == Helper.NULL_VALUE)) { container = this.containerPolicy.containerInstance(); referenceDataByKey.put(eachCacheKey, container); } // Allow for value conversion. if (this.valueConverter != null) { referenceValue = this.valueConverter.convertDataValueToObjectValue(referenceValue, query.getSession()); } this.containerPolicy.addInto(referenceKey, referenceValue, container, query.getSession()); } } /** * INTERNAL: * Return the value of the field from the row or a value holder on the query to obtain the object. */ @Override protected Object valueFromRowInternalWithJoin(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, CacheKey parentCacheKey, AbstractSession executionSession, boolean isTargetProtected) throws DatabaseException { ContainerPolicy policy = getContainerPolicy(); Object value = policy.containerInstance(); ObjectBuilder objectBuilder = getDescriptor().getObjectBuilder(); // Extract the primary key of the source object, to filter only the joined rows for that object. Object sourceKey = objectBuilder.extractPrimaryKeyFromRow(row, executionSession); // If the query was using joining, all of the result rows by primary key will have been computed. List rows = joinManager.getDataResultsByPrimaryKey().get(sourceKey); // If no 1-m rows were fetch joined, then get the value normally, // this can occur with pagination where the last row may not be complete. if (rows == null) { return valueFromRowInternal(row, joinManager, sourceQuery, executionSession); } // A set of direct values must be maintained to avoid duplicates from multiple 1-m joins. Set directValues = new HashSet(); Converter valueConverter = getValueConverter(); // For each rows, extract the target row and build the target object and add to the collection. int size = rows.size(); for (int index = 0; index < size; index++) { AbstractRecord sourceRow = rows.get(index); AbstractRecord targetRow = sourceRow; // The field for many objects may be in the row, // so build the subpartion of the row through the computed values in the query, // this also helps the field indexing match. targetRow = trimRowForJoin(targetRow, joinManager, executionSession); // Partial object queries must select the primary key of the source and related objects. // If the target joined rows in null (outerjoin) means an empty collection. Object directKey = this.containerPolicy.buildKeyFromJoinedRow(targetRow, joinManager, sourceQuery, parentCacheKey, executionSession, isTargetProtected); if (directKey == null) { // A null direct value means an empty collection returned as nulls from an outerjoin. return getIndirectionPolicy().valueFromRow(value); } // Only build/add the target object once, skip duplicates from multiple 1-m joins. if (!directValues.contains(directKey)) { directValues.add(directKey); Object directValue = targetRow.get(this.directField); // Allow for value conversion. if (valueConverter != null) { directValue = valueConverter.convertDataValueToObjectValue(directValue, executionSession); } policy.addInto(directKey, directValue, value, executionSession); } } return getIndirectionPolicy().valueFromRow(value); } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/RelationalMapping.java0000664000000000000000000000200012216173130024462 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings; /** * INTERNAL * All mappings which can be added to org.eclipse.persistence.mappings.Descriptor must * implement this interface. * * @since OracleAS TopLink 10g (10.0.3) */ public interface RelationalMapping { } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/DirectCollectionMapping.java0000664000000000000000000043650312216173130025641 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 07/19/2011-2.2.1 Guy Pelletier * - 338812: ManyToMany mapping in aggregate object violate integrity constraint on deletion * 04/09/2012-2.4 Guy Pelletier * - 374377: OrderBy with ElementCollection doesn't work * 14/05/2012-2.4 Guy Pelletier * - 376603: Provide for table per tenant support for multitenant applications * 30/05/2012-2.4 Guy Pelletier * - 354678: Temp classloader is still being used during metadata processing * 08/01/2012-2.5 Chris Delahunt * - 371950: Metadata caching * 06/03/2013-2.5.1 Guy Pelletier * - 402380: 3 jpa21/advanced tests failed on server with * "java.lang.NoClassDefFoundError: org/eclipse/persistence/testing/models/jpa21/advanced/enums/Gender" ******************************************************************************/ package org.eclipse.persistence.mappings; import java.beans.PropertyChangeEvent; import java.security.AccessController; import java.security.PrivilegedActionException; import java.util.*; import org.eclipse.persistence.annotations.BatchFetchType; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.descriptors.TablePerMultitenantPolicy; import org.eclipse.persistence.descriptors.changetracking.*; import org.eclipse.persistence.internal.descriptors.changetracking.AttributeChangeListener; import org.eclipse.persistence.internal.descriptors.changetracking.ObjectChangeListener; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.expressions.*; import org.eclipse.persistence.history.*; import org.eclipse.persistence.indirection.IndirectCollection; import org.eclipse.persistence.indirection.IndirectList; import org.eclipse.persistence.indirection.ValueHolder; import org.eclipse.persistence.internal.databaseaccess.DatasourcePlatform; import org.eclipse.persistence.internal.databaseaccess.Platform; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.expressions.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.*; import org.eclipse.persistence.internal.queries.*; import org.eclipse.persistence.internal.sessions.remote.*; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; import org.eclipse.persistence.internal.security.PrivilegedNewInstanceFromClass; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.mappings.converters.*; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.remote.*; import org.eclipse.persistence.sessions.CopyGroup; import org.eclipse.persistence.sessions.DatabaseRecord; /** *

Purpose: This mapping is used to store a collection of simple types (String, Number, Date, etc.) * into a single table. The table must store the value and a foreign key to the source object. * A converter can be used if the desired object type and the data type do not match. * * @see Converter * @see ObjectTypeConverter * @see TypeConversionConverter * @see SerializedObjectConverter * * @author Sati * @since TOPLink/Java 1.0 * * 09/18/2009-2.0 Michael O'Brien * - 266912: JPA 2.0 Metamodel API (part of the JSR-317 EJB 3.1 Criteria API) * add support for passing BasicMap value type to MapAttributeImpl via new attributeClassification field */ public class DirectCollectionMapping extends CollectionMapping implements RelationalMapping { /** Used for data modification events. */ protected static final String Delete = "delete"; protected static final String Insert = "insert"; protected static final String DeleteAll = "deleteAll"; protected static final String DeleteAtIndex = "deleteAtIndex"; protected static final String UpdateAtIndex = "updateAtIndex"; /** Allows user defined conversion between the object value and the database value. */ protected Converter valueConverter; protected String valueConverterClassName; protected List orderByExpressions; /** Stores the reference table*/ protected DatabaseTable referenceTable; /** The direct field name is converted and stored */ protected DatabaseField directField; protected Vector sourceKeyFields; protected Vector referenceKeyFields; /** Used for insertion for m-m and dc, not used in 1-m. */ protected DataModifyQuery insertQuery; /** Used for deletion when ChangeSets are used */ protected ModifyQuery changeSetDeleteQuery; protected transient ModifyQuery changeSetDeleteNullQuery; // Bug 306075 protected boolean hasCustomDeleteQuery; protected boolean hasCustomInsertQuery; protected HistoryPolicy historyPolicy; /** Used (only in case listOrderField != null) to delete object with particular orderFieldValue */ protected ModifyQuery deleteAtIndexQuery; /** Used (only in case listOrderField != null) to update orderFieldValue of object with particular orderFieldValue */ protected ModifyQuery updateAtIndexQuery; protected boolean hasCustomDeleteAtIndexQuery; protected boolean hasCustomUpdateAtIndexQuery; /** * @since Java Persistence API 2.0 * Referenced by MapAttributeImpl to pick up the BasicMap value parameter type * To specify the conversion type * */ protected transient Class attributeClassification; protected String attributeClassificationName; /** * PUBLIC: * Default constructor. */ public DirectCollectionMapping() { this.insertQuery = new DataModifyQuery(); this.orderByExpressions = new ArrayList(); this.sourceKeyFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1); this.referenceKeyFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1); this.selectionQuery = new DirectReadQuery(); this.hasCustomInsertQuery = false; this.isPrivateOwned = true; this.isListOrderFieldSupported = true; } /** * PUBLIC: * Provide ascending order support for this direct collection mapping. */ public void addAscendingOrdering() { this.hasOrderBy = true; orderByExpressions.add(new ExpressionBuilder().getField(getDirectFieldName()).ascending()); } /** * PUBLIC: * Provide descending order support for this direct collection mapping. */ public void addDescendingOrdering() { this.hasOrderBy = true; orderByExpressions.add(new ExpressionBuilder().getField(getDirectFieldName()).descending()); } /** * ADVANCED: * Used this method to add custom ordering expressions when fetching * the collection. This could be things like expressions using a functions * like UPPER or NULLS LAST etc. */ public void addOrdering(Expression expression) { this.orderByExpressions.add(expression); } @Override public boolean isRelationalMapping() { return true; } /** * PUBLIC: * Return the converter on the mapping. * A converter can be used to convert between the direct collection's object value and database value. */ public Converter getValueConverter() { return valueConverter; } /** * PUBLIC: * Set the converter on the mapping. * A converter can be used to convert between the direct collection's object value and database value. */ public void setValueConverter(Converter valueConverter) { this.valueConverter = valueConverter; } /** * PUBLIC: * Set the converter class name on the mapping. Initialized in * convertClassNamesToClasses. * A converter can be used to convert between the direct collection's object value and database value. */ public void setValueConverterClassName(String valueConverterClassName) { this.valueConverterClassName = valueConverterClassName; } /** * PUBLIC: * Add the reference key field. * This is used for composite reference keys. * This is the foreign key field in the direct table referencing the primary key of the source object. * Both the reference field and the source field that it references must be provided. */ public void addReferenceKeyField(DatabaseField referenceForeignKeyField, DatabaseField sourcePrimaryKeyField) { getSourceKeyFields().addElement(sourcePrimaryKeyField); getReferenceKeyFields().addElement(referenceForeignKeyField); } /** * PUBLIC: * Add the name of the reference key field. * This is used for composite reference keys. * This is the foreign key field in the direct table referencing the primary key of the source object. * Both the reference field name and the name of the source field that it references must be provided. */ public void addReferenceKeyFieldName(String referenceForeignKeyFieldName, String sourcePrimaryKeyFieldName) { addReferenceKeyField(new DatabaseField(referenceForeignKeyFieldName), new DatabaseField(sourcePrimaryKeyFieldName)); } /** * INTERNAL: * Clone and prepare the selection query as a nested batch read query. * This is used for nested batch reading. */ @Override public ReadQuery prepareNestedBatchQuery(ObjectLevelReadQuery query) { // For CR#2646-S.M. In case of inheritance the descriptor to use may not be that // of the source query (the base class descriptor), but that of the subclass, if the // attribute is only of the subclass. Thus in this case use the descriptor from the mapping. // Also: for Bug 5478648 - Do not switch the descriptor if the query's descriptor is an aggregate ClassDescriptor descriptorToUse = query.getDescriptor(); if ((descriptorToUse != this.descriptor) && (!descriptorToUse.getMappings().contains(this)) && (!this.descriptor.isDescriptorTypeAggregate())) { descriptorToUse = this.descriptor; } DataReadQuery batchQuery = new DataReadQuery(); batchQuery.setName(getAttributeName()); // Join the query where clause with the mappings, // this will cause a join that should bring in all of the target objects. ExpressionBuilder builder; Expression originalSelectionCriteria = null; // 2612538 - the default size of Map (32) is appropriate Map clonedExpressions = new IdentityHashMap(); builder = new ExpressionBuilder(); // For flashback. if (query.hasAsOfClause()) { builder.asOf(query.getAsOfClause()); } Expression batchSelectionCriteria = null; // Build the batch query, either using joining, or an exist sub-select. BatchFetchType batchType = query.getBatchFetchPolicy().getType(); if (this.batchFetchType != null) { batchType = this.batchFetchType; } if (batchType == BatchFetchType.EXISTS) { // Using a EXISTS sub-select (WHERE EXIST ( AND AND ) ExpressionBuilder subBuilder = new ExpressionBuilder(descriptorToUse.getJavaClass()); subBuilder.setQueryClassAndDescriptor(descriptorToUse.getJavaClass(), descriptorToUse); ReportQuery subQuery = new ReportQuery(descriptorToUse.getJavaClass(), subBuilder); subQuery.setDescriptor(descriptorToUse); subQuery.setShouldRetrieveFirstPrimaryKey(true); Expression subCriteria = subBuilder.twist(getSelectionCriteria(), builder); if (query.getSelectionCriteria() != null) { // For bug 2612567, any query can have batch attributes, so the // original selection criteria can be quite complex, with multiple // builders (i.e. for parallel selects). // Now uses cloneUsing(newBase) instead of rebuildOn(newBase). subCriteria = query.getSelectionCriteria().cloneUsing(subBuilder).and(subCriteria); } subQuery.setSelectionCriteria(subCriteria); batchSelectionCriteria = builder.exists(subQuery); } else if (batchType == BatchFetchType.IN) { // Using a IN with foreign key values (WHERE FK IN :QUERY_BATCH_PARAMETER) batchSelectionCriteria = buildBatchCriteria(builder, query); } else { // For 2729729 must clone the original selection criteria first, // otherwise the original query will be corrupted. if (query.getSelectionCriteria() != null) { originalSelectionCriteria = query.getSelectionCriteria().copiedVersionFrom(clonedExpressions); builder = originalSelectionCriteria.getBuilder(); } // Using a join, (WHERE AND ) if (this.selectionQuery.isReadAllQuery()) { batchSelectionCriteria = builder.twist(this.selectionQuery.getSelectionCriteria(), builder); } else { batchSelectionCriteria = builder.twist(this.selectionQuery.getSQLStatement().getWhereClause(), builder); } // For 2729729, rebuildOn is not needed as the base is still the same. if (originalSelectionCriteria != null) { batchSelectionCriteria = batchSelectionCriteria.and(originalSelectionCriteria); } if (descriptorToUse.getQueryManager().getAdditionalJoinExpression() != null) { batchSelectionCriteria = batchSelectionCriteria.and(query.getDescriptor().getQueryManager().getAdditionalJoinExpression().rebuildOn(builder)); } if (this.historyPolicy != null) { if (query.getSession().getAsOfClause() != null) { builder.asOf(query.getSession().getAsOfClause()); } else if (builder.getAsOfClause() == null) { builder.asOf(AsOfClause.NO_CLAUSE); } batchSelectionCriteria = batchSelectionCriteria.and(this.historyPolicy.additionalHistoryExpression(builder, builder)); } } SQLSelectStatement batchStatement = new SQLSelectStatement(); for (DatabaseField keyField : getReferenceKeyFields()) { batchStatement.addField(builder.getTable(this.referenceTable).getField(keyField)); } batchStatement.addField(builder.getTable(this.referenceTable).getField(this.directField)); batchStatement.setWhereClause(batchSelectionCriteria); batchQuery.setSQLStatement(batchStatement); this.containerPolicy.addAdditionalFieldsToQuery(batchQuery, getAdditionalFieldsBaseExpression(batchQuery)); batchStatement.normalize(query.getSession(), descriptorToUse, clonedExpressions); return batchQuery; } /** * INTERNAL: * Clone and prepare the joined direct query. * Since direct-collection does not build objects a nest query is not required. */ @Override public ObjectLevelReadQuery prepareNestedJoins(JoinedAttributeManager joinManager, ObjectBuildingQuery baseQuery, AbstractSession session) { return null; } /** * INTERNAL: * Return the value of the field from the row or a value holder on the query to obtain the object. */ @Override protected Object valueFromRowInternalWithJoin(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, CacheKey parentCacheKey, AbstractSession executionSession, boolean isTargetProtected) throws DatabaseException { ContainerPolicy policy = getContainerPolicy(); Object value = policy.containerInstance(); ObjectBuilder objectBuilder = this.descriptor.getObjectBuilder(); // Extract the primary key of the source object, to filter only the joined rows for that object. Object sourceKey = objectBuilder.extractPrimaryKeyFromRow(row, executionSession); // If the query was using joining, all of the result rows by primary key will have been computed. List rows = joinManager.getDataResultsByPrimaryKey().get(sourceKey); // If no 1-m rows were fetch joined, then get the value normally, // this can occur with pagination where the last row may not be complete. if (rows == null) { return valueFromRowInternal(row, joinManager, sourceQuery, executionSession); } int size = rows.size(); if(size > 0) { // A set of direct values must be maintained to avoid duplicates from multiple 1-m joins. Set directValues = new HashSet(); ArrayList directValuesList = null; ArrayList targetRows = null; boolean shouldAddAll = policy.shouldAddAll(); if(shouldAddAll) { directValuesList = new ArrayList(size); targetRows = new ArrayList(size); } Converter valueConverter = getValueConverter(); // indicates if collection contains null boolean containsNull = false; // For each rows, extract the target row and build the target object and add to the collection. for (int index = 0; index < size; index++) { AbstractRecord sourceRow = rows.get(index); AbstractRecord targetRow = sourceRow; // The field for many objects may be in the row, // so build the subpartion of the row through the computed values in the query, // this also helps the field indexing match. targetRow = trimRowForJoin(targetRow, joinManager, executionSession); // Partial object queries must select the primary key of the source and related objects. // If the target joined rows in null (outerjoin) means an empty collection. Object directValue = targetRow.get(this.directField); if (directValue == null) { if (size == 1) { // A null direct value means an empty collection returned as nulls from an outerjoin. return getIndirectionPolicy().valueFromRow(value); } else { containsNull = true; } } // Only build/add the target object once, skip duplicates from multiple 1-m joins. if (!directValues.contains(directValue)) { directValues.add(directValue); // Allow for value conversion. if (valueConverter != null) { directValue = valueConverter.convertDataValueToObjectValue(directValue, executionSession); } if (shouldAddAll) { directValuesList.add(directValue); targetRows.add(targetRow); } else { policy.addInto(directValue, value, executionSession, targetRow, sourceQuery, parentCacheKey, isTargetProtected); } } } if (shouldAddAll) { // if collection contains a single element which is null then return an empty collection if (!(containsNull && targetRows.size() == 1)) { policy.addAll(directValuesList, value, executionSession, targetRows, sourceQuery, parentCacheKey, isTargetProtected); } } else { // if collection contains a single element which is null then return an empty collection if (containsNull && policy.sizeFor(value) == 1) { policy.clear(value); } } } return getIndirectionPolicy().valueFromRow(value); } /** * INTERNAL: * Copy of the attribute of the object. * This is NOT used for unit of work but for templatizing an object. */ @Override public void buildCopy(Object copy, Object original, CopyGroup group) { Object attributeValue = getRealCollectionAttributeValueFromObject(original, group.getSession()); attributeValue = getContainerPolicy().cloneFor(attributeValue); // if value holder is used, then the value holder shared with original substituted for a new ValueHolder. getIndirectionPolicy().reset(copy); setRealAttributeValueInObject(copy, attributeValue); } /** * INTERNAL: * Clone the element, if necessary. * DirectCollections hold on to objects that do not have Descriptors * (e.g. int, String). These objects do not need to be cloned, unless they use a converter - they * are immutable. */ @Override public Object buildElementClone(Object element, Object parent, CacheKey parentCacheKey, Integer refreshCascade, AbstractSession cloningSession, boolean isExisting, boolean isFromSharedCache) { Object cloneValue = element; if ((getValueConverter() != null) && getValueConverter().isMutable()) { cloneValue = getValueConverter().convertDataValueToObjectValue(getValueConverter().convertObjectValueToDataValue(cloneValue, cloningSession), cloningSession); } return cloneValue; } /** * INTERNAL: * Verifies listOrderField's table: it must be reference table. * Precondition: listOrderField != null. */ @Override protected void buildListOrderField() { if(this.listOrderField.hasTableName()) { if(!getReferenceTable().equals(this.listOrderField.getTable())) { throw DescriptorException.listOrderFieldTableIsWrong(this.getDescriptor(), this, this.listOrderField.getTable(), getReferenceTable()); } } else { this.listOrderField.setTable(getReferenceTable()); } this.listOrderField = getDescriptor().buildField(this.listOrderField, getReferenceTable()); } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade */ @Override public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { //as this mapping type references primitive objects this method does not apply } /** * INTERNAL: * Cascade perform removal of orphaned private owned objects from the UnitOfWorkChangeSet */ @Override public void cascadePerformRemovePrivateOwnedObjectFromChangeSetIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { // as this mapping type references primitive objects this method does not apply } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ @Override public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { //as this mapping type references primitive objects this method does not apply } /** * INTERNAL: * Cascade discover and persist new objects during commit. */ @Override public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow, Set cascadeErrors) { // Direct mappings do not require any cascading. } /** * INTERNAL: * The mapping clones itself to create deep copy. */ @Override public Object clone() { DirectCollectionMapping clone = (DirectCollectionMapping)super.clone(); clone.setSourceKeyFields(cloneFields(getSourceKeyFields())); clone.setReferenceKeyFields(cloneFields(getReferenceKeyFields())); if(this.changeSetDeleteQuery != null) { clone.changeSetDeleteQuery = (ModifyQuery)this.changeSetDeleteQuery.clone(); } // Bug 306075 if(this.changeSetDeleteNullQuery != null) { clone.changeSetDeleteNullQuery = (ModifyQuery)this.changeSetDeleteNullQuery.clone(); } if(this.deleteAtIndexQuery != null) { clone.deleteAtIndexQuery = (ModifyQuery)this.deleteAtIndexQuery.clone(); } if(this.updateAtIndexQuery != null) { clone.updateAtIndexQuery = (ModifyQuery)this.updateAtIndexQuery.clone(); } return clone; } /** * INTERNAL: * This method is used to calculate the differences between two collections. */ @Override public void compareCollectionsForChange(Object oldCollection, Object newCollection, ChangeRecord changeRecord, AbstractSession session) { if(this.listOrderField != null) { compareListsForChange((List)oldCollection, (List)newCollection, changeRecord, session); return; } ContainerPolicy cp = getContainerPolicy(); int numberOfNewNulls = 0; HashMap originalKeyValues = new HashMap(10); HashMap cloneKeyValues = new HashMap(10); if (oldCollection != null) { Object backUpIter = cp.iteratorFor(oldCollection); while (cp.hasNext(backUpIter)) {// Make a lookup of the objects Object secondObject = cp.next(backUpIter, session); // For CR#2258/CR#2378 handle null values inserted in a collection. if (secondObject == null) { numberOfNewNulls--; } else { Integer count = (Integer)originalKeyValues.get(secondObject); if (count == null) { originalKeyValues.put(secondObject, Integer.valueOf(1)); } else { originalKeyValues.put(secondObject, Integer.valueOf(count.intValue() + 1)); } } } } // should a removal occur this is the original count of objects on the database. // this value is used to determine how many objects to re-insert after the delete as a // delete will delete all of the objects not just one. HashMap databaseCount = (HashMap)originalKeyValues.clone(); int databaseNullCount = Math.abs(numberOfNewNulls); if (newCollection != null) { Object cloneIter = cp.iteratorFor(newCollection); /* The following code is used to compare objects in a direct collection. Because objects in a direct collection are primitives and may be the same object the following code must count the number of instances in the collection not just the existence of an object. */ while (cp.hasNext(cloneIter)) {//Compare them with the objects from the clone Object firstObject = cp.next(cloneIter, session); // For CR#2258/CR#2378 handle null values inserted in a collection. if (firstObject == null) { numberOfNewNulls++; } else { Integer count = (Integer)originalKeyValues.get(firstObject); if (count == null) {//the object was not in the backup Integer cloneCount = (Integer)cloneKeyValues.get(firstObject); //Add it to the additions hashtable if (cloneCount == null) { cloneKeyValues.put(firstObject, Integer.valueOf(1)); } else { cloneKeyValues.put(firstObject, Integer.valueOf(cloneCount.intValue() + 1)); } } else if (count.intValue() == 1) { //There is only one object so remove the whole reference originalKeyValues.remove(firstObject); } else { originalKeyValues.put(firstObject, Integer.valueOf(count.intValue() - 1)); } } } } if (cloneKeyValues.isEmpty() && originalKeyValues.isEmpty() && (numberOfNewNulls == 0) && (!changeRecord.getOwner().isNew())) { return; } ((DirectCollectionChangeRecord)changeRecord).clearChanges(); ((DirectCollectionChangeRecord)changeRecord).addAdditionChange(cloneKeyValues, databaseCount); ((DirectCollectionChangeRecord)changeRecord).addRemoveChange(originalKeyValues, databaseCount); ((DirectCollectionChangeRecord)changeRecord).setIsDeferred(false); ((DirectCollectionChangeRecord)changeRecord).setLatestCollection(null); //For CR#2258, produce a changeRecord which reflects the addition and removal of null values. if (numberOfNewNulls != 0) { ((DirectCollectionChangeRecord)changeRecord).getCommitAddMap().put(null, Integer.valueOf(databaseNullCount)); if (numberOfNewNulls > 0) { ((DirectCollectionChangeRecord)changeRecord).addAdditionChange(null, Integer.valueOf(numberOfNewNulls)); } else { numberOfNewNulls *= -1; ((DirectCollectionChangeRecord)changeRecord).addRemoveChange(null, Integer.valueOf(numberOfNewNulls)); } } } /** * INTERNAL: * This method is used to calculate the differences between two Lists. */ public void compareListsForChange(List oldList, List newList, ChangeRecord changeRecord, AbstractSession session) { // Maps objects (null included) in newList and oldList to an array of two Sets: // the first one contains indexes of the object in oldList, the second - in newList. // Contains only the objects for which the set of indexes in newList and oldList are different; // only changed indexes appear in the sets (therefore the old index set and new index set don't intersect). // Examples: // obj was first (index 0) in oldList; first and second (indexes 0 and 1)in newList: obj -> {{}, {1}}; // obj was not in oldList; first in newList: obj -> {null, {0}}; // obj was first in oldList; not in newList: obj -> {{0}, null}; // obj was first and second in oldList; first in newList: obj -> {{1}, {}}; // Note the difference between null and empty set: // empty set means there's at least one index (the same in oldList and newList - otherwise it would've been in the set); // null means there's no indexes. // That helps during deletion - if we know there is no remaining duplicates for the object to be removed // we can delete it without checking its index (which allows delete several duplicates in one sql). // Map entry sets with no new and no old indexes removed. HashMap changedIndexes = new HashMap(Math.max(oldList.size(), newList.size())); int nOldSize = 0; // for each object in oldList insert all its indexes in oldList into the old indexes set corresponding to each object. if (oldList != null) { nOldSize = oldList.size(); for(int i=0; i < nOldSize; i++) { Object obj = oldList.get(i); Set[] indexes = (Set[])changedIndexes.get(obj); if (indexes == null) { // the first index found for the object. indexes = new Set[]{new HashSet(), null}; changedIndexes.put(obj, indexes); } indexes[0].add(i); } } // helper set to store objects for which entries into changedIndexes has been removed: // if an entry for the object is created again, it will have an empty old indexes set (rather than null) // to indicate that the object has been on the oldList, too. HashSet removedFromChangedIndexes = new HashSet(); HashSet dummySet = new HashSet(0); // for each object in newList, for each its index in newList: // if the object has the same index in oldList - remove the index from old indexes set; // if the object doesn't have the same index in oldList - insert the index into new indexes set. int nNewSize = 0; if (newList != null) { nNewSize = newList.size(); for(int i=0; i < nNewSize; i++) { Object obj = newList.get(i); Set[] indexes = (Set[])changedIndexes.get(obj); if (indexes == null) { // the first index found for the object - or was found and removed before. if(removedFromChangedIndexes.contains(obj)) { // the object also exists in oldList indexes = new Set[]{dummySet, new HashSet()}; } else { // the object does not exist in oldList indexes = new Set[]{null, new HashSet()}; } changedIndexes.put(obj, indexes); // the object doesn't have this index in oldList - add the index to new indexes set. indexes[1].add(i); } else { if(indexes[0] == null || !indexes[0].contains(i)) { // the object doesn't have this index in oldList - add the index to new indexes set. if(indexes[1] == null) { indexes[1] = new HashSet(); } indexes[1].add(i); } else { // the object has this index in oldList - remove the index from the old indexes set. indexes[0].remove(i); if(indexes[0].isEmpty()) { // no old indexes left for the object. if(indexes[1] == null || indexes[1].isEmpty()) { // no new indexes left, too - remove the entry for the object. changedIndexes.remove(obj); // store the object in case it has another index on newList removedFromChangedIndexes.add(obj); } } } } } } ((DirectCollectionChangeRecord)changeRecord).setChangedIndexes(changedIndexes); ((DirectCollectionChangeRecord)changeRecord).setOldSize(nOldSize); ((DirectCollectionChangeRecord)changeRecord).setNewSize(nNewSize); } /** * INTERNAL: * This method compares the changes between two direct collections. Comparisons are made on equality * not identity. */ @Override public ChangeRecord compareForChange(Object clone, Object backUp, ObjectChangeSet owner, AbstractSession session) { Object cloneAttribute = getAttributeValueFromObject(clone); Object backUpAttribute = null; if ((cloneAttribute != null) && (!getIndirectionPolicy().objectIsInstantiated(cloneAttribute))) { return null; } Object cloneObjectCollection = getRealCollectionAttributeValueFromObject(clone, session); Object backUpCollection = null; if (!owner.isNew()) { backUpAttribute = getAttributeValueFromObject(backUp); if ((backUpAttribute == null) && (cloneAttribute == null)) { return null; } backUpCollection = getRealCollectionAttributeValueFromObject(backUp, session); } DirectCollectionChangeRecord changeRecord = new DirectCollectionChangeRecord(owner); changeRecord.setAttribute(getAttributeName()); changeRecord.setMapping(this); if(this.listOrderField != null) { changeRecord.setLatestCollection(cloneObjectCollection); } compareCollectionsForChange(backUpCollection, cloneObjectCollection, changeRecord, session); if (changeRecord.hasChanges()) { changeRecord.setOriginalCollection(backUpCollection); return changeRecord; } return null; } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ @Override public boolean compareObjects(Object firstObject, Object secondObject, AbstractSession session) { Object firstCollection = getRealCollectionAttributeValueFromObject(firstObject, session); Object secondCollection = getRealCollectionAttributeValueFromObject(secondObject, session); if(this.listOrderField != null) { return compareLists((List)firstCollection, (List)secondCollection); } ContainerPolicy containerPolicy = getContainerPolicy(); if (containerPolicy.sizeFor(firstCollection) != containerPolicy.sizeFor(secondCollection)) { return false; } HashMap firstCounter = new HashMap(); HashMap secondCounter = new HashMap(); for (Object iter = containerPolicy.iteratorFor(firstCollection); containerPolicy.hasNext(iter);) { Object object = containerPolicy.next(iter, session); if (firstCounter.containsKey(object)) { int count = ((Integer)firstCounter.get(object)).intValue(); firstCounter.put(object, Integer.valueOf(++count)); } else { firstCounter.put(object, Integer.valueOf(1)); } } for (Object iter = containerPolicy.iteratorFor(secondCollection); containerPolicy.hasNext(iter);) { Object object = containerPolicy.next(iter, session); if (secondCounter.containsKey(object)) { int count = ((Integer)secondCounter.get(object)).intValue(); secondCounter.put(object, Integer.valueOf(++count)); } else { secondCounter.put(object, Integer.valueOf(1)); } } for (Iterator iterator = firstCounter.keySet().iterator(); iterator.hasNext();) { Object object = iterator.next(); if (!secondCounter.containsKey(object) || (((Integer)secondCounter.get(object)).intValue() != ((Integer)firstCounter.get(object)).intValue())) { // containsKey(object) will fail when the objects are arrays. boolean found = false; for (Iterator ii = secondCounter.keySet().iterator(); ii.hasNext();) { Object otherObject = ii.next(); if(object == otherObject) { found = true; } else if(object == null || otherObject == null) { found = false; } else { found = Helper.comparePotentialArrays(object, otherObject); } if (found) { iterator.remove(); secondCounter.remove(otherObject); break; } } if (!found) { return false; } } else { iterator.remove(); secondCounter.remove(object); } } if (!firstCounter.isEmpty() || !secondCounter.isEmpty()) { return false; } return true; } /** * Compare two lists. For equality the order of the elements should be the same. * Used only if listOrderField != null */ protected boolean compareLists(List firstList, List secondList) { if (firstList.size() != secondList.size()) { return false; } int size = firstList.size(); for(int i=0; i < size; i++) { Object firstObject = firstList.get(i); Object secondObject = secondList.get(i); if(firstObject != secondObject) { if(firstObject==null || secondObject==null) { return false; } else { if(!firstObject.equals(secondObject)) { return false; } } } } return true; } /** * INTERNAL: * Convert all the class-name-based settings in this mapping to actual class-based * settings * This method is implemented by subclasses as necessary. * @param classLoader */ @Override public void convertClassNamesToClasses(ClassLoader classLoader) { super.convertClassNamesToClasses(classLoader); // Tell the direct field to convert any class names (type name). directField.convertClassNamesToClasses(classLoader); // Convert and any Converter class names. convertConverterClassNamesToClasses(valueConverter, classLoader); // Instantiate any custom converter class if (valueConverterClassName != null) { Class valueConverterClass; Converter valueConverter; try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { valueConverterClass = (Class) AccessController.doPrivileged(new PrivilegedClassForName(valueConverterClassName, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(valueConverterClassName, exception.getException()); } try { valueConverter = (Converter) AccessController.doPrivileged(new PrivilegedNewInstanceFromClass(valueConverterClass)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(valueConverterClassName, exception.getException()); } } else { valueConverterClass = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(valueConverterClassName, true, classLoader); valueConverter = (Converter) org.eclipse.persistence.internal.security.PrivilegedAccessHelper.newInstanceFromClass(valueConverterClass); } } catch (ClassNotFoundException exc) { throw ValidationException.classNotFoundWhileConvertingClassNames(valueConverterClassName, exc); } catch (Exception e) { // Catches IllegalAccessException and InstantiationException throw ValidationException.classNotFoundWhileConvertingClassNames(valueConverterClassName, e); } setValueConverter(valueConverter); } // Check if the attribute classification is set (either directly or through a type conversion converter) if (attributeClassification == null) { // Look for an attribute classification name if (attributeClassificationName != null) { try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { attributeClassification = (Class) AccessController.doPrivileged(new PrivilegedClassForName(attributeClassificationName, true, classLoader)); } catch (PrivilegedActionException pae) { throw ValidationException.classNotFoundWhileConvertingClassNames(attributeClassificationName, pae.getException()); } } else { attributeClassification = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(attributeClassificationName, true, classLoader); } } catch (Exception exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(attributeClassificationName, exception); } } else { // Still nothing, default to the type from the direct field. attributeClassification = getDirectField().getType(); } } } /** * INTERNAL: * Extract the source primary key value from the reference direct row. * Used for batch reading, most following same order and fields as in the mapping. */ @Override protected Object extractKeyFromTargetRow(AbstractRecord row, AbstractSession session) { int size = this.referenceKeyFields.size(); Object[] key = new Object[size]; ConversionManager conversionManager = session.getDatasourcePlatform().getConversionManager(); for (int index = 0; index < size; index++) { DatabaseField relationField = this.referenceKeyFields.get(index); DatabaseField sourceField = this.sourceKeyFields.get(index); Object value = row.get(relationField); // Must ensure the classification gets a cache hit. try { value = conversionManager.convertObject(value, sourceField.getType()); } catch (ConversionException e) { throw ConversionException.couldNotBeConverted(this, getDescriptor(), e); } key[index] = value; } return new CacheId(key); } /** * INTERNAL: * Extract the primary key value from the source row. * Used for batch reading, most following same order and fields as in the mapping. */ @Override protected Object extractBatchKeyFromRow(AbstractRecord row, AbstractSession session) { int size = this.sourceKeyFields.size(); Object[] key = new Object[size]; ConversionManager conversionManager = session.getDatasourcePlatform().getConversionManager(); for (int index = 0; index < size; index++) { DatabaseField field = this.sourceKeyFields.get(index); Object value = row.get(field); // Must ensure the classification to get a cache hit. try { value = conversionManager.convertObject(value, field.getType()); } catch (ConversionException exception) { throw ConversionException.couldNotBeConverted(this, this.descriptor, exception); } key[index] = value; } return new CacheId(key); } /** * INTERNAL: * Return the selection criteria used to IN batch fetching. */ @Override protected Expression buildBatchCriteria(ExpressionBuilder builder, ObjectLevelReadQuery query) { int size = this.referenceKeyFields.size(); Expression table = builder.getTable(this.referenceTable); if (size > 1) { // Support composite keys using nested IN. List fields = new ArrayList(size); for (DatabaseField referenceKeyField : this.referenceKeyFields) { fields.add(table.getField(referenceKeyField)); } return query.getSession().getPlatform().buildBatchCriteriaForComplexId(builder, fields); } else { return query.getSession().getPlatform().buildBatchCriteria(builder, table.getField(this.referenceKeyFields.get(0))); } } /** * INTERNAL: * Prepare and execute the batch query and store the * results for each source object in a map keyed by the * mappings source keys of the source objects. */ @Override protected void executeBatchQuery(DatabaseQuery query, CacheKey parentCacheKey, Map referenceDataByKey, AbstractSession session, AbstractRecord translationRow) { // Execute query and index resulting object sets by key. List rows = (List)session.executeQuery(query, translationRow); int size = rows.size(); if (this.containerPolicy.shouldAddAll()) { if (size > 0) { Map referenceDataAndRowsByKey = new HashMap(); for (int index = 0; index < size; index++) { AbstractRecord referenceRow = rows.get(index); Object referenceValue = referenceRow.get(this.directField); Object eachReferenceKey = extractKeyFromTargetRow(referenceRow, session); // Allow for value conversion. if (this.valueConverter != null) { referenceValue = this.valueConverter.convertDataValueToObjectValue(referenceValue, query.getSession()); } List[] valuesAndRows = referenceDataAndRowsByKey.get(eachReferenceKey); if (valuesAndRows == null) { valuesAndRows = new List[]{new ArrayList(), new ArrayList()}; referenceDataAndRowsByKey.put(eachReferenceKey, valuesAndRows); } valuesAndRows[0].add(referenceValue); valuesAndRows[1].add(referenceRow); } Iterator> iterator = referenceDataAndRowsByKey.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry entry = iterator.next(); Object eachReferenceKey = entry.getKey(); List referenceValues = entry.getValue()[0]; List referenceRows = entry.getValue()[1]; Object container = this.containerPolicy.containerInstance(referenceValues.size()); this.containerPolicy.addAll(referenceValues, container, query.getSession(), referenceRows, (DataReadQuery)query, parentCacheKey, true); referenceDataByKey.put(eachReferenceKey, container); } } } else { for (int index = 0; index < size; index++) { AbstractRecord referenceRow = rows.get(index); Object referenceValue = referenceRow.get(this.directField); Object eachReferenceKey = extractKeyFromTargetRow(referenceRow, session); Object container = referenceDataByKey.get(eachReferenceKey); if ((container == null) || (container == Helper.NULL_VALUE)) { container = this.containerPolicy.containerInstance(); referenceDataByKey.put(eachReferenceKey, container); } // Allow for value conversion. if (this.valueConverter != null) { referenceValue = this.valueConverter.convertDataValueToObjectValue(referenceValue, query.getSession()); } this.containerPolicy.addInto(referenceValue, container, query.getSession()); } } } /** * INTERNAL: * At this point, we realize we don't have indirection; * so we need to replace the reference object(s) with * the corresponding object(s) from the remote session. * * The reference objects for a DirectCollectionMapping * are primitives, so they do not need to be replaced. */ @Override public void fixRealObjectReferences(Object object, Map objectInformation, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { // do nothing } /** * PUBLIC: * Some databases do not properly support all of the base data types. For these databases, * the base data type must be explicitly specified in the mapping to tell EclipseLink to force * the instance variable value to that data type. * @since Java Persistence API 2.0 */ public Class getAttributeClassification() { return attributeClassification; } /** * INTERNAL: * Return the class name of the attribute type. * This is only used by the MW. * @since Java Persistence API 2.0 */ public String getAttributeClassificationName() { if ((null == attributeClassificationName) && (attributeClassification != null)) { attributeClassificationName = attributeClassification.getName(); } return attributeClassificationName; } protected ModifyQuery getDeleteQuery() { if (changeSetDeleteQuery == null) { changeSetDeleteQuery = new DataModifyQuery(); } return changeSetDeleteQuery; } // Bug 306075 protected ModifyQuery getDeleteNullQuery() { if (changeSetDeleteNullQuery == null) { changeSetDeleteNullQuery = new DataModifyQuery(); } return changeSetDeleteNullQuery; } protected ModifyQuery getDeleteAtIndexQuery() { if (deleteAtIndexQuery == null) { deleteAtIndexQuery = new DataModifyQuery(); } return deleteAtIndexQuery; } protected ModifyQuery getUpdateAtIndexQuery() { if (updateAtIndexQuery == null) { updateAtIndexQuery = new DataModifyQuery(); } return updateAtIndexQuery; } /** * INTERNAL: * Returns the set of fields that should be selected to build this mapping's value(s). * This is used by expressions to determine which fields to include in the select clause for non-object expressions. */ @Override public Vector getSelectFields() { Vector fields = new NonSynchronizedVector(2); fields.add(getDirectField()); return fields; } /** * INTERNAL: * Returns the table(s) that should be selected to build this mapping's value(s). * This is used by expressions to determine which tables to include in the from clause for non-object expressions. */ @Override public Vector getSelectTables() { Vector tables = new NonSynchronizedVector(0); tables.add(getReferenceTable()); return tables; } /** * INTERNAL: * Return the direct field. * This is the field in the direct table to store the values. */ public DatabaseField getDirectField() { return directField; } /** * PUBLIC: * Returns the name of the field name in the reference table. */ public String getDirectFieldName() { if (getDirectField() == null) { return null; } return getDirectField().getQualifiedName(); } protected DataModifyQuery getInsertQuery() { return insertQuery; } /** * INTERNAL: * Returns the join criteria stored in the mapping selection query. This criteria * is used to read reference objects across the tables from the database. */ @Override public Expression getJoinCriteria(ObjectExpression context, Expression base) { if (getHistoryPolicy() != null) { Expression result = super.getJoinCriteria(context, base); Expression historyCriteria = getHistoryPolicy().additionalHistoryExpression(context, base); if (result != null) { return result.and(historyCriteria); } else if (historyCriteria != null) { return historyCriteria; } else { return null; } } else { return super.getJoinCriteria(context, base); } } /** * INTERNAL: * return the object on the client corresponding to the specified object. * DirectCollections do not have to worry about * maintaining object identity. */ @Override public Object getObjectCorrespondingTo(Object object, DistributedSession session, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query) { return object; } /** * PUBLIC: * Return the order by expression. */ public List getOrderByExpressions() { return orderByExpressions; } /** * PUBLIC: * Allow history support on the reference table. */ public HistoryPolicy getHistoryPolicy() { return historyPolicy; } /** * INTERNAL: * Get the container policy from the selection query for this mapping. */ @Override protected ContainerPolicy getSelectionQueryContainerPolicy() { return ((DataReadQuery) getSelectionQuery()).getContainerPolicy(); } /** * INTERNAL: * This cannot be used with direct collection mappings. */ @Override public Class getReferenceClass() { return null; } @Override public String getReferenceClassName() { return null; } /** * INTERNAL: * There is none on direct collection. */ @Override public ClassDescriptor getReferenceDescriptor() { return null; } /** * INTERNAL: * Return the reference key field names associated with the mapping. * These are in-order with the sourceKeyFieldNames. */ public Vector getReferenceKeyFieldNames() { Vector fieldNames = new Vector(getReferenceKeyFields().size()); for (Enumeration fieldsEnum = getReferenceKeyFields().elements(); fieldsEnum.hasMoreElements();) { fieldNames.addElement(((DatabaseField)fieldsEnum.nextElement()).getQualifiedName()); } return fieldNames; } /** * INTERNAL: * Return the reference key fields. */ public Vector getReferenceKeyFields() { return referenceKeyFields; } /** * INTERNAL: * Return the direct table. * This is the table to store the values. */ public DatabaseTable getReferenceTable() { return referenceTable; } /** * PUBLIC: * Returns the name of the reference table */ public String getReferenceTableName() { if (getReferenceTable() == null) { return null; } return getReferenceTable().getName(); } //This method is added to include table qualifier. /** * PUBLIC: * Returns the qualified name of the reference table. */ public String getReferenceTableQualifiedName() {//CR#2407 if (getReferenceTable() == null) { return null; } return getReferenceTable().getQualifiedName(); } /** * INTERNAL: * Return the relationshipPartner mapping for this bi-directional mapping. If the relationshipPartner is null then * this is a uni-directional mapping. * DirectCollectionMapping can not be part of a bi-directional mapping */ @Override public DatabaseMapping getRelationshipPartner() { return null; } /** * PUBLIC: * Return the source key field names associated with the mapping. * These are in-order with the referenceKeyFieldNames. */ public Vector getSourceKeyFieldNames() { Vector fieldNames = new Vector(getSourceKeyFields().size()); for (Enumeration fieldsEnum = getSourceKeyFields().elements(); fieldsEnum.hasMoreElements();) { fieldNames.addElement(((DatabaseField)fieldsEnum.nextElement()).getQualifiedName()); } return fieldNames; } /** * INTERNAL: * Return the source key fields. */ public Vector getSourceKeyFields() { return sourceKeyFields; } protected boolean hasCustomDeleteQuery() { return hasCustomDeleteQuery; } protected boolean hasCustomInsertQuery() { return hasCustomInsertQuery; } protected boolean hasCustomDeleteAtIndexQuery() { return hasCustomDeleteAtIndexQuery; } protected boolean hasCustomUpdateAtIndexQuery() { return hasCustomUpdateAtIndexQuery; } /** * INTERNAL: * Indicates whether the mapping (or at least one of its nested mappings, at any nested depth) * references an entity. * To return true the mapping (or nested mapping) should be ForeignReferenceMapping with non-null and non-aggregate reference descriptor. */ @Override public boolean hasNestedIdentityReference() { return false; } /** * INTERNAL: * Initialize and validate the mapping properties. */ @Override public void initialize(AbstractSession session) throws DescriptorException { if (session.hasBroker()) { if (getInsertQuery().hasSessionName()) { // substitute session that owns the mapping for the session that owns reference table. session = session.getBroker().getSessionForName(getInsertQuery().getSessionName()); } } if (isKeyForSourceSpecified()) { initializeSourceKeys(session); } else { initializeSourceKeysWithDefaults(session); } initializeReferenceTable(session); initializeReferenceKeys(session); initializeDirectField(session); if (getReferenceTable().getName().indexOf(' ') != -1) { //table names contains a space so needs to be quoted. String beginQuote = ((DatasourcePlatform)session.getDatasourcePlatform()).getStartDelimiter(); String endQuote = ((DatasourcePlatform)session.getDatasourcePlatform()).getEndDelimiter(); //Ensure this tablename hasn't already been quoted. if (getReferenceTable().getName().indexOf(beginQuote) == -1) { getReferenceTable().setName(beginQuote + getReferenceTable().getName() + endQuote); } } if (this.listOrderField != null) { this.initializeListOrderField(session); } getContainerPolicy().initialize(session, this.referenceTable); // Initialize the value converter sooner since it likely will finish // configuring field and attribute classifications. if (getValueConverter() != null) { getValueConverter().initialize(this, session); } if (!hasCustomSelectionQuery()){ initOrRebuildSelectQuery(); getSelectionQuery().setName(getAttributeName()); if (shouldInitializeSelectionCriteria()) { initializeSelectionCriteria(session); initializeSelectionStatement(session); } } if (!getSelectionQuery().hasSessionName()) { getSelectionQuery().setSessionName(session.getName()); } if (getSelectionQuery().getPartitioningPolicy() == null) { getSelectionQuery().setPartitioningPolicy(getPartitioningPolicy()); } getSelectionQuery().setSourceMapping(this); if ((getValueConverter() != null) && (getSelectionQuery() instanceof DirectReadQuery)) { ((DirectReadQuery)getSelectionQuery()).setValueConverter(getValueConverter()); } initializeDeleteAllQuery(session); initializeDeleteQuery(session); initializeDeleteNullQuery(session); // Bug 306075 initializeInsertQuery(session); initializeDeleteAtIndexQuery(session); initializeUpdateAtIndexQuery(session); if (getHistoryPolicy() != null) { getHistoryPolicy().initialize(session); } super.initialize(session); } /** * INTERNAL: * Initializes listOrderField. * Precondition: listOrderField != null. */ @Override protected void initializeListOrderField(AbstractSession session) { // This method is called twice. The second call (by CollectionMapping.initialize) should be ignored because initialization has been already done. if(!getContainerPolicy().isOrderedListPolicy() || ((OrderedListContainerPolicy)getContainerPolicy()).getListOrderField() == null) { super.initializeListOrderField(session); } } /** * Initialize delete all query. This query is used to delete the collection of objects from the * reference table. */ protected void initializeDeleteAllQuery(AbstractSession session) { if (!getDeleteAllQuery().hasSessionName()) { getDeleteAllQuery().setSessionName(session.getName()); } if (getDeleteAllQuery().getPartitioningPolicy() == null) { getDeleteAllQuery().setPartitioningPolicy(getPartitioningPolicy()); } if (hasCustomDeleteAllQuery()) { return; } Expression expression = null; Expression subExp1; Expression subExp2; Expression subExpression; Expression builder = new ExpressionBuilder(); SQLDeleteStatement statement = new SQLDeleteStatement(); // Construct an expression to delete from the relation table. for (int index = 0; index < getReferenceKeyFields().size(); index++) { DatabaseField referenceKey = getReferenceKeyFields().elementAt(index); DatabaseField sourceKey = getSourceKeyFields().elementAt(index); subExp1 = builder.getField(referenceKey); subExp2 = builder.getParameter(sourceKey); subExpression = subExp1.equal(subExp2); if (expression == null) { expression = subExpression; } else { expression = expression.and(subExpression); } } statement.setWhereClause(expression); statement.setTable(getReferenceTable()); getDeleteAllQuery().setSQLStatement(statement); } protected void initializeDeleteQuery(AbstractSession session) { if (!getDeleteQuery().hasSessionName()) { getDeleteQuery().setSessionName(session.getName()); } if (getDeleteQuery().getPartitioningPolicy() == null) { getDeleteQuery().setPartitioningPolicy(getPartitioningPolicy()); } if (hasCustomDeleteQuery()) { return; } SQLDeleteStatement statement = new SQLDeleteStatement(); ExpressionBuilder builder = new ExpressionBuilder(); Expression expression = createWhereClauseForDeleteQuery(builder); statement.setWhereClause(expression); statement.setTable(getReferenceTable()); getDeleteQuery().setSQLStatement(statement); } // Bug 306075 - for deleting a null value from a collection protected void initializeDeleteNullQuery(AbstractSession session) { if (!getDeleteNullQuery().hasSessionName()) { getDeleteNullQuery().setSessionName(session.getName()); } if (getDeleteNullQuery().getPartitioningPolicy() == null) { getDeleteNullQuery().setPartitioningPolicy(getPartitioningPolicy()); } SQLDeleteStatement statement = new SQLDeleteStatement(); ExpressionBuilder builder = new ExpressionBuilder(); Expression expression = createWhereClauseForDeleteNullQuery(builder); statement.setWhereClause(expression); statement.setTable(getReferenceTable()); getDeleteNullQuery().setSQLStatement(statement); } protected void initializeDeleteAtIndexQuery(AbstractSession session) { if (!getDeleteAtIndexQuery().hasSessionName()) { getDeleteAtIndexQuery().setSessionName(session.getName()); } if (getDeleteAtIndexQuery().getPartitioningPolicy() == null) { getDeleteAtIndexQuery().setPartitioningPolicy(getPartitioningPolicy()); } if (hasCustomDeleteAtIndexQuery()) { return; } SQLDeleteStatement statement = new SQLDeleteStatement(); ExpressionBuilder builder = new ExpressionBuilder(); Expression expression = createWhereClauseForDeleteQuery(builder); expression = expression.and(builder.getField(this.listOrderField).equal(builder.getParameter(this.listOrderField))); statement.setWhereClause(expression); statement.setTable(getReferenceTable()); getDeleteAtIndexQuery().setSQLStatement(statement); } protected void initializeUpdateAtIndexQuery(AbstractSession session) { if (!getUpdateAtIndexQuery().hasSessionName()) { getUpdateAtIndexQuery().setSessionName(session.getName()); } if (getUpdateAtIndexQuery().getPartitioningPolicy() == null) { getUpdateAtIndexQuery().setPartitioningPolicy(getPartitioningPolicy()); } if (hasCustomUpdateAtIndexQuery()) { return; } SQLUpdateStatement statement = new SQLUpdateStatement(); ExpressionBuilder builder = new ExpressionBuilder(); Expression expression = createWhereClauseForDeleteQuery(builder); expression = expression.and(builder.getField(this.listOrderField).equal(builder.getParameter(this.listOrderField))); statement.setWhereClause(expression); statement.setTable(getReferenceTable()); AbstractRecord modifyRow = new DatabaseRecord(); modifyRow.add(this.listOrderField, null); statement.setModifyRow(modifyRow); getUpdateAtIndexQuery().setSQLStatement(statement); } /** * INTERNAL: * Indicates whether getListOrderFieldExpression method should create field expression on table expression. */ @Override public boolean shouldUseListOrderFieldTableExpression() { return true; } protected Expression createWhereClauseForDeleteQuery(ExpressionBuilder builder) { Expression directExp = builder.getField(getDirectField()).equal(builder.getParameter(getDirectField())); Expression expression = null; // Construct an expression to delete from the relation table. for (int index = 0; index < getReferenceKeyFields().size(); index++) { DatabaseField referenceKey = getReferenceKeyFields().get(index); DatabaseField sourceKey = getSourceKeyFields().get(index); Expression subExp1 = builder.getField(referenceKey); Expression subExp2 = builder.getParameter(sourceKey); Expression subExpression = subExp1.equal(subExp2); expression = subExpression.and(expression); } expression = expression.and(directExp); return expression; } // Bug 306075 - for deleting a null value from a collection protected Expression createWhereClauseForDeleteNullQuery(ExpressionBuilder builder) { Expression directExp = builder.getField(getDirectField()).isNull(); Expression expression = null; // Construct an expression to delete from the relation table. for (int index = 0; index < getReferenceKeyFields().size(); index++) { DatabaseField referenceKey = getReferenceKeyFields().get(index); DatabaseField sourceKey = getSourceKeyFields().get(index); Expression subExp1 = builder.getField(referenceKey); Expression subExp2 = builder.getParameter(sourceKey); Expression subExpression = subExp1.equal(subExp2); expression = subExpression.and(expression); } expression = expression.and(directExp); return expression; } /** * The field name on the reference table is initialized and cached. */ protected void initializeDirectField(AbstractSession session) throws DescriptorException { if (getDirectField() == null) { throw DescriptorException.directFieldNameNotSet(this); } getDirectField().setTable(getReferenceTable()); getDirectField().setIndex(0); } /** * Initialize insert query. This query is used to insert the collection of objects into the * reference table. */ protected void initializeInsertQuery(AbstractSession session) { if (!getInsertQuery().hasSessionName()) { getInsertQuery().setSessionName(session.getName()); } if (getInsertQuery().getPartitioningPolicy() == null) { getInsertQuery().setPartitioningPolicy(getPartitioningPolicy()); } if (hasCustomInsertQuery()) { return; } SQLInsertStatement statement = new SQLInsertStatement(); statement.setTable(getReferenceTable()); AbstractRecord directRow = new DatabaseRecord(); for (Enumeration referenceEnum = getReferenceKeyFields().elements(); referenceEnum.hasMoreElements();) { directRow.put((DatabaseField)referenceEnum.nextElement(), null); } directRow.put(getDirectField(), null); if(listOrderField != null) { directRow.put(listOrderField, null); } statement.setModifyRow(directRow); getInsertQuery().setSQLStatement(statement); getInsertQuery().setModifyRow(directRow); } /** * There is no reference descriptor */ @Override protected void initializeReferenceDescriptor(AbstractSession session) { // no-op. } /** * The reference keys on the reference table are initialized */ protected void initializeReferenceKeys(AbstractSession session) throws DescriptorException { if (getReferenceKeyFields().size() == 0) { throw DescriptorException.noReferenceKeyIsSpecified(this); } for (Enumeration referenceEnum = getReferenceKeyFields().elements(); referenceEnum.hasMoreElements();) { DatabaseField field = (DatabaseField)referenceEnum.nextElement(); // Update the field first if the mapping is on a table per tenant entity. if (getDescriptor().hasTablePerMultitenantPolicy()) { field.setTable(((TablePerMultitenantPolicy) getDescriptor().getMultitenantPolicy()).getTable(field.getTable())); } if (field.hasTableName() && (!(field.getTableName().equals(getReferenceTable().getName())))) { throw DescriptorException.referenceKeyFieldNotProperlySpecified(field, this); } field.setTable(getReferenceTable()); } } /** * Set the table qualifier on the reference table if required */ protected void initializeReferenceTable(AbstractSession session) throws DescriptorException { Platform platform = session.getDatasourcePlatform(); if (getReferenceTable() == null) { throw DescriptorException.referenceTableNotSpecified(this); } if (platform.getTableQualifier().length() > 0) { if (getReferenceTable().getTableQualifier().length() == 0) { getReferenceTable().setTableQualifier(platform.getTableQualifier()); } } } protected void initializeSelectionCriteria(AbstractSession session) { Expression criteria = null; ExpressionBuilder base = new ExpressionBuilder(); TableExpression table = (TableExpression)base.getTable(getReferenceTable()); Iterator referenceKeys = getReferenceKeyFields().iterator(); Iterator sourceKeys = getSourceKeyFields().iterator(); while (referenceKeys.hasNext()) { DatabaseField referenceKey = referenceKeys.next(); DatabaseField sourceKey = sourceKeys.next(); Expression expression = table.getField(referenceKey).equal(base.getParameter(sourceKey)); if (criteria == null) { criteria = expression; } else { criteria = expression.and(criteria); } } setSelectionCriteria(criteria); } /** * The selection query is initialized */ @Override protected void initializeSelectionQuery(AbstractSession session) { // Nothing required. } protected void initializeSelectionStatement(AbstractSession session) { SQLSelectStatement statement = new SQLSelectStatement(); statement.addTable(getReferenceTable()); statement.addField(getDirectField().clone()); statement.setWhereClause(getSelectionCriteria()); statement.setOrderByExpressions(orderByExpressions); getSelectionQuery().setSQLStatement(statement); getContainerPolicy().addAdditionalFieldsToQuery(selectionQuery, getAdditionalFieldsBaseExpression(getSelectionQuery())); statement.normalize(session, null); } /** * The source keys are initialized */ protected void initializeSourceKeys(AbstractSession session) { for (int index = 0; index < getSourceKeyFields().size(); index++) { DatabaseField field = getDescriptor().buildField(getSourceKeyFields().get(index)); if (usesIndirection()) { field.setKeepInRow(true); } getSourceKeyFields().set(index, field); } } /** * INTERNAL: * If a user does not specify the source key then the primary keys of the source table are used. */ protected void initializeSourceKeysWithDefaults(AbstractSession session) { List primaryKeyFields = getDescriptor().getPrimaryKeyFields(); for (int index = 0; index < primaryKeyFields.size(); index++) { DatabaseField field = primaryKeyFields.get(index); if (usesIndirection()) { field.setKeepInRow(true); } getSourceKeyFields().addElement(field); } } /** * INTERNAL: * Return the base expression to use for adding fields to the query. * This is the reference table. */ @Override protected Expression getAdditionalFieldsBaseExpression(ReadQuery query) { if (query.isReadAllQuery()) { return ((ReadAllQuery)query).getExpressionBuilder(); } else { return ((DataReadQuery)query).getSQLStatement().getBuilder().getTable(getReferenceTable()); } } /** * INTERNAL: */ @Override public boolean isDirectCollectionMapping() { return true; } /** * INTERNAL: */ @Override public boolean isElementCollectionMapping() { return true; } /** * INTERNAL: * Return if this mapping support joining. */ @Override public boolean isJoiningSupported() { return true; } /** * INTERNAL: * Checks if source and target keys are mentioned by the user or not. */ protected boolean isKeyForSourceSpecified() { return !getSourceKeyFields().isEmpty(); } /** * INTERNAL: * Return whether this mapping should be traversed when we are locking * @return */ public boolean isLockableMapping(){ return false; } /** * INTERNAL: */ @Override public boolean isOwned(){ return true; } /** * INTERNAL: * Iterate on the attribute value. * The value holder has already been processed. * PERF: Avoid iteration if not required. */ @Override public void iterateOnRealAttributeValue(DescriptorIterator iterator, Object realAttributeValue) { if (iterator.shouldIterateOnPrimitives()) { super.iterateOnRealAttributeValue(iterator, realAttributeValue); } } /** * INTERNAL: * Iterate on the specified element. */ @Override public void iterateOnElement(DescriptorIterator iterator, Object element) { iterator.iteratePrimitiveForMapping(element, this); } /** * INTERNAL: * Merge changes from the source to the target object. * Because this is a collection mapping, values are added to or removed from the * collection based on the changeset */ @Override public void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { if (this.descriptor.getCachePolicy().isProtectedIsolation()&& !this.isCacheable && !targetSession.isProtectedSession()){ setAttributeValueInObject(target, this.indirectionPolicy.buildIndirectObject(new ValueHolder(null))); return; } ContainerPolicy containerPolicy = getContainerPolicy(); Object valueOfTarget = null; AbstractSession session = mergeManager.getSession(); DirectCollectionChangeRecord directCollectionChangeRecord = (DirectCollectionChangeRecord) changeRecord; //Check to see if the target has an instantiated collection if ((isAttributeValueInstantiated(target)) && (!changeRecord.getOwner().isNew())) { if (isSynchronizeOnMerge) { valueOfTarget = getRealCollectionAttributeValueFromObject(target, session); } else { valueOfTarget = containerPolicy.cloneFor(getRealCollectionAttributeValueFromObject(target, session)); } } else { //if not create an instance of the collection valueOfTarget = containerPolicy.containerInstance(directCollectionChangeRecord.getAddObjectMap().size()); } if (!isAttributeValueInstantiated(target)) { if (mergeManager.shouldMergeChangesIntoDistributedCache()) { return; } for (Object iterator = containerPolicy.iteratorFor(getRealCollectionAttributeValueFromObject(source, session)); containerPolicy.hasNext(iterator);) { containerPolicy.addInto(containerPolicy.next(iterator, session), valueOfTarget, session); } } else { Object synchronizationTarget = valueOfTarget; // For indirect containers the delegate must be synchronized on, // not the wrapper as the clone synchs on the delegate, see bug#5685287. if (valueOfTarget instanceof IndirectCollection) { synchronizationTarget = ((IndirectCollection)valueOfTarget).getDelegateObject(); if (((DirectCollectionChangeRecord)changeRecord).orderHasBeenRepaired() && (valueOfTarget instanceof IndirectList)) { ((IndirectList)valueOfTarget).setIsListOrderBrokenInDb(false); } } if (isSynchronizeOnMerge) { synchronized(synchronizationTarget) { mergeAddRemoveChanges(valueOfTarget, synchronizationTarget, directCollectionChangeRecord, mergeManager, session); } } else { mergeAddRemoveChanges(valueOfTarget, synchronizationTarget, directCollectionChangeRecord, mergeManager, session); } } setRealAttributeValueInObject(target, valueOfTarget); } /** * INTERNAL: * Merge changes by adding and removing from the change record to the * target object, and its delegate object if instance of IndirectCollection. * It will also reorder the collection if required. */ protected void mergeAddRemoveChanges(Object valueOfTarget, Object delegateTarget, DirectCollectionChangeRecord changeRecord, MergeManager mergeManager, AbstractSession session) { //collect the changes into a vector HashMap addObjects = changeRecord.getAddObjectMap(); HashMap removeObjects = changeRecord.getRemoveObjectMap(); // Next iterate over the changes and add them to the container for (Iterator iterator = addObjects.keySet().iterator(); iterator.hasNext();) { Object object = iterator.next(); int objectCount = ((Integer)addObjects.get(object)).intValue(); for (int i = 0; i < objectCount; ++i) { if (mergeManager.shouldMergeChangesIntoDistributedCache()) { //bug#4458089 and 4544532- check if collection contains new item before adding during merge into distributed cache if (!containerPolicy.contains(object, valueOfTarget, session)) { containerPolicy.addInto(object, valueOfTarget, session); } } else { containerPolicy.addInto(object, valueOfTarget, session); } } } for (Iterator iterator = removeObjects.keySet().iterator(); iterator.hasNext();) { Object object = iterator.next(); int objectCount = ((Integer)removeObjects.get(object)).intValue(); for (int i = 0; i < objectCount; ++i) { containerPolicy.removeFrom(object, valueOfTarget, session); } } if(this.listOrderField != null && changeRecord.getChangedIndexes() == null) { this.compareListsForChange((List)changeRecord.getOriginalCollection(), (List)changeRecord.getLatestCollection(), changeRecord, session); } if(changeRecord.getChangedIndexes() != null) { int oldSize = changeRecord.getOldSize(); int newSize = changeRecord.getNewSize(); int delta = newSize - oldSize; Object newTail[] = null; if(delta > 0) { newTail = new Object[delta]; } Iterator> it = changeRecord.getChangedIndexes().entrySet().iterator(); while(it.hasNext()) { Map.Entry entry = it.next(); Object value = entry.getKey(); Set[] indexes = entry.getValue(); Set indexesAfter = indexes[1]; if(indexesAfter != null) { Iterator itIndexesAfter = indexesAfter.iterator(); while(itIndexesAfter.hasNext()) { int index = itIndexesAfter.next(); if(index < oldSize) { ((List)delegateTarget).set(index, value); } else { newTail[index - oldSize] = value; } } } } if(delta > 0) { for(int i=0; i < delta; i++) { ((List)delegateTarget).add(newTail[i]); } } else if(delta < 0) { for(int i=oldSize -1 ; i >= newSize; i--) { ((List)delegateTarget).remove(i); } } } } /** * INTERNAL: * Merge changes from the source to the target object. */ @Override public void mergeIntoObject(Object target, boolean isTargetUnInitialized, Object source, MergeManager mergeManager, AbstractSession targetSession) { if (this.descriptor.getCachePolicy().isProtectedIsolation() && !this.isCacheable && !targetSession.isProtectedSession()){ setAttributeValueInObject(target, this.indirectionPolicy.buildIndirectObject(new ValueHolder(null))); return; } if (isTargetUnInitialized) { // This will happen if the target object was removed from the cache before the commit was attempted if (mergeManager.shouldMergeWorkingCopyIntoOriginal() && (!isAttributeValueInstantiated(source))) { setAttributeValueInObject(target, getIndirectionPolicy().getOriginalIndirectionObject(getAttributeValueFromObject(source), targetSession)); return; } } if (!shouldMergeCascadeReference(mergeManager)) { // This is only going to happen on mergeClone, and we should not attempt to merge the reference return; } if (mergeManager.shouldRefreshRemoteObject() && usesIndirection()) { mergeRemoteValueHolder(target, source, mergeManager); return; } if (mergeManager.isForRefresh()) { if (!isAttributeValueInstantiated(target)) { // This will occur when the clone's value has not been instantiated yet and we do not need // the refresh that attribute return; } } else if (!isAttributeValueInstantiatedOrChanged(source)) { // I am merging from a clone into an original. No need to do merge if the attribute was never // modified return; } ContainerPolicy containerPolicy = getContainerPolicy(); Object valueOfSource = getRealCollectionAttributeValueFromObject(source, mergeManager.getSession()); // trigger instantiation of target attribute Object valueOfTarget = getRealCollectionAttributeValueFromObject(target, mergeManager.getSession()); Object newContainer = containerPolicy.containerInstance(containerPolicy.sizeFor(valueOfSource)); boolean fireCollectionChangeEvents = false; boolean firePropertyChangeEvent = false; ObjectChangeListener listener = null; if ((this.descriptor.getObjectChangePolicy().isObjectChangeTrackingPolicy()) && (target instanceof ChangeTracker) && (((ChangeTracker)target)._persistence_getPropertyChangeListener() != null)) { listener = (ObjectChangeListener)((ChangeTracker)target)._persistence_getPropertyChangeListener(); if(this.listOrderField == null) { fireCollectionChangeEvents = true; //Collections may not be indirect list or may have been replaced with user collection. Object iterator = containerPolicy.iteratorFor(valueOfTarget); Integer zero = Integer.valueOf(0);//remove does not seem to use index. while (containerPolicy.hasNext(iterator)) { // Bug304251: let the containerPolicy build the proper remove CollectionChangeEvent CollectionChangeEvent event = containerPolicy.createChangeEvent(target, getAttributeName(), valueOfTarget, containerPolicy.next(iterator, mergeManager.getSession()), CollectionChangeEvent.REMOVE, zero, false); listener.internalPropertyChange(event); } if (newContainer instanceof ChangeTracker) { ((ChangeTracker)newContainer)._persistence_setPropertyChangeListener(((ChangeTracker)target)._persistence_getPropertyChangeListener()); } if (valueOfTarget instanceof ChangeTracker) { ((ChangeTracker)valueOfTarget)._persistence_setPropertyChangeListener(null);//remove listener } } else { firePropertyChangeEvent = true; } } Object originalValueOfTarget = valueOfTarget; valueOfTarget = newContainer; int i = 0; for (Object sourceValuesIterator = containerPolicy.iteratorFor(valueOfSource); containerPolicy.hasNext(sourceValuesIterator);) { Object sourceValue = containerPolicy.next(sourceValuesIterator, mergeManager.getSession()); if (fireCollectionChangeEvents) { // Bug304251: let the containerPolicy build the proper remove CollectionChangeEvent CollectionChangeEvent event = containerPolicy.createChangeEvent(target, getAttributeName(), valueOfTarget, sourceValue, CollectionChangeEvent.ADD, Integer.valueOf(i), false); listener.internalPropertyChange(event); } containerPolicy.addInto(sourceValue, valueOfTarget, mergeManager.getSession()); i++; } if (fireCollectionChangeEvents && (this.descriptor.getObjectChangePolicy().isAttributeChangeTrackingPolicy())) { // check that there were changes, if not then remove the record. ObjectChangeSet changeSet = ((AttributeChangeListener)((ChangeTracker)target)._persistence_getPropertyChangeListener()).getObjectChangeSet(); if (changeSet != null) { DirectCollectionChangeRecord changeRecord = (DirectCollectionChangeRecord)changeSet.getChangesForAttributeNamed(getAttributeName()); if (changeRecord != null) { if (!changeRecord.isDeferred()) { if (!changeRecord.hasChanges()) { changeSet.removeChange(getAttributeName()); } } else { // Must reset the latest collection. changeRecord.setLatestCollection(valueOfTarget); } } } } if(firePropertyChangeEvent) { ((ObjectChangeListener)((ChangeTracker)target)._persistence_getPropertyChangeListener()).internalPropertyChange(new PropertyChangeEvent(target, getAttributeName(), originalValueOfTarget, valueOfTarget)); if (valueOfTarget instanceof ChangeTracker) { ((ChangeTracker)valueOfTarget)._persistence_setPropertyChangeListener(((ChangeTracker)target)._persistence_getPropertyChangeListener()); } if (originalValueOfTarget instanceof ChangeTracker) { ((ChangeTracker)originalValueOfTarget)._persistence_setPropertyChangeListener(null);//remove listener } } // Must re-set variable to allow for set method to re-morph changes if the collection is not being stored directly. setRealAttributeValueInObject(target, valueOfTarget); } /** * INTERNAL: * Perform the commit event. * This is used in the uow to delay data modifications. */ @Override public void performDataModificationEvent(Object[] event, AbstractSession session) throws DatabaseException, DescriptorException { // Hey I might actually want to use an inner class here... ok array for now. if (event[0] == Delete) { session.executeQuery((DataModifyQuery)event[1], (AbstractRecord)event[2]); if ((getHistoryPolicy() != null) && getHistoryPolicy().shouldHandleWrites()) { getHistoryPolicy().mappingLogicalDelete((DataModifyQuery)event[1], (AbstractRecord)event[2], session); } } else if (event[0] == Insert) { session.executeQuery((DataModifyQuery)event[1], (AbstractRecord)event[2]); if ((getHistoryPolicy() != null) && getHistoryPolicy().shouldHandleWrites()) { getHistoryPolicy().mappingLogicalInsert((DataModifyQuery)event[1], (AbstractRecord)event[2], session); } } else if (event[0] == DeleteAll) { preDelete((DeleteObjectQuery)event[1]); } else if (event[0] == DeleteAtIndex) { session.executeQuery((DataModifyQuery)event[1], (AbstractRecord)event[2]); } else if (event[0] == UpdateAtIndex) { DataModifyQuery updateAtIndexQuery = (DataModifyQuery)((DataModifyQuery)event[1]).clone(); updateAtIndexQuery.setModifyRow((AbstractRecord)event[3]); updateAtIndexQuery.setHasModifyRow(true); updateAtIndexQuery.setIsExecutionClone(true); session.executeQuery(updateAtIndexQuery, (AbstractRecord)event[2]); } else { throw DescriptorException.invalidDataModificationEventCode(event[0], this); } } /** * INTERNAL: * Overridden by mappings that require additional processing of the change record after the record has been calculated. */ @Override public void postCalculateChanges(org.eclipse.persistence.sessions.changesets.ChangeRecord changeRecord, UnitOfWorkImpl uow) { //no -op for this collection type } /** * INTERNAL: * Insert the private owned object. */ @Override public void postInsert(WriteObjectQuery query) throws DatabaseException { Object objects; AbstractRecord databaseRow = new DatabaseRecord(); if (isReadOnly()) { return; } objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); ContainerPolicy containerPolicy = getContainerPolicy(); if (containerPolicy.isEmpty(objects)) { return; } prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getDescriptor(), query.getSession()); // Extract primary key and value from the source. for (int index = 0; index < getReferenceKeyFields().size(); index++) { DatabaseField referenceKey = getReferenceKeyFields().get(index); DatabaseField sourceKey = getSourceKeyFields().get(index); Object sourceKeyValue = query.getTranslationRow().get(sourceKey); databaseRow.put(referenceKey, sourceKeyValue); } int orderIndex = 0; // Extract target field and its value. Construct insert statement and execute it for (Object iter = containerPolicy.iteratorFor(objects); containerPolicy.hasNext(iter);) { Object wrappedObject = containerPolicy.nextEntry(iter, query.getSession()); Object object = containerPolicy.unwrapIteratorResult(wrappedObject); if (getValueConverter() != null) { object = getValueConverter().convertObjectValueToDataValue(object, query.getSession()); } databaseRow.put(getDirectField(), object); // In the uow data queries are cached until the end of the commit. if (query.shouldCascadeOnlyDependentParts()) { // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = Insert; event[1] = getInsertQuery(); event[2] = databaseRow.clone(); if(listOrderField != null) { ((AbstractRecord)event[2]).put(listOrderField, orderIndex++); } query.getSession().getCommitManager().addDataModificationEvent(this, event); } else { query.getSession().executeQuery(getInsertQuery(), databaseRow); if ((getHistoryPolicy() != null) && getHistoryPolicy().shouldHandleWrites()) { getHistoryPolicy().mappingLogicalInsert(getInsertQuery(), databaseRow, query.getSession()); } } containerPolicy.propogatePostInsert(query, wrappedObject); } } /** * INTERNAL: * Convert the attribute value to a field value. * Process any converter if defined. */ public Object getFieldValue(Object attributeValue, AbstractSession session) { if (this.valueConverter != null) { return this.valueConverter.convertObjectValueToDataValue(attributeValue, session); } return attributeValue; } /** * INTERNAL: * Return source key fields for translation by an AggregateObjectMapping */ @Override public Vector getFieldsForTranslationInAggregate() { return getSourceKeyFields(); } /** * INTERNAL: * Update private owned part. */ @Override public void postUpdate(WriteObjectQuery writeQuery) throws DatabaseException { if (isReadOnly()) { return; } if (writeQuery.getObjectChangeSet() != null) { if(this.listOrderField != null) { postUpdateWithChangeSetListOrder(writeQuery); } else { postUpdateWithChangeSet(writeQuery); } return; } // If objects are not instantiated that means they are not changed. if (!isAttributeValueInstantiatedOrChanged(writeQuery.getObject())) { return; } if (writeQuery.getSession().isUnitOfWork()) { if (compareObjects(writeQuery.getObject(), writeQuery.getBackupClone(), writeQuery.getSession())) { return;// Nothing has changed, no work required } } DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); deleteQuery.setObject(writeQuery.getObject()); deleteQuery.setSession(writeQuery.getSession()); deleteQuery.setTranslationRow(writeQuery.getTranslationRow()); if (writeQuery.shouldCascadeOnlyDependentParts()) { // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = DeleteAll; event[1] = deleteQuery; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, event); } else { preDelete(deleteQuery); } postInsert(writeQuery); } /** * INTERNAL: * Update private owned part. */ protected void postUpdateWithChangeSet(WriteObjectQuery writeQuery) throws DatabaseException { ObjectChangeSet changeSet = writeQuery.getObjectChangeSet(); DirectCollectionChangeRecord changeRecord = (DirectCollectionChangeRecord)changeSet.getChangesForAttributeNamed(this.getAttributeName()); if (changeRecord == null) { return; } for (int index = 0; index < getReferenceKeyFields().size(); index++) { DatabaseField referenceKey = getReferenceKeyFields().get(index); DatabaseField sourceKey = getSourceKeyFields().get(index); Object sourceKeyValue = writeQuery.getTranslationRow().get(sourceKey); writeQuery.getTranslationRow().put(referenceKey, sourceKeyValue); } for (Iterator iterator = changeRecord.getRemoveObjectMap().keySet().iterator(); iterator.hasNext();) { Object object = iterator.next(); AbstractRecord thisRow = writeQuery.getTranslationRow().clone(); Object value = getFieldValue(object, writeQuery.getSession()); // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = Delete; if (value == null) { // Bug 306075 - for deleting a null value from a collection event[1] = getDeleteNullQuery(); } else { thisRow.add(getDirectField(), value); event[1] = getDeleteQuery(); } event[2] = thisRow; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, event); Integer count = (Integer)changeRecord.getCommitAddMap().get(object); if (count != null) { for (int counter = count.intValue(); counter > 0; --counter) { thisRow = writeQuery.getTranslationRow().clone(); thisRow.add(getDirectField(), value); // Hey I might actually want to use an inner class here... ok array for now. event = new Object[3]; event[0] = Insert; event[1] = getInsertQuery(); event[2] = thisRow; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, event); } } } for (Iterator iterator = changeRecord.getAddObjectMap().keySet().iterator(); iterator.hasNext();) { Object object = iterator.next(); Integer count = (Integer)changeRecord.getAddObjectMap().get(object); for (int counter = count.intValue(); counter > 0; --counter) { AbstractRecord thisRow = writeQuery.getTranslationRow().clone(); Object value = object; if (getValueConverter() != null) { value = getValueConverter().convertObjectValueToDataValue(value, writeQuery.getSession()); } thisRow.add(getDirectField(), value); // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = Insert; event[1] = getInsertQuery(); event[2] = thisRow; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, event); } } } /** * INTERNAL: * Update private owned part. */ protected void postUpdateWithChangeSetListOrder(WriteObjectQuery writeQuery) throws DatabaseException { ObjectChangeSet changeSet = writeQuery.getObjectChangeSet(); DirectCollectionChangeRecord changeRecord = (DirectCollectionChangeRecord)changeSet.getChangesForAttributeNamed(this.getAttributeName()); if (changeRecord == null) { return; } for (int index = 0; index < getReferenceKeyFields().size(); index++) { DatabaseField referenceKey = getReferenceKeyFields().get(index); DatabaseField sourceKey = getSourceKeyFields().get(index); Object sourceKeyValue = writeQuery.getTranslationRow().get(sourceKey); writeQuery.getTranslationRow().put(referenceKey, sourceKeyValue); } boolean shouldRepairOrder = false; if((List)changeRecord.getLatestCollection() instanceof IndirectList) { shouldRepairOrder = ((IndirectList)changeRecord.getLatestCollection()).isListOrderBrokenInDb(); } if(shouldRepairOrder) { // delete all members of collection DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); deleteQuery.setObject(writeQuery.getObject()); deleteQuery.setSession(writeQuery.getSession()); deleteQuery.setTranslationRow(writeQuery.getTranslationRow()); // Hey I might actually want to use an inner class here... ok array for now. Object[] eventDeleteAll = new Object[2]; eventDeleteAll[0] = DeleteAll; eventDeleteAll[1] = deleteQuery; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, eventDeleteAll); // re-insert them back for(int i=0; i < ((List)changeRecord.getLatestCollection()).size(); i++) { Object value = ((List)changeRecord.getLatestCollection()).get(i); value = getFieldValue(value, writeQuery.getSession()); AbstractRecord insertRow = writeQuery.getTranslationRow().clone(); insertRow.add(getDirectField(), value); insertRow.add(this.listOrderField, i); // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = Insert; event[1] = getInsertQuery(); event[2] = insertRow; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, event); } ((IndirectList)changeRecord.getLatestCollection()).setIsListOrderBrokenInDb(false); changeRecord.setOrderHasBeenRepaired(true); return; } if(changeRecord.getChangedIndexes() == null) { compareListsForChange((List)changeRecord.getOriginalCollection(), (List)changeRecord.getLatestCollection(), changeRecord, writeQuery.getSession()); } Iterator> it = changeRecord.getChangedIndexes().entrySet().iterator(); while(it.hasNext()) { Map.Entry entry = it.next(); Object value = entry.getKey(); if (getValueConverter() != null) { value = getValueConverter().convertObjectValueToDataValue(value, writeQuery.getSession()); } Set[] indexes = entry.getValue(); Set indexesBefore = indexes[0]; Set indexesAfter = indexes[1]; if(indexesAfter == null) { // All copies of the target object deleted - don't need to verify order field contents. AbstractRecord deleteRow = writeQuery.getTranslationRow().clone(); // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = Delete; if (value == null) { // Bug 306075 - for deleting a null value from a collection event[1] = getDeleteNullQuery(); } else { deleteRow.add(getDirectField(), value); event[1] = getDeleteQuery(); } event[2] = deleteRow; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, event); } else if(indexesAfter.isEmpty()) { // Some copies of the target objects should be deleted, some left in the db Iterator itBefore = indexesBefore.iterator(); while(itBefore.hasNext()) { AbstractRecord deleteAtIndexRow = writeQuery.getTranslationRow().clone(); deleteAtIndexRow.add(getDirectField(), value); deleteAtIndexRow.add(this.listOrderField, itBefore.next()); // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = DeleteAtIndex; event[1] = deleteAtIndexQuery; event[2] = deleteAtIndexRow; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, event); } } else { if(indexesBefore == null || indexesBefore.isEmpty()) { // insert the object for each index in indexesAfter Iterator itAfter = indexesAfter.iterator(); while(itAfter.hasNext()) { AbstractRecord insertRow = writeQuery.getTranslationRow().clone(); insertRow.add(getDirectField(), value); insertRow.add(this.listOrderField, itAfter.next()); // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = Insert; event[1] = getInsertQuery(); event[2] = insertRow; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, event); } } else { Iterator itBefore = indexesBefore.iterator(); Iterator itAfter = indexesAfter.iterator(); while(itBefore.hasNext() || itAfter.hasNext()) { if(itBefore.hasNext()) { if(itAfter.hasNext()) { // update the object changing index from indexBefore to indexAfter AbstractRecord updateAtIndexRow = writeQuery.getTranslationRow().clone(); updateAtIndexRow.add(getDirectField(), value); updateAtIndexRow.add(this.listOrderField, itBefore.next()); // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[4]; event[0] = UpdateAtIndex; event[1] = updateAtIndexQuery; event[2] = updateAtIndexRow; DatabaseRecord modifyRow = new DatabaseRecord(1); modifyRow.add(this.listOrderField, itAfter.next()); event[3] = modifyRow; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, event); } else { // delete the object at indexBefore AbstractRecord deleteAtIndexRow = writeQuery.getTranslationRow().clone(); deleteAtIndexRow.add(getDirectField(), value); deleteAtIndexRow.add(this.listOrderField, itBefore.next()); // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = DeleteAtIndex; event[1] = deleteAtIndexQuery; event[2] = deleteAtIndexRow; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, event); } } else { // itAfter.hasNext() must be true // insert the object at indexAfter AbstractRecord insertRow = writeQuery.getTranslationRow().clone(); insertRow.add(getDirectField(), value); insertRow.add(this.listOrderField, itAfter.next()); // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = Insert; event[1] = getInsertQuery(); event[2] = insertRow; writeQuery.getSession().getCommitManager().addDataModificationEvent(this, event); } } } } } } /** * INTERNAL: * Delete private owned part. Which is a collection of objects from the reference table. */ @Override public void preDelete(DeleteObjectQuery query) throws DatabaseException { if (this.isReadOnly) { return; } if (!this.isCascadeOnDeleteSetOnDatabase) { prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getDescriptor(), query.getSession()); query.getSession().executeQuery(this.deleteAllQuery, query.getTranslationRow()); } if ((this.historyPolicy != null) && this.historyPolicy.shouldHandleWrites()) { if (this.isCascadeOnDeleteSetOnDatabase) { prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getDescriptor(), query.getSession()); } this.historyPolicy.mappingLogicalDelete(this.deleteAllQuery, query.getTranslationRow(), query.getSession()); } } /** * INTERNAL: * The translation row may require additional fields than the primary key if the mapping in not on the primary key. */ @Override protected void prepareTranslationRow(AbstractRecord translationRow, Object object, ClassDescriptor descriptor, AbstractSession session) { // Make sure that each source key field is in the translation row. for (Enumeration sourceFieldsEnum = getSourceKeyFields().elements(); sourceFieldsEnum.hasMoreElements();) { DatabaseField sourceKey = (DatabaseField)sourceFieldsEnum.nextElement(); if (!translationRow.containsKey(sourceKey)) { Object value = descriptor.getObjectBuilder().extractValueFromObjectForField(object, sourceKey, session); translationRow.put(sourceKey, value); } } } /** * INTERNAL: * Used by DirectMapMapping to rebuild select query. */ protected void initOrRebuildSelectQuery() { this.selectionQuery.setSQLStatement(new SQLSelectStatement()); } /** * INTERNAL: * Overridden by mappings that require additional processing of the change record after the record has been calculated. */ @Override public void recordPrivateOwnedRemovals(Object object, UnitOfWorkImpl uow) { } /** * INTERNAL: * Once descriptors are serialized to the remote session. All its mappings and reference descriptors are traversed. Usually * mappings are initialized and serialized reference descriptors are replaced with local descriptors if they already exist on the * remote session. */ @Override public void remoteInitialization(DistributedSession session) { // Remote mappings is initialized here again because while serializing only the uninitialized data is passed // as the initialized data is not serializable. if (!isRemotelyInitialized()) { getAttributeAccessor().initializeAttributes(getDescriptor().getJavaClass()); remotelyInitialized(); } } /** * INTERNAL: * replace the value holders in the specified reference object(s) */ @Override public Map replaceValueHoldersIn(Object object, RemoteSessionController controller) { // do nothing, since direct collections do not hold onto other domain objects return null; } /** * PUBLIC: * Some databases do not properly support all of the base data types. For these databases, * the base data type must be explicitly specified in the mapping to tell EclipseLink to force * the instance variable value to that data type. * @since Java Persistence API 2.0 * Migrated from AbstractDirectMapping */ public void setAttributeClassification(Class attributeClassification) { this.attributeClassification = attributeClassification; } /** * INTERNAL: * Set the name of the class for MW usage. * @since Java Persistence API 2.0 * Migrated from AbstractDirectMapping */ public void setAttributeClassificationName(String attributeClassificationName) { this.attributeClassificationName = attributeClassificationName; } protected void setDeleteQuery(ModifyQuery query) { this.changeSetDeleteQuery = query; } /** * PUBLIC: * Set the receiver's delete SQL string. This allows the user to override the SQL * generated by TopLink, with there own SQL or procedure call. The arguments are * translated from the fields of the source row, through replacing the field names * marked by '#' with the values for those fields. * This SQL is responsible for doing the deletion required by the mapping, * such as deletion from join table for M-M. * Example, 'delete from RESPONS where EMP_ID = #EMP_ID and DESCRIP = #DESCRIP'. */ public void setDeleteSQLString(String sqlString) { DataModifyQuery query = new DataModifyQuery(); query.setSQLString(sqlString); setCustomDeleteQuery(query); } /** * ADVANCED: * Configure the mapping to use a container policy. * The policy manages the access to the collection. */ @Override public void setContainerPolicy(ContainerPolicy containerPolicy) { this.containerPolicy = containerPolicy; if (this.selectionQuery.isDataReadQuery()){ ((DataReadQuery) getSelectionQuery()).setContainerPolicy(containerPolicy); } } /** * PUBLIC: * The default delete query for this mapping can be overridden by specifying the new query. * This query is responsible for doing the deletion required by the mapping, * such as deletion from join table for M-M. The query should delete a specific row from the * DirectCollectionTable bases on the DirectField. */ public void setCustomDeleteQuery(ModifyQuery query) { setDeleteQuery(query); setHasCustomDeleteQuery(true); } /** * PUBLIC: * The default delete by index query for this mapping can be overridden by specifying the new query. * This query used (only in case listOrderField != null) to delete object with particular orderFieldValue. */ public void setCustomDeleteAtIndexQuery(ModifyQuery query) { this.deleteAtIndexQuery = query; hasCustomDeleteAtIndexQuery = true; } /** * PUBLIC: * The default insert query for mapping can be overridden by specifying the new query. * This query inserts the row into the direct table. */ public void setCustomInsertQuery(DataModifyQuery query) { setInsertQuery(query); setHasCustomInsertQuery(true); } /** * PUBLIC: * The default delete by index query for this mapping can be overridden by specifying the new query. * This query used (only in case listOrderField != null) to update orderFieldValue of object with particular orderFieldValue. */ public void setCustomUpdateAtIndexQuery(ModifyQuery query) { this.updateAtIndexQuery = query; hasCustomUpdateAtIndexQuery = true; } /** * PUBLIC: * Set the direct field in the reference table. * This is the field that the primitive data value is stored in. */ public void setDirectField(DatabaseField field) { directField = field; } /** * ADVANCED: * Set the class type of the field value. * This can be used if field value differs from the object value, * has specific typing requirements such as usage of java.sql.Blob or NChar. * This must be called after the field name has been set. */ public void setDirectFieldClassification(Class fieldType) { getDirectField().setType(fieldType); } /** * ADVANCED: * Set the class type of the field value. * This can be used if field value differs from the object value, * has specific typing requirements such as usage of java.sql.Blob or NChar. * This must be called after the field name has been set. */ public void setDirectFieldClassificationName(String className) { getDirectField().setTypeName(className); } /** * PUBLIC: * Set the direct field name in the reference table. * This is the field that the primitive data value is stored in. */ public void setDirectFieldName(String fieldName) { setDirectField(new DatabaseField(fieldName)); } protected void setHasCustomDeleteQuery(boolean bool) { hasCustomDeleteQuery = bool; } protected void setHasCustomInsertQuery(boolean bool) { hasCustomInsertQuery = bool; } protected void setInsertQuery(DataModifyQuery insertQuery) { this.insertQuery = insertQuery; } /** * PUBLIC: * Set the receiver's insert SQL string. This allows the user to override the SQL * generated by TopLink, with there own SQL or procedure call. The arguments are * translated from the fields of the source row, through replacing the field names * marked by '#' with the values for those fields. * This is used to insert an entry into the direct table. *

Example, 'insert into RESPONS (EMP_ID, RES_DESC) values (#EMP_ID, #RES_DESC)'. */ public void setInsertSQLString(String sqlString) { DataModifyQuery query = new DataModifyQuery(); query.setSQLString(sqlString); setCustomInsertQuery(query); } /** * INTERNAL: * This cannot be used with direct collection mappings. */ @Override public void setReferenceClass(Class referenceClass) { return; } @Override public void setReferenceClassName(String referenceClassName) { return; } /** * PUBLIC: * Set the name of the reference key field. * This is the foreign key field in the direct table referencing the primary key of the source object. * This method is used if the reference key consists of only a single field. */ public void setReferenceKeyFieldName(String fieldName) { getReferenceKeyFields().addElement(new DatabaseField(fieldName)); } /** * INTERNAL: * Set the reference key field names associated with the mapping. * These must be in-order with the sourceKeyFieldNames. */ public void setReferenceKeyFieldNames(Vector fieldNames) { Vector fields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(fieldNames.size()); for (Enumeration fieldNamesEnum = fieldNames.elements(); fieldNamesEnum.hasMoreElements();) { fields.addElement(new DatabaseField((String)fieldNamesEnum.nextElement())); } setReferenceKeyFields(fields); } /** * INTERNAL: * Set the reference fields. */ public void setReferenceKeyFields(Vector aVector) { this.referenceKeyFields = aVector; } /** * INTERNAL: * Set the reference table. */ public void setReferenceTable(DatabaseTable table) { referenceTable = table; } /** * PUBLIC: * Sets the selection criteria to be used as a where clause to read * reference objects. This criteria is automatically generated by the * TopLink if not explicitly specified by the user. */ @Override public void setSelectionCriteria(Expression anExpression) { if (getSelectionQuery().isReadAllQuery()){ ((ReadAllQuery)getSelectionQuery()).setSelectionCriteria(anExpression); } else { getSelectionQuery().getSQLStatement().setWhereClause(anExpression); } } /** * PUBLIC: * Set the reference table name. */ public void setReferenceTableName(String tableName) { if (tableName == null) { setReferenceTable(null); } else { setReferenceTable(new DatabaseTable(tableName)); } } /** * INTERNAL: * Set the container policy on the selection query for this mapping. */ @Override protected void setSelectionQueryContainerPolicy(ContainerPolicy containerPolicy) { ((DataReadQuery) getSelectionQuery()).setContainerPolicy(containerPolicy); } /** * PUBLIC: * Support history on the reference table. */ public void setHistoryPolicy(HistoryPolicy policy) { this.historyPolicy = policy; if (policy != null) { policy.setMapping(this); } } /** * PUBLIC: * Set the name of the session to execute the mapping's queries under. * This can be used by the session broker to override the default session * to be used for the target class. */ @Override public void setSessionName(String name) { super.setSessionName(name); getInsertQuery().setSessionName(name); } /** * INTERNAL: * Set the source key field names associated with the mapping. * These must be in-order with the referenceKeyFieldNames. */ public void setSourceKeyFieldNames(Vector fieldNames) { Vector fields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(fieldNames.size()); for (Enumeration fieldNamesEnum = fieldNames.elements(); fieldNamesEnum.hasMoreElements();) { fields.addElement(new DatabaseField((String)fieldNamesEnum.nextElement())); } setSourceKeyFields(fields); } /** * INTERNAL: * Set the source fields. */ public void setSourceKeyFields(Vector sourceKeyFields) { this.sourceKeyFields = sourceKeyFields; } /** * INTERNAL: * This method is used to store the FK fields that can be cached that correspond to noncacheable mappings * the FK field values will be used to re-issue the query when cloning the shared cache entity */ @Override public void collectQueryParameters(Set cacheFields){ for (DatabaseField field : getSourceKeyFields()) { cacheFields.add(field); } } /** * INTERNAL: * Used by AttributeLevelChangeTracking to update a changeRecord with calculated changes * as apposed to detected changes. If an attribute can not be change tracked it's * changes can be detected through this process. */ @Override public void calculateDeferredChanges(ChangeRecord changeRecord, AbstractSession session) { DirectCollectionChangeRecord collectionRecord = (DirectCollectionChangeRecord)changeRecord; // TODO: Handle events that fired after collection was replaced. compareCollectionsForChange(collectionRecord.getOriginalCollection(), collectionRecord.getLatestCollection(), collectionRecord, session); } /** * ADVANCED: * This method is used to have an object add to a collection once the changeSet is applied * The referenceKey parameter should only be used for direct Maps. */ @Override public void simpleAddToCollectionChangeRecord(Object referenceKey, Object objectToAdd, ObjectChangeSet changeSet, AbstractSession session) { simpleAddToCollectionChangeRecord(objectToAdd, null, false, changeSet, session, true); } protected void simpleAddToCollectionChangeRecord(Object objectToAdd, Integer index, boolean isSet, ObjectChangeSet changeSet, AbstractSession session, boolean isChangeApplied) { DirectCollectionChangeRecord collectionChangeRecord = (DirectCollectionChangeRecord)changeSet.getChangesForAttributeNamed(getAttributeName()); if (collectionChangeRecord == null) { collectionChangeRecord = new DirectCollectionChangeRecord(changeSet); collectionChangeRecord.setAttribute(getAttributeName()); collectionChangeRecord.setMapping(this); changeSet.addChange(collectionChangeRecord); Object collection = getRealAttributeValueFromObject(changeSet.getUnitOfWorkClone(), session); if(this.listOrderField != null) { List originalListCopy = new ArrayList((List)collection); // collection already contains the added object - to bring it to the original state it should be removed if(index == null) { originalListCopy.remove(originalListCopy.size() - 1); } else { // intValue() is essential - otherwise invokes remove(Object) originalListCopy.remove(index.intValue()); } collectionChangeRecord.setOriginalCollection(originalListCopy); collectionChangeRecord.setLatestCollection(collection); } else { collectionChangeRecord.storeDatabaseCounts(collection, getContainerPolicy(), session); collectionChangeRecord.setFirstToAddAlreadyInCollection(isChangeApplied); } } if(!collectionChangeRecord.isDeferred() && this.listOrderField == null) { collectionChangeRecord.addAdditionChange(objectToAdd, Integer.valueOf(1)); } } /** * ADVANCED: * This method is used to have an object removed from a collection once the changeSet is applied * The referenceKey parameter should only be used for direct Maps. */ @Override public void simpleRemoveFromCollectionChangeRecord(Object referenceKey, Object objectToRemove, ObjectChangeSet changeSet, AbstractSession session) { simpleRemoveFromCollectionChangeRecord(objectToRemove, null, false, changeSet, session, true); } protected void simpleRemoveFromCollectionChangeRecord(Object objectToRemove, Integer index, boolean isSet, ObjectChangeSet changeSet, AbstractSession session, boolean isChangeApplied) { DirectCollectionChangeRecord collectionChangeRecord = (DirectCollectionChangeRecord)changeSet.getChangesForAttributeNamed(getAttributeName()); if (collectionChangeRecord == null) { collectionChangeRecord = new DirectCollectionChangeRecord(changeSet); collectionChangeRecord.setAttribute(getAttributeName()); collectionChangeRecord.setMapping(this); changeSet.addChange(collectionChangeRecord); Object collection = getRealAttributeValueFromObject(changeSet.getUnitOfWorkClone(), session); if(this.listOrderField != null) { List originalListCopy = new ArrayList((List)collection); // collection already doesn't contain the removed object - to bring it to the original state it should be added or set back. // index is not null because IndirectList does remove through indexOf. if(isSet) { originalListCopy.set(index, objectToRemove); } else { originalListCopy.add(index, objectToRemove); } collectionChangeRecord.setOriginalCollection(originalListCopy); collectionChangeRecord.setLatestCollection(collection); } else { collectionChangeRecord.storeDatabaseCounts(collection, getContainerPolicy(), session); collectionChangeRecord.setFirstToRemoveAlreadyOutCollection(isChangeApplied); if(isSet) { collectionChangeRecord.setFirstToAddAlreadyInCollection(isChangeApplied); } } } if(!collectionChangeRecord.isDeferred() && this.listOrderField == null) { collectionChangeRecord.addRemoveChange(objectToRemove, Integer.valueOf(1)); } } /** * INTERNAL: * Either create a new change record or update with the new value. This is used * by attribute change tracking. * Specifically in a collection mapping this will be called when the customer * Set a new collection. In this case we will need to mark the change record * with the new and the old versions of the collection. * And mark the ObjectChangeSet with the attribute name then when the changes are calculated * force a compare on the collections to determine changes. */ @Override public void updateChangeRecord(Object clone, Object newValue, Object oldValue, ObjectChangeSet objectChangeSet, UnitOfWorkImpl uow) { DirectCollectionChangeRecord collectionChangeRecord = (DirectCollectionChangeRecord)objectChangeSet.getChangesForAttributeNamed(this.getAttributeName()); if (collectionChangeRecord == null) { collectionChangeRecord = new DirectCollectionChangeRecord(objectChangeSet); collectionChangeRecord.setAttribute(getAttributeName()); collectionChangeRecord.setMapping(this); objectChangeSet.addChange(collectionChangeRecord); } collectionChangeRecord.setIsDeferred(true); objectChangeSet.deferredDetectionRequiredOn(getAttributeName()); if (collectionChangeRecord.getOriginalCollection() == null) { collectionChangeRecord.recreateOriginalCollection(oldValue, uow); } collectionChangeRecord.setLatestCollection(newValue); } /** * INTERNAL: * Add or removes a new value and its change set to the collection change record based on the event passed in. This is used by * attribute change tracking. */ @Override public void updateCollectionChangeRecord(CollectionChangeEvent event, ObjectChangeSet changeSet, UnitOfWorkImpl uow) { if (event != null ) { //Letting the mapping create and add the ChangeSet to the ChangeRecord rather // than the policy, since the policy doesn't know how to handle DirectCollectionChangeRecord. // if ordering is to be supported in the future, check how the method in CollectionMapping is implemented Object value = event.getNewValue(); if (event.getChangeType() == CollectionChangeEvent.ADD) { simpleAddToCollectionChangeRecord(value, event.getIndex(), event.isSet(), changeSet, uow, event.isChangeApplied()); } else if (event.getChangeType() == CollectionChangeEvent.REMOVE) { simpleRemoveFromCollectionChangeRecord(value, event.getIndex(), event.isSet(), changeSet, uow, event.isChangeApplied()); } else { throw ValidationException.wrongCollectionChangeEventType(event.getChangeType()); } } } /** * PUBLIC: * It is illegal to use a Map as the container of a DirectCollectionMapping. Only * Collection containers are supported for DirectCollectionMappings. * @see org.eclipse.persistence.mappings.DirectMapMapping */ public void useMapClass(Class concreteClass, String methodName) { throw ValidationException.illegalUseOfMapInDirectCollection(this, concreteClass, methodName); } /** * INTERNAL: * Return the value of the reference attribute or a value holder. * Check whether the mapping's attribute should be optimized through batch and joining. * Overridden to support flashback/historical queries. */ @Override public Object valueFromRow(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, CacheKey cacheKey, AbstractSession session, boolean isTargetProtected, Boolean[] wasCacheUsed) throws DatabaseException { if (this.descriptor.getCachePolicy().isProtectedIsolation()) { if (this.isCacheable && isTargetProtected && cacheKey != null) { //cachekey will be null when isolating to uow //used cached collection Object result = null; Object cached = cacheKey.getObject(); if (cached != null) { if (wasCacheUsed != null){ wasCacheUsed[0] = Boolean.TRUE; } return this.getAttributeValueFromObject(cached); } return result; } else if (!this.isCacheable && !isTargetProtected && cacheKey != null) { return this.indirectionPolicy.buildIndirectObject(new ValueHolder(null)); } } if (row.hasSopObject()) { return getAttributeValueFromObject(row.getSopObject()); } if (sourceQuery.isObjectLevelReadQuery() && (((ObjectLevelReadQuery)sourceQuery).isAttributeBatchRead(this.descriptor, getAttributeName()) || (sourceQuery.isReadAllQuery() && shouldUseBatchReading()))) { return batchedValueFromRow(row, (ObjectLevelReadQuery)sourceQuery, cacheKey); } if (shouldUseValueFromRowWithJoin(joinManager, sourceQuery)) { return valueFromRowInternalWithJoin(row, joinManager, sourceQuery, cacheKey, session, isTargetProtected); } // if the query uses batch reading, return a special value holder // or retrieve the object from the query property. ReadQuery targetQuery = getSelectionQuery(); boolean extendingPessimisticLockScope = isExtendingPessimisticLockScope(sourceQuery) && extendPessimisticLockScope == ExtendPessimisticLockScope.TARGET_QUERY; if ((getHistoryPolicy() != null) || (sourceQuery.getSession().getAsOfClause() != null) || ((sourceQuery.isObjectLevelReadQuery() && ((ObjectLevelReadQuery)sourceQuery).hasAsOfClause()) && (sourceQuery.shouldCascadeAllParts() || (sourceQuery.shouldCascadePrivateParts() && isPrivateOwned()) || (sourceQuery.shouldCascadeByMapping() && this.cascadeRefresh))) || extendingPessimisticLockScope) { targetQuery = (ReadQuery)targetQuery.clone(); // Code copied roughly from initializeSelectionStatement. SQLSelectStatement statement = new SQLSelectStatement(); statement.addTable(getReferenceTable()); statement.addField(getDirectField().clone()); if (isDirectMapMapping()) { statement.addField(((DirectMapMapping)this).getDirectKeyField().clone()); } statement.setWhereClause((Expression)getSelectionCriteria().clone()); if (sourceQuery.isObjectLevelReadQuery()) { statement.getBuilder().asOf(((ObjectLevelReadQuery)sourceQuery).getAsOfClause()); } if (extendingPessimisticLockScope) { statement.setLockingClause(new ForUpdateClause(sourceQuery.getLockMode())); } if (getHistoryPolicy() != null) { ExpressionBuilder builder = statement.getBuilder(); if (sourceQuery.getSession().getAsOfClause() != null) { builder.asOf(sourceQuery.getSession().getAsOfClause()); } else if (builder.getAsOfClause() == null) { builder.asOf(AsOfClause.NO_CLAUSE); } Expression temporalExpression = getHistoryPolicy().additionalHistoryExpression(builder, builder); statement.setWhereClause(statement.getWhereClause().and(temporalExpression)); if (builder.hasAsOfClause()) { statement.getTables().set(0, getHistoryPolicy().getHistoricalTables().get(0)); } } statement.normalize(sourceQuery.getSession(), null); targetQuery.setSQLStatement(statement); } return getIndirectionPolicy().valueFromQuery(targetQuery, row, sourceQuery.getSession()); } /** * INTERNAL: * Checks if object is deleted from the database or not. */ @Override public boolean verifyDelete(Object object, AbstractSession session) throws DatabaseException { // Row is built for translation if (isReadOnly()) { return true; } AbstractRecord row = getDescriptor().getObjectBuilder().buildRowForTranslation(object, session); Object value = session.executeQuery(getSelectionQuery(), row); return getContainerPolicy().isEmpty(value); } /** * INTERNAL: * DirectCollectionMapping contents should not be considered for addition to the UnitOfWork * private owned objects list for removal. */ @Override public boolean isCandidateForPrivateOwnedRemoval() { return false; } /** * INTERNAL * Return true if this mapping supports cascaded version optimistic locking. */ @Override public boolean isCascadedLockingSupported() { return true; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/CollectionMapping.java0000664000000000000000000037570112216173130024510 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 02/26/2009-2.0 Guy Pelletier * - 264001: dot notation for mapped-by and order-by * 08/23/2010-2.2 Michael O'Brien * - 323043: application.xml module ordering may cause weaving not to occur causing an NPE. * warn if expected "_persistence_*_vh" method not found * instead of throwing NPE during deploy validation. * 07/19/2011-2.2.1 Guy Pelletier * - 338812: ManyToMany mapping in aggregate object violate integrity constraint on deletion * 04/09/2012-2.4 Guy Pelletier * - 374377: OrderBy with ElementCollection doesn't work * 14/05/2012-2.4 Guy Pelletier * - 376603: Provide for table per tenant support for multitenant applications ******************************************************************************/ package org.eclipse.persistence.mappings; import java.beans.PropertyChangeListener; import java.util.*; import org.eclipse.persistence.annotations.OrderCorrectionType; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.descriptors.changetracking.*; import org.eclipse.persistence.internal.descriptors.changetracking.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.expressions.*; import org.eclipse.persistence.indirection.*; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.expressions.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.indirection.*; import org.eclipse.persistence.internal.queries.*; import org.eclipse.persistence.internal.sessions.remote.*; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.remote.*; import org.eclipse.persistence.sessions.CopyGroup; import org.eclipse.persistence.sessions.DatabaseRecord; import org.eclipse.persistence.sessions.Project; /** *

Purpose: Abstract class for relationship mappings which store collection of objects * * @author Sati * @since TOPLink/Java 1.0 */ public abstract class CollectionMapping extends ForeignReferenceMapping implements ContainerMapping { /** Used for delete all in m-m, dc and delete all optimization in 1-m. */ protected transient ModifyQuery deleteAllQuery; protected transient boolean hasCustomDeleteAllQuery; protected ContainerPolicy containerPolicy; protected boolean hasOrderBy; /** Field holds the order of elements in the list in the db, requires collection of type List, may be not null only in case isListOrderFieldSupported==true */ protected DatabaseField listOrderField; /** Indicates whether the mapping supports listOrderField, if it doesn't attempt to set listOrderField throws exception. */ protected boolean isListOrderFieldSupported; /** Query used when order of list members is changed. Used only if listOrderField!=null */ protected transient DataModifyQuery changeOrderTargetQuery; /** * Specifies what should be done if the list of values read from listOrserField is invalid * (there should be no nulls, no duplicates, no "holes"). **/ protected OrderCorrectionType orderCorrectionType; /** Store if the mapping can batch delete reference objects. */ protected Boolean mustDeleteReferenceObjectsOneByOne = null; /** Flag to indicate if collection needs to be synchronized instead of cloning during merge. */ protected static boolean isSynchronizeOnMerge = Boolean.getBoolean("eclipselink.synchronizeCollectionOnMerge"); /** * PUBLIC: * Default constructor. */ public CollectionMapping() { this.selectionQuery = new ReadAllQuery(); this.hasCustomDeleteAllQuery = false; this.containerPolicy = ContainerPolicy.buildDefaultPolicy(); this.hasOrderBy = false; this.isListOrderFieldSupported = false; } /** * PUBLIC: * Provide order support for queryKeyName in ascending order */ public void addAscendingOrdering(String queryKeyName) { this.hasOrderBy = true; if (queryKeyName == null) { return; } ((ReadAllQuery)getSelectionQuery()).addAscendingOrdering(queryKeyName); } /** * PUBLIC: * Provide order support for queryKeyName in descending order. */ public void addDescendingOrdering(String queryKeyName) { this.hasOrderBy = true; if (queryKeyName == null) { return; } ((ReadAllQuery)getSelectionQuery()).addDescendingOrdering(queryKeyName); } /** * PUBLIC: * Provide order support for queryKeyName in descending or ascending order. * Called from the jpa metadata processing of an order by value. */ public void addOrderBy(String queryKeyName, boolean isDescending) { if (isDescending) { addDescendingOrdering(queryKeyName); } else { addAscendingOrdering(queryKeyName); } } /** * PUBLIC: * Provide order support for queryKeyName in ascending or descending order. * Called from the jpa metadata processing of an order by value. The * aggregate name may be chained through the dot notation. */ public void addAggregateOrderBy(String aggregateName, String queryKeyName, boolean isDescending) { this.hasOrderBy = true; ReadAllQuery readAllQuery = (ReadAllQuery) getSelectionQuery(); ExpressionBuilder builder = readAllQuery.getExpressionBuilder(); Expression expression = null; if (aggregateName.contains(".")) { StringTokenizer st = new StringTokenizer(aggregateName, "."); while (st.hasMoreTokens()) { if (expression == null) { expression = builder.get(st.nextToken()); } else { expression = expression.get(st.nextToken()); } } expression = expression.get(queryKeyName); } else { // Single level aggregate if (aggregateName.equals("")) { expression = builder.get(queryKeyName); } else { expression = builder.get(aggregateName).get(queryKeyName); } } if (isDescending) { readAllQuery.addOrdering(expression.descending()); } else { readAllQuery.addOrdering(expression.ascending()); } } /** * INTERNAL: * Used during building the backup shallow copy to copy * the vector without re-registering the target objects. */ @Override public Object buildBackupCloneForPartObject(Object attributeValue, Object clone, Object backup, UnitOfWorkImpl unitOfWork) { // Check for null if (attributeValue == null) { return this.containerPolicy.containerInstance(1); } else { return this.containerPolicy.cloneFor(attributeValue); } } /** * INTERNAL: * Require for cloning, the part must be cloned. * Ignore the objects, use the attribute value. */ @Override public Object buildCloneForPartObject(Object attributeValue, Object original, CacheKey cacheKey, Object clone, AbstractSession cloningSession, Integer refreshCascade, boolean isExisting, boolean isFromSharedCache) { ContainerPolicy containerPolicy = this.containerPolicy; if (attributeValue == null) { Object container = containerPolicy.containerInstance(1); if (cloningSession.isUnitOfWork() && (this.getDescriptor().getObjectChangePolicy().isObjectChangeTrackingPolicy()) && ((clone != null) && (((ChangeTracker)clone)._persistence_getPropertyChangeListener() != null)) && (container instanceof CollectionChangeTracker)) { ((CollectionChangeTracker)container).setTrackedAttributeName(this.getAttributeName()); ((CollectionChangeTracker)container)._persistence_setPropertyChangeListener(((ChangeTracker)clone)._persistence_getPropertyChangeListener()); } return container; } Object clonedAttributeValue = containerPolicy.containerInstance(containerPolicy.sizeFor(attributeValue)); Object temporaryCollection = null; if (isSynchronizeOnMerge) { // I need to synchronize here to prevent the collection from changing while I am cloning it. // This will occur when I am merging into the cache and I am instantiating a UOW valueHolder at the same time // I can not synchronize around the clone, as this will cause deadlocks, so I will need to copy the collection then create the clones // I will use a temporary collection to help speed up the process synchronized (attributeValue) { temporaryCollection = containerPolicy.cloneFor(attributeValue); } } else { // Clone is used while merging into cache. It can operate directly without synchronize/clone. temporaryCollection = attributeValue; } for (Object valuesIterator = containerPolicy.iteratorFor(temporaryCollection);containerPolicy.hasNext(valuesIterator);){ containerPolicy.addNextValueFromIteratorInto(valuesIterator, clone, cacheKey, clonedAttributeValue, this, refreshCascade, cloningSession, isExisting, isFromSharedCache); } if (cloningSession.isUnitOfWork() && (this.getDescriptor().getObjectChangePolicy().isObjectChangeTrackingPolicy()) && ((clone != null) && (((ChangeTracker)clone)._persistence_getPropertyChangeListener() != null)) && (clonedAttributeValue instanceof CollectionChangeTracker)) { ((CollectionChangeTracker)clonedAttributeValue).setTrackedAttributeName(this.getAttributeName()); ((CollectionChangeTracker)clonedAttributeValue)._persistence_setPropertyChangeListener(((ChangeTracker)clone)._persistence_getPropertyChangeListener()); } if(temporaryCollection instanceof IndirectList) { ((IndirectList)clonedAttributeValue).setIsListOrderBrokenInDb(((IndirectList)temporaryCollection).isListOrderBrokenInDb()); } return clonedAttributeValue; } /** * INTERNAL: * Performs a first level clone of the attribute. This generally means on the container will be cloned. */ public Object buildContainerClone(Object attributeValue, AbstractSession cloningSession){ Object newContainer = this.containerPolicy.containerInstance(this.containerPolicy.sizeFor(attributeValue)); Object valuesIterator = this.containerPolicy.iteratorFor(attributeValue); while (this.containerPolicy.hasNext(valuesIterator)) { Object originalValue = this.containerPolicy.next(valuesIterator, cloningSession); this.containerPolicy.addInto(originalValue, newContainer, cloningSession); } return newContainer; } /** * INTERNAL: * Copy of the attribute of the object. * This is NOT used for unit of work but for templatizing an object. */ @Override public void buildCopy(Object copy, Object original, CopyGroup group) { Object attributeValue = getRealCollectionAttributeValueFromObject(original, group.getSession()); Object valuesIterator = this.containerPolicy.iteratorFor(attributeValue); attributeValue = this.containerPolicy.containerInstance(this.containerPolicy.sizeFor(attributeValue)); while (this.containerPolicy.hasNext(valuesIterator)) { Object originalValue = this.containerPolicy.next(valuesIterator, group.getSession()); Object copyValue = originalValue; Object originalKey = this.containerPolicy.keyFromIterator(valuesIterator); Object copyKey = originalKey; if (group.shouldCascadeAllParts() || (group.shouldCascadePrivateParts() && isPrivateOwned()) || group.shouldCascadeTree()) { copyValue = copyElement(originalValue, group); copyKey = group.getSession().copyInternal(originalKey, group); } else { // Check for backrefs to copies. copyValue = group.getCopies().get(originalValue); if (copyValue == null) { copyValue = originalValue; } } this.containerPolicy.addInto(copyKey, copyValue, attributeValue, group.getSession()); } // if value holder is used, then the value holder shared with original substituted for a new ValueHolder. getIndirectionPolicy().reset(copy); setRealAttributeValueInObject(copy, attributeValue); } /** * INTERNAL: * Copies member's value */ protected Object copyElement(Object original, CopyGroup group) { return group.getSession().copyInternal(original, group); } /** * INTERNAL: * Clone the element, if necessary. */ public Object buildElementUnitOfWorkClone(Object element, Object parent, Integer refreshCascade, UnitOfWorkImpl unitOfWork, boolean isExisting, boolean isFromSharedCache) { // optimize registration to knowledge of existence if (refreshCascade != null ){ switch(refreshCascade){ case ObjectBuildingQuery.CascadeAllParts : return unitOfWork.mergeClone(element, MergeManager.CASCADE_ALL_PARTS, true); case ObjectBuildingQuery.CascadePrivateParts : return unitOfWork.mergeClone(element, MergeManager.CASCADE_PRIVATE_PARTS, true); case ObjectBuildingQuery.CascadeByMapping : return unitOfWork.mergeClone(element, MergeManager.CASCADE_BY_MAPPING, true); default: return unitOfWork.mergeClone(element, MergeManager.NO_CASCADE, true); } }else{ if (isExisting) { return unitOfWork.registerExistingObject(element, isFromSharedCache); } else {// not known whether existing or not return unitOfWork.registerObject(element); } } } /** * INTERNAL: * Clone the element, if necessary. */ public Object buildElementClone(Object element, Object parent, CacheKey parentCacheKey, Integer refreshCascade, AbstractSession cloningSession, boolean isExisting, boolean isFromSharedCache) { if (cloningSession.isUnitOfWork()){ return buildElementUnitOfWorkClone(element, parent, refreshCascade, (UnitOfWorkImpl)cloningSession, isExisting, isFromSharedCache); } if (referenceDescriptor.getCachePolicy().isProtectedIsolation()){ return cloningSession.createProtectedInstanceFromCachedData(element, refreshCascade, referenceDescriptor); } return element; } /** * INTERNAL: * This method will access the target relationship and create a list of information to rebuild the relationship. * This method is used in combination with the CachedValueHolder to store references to PK's to be loaded * from a cache instead of a query. */ @Override public Object[] buildReferencesPKList(Object entity, Object attribute, AbstractSession session){ Object container = indirectionPolicy.getRealAttributeValueFromObject(entity, attribute); return containerPolicy.buildReferencesPKList(container, session); } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade */ @Override public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { if (!this.cascadeRemove) { return; } Object cloneAttribute = getAttributeValueFromObject(object); if (cloneAttribute == null) { return; } // PERF: If private owned and not instantiated, then avoid instantiating, delete-all will handle deletion. if ((this.isPrivateOwned) && usesIndirection() && (!mustDeleteReferenceObjectsOneByOne())) { if (!this.indirectionPolicy.objectIsEasilyInstantiated(cloneAttribute)) { return; } } ContainerPolicy cp = this.containerPolicy; Object cloneObjectCollection = null; cloneObjectCollection = getRealCollectionAttributeValueFromObject(object, uow); Object cloneIter = cp.iteratorFor(cloneObjectCollection); while (cp.hasNext(cloneIter)) { Object wrappedObject = cp.nextEntry(cloneIter, uow); Object nextObject = cp.unwrapIteratorResult(wrappedObject); if ((nextObject != null) && (!visitedObjects.containsKey(nextObject))) { visitedObjects.put(nextObject, nextObject); if (this.isCascadeOnDeleteSetOnDatabase && isOneToManyMapping()) { uow.getCascadeDeleteObjects().add(nextObject); } uow.performRemove(nextObject, visitedObjects); cp.cascadePerformRemoveIfRequired(wrappedObject, uow, visitedObjects); } } } /** * INTERNAL: * Cascade perform removal of orphaned private owned objects from the UnitOfWorkChangeSet */ @Override public void cascadePerformRemovePrivateOwnedObjectFromChangeSetIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { // if the object is not instantiated, do not instantiate or cascade Object attributeValue = getAttributeValueFromObject(object); if (attributeValue != null && this.indirectionPolicy.objectIsInstantiated(attributeValue)) { Object realObjectCollection = getRealCollectionAttributeValueFromObject(object, uow); ContainerPolicy cp = this.containerPolicy; for (Object cloneIter = cp.iteratorFor(realObjectCollection); cp.hasNext(cloneIter);) { Object nextObject = cp.next(cloneIter, uow); if (nextObject != null && !visitedObjects.containsKey(nextObject)) { visitedObjects.put(nextObject, nextObject); // remove the object from the UnitOfWork ChangeSet uow.performRemovePrivateOwnedObjectFromChangeSet(nextObject, visitedObjects); } } } } /** * INTERNAL: * Cascade discover and persist new objects during commit. */ @Override public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow, Set cascadeErrors) { Object cloneAttribute = getAttributeValueFromObject(object); if ((cloneAttribute == null) || (!this.indirectionPolicy.objectIsInstantiated(cloneAttribute))) { if (cloneAttribute instanceof IndirectCollection) { IndirectCollection collection = (IndirectCollection)cloneAttribute; if (collection.hasDeferredChanges()) { Iterator iterator = collection.getAddedElements().iterator(); boolean cascade = isCascadePersist(); while (iterator.hasNext()) { Object nextObject = iterator.next(); // remove private owned object from uow list if (isCandidateForPrivateOwnedRemoval()){ uow.removePrivateOwnedObject(this, nextObject); } uow.discoverAndPersistUnregisteredNewObjects(nextObject, cascade, newObjects, unregisteredExistingObjects, visitedObjects, cascadeErrors); } } } return; } ContainerPolicy containerPolicy = this.containerPolicy; Object cloneObjectCollection = getRealCollectionAttributeValueFromObject(object, uow); Object iterator = containerPolicy.iteratorFor(cloneObjectCollection); boolean cascade = isCascadePersist(); while (containerPolicy.hasNext(iterator)) { Object wrappedObject = containerPolicy.nextEntry(iterator, uow); Object nextObject = containerPolicy.unwrapIteratorResult(wrappedObject); // remove private owned object from uow list if (isCandidateForPrivateOwnedRemoval()) { uow.removePrivateOwnedObject(this, nextObject); } uow.discoverAndPersistUnregisteredNewObjects(nextObject, cascade, newObjects, unregisteredExistingObjects, visitedObjects, cascadeErrors); containerPolicy.cascadeDiscoverAndPersistUnregisteredNewObjects(wrappedObject, newObjects, unregisteredExistingObjects, visitedObjects, uow, cascadeErrors); } } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ @Override public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { if (!this.cascadePersist) { return; } Object attributeValue = getAttributeValueFromObject(object); if ((attributeValue == null) // Also check if the source is new, then must always cascade. || (!this.indirectionPolicy.objectIsInstantiated(attributeValue) && !uow.isCloneNewObject(object))) { return; } ContainerPolicy cp = this.containerPolicy; Object cloneObjectCollection = null; cloneObjectCollection = getRealCollectionAttributeValueFromObject(object, uow); Object cloneIter = cp.iteratorFor(cloneObjectCollection); // add private owned objects to uow list if mapping is a candidate and uow should discover new objects and the source object is new. boolean shouldAddPrivateOwnedObject = isCandidateForPrivateOwnedRemoval() && uow.shouldDiscoverNewObjects() && uow.isCloneNewObject(object); while (cp.hasNext(cloneIter)) { Object wrappedObject = cp.nextEntry(cloneIter, uow); Object nextObject = cp.unwrapIteratorResult(wrappedObject); if (shouldAddPrivateOwnedObject && nextObject != null) { uow.addPrivateOwnedObject(this, nextObject); } uow.registerNewObjectForPersist(nextObject, visitedObjects); cp.cascadeRegisterNewIfRequired(wrappedObject, uow, visitedObjects); } } /** * INTERNAL: * This method is used to store the FK fields that can be cached that correspond to noncacheable mappings * the FK field values will be used to re-issue the query when cloning the shared cache entity */ @Override public void collectQueryParameters(Set record){ //no-op for mappings that do not support PROTECTED cache isolation } /** * INTERNAL: * Used by AttributeLevelChangeTracking to update a changeRecord with calculated changes * as apposed to detected changes. If an attribute can not be change tracked it's * changes can be detected through this process. */ @Override public void calculateDeferredChanges(ChangeRecord changeRecord, AbstractSession session) { CollectionChangeRecord collectionRecord = (CollectionChangeRecord)changeRecord; // TODO: Handle events that fired after collection was replaced. compareCollectionsForChange(collectionRecord.getOriginalCollection(), collectionRecord.getLatestCollection(), collectionRecord, session); if(this.isPrivateOwned()) { postCalculateChanges(collectionRecord, (UnitOfWorkImpl)session); } } /** * INTERNAL: * The mapping clones itself to create deep copy. */ @Override public Object clone() { CollectionMapping clone = (CollectionMapping)super.clone(); clone.setDeleteAllQuery((ModifyQuery)getDeleteAllQuery().clone()); if (this.listOrderField != null) { clone.listOrderField = this.listOrderField.clone(); } if(this.changeOrderTargetQuery != null) { clone.changeOrderTargetQuery = (DataModifyQuery)this.changeOrderTargetQuery.clone(); } // Clone the container policy. clone.containerPolicy = (ContainerPolicy) this.containerPolicy.clone(); return clone; } /** * INTERNAL: * This method is used to calculate the differences between two collections. */ public void compareCollectionsForChange(Object oldCollection, Object newCollection, ChangeRecord changeRecord, AbstractSession session) { this.containerPolicy.compareCollectionsForChange(oldCollection, newCollection, (CollectionChangeRecord) changeRecord, session, getReferenceDescriptor()); } /** * INTERNAL: * This method is used to create a change record from comparing two collections. */ @Override public ChangeRecord compareForChange(Object clone, Object backUp, ObjectChangeSet owner, AbstractSession session) { Object cloneAttribute = null; Object backUpAttribute = null; Object backUpObjectCollection = null; cloneAttribute = getAttributeValueFromObject(clone); if ((cloneAttribute != null) && (!this.indirectionPolicy.objectIsInstantiated(cloneAttribute))) { return null; } if (!owner.isNew()) {// if the changeSet is for a new object then we must record all of the attributes backUpAttribute = getAttributeValueFromObject(backUp); if ((cloneAttribute == null) && (backUpAttribute == null)) { return null; } backUpObjectCollection = getRealCollectionAttributeValueFromObject(backUp, session); } Object cloneObjectCollection = null; if (cloneAttribute != null) { cloneObjectCollection = getRealCollectionAttributeValueFromObject(clone, session); } else { cloneObjectCollection = this.containerPolicy.containerInstance(1); } CollectionChangeRecord changeRecord = new CollectionChangeRecord(owner); changeRecord.setAttribute(getAttributeName()); changeRecord.setMapping(this); compareCollectionsForChange(backUpObjectCollection, cloneObjectCollection, changeRecord, session); if (changeRecord.hasChanges()) { changeRecord.setOriginalCollection(backUpObjectCollection); return changeRecord; } return null; } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ @Override public boolean compareObjects(Object firstObject, Object secondObject, AbstractSession session) { Object firstObjectCollection = getRealCollectionAttributeValueFromObject(firstObject, session); Object secondObjectCollection = getRealCollectionAttributeValueFromObject(secondObject, session); return super.compareObjects(firstObjectCollection, secondObjectCollection, session); } /** * INTERNAL: * Write the changes defined in the change set for the mapping. * Mapping added or removed events are raised to allow the mapping to write the changes as required. */ public void writeChanges(ObjectChangeSet changeSet, WriteObjectQuery query) throws DatabaseException, OptimisticLockException { CollectionChangeRecord record = (CollectionChangeRecord)changeSet.getChangesForAttributeNamed(this.getAttributeName()); if (record != null) { for (ObjectChangeSet removedChangeSet : record.getRemoveObjectList().values()) { objectRemovedDuringUpdate(query, this.containerPolicy.getCloneDataFromChangeSet(removedChangeSet), null); if (removedChangeSet.getOldKey() != null){ this.containerPolicy.propogatePostUpdate(query, removedChangeSet.getOldKey()); } } Map extraData = null; Object currentObjects = null; for (ObjectChangeSet addedChangeSet : record.getAddObjectList().values()) { if (this.listOrderField != null) { extraData = new HashMap(1); Integer addedIndexInList = record.getOrderedAddObjectIndices().get(addedChangeSet); if (addedIndexInList == null) { if (currentObjects == null) { currentObjects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); } addedIndexInList = ((List)currentObjects).indexOf(addedChangeSet.getUnitOfWorkClone()); } extraData.put(this.listOrderField, addedIndexInList); } objectAddedDuringUpdate(query, this.containerPolicy.getCloneDataFromChangeSet(addedChangeSet), addedChangeSet, extraData); if (addedChangeSet.getNewKey() != null){ this.containerPolicy.propogatePostUpdate(query, addedChangeSet.getNewKey()); } } if (this.listOrderField != null) { // This is a hacky check for attribute change tracking, if the backup clone is different, then is using deferred. List previousList = (List)getRealCollectionAttributeValueFromObject(query.getBackupClone(), query.getSession());; int previousSize = previousList.size(); if (currentObjects == null) { currentObjects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); } List currentList = (List)currentObjects; int currentSize = currentList.size(); boolean shouldRepairOrder = false; if(currentList instanceof IndirectList) { shouldRepairOrder = ((IndirectList)currentList).isListOrderBrokenInDb(); } if(previousList == currentList) { // previousList is not available // The same size as previous list, // at the i-th position holds the index of the i-th original object in the current list (-1 if the object was removed): // for example: {0, -1, 1, -1, 3} means that: // previous(0) == current(0); // previous(1) was removed; // previous(2) == current(1); // previous(3) was removed; // previous(4) == current(3); // current(1) and current(3) were also on previous list, but with different indexes: they are the ones that should have their index changed. List currentIndexes = record.getCurrentIndexesOfOriginalObjects(currentList); for(int i=0; i < currentIndexes.size(); i++) { int currentIndex = currentIndexes.get(i); if((currentIndex >= 0) && (currentIndex != i || shouldRepairOrder)) { objectOrderChangedDuringUpdate(query, currentList.get(currentIndex), currentIndex); } } } else { for (int i=0; i < previousSize; i++) { // TODO: should we check for previousObject != null? Object prevObject = previousList.get(i); Object currentObject = null; if(i < currentSize) { currentObject = currentList.get(i); } if(prevObject != currentObject || shouldRepairOrder) { // object has either been removed or its index in the List has changed int newIndex = currentList.indexOf(prevObject); if(newIndex >= 0) { objectOrderChangedDuringUpdate(query, prevObject, newIndex); } } } } if (shouldRepairOrder) { ((IndirectList)currentList).setIsListOrderBrokenInDb(false); record.setOrderHasBeenRepaired(true); } } } } /** * INTERNAL: * The memory objects are compared and only the changes are written to the database. */ protected void compareObjectsAndWrite(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { Object currentObjects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); Object previousObjects = readPrivateOwnedForObject(query); if (previousObjects == null) { previousObjects = getContainerPolicy().containerInstance(1); } if (this.listOrderField != null && this.isAggregateCollectionMapping()) { compareListsAndWrite((List)previousObjects, (List)currentObjects, query); return; } ContainerPolicy cp = this.containerPolicy; Map previousObjectsByKey = new HashMap(cp.sizeFor(previousObjects)); // Read from db or from backup in uow. Map currentObjectsByKey = new HashMap(cp.sizeFor(currentObjects)); // Current value of object's attribute (clone in uow). Map keysOfCurrentObjects = new IdentityHashMap(cp.sizeFor(currentObjects) + 1); // First index the current objects by their primary key. for (Object currentObjectsIter = cp.iteratorFor(currentObjects); cp.hasNext(currentObjectsIter);) { Object currentObject = cp.next(currentObjectsIter, query.getSession()); try { Object primaryKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(currentObject, query.getSession()); currentObjectsByKey.put(primaryKey, currentObject); keysOfCurrentObjects.put(currentObject, primaryKey); } catch (NullPointerException e) { // For CR#2646 quietly discard nulls added to a collection mapping. // This try-catch is essentially a null check on currentObject, for // ideally the customer should check for these themselves. if (currentObject != null) { throw e; } } } // Next index the previous objects (read from db or from backup in uow) // and process the difference to current (optimized in same loop). for (Object previousObjectsIter = cp.iteratorFor(previousObjects); cp.hasNext(previousObjectsIter);) { Object wrappedObject = cp.nextEntry(previousObjectsIter, query.getSession()); Map mapKeyFields = containerPolicy.getKeyMappingDataForWriteQuery(wrappedObject, query.getSession()); Object previousObject = containerPolicy.unwrapIteratorResult(wrappedObject); Object primaryKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(previousObject, query.getSession()); previousObjectsByKey.put(primaryKey, previousObject); // Delete must occur first, in case object with same pk is removed and added, // (technically should not happen, but same applies to unique constraints) if (!currentObjectsByKey.containsKey(primaryKey)) { objectRemovedDuringUpdate(query, wrappedObject, mapKeyFields); cp.propogatePostUpdate(query, wrappedObject); } } for (Object currentObjectsIter = cp.iteratorFor(currentObjects); cp.hasNext(currentObjectsIter);) { Object wrappedObject = cp.nextEntry(currentObjectsIter, query.getSession()); Object currentObject = containerPolicy.unwrapIteratorResult(wrappedObject); try { Map mapKeyFields = containerPolicy.getKeyMappingDataForWriteQuery(wrappedObject, query.getSession()); Object primaryKey = keysOfCurrentObjects.get(currentObject); if (!(previousObjectsByKey.containsKey(primaryKey))) { objectAddedDuringUpdate(query, currentObject, null, mapKeyFields); cp.propogatePostUpdate(query, wrappedObject); } else { objectUnchangedDuringUpdate(query, currentObject, previousObjectsByKey, primaryKey); } } catch (NullPointerException e) { // For CR#2646 skip currentObject if it is null. if (currentObject != null) { throw e; } } } } /** * INTERNAL: * Old and new lists are compared and only the changes are written to the database. * Currently there's no support for listOrderField in CollectionMapping in case there's no change sets, * so this method currently never called (currently only overriding method in AggregateCollectionMapping is called). * This method should be implemented to support listOrderField functionality without change sets. */ protected void compareListsAndWrite(List previousList, List currentList, WriteObjectQuery query) throws DatabaseException, OptimisticLockException { } /** * Compare two objects if their parts are not private owned */ @Override protected boolean compareObjectsWithoutPrivateOwned(Object firstCollection, Object secondCollection, AbstractSession session) { if(this.listOrderField != null) { return compareLists((List)firstCollection, (List)secondCollection, session, false); } ContainerPolicy cp = this.containerPolicy; if (cp.sizeFor(firstCollection) != cp.sizeFor(secondCollection)) { return false; } Object firstIter = cp.iteratorFor(firstCollection); Object secondIter = cp.iteratorFor(secondCollection); Map keyValues = new HashMap(); if (isMapKeyMapping()) { while (cp.hasNext(secondIter)) { Map.Entry secondObject = (Map.Entry)cp.nextEntry(secondIter, session); Object primaryKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(secondObject.getValue(), session); Object key = secondObject.getKey(); keyValues.put(key, primaryKey); } while (cp.hasNext(firstIter)) { Map.Entry firstObject = (Map.Entry)cp.nextEntry(firstIter, session); Object primaryKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(firstObject.getValue(), session); Object key = firstObject.getKey(); if (!primaryKey.equals(keyValues.get(key))) { return false; } } } else { while (cp.hasNext(secondIter)) { Object secondObject = cp.next(secondIter, session); Object primaryKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(secondObject, session); keyValues.put(primaryKey, primaryKey); } while (cp.hasNext(firstIter)) { Object firstObject = cp.next(firstIter, session); Object primaryKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(firstObject, session); if (!keyValues.containsKey(primaryKey)) { return false; } } } return true; } /** * Compare two objects if their parts are private owned */ @Override protected boolean compareObjectsWithPrivateOwned(Object firstCollection, Object secondCollection, AbstractSession session) { if(this.listOrderField != null) { return compareLists((List)firstCollection, (List)secondCollection, session, true); } ContainerPolicy cp = this.containerPolicy; if (cp.sizeFor(firstCollection) != cp.sizeFor(secondCollection)) { return false; } Object firstIter = cp.iteratorFor(firstCollection); Object secondIter = cp.iteratorFor(secondCollection); Map keyValueToObject = new HashMap(cp.sizeFor(firstCollection)); while (cp.hasNext(secondIter)) { Object secondObject = cp.next(secondIter, session); Object primaryKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(secondObject, session); keyValueToObject.put(primaryKey, secondObject); } while (cp.hasNext(firstIter)) { Object firstObject = cp.next(firstIter, session); Object primaryKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(firstObject, session); if (keyValueToObject.containsKey(primaryKey)) { Object object = keyValueToObject.get(primaryKey); if (!session.compareObjects(firstObject, object)) { return false; } } else { return false; } } return true; } /** * Compare two lists. For equality the order of the elements should be the same. * Used only if listOrderField != null */ protected boolean compareLists(List firstList, List secondList, AbstractSession session, boolean withPrivateOwned) { if (firstList.size() != secondList.size()) { return false; } int size = firstList.size(); for(int i=0; i < size; i++) { Object firstObject = firstList.get(i); Object secondObject = secondList.get(i); if(withPrivateOwned) { if(!session.compareObjects(firstObject, secondObject)) { return false; } } else { Object firstKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(firstObject, session); Object secondKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(secondObject, session); if (!firstKey.equals(secondKey)) { return false; } } } return true; } /** * INTERNAL: * Convert all the class-name-based settings in this mapping to actual class-based * settings * This method is implemented by subclasses as necessary. * @param classLoader */ @Override public void convertClassNamesToClasses(ClassLoader classLoader){ super.convertClassNamesToClasses(classLoader); containerPolicy.convertClassNamesToClasses(classLoader); } /** * INTERNAL: * Extract the value from the batch optimized query, this should be supported by most query types. */ @Override public Object extractResultFromBatchQuery(ReadQuery batchQuery, CacheKey parentCacheKey, AbstractRecord sourceRow, AbstractSession session, ObjectLevelReadQuery originalQuery) throws QueryException { Object result = super.extractResultFromBatchQuery(batchQuery, parentCacheKey, sourceRow, session, originalQuery); // The source object might not have any target objects. if (result == null) { return this.containerPolicy.containerInstance(); } else { return result; } } /** * INTERNAL: * Prepare and execute the batch query and store the * results for each source object in a map keyed by the * mappings source keys of the source objects. */ @Override protected void executeBatchQuery(DatabaseQuery query, CacheKey parentCacheKey, Map referenceObjectsByKey, AbstractSession session, AbstractRecord translationRow) { // Execute query and index resulting object sets by key. ReadAllQuery batchQuery = (ReadAllQuery)query; ComplexQueryResult complexResult = (ComplexQueryResult)session.executeQuery(batchQuery, translationRow); Object results = complexResult.getResult(); Iterator rowsIterator = ((List)complexResult.getData()).iterator(); ContainerPolicy queryContainerPolicy = batchQuery.getContainerPolicy(); if (this.containerPolicy.shouldAddAll()) { // Indexed list mappings require special add that include the row data with the index. Map referenceObjectsAndRowsByKey = new HashMap(); for (Object objectsIterator = queryContainerPolicy.iteratorFor(results); queryContainerPolicy.hasNext(objectsIterator);) { Object eachReferenceObject = queryContainerPolicy.next(objectsIterator, session); AbstractRecord row = rowsIterator.next(); Object eachReferenceKey = extractKeyFromTargetRow(row, session); List[] objectsAndRows = referenceObjectsAndRowsByKey.get(eachReferenceKey); if (objectsAndRows == null) { objectsAndRows = new List[]{new ArrayList(), new ArrayList()}; referenceObjectsAndRowsByKey.put(eachReferenceKey, objectsAndRows); } objectsAndRows[0].add(eachReferenceObject); objectsAndRows[1].add(row); } Iterator> iterator = referenceObjectsAndRowsByKey.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry entry = iterator.next(); Object eachReferenceKey = entry.getKey(); List objects = entry.getValue()[0]; List rows = entry.getValue()[1]; Object container = this.containerPolicy.containerInstance(objects.size()); this.containerPolicy.addAll(objects, container, query.getSession(), rows, batchQuery, parentCacheKey, true); referenceObjectsByKey.put(eachReferenceKey, container); } } else { // Non-indexed list, either normal collection, or a map key. for (Object objectsIterator = queryContainerPolicy.iteratorFor(results); queryContainerPolicy.hasNext(objectsIterator);) { Object eachReferenceObject = queryContainerPolicy.next(objectsIterator, session); AbstractRecord row = rowsIterator.next(); // Handle duplicate rows in the ComplexQueryResult being replaced with null, as a // result of duplicate filtering being true for constructing the ComplexQueryResult while (row == null && rowsIterator.hasNext()) { row = rowsIterator.next(); } Object eachReferenceKey = extractKeyFromTargetRow(row, session); Object container = referenceObjectsByKey.get(eachReferenceKey); if ((container == null) || (container == Helper.NULL_VALUE)) { container = this.containerPolicy.containerInstance(); referenceObjectsByKey.put(eachReferenceKey, container); } this.containerPolicy.addInto(eachReferenceObject, container, session, row, batchQuery, parentCacheKey, true); } } } /** * INTERNAL: * Extract the source primary key value from the target row. * Used for batch reading, most following same order and fields as in the mapping. * The method should be overridden by classes that support batch reading. */ protected Object extractKeyFromTargetRow(AbstractRecord row, AbstractSession session) { throw QueryException.batchReadingNotSupported(this, null); } /** * INTERNAL: * We are not using a remote valueholder * so we need to replace the reference object(s) with * the corresponding object(s) from the remote session. */ @Override public void fixRealObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { //bug 4147755 getRealAttribute... / setReal Object attributeValue = getRealAttributeValueFromObject(object, session); // the object collection could be null, check here to avoid NPE if (attributeValue == null) { setAttributeValueInObject(object, null); return; } ObjectLevelReadQuery tempQuery = query; if (!tempQuery.shouldMaintainCache()) { if ((!tempQuery.shouldCascadeParts()) || (tempQuery.shouldCascadePrivateParts() && (!isPrivateOwned()))) { tempQuery = null; } } Object remoteAttributeValue = session.getObjectsCorrespondingToAll(attributeValue, objectDescriptors, processedObjects, tempQuery, this.containerPolicy); setRealAttributeValueInObject(object, remoteAttributeValue); } /** * INTERNAL: * Returns the receiver's containerPolicy. */ public ContainerPolicy getContainerPolicy() { return containerPolicy; } protected ModifyQuery getDeleteAllQuery() { if (deleteAllQuery == null) { deleteAllQuery = new DataModifyQuery(); } return deleteAllQuery; } /** * INTERNAL: * Returns the join criteria stored in the mapping selection query. This criteria * is used to read reference objects across the tables from the database. */ @Override public Expression getJoinCriteria(ObjectExpression context, Expression base) { Expression selectionCriteria = getSelectionCriteria(); Expression keySelectionCriteria = this.containerPolicy.getKeySelectionCriteria(); if (keySelectionCriteria != null) { selectionCriteria = selectionCriteria.and(keySelectionCriteria); } return context.getBaseExpression().twist(selectionCriteria, base); } /** * INTERNAL: * return the object on the client corresponding to the specified object. * CollectionMappings have to worry about * maintaining object identity. */ @Override public Object getObjectCorrespondingTo(Object object, DistributedSession session, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query) { return session.getObjectsCorrespondingToAll(object, objectDescriptors, processedObjects, query, this.containerPolicy); } /** * INTERNAL: * Return the ordering query keys. * Used for Workbench integration. */ public List getOrderByQueryKeyExpressions() { List expressions = new ArrayList (); if ((getSelectionQuery() != null) && getSelectionQuery().isReadAllQuery()) { for (Expression orderExpression : ((ReadAllQuery)getSelectionQuery()).getOrderByExpressions()) { if (orderExpression.isFunctionExpression() && ((FunctionExpression)orderExpression).getBaseExpression().isQueryKeyExpression()) { expressions.add(orderExpression); } } } return expressions; } /** * INTERNAL: * Get the container policy from the selection query for this mapping. This * method is overridden in DirectCollectionMapping since its selection * query is a DataReadQuery. */ protected ContainerPolicy getSelectionQueryContainerPolicy() { return ((ReadAllQuery) getSelectionQuery()).getContainerPolicy(); } /** * Convenience method. * Return the value of an attribute, unwrapping value holders if necessary. * If the value is null, build a new container. */ @Override public Object getRealCollectionAttributeValueFromObject(Object object, AbstractSession session) throws DescriptorException { Object value = getRealAttributeValueFromObject(object, session); if (value == null) { value = this.containerPolicy.containerInstance(1); } return value; } /** * PUBLIC: * Field holds the order of elements in the list in the db, requires collection of type List; * may be not null only in case isListOrderFieldSupported==true. */ public DatabaseField getListOrderField() { return listOrderField; } /** * INTERNAL: * Returns list of primary key fields from the reference descriptor. */ public List getTargetPrimaryKeyFields() { return getReferenceDescriptor().getPrimaryKeyFields(); } /** * PUBLIC: * Specifies what should be done if the list of values read from listOrserField is invalid * (there should be no nulls, no duplicates, no "holes"). */ public OrderCorrectionType getOrderCorrectionType() { return this.orderCorrectionType; } protected boolean hasCustomDeleteAllQuery() { return hasCustomDeleteAllQuery; } /** * INTERNAL: * Return true if ascending or descending ordering has been set on this * mapping via the @OrderBy annotation. */ public boolean hasOrderBy() { return hasOrderBy; } /** * INTERNAL: * Initialize the state of mapping. */ @Override public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); setFields(collectFields()); this.containerPolicy.prepare(getSelectionQuery(), session); // Check that the container policy is correct for the collection type. if ((!usesIndirection()) && (!getAttributeAccessor().getAttributeClass().isAssignableFrom(this.containerPolicy.getContainerClass()))) { throw DescriptorException.incorrectCollectionPolicy(this, getAttributeAccessor().getAttributeClass(), this.containerPolicy.getContainerClass()); } if(listOrderField != null) { initializeListOrderField(session); } } /** * INTERNAL: * Initializes listOrderField. * Precondition: listOrderField != null. */ protected void initializeListOrderField(AbstractSession session) { if(!List.class.isAssignableFrom(getAttributeAccessor().getAttributeClass())) { throw DescriptorException.listOrderFieldRequiersList(getDescriptor(), this); } boolean isAttributeAssignableFromIndirectList = getAttributeAccessor().getAttributeClass().isAssignableFrom(IndirectList.class); if(this.orderCorrectionType == null) { // set default validation mode if(isAttributeAssignableFromIndirectList) { this.orderCorrectionType = OrderCorrectionType.READ_WRITE; } else { this.orderCorrectionType = OrderCorrectionType.READ; } } else if(this.orderCorrectionType == OrderCorrectionType.READ_WRITE) { //OrderValidationMode.CORRECTION sets container class to IndirectList, make sure the attribute is of compatible type. if(!isAttributeAssignableFromIndirectList) { throw DescriptorException.listOrderFieldRequiersIndirectList(getDescriptor(), this); } } ContainerPolicy originalQueryContainerPolicy = getSelectionQueryContainerPolicy(); if(!this.containerPolicy.isOrderedListPolicy()) { setContainerPolicy(new OrderedListContainerPolicy(this.containerPolicy.getContainerClass())); // re-prepare replaced container policy as we are initializing getContainerPolicy().prepare(getSelectionQuery(), session); } OrderedListContainerPolicy orderedListContainerPolicy = (OrderedListContainerPolicy)this.containerPolicy; orderedListContainerPolicy.setListOrderField(this.listOrderField); orderedListContainerPolicy.setOrderCorrectionType(this.orderCorrectionType); // If ContainerPolicy's container class is IndirectList, originalQueryContainerPolicy's container class is not (likely Vector) // and orderCorrectionType doesn't require query to use IndirectList - then query will keep a separate container policy // that uses its original container class (likely Vector) - this is the same optimization as used in useTransparentList method. if(this.containerPolicy.getContainerClass().isAssignableFrom(IndirectList.class) && !IndirectList.class.isAssignableFrom(originalQueryContainerPolicy.getContainerClass()) && this.orderCorrectionType != OrderCorrectionType.READ_WRITE || originalQueryContainerPolicy == this.getSelectionQueryContainerPolicy()) { OrderedListContainerPolicy queryOrderedListContainerPolicy; if(originalQueryContainerPolicy.getClass().equals(orderedListContainerPolicy.getClass())) { // original query container policy queryOrderedListContainerPolicy = (OrderedListContainerPolicy)originalQueryContainerPolicy; queryOrderedListContainerPolicy.setListOrderField(this.listOrderField); queryOrderedListContainerPolicy.setOrderCorrectionType(this.orderCorrectionType); } else { // clone mapping's container policy queryOrderedListContainerPolicy = (OrderedListContainerPolicy)orderedListContainerPolicy.clone(); queryOrderedListContainerPolicy.setContainerClass(originalQueryContainerPolicy.getContainerClass()); setSelectionQueryContainerPolicy(queryOrderedListContainerPolicy); } } if(this.listOrderField.getType() == null) { this.listOrderField.setType(Integer.class); } buildListOrderField(); // DirectCollectMap - that uses DataReadQuery - adds listOrderField to selection query in initializeSelectionStatement method. if (getSelectionQuery().isReadAllQuery()) { if(shouldUseListOrderFieldTableExpression()) { initializeListOrderFieldTable(session); } } initializeChangeOrderTargetQuery(session); } /** * INTERNAL: * Initializes listOrderField's table, does nothing by default. * Precondition: listOrderField != null. */ protected void initializeListOrderFieldTable(AbstractSession session) { } /** * INTERNAL: * Verifies listOrderField's table, if none found sets the default one. * Precondition: listOrderField != null. */ protected void buildListOrderField() { if(this.listOrderField.hasTableName()) { if(!this.getReferenceDescriptor().getDefaultTable().equals(this.listOrderField.getTable())) { throw DescriptorException.listOrderFieldTableIsWrong(this.getDescriptor(), this, this.listOrderField.getTable(), this.getReferenceDescriptor().getDefaultTable()); } } else { this.listOrderField.setTable(this.getReferenceDescriptor().getDefaultTable()); } this.listOrderField = this.getReferenceDescriptor().buildField(this.listOrderField); } /** * ADVANCED: * This method should only be called after this mapping's indirection policy has been set * * IndirectList and IndirectSet can be configured not to instantiate the list from the * database when you add and remove from them. IndirectList defaults to this behavior. When * Set to true, the collection associated with this TransparentIndirection will be setup so as * not to instantiate for adds and removes. The weakness of this setting for an IndirectSet is * that when the set is not instantiated, if a duplicate element is added, it will not be * detected until commit time. */ public Boolean shouldUseLazyInstantiationForIndirectCollection() { if (getIndirectionPolicy() == null){ return null; } return getIndirectionPolicy().shouldUseLazyInstantiation(); } /** * INTERNAL: * Indicates whether getListOrderFieldExpression method should create field expression based on table expression. */ public boolean shouldUseListOrderFieldTableExpression() { return false; } /** * INTERNAL: * Initialize changeOrderTargetQuery. */ protected void initializeChangeOrderTargetQuery(AbstractSession session) { } /** * INTERNAL: * Return whether this mapping is a Collection type. */ @Override public boolean isCollectionMapping() { return true; } /** * INTERNAL: * Return if this mapping has a mapped key that uses a OneToOne (object). */ public boolean isMapKeyObjectRelationship() { return this.containerPolicy.isMapKeyObject(); } /** * INTERNAL: * The referenced object is checked if it is instantiated or not, * also check if it has been changed (as indirect collections avoid instantiation on add/remove. */ public boolean isAttributeValueInstantiatedOrChanged(Object object) { return this.indirectionPolicy.objectIsInstantiatedOrChanged(getAttributeValueFromObject(object)); } /** * INTERNAL: * Iterate on the specified element. */ public void iterateOnElement(DescriptorIterator iterator, Object element) { iterator.iterateReferenceObjectForMapping(element, this); } /** * INTERNAL: * Iterate on the attribute value. * The value holder has already been processed. */ @Override public void iterateOnRealAttributeValue(DescriptorIterator iterator, Object realAttributeValue) { if (realAttributeValue == null) { return; } ContainerPolicy cp = this.containerPolicy; for (Object iter = cp.iteratorFor(realAttributeValue); cp.hasNext(iter);) { Object wrappedObject = cp.nextEntry(iter, iterator.getSession()); Object object = cp.unwrapIteratorResult(wrappedObject); iterateOnElement(iterator, object); cp.iterateOnMapKey(iterator, wrappedObject); } } /** * Force instantiation of the load group. */ @Override public void load(final Object object, AttributeItem item, final AbstractSession session, final boolean fromFetchGroup) { instantiateAttribute(object, session); if (item.getGroup() != null && (!fromFetchGroup || session.isUnitOfWork()) ){ //if UOW make sure the nested attributes are loaded as the clones will not be instantiated Object value = getRealAttributeValueFromObject(object, session); ContainerPolicy cp = this.containerPolicy; for (Object iterator = cp.iteratorFor(value); cp.hasNext(iterator);) { Object wrappedObject = cp.nextEntry(iterator, session); Object nestedObject = cp.unwrapIteratorResult(wrappedObject); session.load(nestedObject, item.getGroup(nestedObject.getClass()), getReferenceDescriptor(), fromFetchGroup); } } } /** * Force instantiation of all indirections. */ @Override public void loadAll(Object object, AbstractSession session, IdentityHashSet loaded) { instantiateAttribute(object, session); ClassDescriptor referenceDescriptor = getReferenceDescriptor(); if (referenceDescriptor != null) { boolean hasInheritance = referenceDescriptor.hasInheritance(); Object value = getRealAttributeValueFromObject(object, session); ContainerPolicy cp = this.containerPolicy; for (Object iterator = cp.iteratorFor(value); cp.hasNext(iterator);) { Object wrappedObject = cp.nextEntry(iterator, session); Object nestedObject = cp.unwrapIteratorResult(wrappedObject); if (hasInheritance && !nestedObject.getClass().equals(referenceDescriptor.getJavaClass())){ ClassDescriptor concreteReferenceDescriptor = referenceDescriptor.getInheritancePolicy().getDescriptor(nestedObject.getClass()); concreteReferenceDescriptor.getObjectBuilder().loadAll(nestedObject, session, loaded); } else { referenceDescriptor.getObjectBuilder().loadAll(nestedObject, session, loaded); } } } } /** * ADVANCED: * Return whether the reference objects must be deleted * one by one, as opposed to with a single DELETE statement. */ public boolean mustDeleteReferenceObjectsOneByOne() { return this.mustDeleteReferenceObjectsOneByOne == null || this.mustDeleteReferenceObjectsOneByOne; } /** * INTERNAL: * Merge changes from the source to the target object. * Because this is a collection mapping, values are added to or removed from the * collection based on the changeset */ @Override public void mergeChangesIntoObject(Object target, ChangeRecord chgRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { if (this.descriptor.getCachePolicy().isProtectedIsolation()&& !this.isCacheable && !targetSession.isProtectedSession()){ setAttributeValueInObject(target, this.indirectionPolicy.buildIndirectObject(new ValueHolder(null))); return; } Object valueOfTarget = null; Object valueOfSource = null; ContainerPolicy containerPolicy = this.containerPolicy; CollectionChangeRecord changeRecord = (CollectionChangeRecord) chgRecord; UnitOfWorkChangeSet uowChangeSet = (UnitOfWorkChangeSet)changeRecord.getOwner().getUOWChangeSet(); // Collect the changes into a vector. Check to see if the target has an instantiated // collection, if it does then iterate over the changes and merge the collections. if (isAttributeValueInstantiated(target)) { // If it is new will need a new collection. if (changeRecord.getOwner().isNew()) { valueOfTarget = containerPolicy.containerInstance(changeRecord.getAddObjectList().size()); } else { if (isSynchronizeOnMerge) { valueOfTarget = getRealCollectionAttributeValueFromObject(target, mergeManager.getSession()); } else { // Clone instead of synchronization to avoid possible deadlocks. valueOfTarget = containerPolicy.cloneFor(getRealCollectionAttributeValueFromObject(target, mergeManager.getSession())); } } containerPolicy.mergeChanges(changeRecord, valueOfTarget, shouldMergeCascadeParts(mergeManager), mergeManager, targetSession, isSynchronizeOnMerge); } else { // The valueholder has not been instantiated if (mergeManager.shouldMergeChangesIntoDistributedCache()) { return; // do nothing } // PERF: Also avoid merge if source has not been instantiated for indirect collection adds. if (!isAttributeValueInstantiated(source)) { return; } // If I'm not merging on another server then create instance of the collection valueOfSource = getRealCollectionAttributeValueFromObject(source, mergeManager.getSession()); Object iterator = containerPolicy.iteratorFor(valueOfSource); valueOfTarget = containerPolicy.containerInstance(containerPolicy.sizeFor(valueOfSource)); while (containerPolicy.hasNext(iterator)) { // CR#2195 Problem with merging Collection mapping in unit of work and inheritance. Object objectToMerge = containerPolicy.next(iterator, mergeManager.getSession()); if (shouldMergeCascadeParts(mergeManager) && (valueOfSource != null)) { ObjectChangeSet changeSet = (ObjectChangeSet)uowChangeSet.getObjectChangeSetForClone(objectToMerge); mergeManager.mergeChanges(objectToMerge, changeSet, targetSession); } // Let the mergemanager get it because I don't have the change for the object. // CR#2188 Problem with merging Collection mapping in unit of work and transparent indirection. containerPolicy.addInto(mergeManager.getTargetVersionOfSourceObject(objectToMerge, referenceDescriptor, targetSession), valueOfTarget, mergeManager.getSession()); } } if (valueOfTarget == null) { valueOfTarget = containerPolicy.containerInstance(); } setRealAttributeValueInObject(target, valueOfTarget); } /** * INTERNAL: * Merge changes from the source to the target object. This merge is only called when a changeSet for the target * does not exist or the target is uninitialized */ @Override public void mergeIntoObject(Object target, boolean isTargetUnInitialized, Object source, MergeManager mergeManager, AbstractSession targetSession) { if (this.descriptor.getCachePolicy().isProtectedIsolation() && !this.isCacheable && !targetSession.isProtectedSession()){ setAttributeValueInObject(target, this.indirectionPolicy.buildIndirectObject(new ValueHolder(null))); return; } if (isTargetUnInitialized) { // This will happen if the target object was removed from the cache before the commit was attempted if (mergeManager.shouldMergeWorkingCopyIntoOriginal() && (!isAttributeValueInstantiated(source))) { setAttributeValueInObject(target, this.indirectionPolicy.getOriginalIndirectionObject(getAttributeValueFromObject(source), targetSession)); return; } } if (!shouldMergeCascadeReference(mergeManager)) { // This is only going to happen on mergeClone, and we should not attempt to merge the reference return; } if (mergeManager.shouldRefreshRemoteObject() && usesIndirection()) { mergeRemoteValueHolder(target, source, mergeManager); return; } if (mergeManager.isForRefresh()) { if (!isAttributeValueInstantiated(target)) { if(shouldRefreshCascadeParts(mergeManager)){ // We must clone and set the value holder from the source to the target. // This ensures any cascaded refresh will be applied to the UOW backup valueholder Object attributeValue = getAttributeValueFromObject(source); Object clonedAttributeValue = this.indirectionPolicy.cloneAttribute(attributeValue, source, null, target, null, mergeManager.getSession(), false); // building clone from an original not a row. setAttributeValueInObject(target, clonedAttributeValue); } // This will occur when the clone's value has not been instantiated yet and we do not need // the refresh that attribute return; } } else if (!isAttributeValueInstantiatedOrChanged(source)) { // I am merging from a clone into an original. No need to do merge if the attribute was never // modified return; } Object valueOfSource = getRealCollectionAttributeValueFromObject(source, mergeManager.getSession()); // There is a very special case when merging into the shared cache that the original // has been refreshed and now has non-instantiated indirection objects. // Force instantiation is not necessary and can cause problem with JTS drivers. AbstractSession mergeSession = mergeManager.getSession(); Object valueOfTarget = getRealCollectionAttributeValueFromObject(target, mergeSession); ContainerPolicy containerPolicy = this.containerPolicy; // BUG#5190470 Must force instantiation of indirection collections. containerPolicy.sizeFor(valueOfTarget); boolean fireChangeEvents = false; ObjectChangeListener listener = null; Object valueOfSourceCloned = null; if (!mergeManager.isForRefresh()) { // EL Bug 338504 - No Need to clone in this case. valueOfSourceCloned = valueOfSource; // if we are copying from original to clone then the source will be // instantiated anyway and we must continue to use the UnitOfWork // valueholder in the case of transparent indirection Object newContainer = containerPolicy.containerInstance(containerPolicy.sizeFor(valueOfSourceCloned)); if ((this.descriptor.getObjectChangePolicy().isObjectChangeTrackingPolicy()) && (target instanceof ChangeTracker) && (((ChangeTracker)target)._persistence_getPropertyChangeListener() != null)) { // Avoid triggering events if we are dealing with the same list. // We rebuild the new container though since any cascade merge // activity such as lifecycle methods etc will be captured on // newly registered objects and not the clones and we need to // make sure the target has these updates once we are done. fireChangeEvents = (valueOfSourceCloned != valueOfTarget); // Collections may not be indirect list or may have been replaced with user collection. Object iterator = containerPolicy.iteratorFor(valueOfTarget); listener = (ObjectChangeListener)((ChangeTracker)target)._persistence_getPropertyChangeListener(); if (fireChangeEvents) { // Objects removed from the first position in the list, so the index of the removed object is always 0. // When event is processed the index is used only in listOrderField case, ignored otherwise. Integer zero = Integer.valueOf(0); while (containerPolicy.hasNext(iterator)) { CollectionChangeEvent event = containerPolicy.createChangeEvent(target, getAttributeName(), valueOfTarget, containerPolicy.next(iterator, mergeSession), CollectionChangeEvent.REMOVE, zero, false); listener.internalPropertyChange(event); } } if (newContainer instanceof ChangeTracker) { ((CollectionChangeTracker)newContainer).setTrackedAttributeName(getAttributeName()); ((CollectionChangeTracker)newContainer)._persistence_setPropertyChangeListener(listener); } if (valueOfTarget instanceof ChangeTracker) { ((ChangeTracker)valueOfTarget)._persistence_setPropertyChangeListener(null);//remove listener } } valueOfTarget = newContainer; } else { if (isSynchronizeOnMerge) { // EL Bug 338504 - It needs to iterate on object which can possibly // cause a deadlock scenario while merging changes from original // to the working copy during rollback of the transaction. So, clone // the original object instead of synchronizing on it and use cloned // object to iterate and merge changes to the working copy. synchronized(valueOfSource) { valueOfSourceCloned = containerPolicy.cloneFor(valueOfSource); } } else { valueOfSourceCloned = valueOfSource; } //bug 3953038 - set a new collection in the object until merge completes, this // prevents rel-maint. from adding duplicates. setRealAttributeValueInObject(target, containerPolicy.containerInstance(containerPolicy.sizeFor(valueOfSourceCloned))); containerPolicy.clear(valueOfTarget); } Object sourceIterator = containerPolicy.iteratorFor(valueOfSourceCloned); // Index of the added object - objects are added to the end of the list. // When event is processed the index is used only in listOrderField case, ignored otherwise. int i = 0; while (containerPolicy.hasNext(sourceIterator)) { Object wrappedObject = containerPolicy.nextEntry(sourceIterator, mergeManager.getSession()); Object object = containerPolicy.unwrapIteratorResult(wrappedObject); if (object == null) { continue;// skip the null } if (shouldMergeCascadeParts(mergeManager)) { Object mergedObject = null; if ((mergeManager.getSession().isUnitOfWork()) && (((UnitOfWorkImpl)mergeManager.getSession()).getUnitOfWorkChangeSet() != null)) { // If it is a unit of work, we have to check if I have a change Set for this object mergedObject = mergeManager.mergeChanges(mergeManager.getObjectToMerge(object, referenceDescriptor, targetSession), (ObjectChangeSet)((UnitOfWorkImpl)mergeManager.getSession()).getUnitOfWorkChangeSet().getObjectChangeSetForClone(object), targetSession); if (listener != null && !fireChangeEvents && mergedObject != object){ // we are merging a collection into itself that contained detached or new Entities. make sure to remove the // old change records // bug 302293 this.descriptor.getObjectChangePolicy().updateListenerForSelfMerge(listener, this, object, mergedObject, (UnitOfWorkImpl) mergeManager.getSession()); } } else { mergedObject = mergeManager.mergeChanges(mergeManager.getObjectToMerge(object, referenceDescriptor, targetSession), null, targetSession); } } wrappedObject = containerPolicy.createWrappedObjectFromExistingWrappedObject(wrappedObject, source, referenceDescriptor, mergeManager, targetSession); if (isSynchronizeOnMerge) { synchronized (valueOfTarget) { if (fireChangeEvents) { //Collections may not be indirect list or may have been replaced with user collection. //bug 304251: let the ContainerPolicy decide what changeevent object to create CollectionChangeEvent event = containerPolicy.createChangeEvent(target, getAttributeName(), valueOfTarget, wrappedObject, CollectionChangeEvent.ADD, i++, false); listener.internalPropertyChange(event); } containerPolicy.addInto(wrappedObject, valueOfTarget, mergeManager.getSession()); } } else { if (fireChangeEvents) { //Collections may not be indirect list or may have been replaced with user collection. //bug 304251: let the ContainerPolicy decide what changeevent object to create CollectionChangeEvent event = containerPolicy.createChangeEvent(target, getAttributeName(), valueOfTarget, wrappedObject, CollectionChangeEvent.ADD, i++, false); listener.internalPropertyChange(event); } containerPolicy.addInto(wrappedObject, valueOfTarget, mergeManager.getSession()); } } if (fireChangeEvents && (this.descriptor.getObjectChangePolicy().isAttributeChangeTrackingPolicy())) { // check that there were changes, if not then remove the record. ObjectChangeSet changeSet = ((AttributeChangeListener)((ChangeTracker)target)._persistence_getPropertyChangeListener()).getObjectChangeSet(); //Bug4910642 Add NullPointer check if (changeSet != null) { CollectionChangeRecord changeRecord = (CollectionChangeRecord)changeSet.getChangesForAttributeNamed(getAttributeName()); if (changeRecord != null) { if (!changeRecord.isDeferred()) { if (!changeRecord.hasChanges()) { changeSet.removeChange(getAttributeName()); } } else { // Must reset the latest collection. changeRecord.setLatestCollection(valueOfTarget); } } } } // Must re-set variable to allow for set method to re-morph changes if the collection is not being stored directly. setRealAttributeValueInObject(target, valueOfTarget); } /** * INTERNAL: * An object was added to the collection during an update, insert it if private. */ protected void objectAddedDuringUpdate(ObjectLevelModifyQuery query, Object objectAdded, ObjectChangeSet changeSet, Map extraData) throws DatabaseException, OptimisticLockException { if (!shouldObjectModifyCascadeToParts(query)) {// Called always for M-M return; } // Only cascade dependents writes in uow. if (query.shouldCascadeOnlyDependentParts()) { return; } // Insert must not be done for uow or cascaded queries and we must cascade to cascade policy. // We should distinguish between insert and write (optimization/paraniod). if (isPrivateOwned()) { InsertObjectQuery insertQuery = new InsertObjectQuery(); insertQuery.setIsExecutionClone(true); insertQuery.setObject(containerPolicy.unwrapIteratorResult(objectAdded)); insertQuery.setCascadePolicy(query.getCascadePolicy()); query.getSession().executeQuery(insertQuery); } else { // Always write for updates, either private or in uow if calling this method. UnitOfWorkChangeSet uowChangeSet = null; if ((changeSet == null) && query.getSession().isUnitOfWork() && (((UnitOfWorkImpl)query.getSession()).getUnitOfWorkChangeSet() != null)) { uowChangeSet = (UnitOfWorkChangeSet)((UnitOfWorkImpl)query.getSession()).getUnitOfWorkChangeSet(); changeSet = (ObjectChangeSet)uowChangeSet.getObjectChangeSetForClone(query.getObject()); } WriteObjectQuery writeQuery = new WriteObjectQuery(); writeQuery.setIsExecutionClone(true); writeQuery.setObject(containerPolicy.unwrapIteratorResult(objectAdded)); writeQuery.setObjectChangeSet(changeSet); writeQuery.setCascadePolicy(query.getCascadePolicy()); query.getSession().executeQuery(writeQuery); } } protected void objectOrderChangedDuringUpdate(WriteObjectQuery query, Object orderChangedObject, int orderIndex) { prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getDescriptor(), query.getSession()); AbstractRecord databaseRow = new DatabaseRecord(); // Extract target field and its value. Construct insert statement and execute it List targetPrimaryKeyFields = getTargetPrimaryKeyFields(); int size = targetPrimaryKeyFields.size(); for (int index = 0; index < size; index++) { DatabaseField targetPrimaryKey = targetPrimaryKeyFields.get(index); Object targetKeyValue = getReferenceDescriptor().getObjectBuilder().extractValueFromObjectForField(orderChangedObject, targetPrimaryKey, query.getSession()); databaseRow.put(targetPrimaryKey, targetKeyValue); } databaseRow.put(listOrderField, orderIndex); query.getSession().executeQuery(changeOrderTargetQuery, databaseRow); } /** * INTERNAL: * An object was removed to the collection during an update, delete it if private. */ protected void objectRemovedDuringUpdate(ObjectLevelModifyQuery query, Object objectDeleted, Map extraData) throws DatabaseException, OptimisticLockException { if (isPrivateOwned()) {// Must check ownership for uow and cascading. if (!query.shouldCascadeOnlyDependentParts()) { containerPolicy.deleteWrappedObject(objectDeleted, query.getSession()); } } } /** * INTERNAL: * An object is still in the collection, update it as it may have changed. */ protected void objectUnchangedDuringUpdate(ObjectLevelModifyQuery query, Object object) throws DatabaseException, OptimisticLockException { if (!shouldObjectModifyCascadeToParts(query)) {// Called always for M-M return; } // Only cascade dependents writes in uow. if (query.shouldCascadeOnlyDependentParts()) { return; } // Always write for updates, either private or in uow if calling this method. WriteObjectQuery writeQuery = new WriteObjectQuery(); writeQuery.setIsExecutionClone(true); writeQuery.setObject(object); writeQuery.setCascadePolicy(query.getCascadePolicy()); query.getSession().executeQuery(writeQuery); } /** * INTERNAL: * Overridden by mappings that require additional processing of the change record after the record has been calculated. */ @Override public void postCalculateChanges(org.eclipse.persistence.sessions.changesets.ChangeRecord changeRecord, UnitOfWorkImpl uow) { // no need for private owned check. This code is only registered for private owned mappings. // targets are added to and/or removed to/from the source. CollectionChangeRecord collectionChangeRecord = (CollectionChangeRecord)changeRecord; Iterator it = collectionChangeRecord.getRemoveObjectList().values().iterator(); while(it.hasNext()) { ObjectChangeSet ocs = (ObjectChangeSet)it.next(); containerPolicy.postCalculateChanges(ocs, referenceDescriptor, this, uow); } } /** * INTERNAL: * Overridden by mappings that require additional processing of the change record after the record has been calculated. */ @Override public void recordPrivateOwnedRemovals(Object object, UnitOfWorkImpl uow) { // no need for private owned check. This code is only registered for private owned mappings. // targets are added to and/or removed to/from the source. if (mustDeleteReferenceObjectsOneByOne()) { Iterator it = (Iterator) containerPolicy.iteratorFor(getRealAttributeValueFromObject(object, uow)); while (it.hasNext()) { Object clone = it.next(); containerPolicy.recordPrivateOwnedRemovals(clone, referenceDescriptor, uow); } } } /** * INTERNAL: * Add additional fields */ @Override protected void postPrepareNestedBatchQuery(ReadQuery batchQuery, ObjectLevelReadQuery query) { super.postPrepareNestedBatchQuery(batchQuery, query); ReadAllQuery mappingBatchQuery = (ReadAllQuery)batchQuery; mappingBatchQuery.setShouldIncludeData(true); this.containerPolicy.addAdditionalFieldsToQuery(mappingBatchQuery, getAdditionalFieldsBaseExpression(mappingBatchQuery)); } /** * INTERNAL: * Return the base expression to use for adding fields to the query. * Normally this is the query's builder, but may be the join table for m-m. */ protected Expression getAdditionalFieldsBaseExpression(ReadQuery query) { return ((ReadAllQuery)query).getExpressionBuilder(); } /** * INTERNAL: * copies the non primary key information into the row currently used only in ManyToMany */ protected void prepareTranslationRow(AbstractRecord translationRow, Object object, ClassDescriptor descriptor, AbstractSession session) { //Do nothing for the generic Collection Mapping } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Recurse thru the parts to delete the reference objects after the actual object is deleted. */ @Override public void postDelete(DeleteObjectQuery query) throws DatabaseException { if (this.containerPolicy.propagatesEventsToCollection()){ Object queryObject = query.getObject(); Object values = getAttributeValueFromObject(queryObject); Object iterator = containerPolicy.iteratorFor(values); while (containerPolicy.hasNext(iterator)){ Object wrappedObject = containerPolicy.nextEntry(iterator, query.getSession()); containerPolicy.propogatePostDelete(query, wrappedObject); } } } /** * INTERNAL: * Ensure the container policy is post initialized */ @Override public void postInitialize(AbstractSession session) { super.postInitialize(session); this.containerPolicy.postInitialize(session); if (this.referenceDescriptor != null && this.mustDeleteReferenceObjectsOneByOne == null) { this.mustDeleteReferenceObjectsOneByOne = this.referenceDescriptor.hasDependencyOnParts() || this.referenceDescriptor.usesOptimisticLocking() || (this.referenceDescriptor.hasInheritance() && this.referenceDescriptor.getInheritancePolicy().shouldReadSubclasses()) || this.referenceDescriptor.hasMultipleTables() || this.containerPolicy.propagatesEventsToCollection() || this.referenceDescriptor.hasRelationshipsExceptBackpointer(descriptor); } else if (this.mustDeleteReferenceObjectsOneByOne == null) { this.mustDeleteReferenceObjectsOneByOne = false; } } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Recurse thru the parts to delete the reference objects after the actual object is deleted. */ @Override public void postInsert(WriteObjectQuery query) throws DatabaseException { if (this.containerPolicy.propagatesEventsToCollection()){ Object queryObject = query.getObject(); Object values = getAttributeValueFromObject(queryObject); Object iterator = containerPolicy.iteratorFor(values); while (containerPolicy.hasNext(iterator)){ Object wrappedObject = containerPolicy.nextEntry(iterator, query.getSession()); containerPolicy.propogatePostInsert(query, wrappedObject); } } } /** * INTERNAL: * Propagate preInsert event to container policy if necessary */ @Override public void preInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (this.containerPolicy.propagatesEventsToCollection()){ Object queryObject = query.getObject(); Object values = getAttributeValueFromObject(queryObject); Object iterator = containerPolicy.iteratorFor(values); while (containerPolicy.hasNext(iterator)){ Object wrappedObject = containerPolicy.nextEntry(iterator, query.getSession()); containerPolicy.propogatePreInsert(query, wrappedObject); } } } /** * INTERNAL: * Propagate preUpdate event to container policy if necessary */ @Override public void preUpdate(WriteObjectQuery query) throws DatabaseException { if (this.containerPolicy.propagatesEventsToCollection()){ Object queryObject = query.getObject(); Object values = getAttributeValueFromObject(queryObject); Object iterator = containerPolicy.iteratorFor(values); while (containerPolicy.hasNext(iterator)){ Object wrappedObject = containerPolicy.nextEntry(iterator, query.getSession()); containerPolicy.propogatePreUpdate(query, wrappedObject); } } } /** * INTERNAL: * An object is still in the collection, update it as it may have changed. */ protected void objectUnchangedDuringUpdate(ObjectLevelModifyQuery query, Object object, Map backupclones, Object key) throws DatabaseException, OptimisticLockException { objectUnchangedDuringUpdate(query, object); } /** * INTERNAL: * All the privately owned parts are read */ protected Object readPrivateOwnedForObject(ObjectLevelModifyQuery modifyQuery) throws DatabaseException { if (modifyQuery.getSession().isUnitOfWork()) { return getRealCollectionAttributeValueFromObject(modifyQuery.getBackupClone(), modifyQuery.getSession()); } else { // cr 3819 prepareTranslationRow(modifyQuery.getTranslationRow(), modifyQuery.getObject(), modifyQuery.getDescriptor(), modifyQuery.getSession()); return modifyQuery.getSession().executeQuery(getSelectionQuery(), modifyQuery.getTranslationRow()); } } /** * INTERNAL: * replace the value holders in the specified reference object(s) */ @Override public Map replaceValueHoldersIn(Object object, RemoteSessionController controller) { return controller.replaceValueHoldersInAll(object, this.containerPolicy); } /** * ADVANCED: * Configure the mapping to use a container policy. * The policy manages the access to the collection. */ public void setContainerPolicy(ContainerPolicy containerPolicy) { this.containerPolicy = containerPolicy; ((ReadAllQuery)getSelectionQuery()).setContainerPolicy(containerPolicy); } /** * PUBLIC: * The default delete all query for mapping can be overridden by specifying the new query. * This query is responsible for doing the deletion required by the mapping, * such as deletion of all the rows from join table for M-M, or optimized delete all of target objects for 1-M. */ public void setCustomDeleteAllQuery(ModifyQuery query) { setDeleteAllQuery(query); setHasCustomDeleteAllQuery(true); } protected void setDeleteAllQuery(ModifyQuery query) { deleteAllQuery = query; } /** * PUBLIC: * Set the receiver's delete all SQL string. This allows the user to override the SQL * generated by TopLink, with there own SQL or procedure call. The arguments are * translated from the fields of the source row, through replacing the field names * marked by '#' with the values for those fields. * This SQL is responsible for doing the deletion required by the mapping, * such as deletion of all the rows from join table for M-M, or optimized delete all of target objects for 1-M. * Example, 'delete from PROJ_EMP where EMP_ID = #EMP_ID'. */ public void setDeleteAllSQLString(String sqlString) { DataModifyQuery query = new DataModifyQuery(); query.setSQLString(sqlString); setCustomDeleteAllQuery(query); } /** * PUBLIC: * Set the receiver's delete all call. This allows the user to override the SQL * generated by TopLink, with there own SQL or procedure call. The arguments are * translated from the fields of the source row. * This call is responsible for doing the deletion required by the mapping, * such as deletion of all the rows from join table for M-M, or optimized delete all of target objects for 1-M. * Example, 'new SQLCall("delete from PROJ_EMP where EMP_ID = #EMP_ID")'. */ public void setDeleteAllCall(Call call) { DataModifyQuery query = new DataModifyQuery(); query.setCall(call); setCustomDeleteAllQuery(query); } protected void setHasCustomDeleteAllQuery(boolean bool) { hasCustomDeleteAllQuery = bool; } /** * INTERNAL: * Set the container policy on the selection query for this mapping. This * method is overridden in DirectCollectionMapping since its selection * query is a DataReadQuery. */ protected void setSelectionQueryContainerPolicy(ContainerPolicy containerPolicy) { ((ReadAllQuery) getSelectionQuery()).setContainerPolicy(containerPolicy); } /** * PUBLIC: * Set the name of the session to execute the mapping's queries under. * This can be used by the session broker to override the default session * to be used for the target class. */ public void setSessionName(String name) { getDeleteAllQuery().setSessionName(name); getSelectionQuery().setSessionName(name); } /** * ADVANCED: * Calling this method will only affect behavior of mappings using transparent indirection * This method should only be called after this mapping's indirection policy has been set * * IndirectList and IndirectSet can be configured not to instantiate the list from the * database when you add and remove from them. IndirectList defaults to this behavior. When * Set to true, the collection associated with this TransparentIndirection will be setup so as * not to instantiate for adds and removes. The weakness of this setting for an IndirectSet is * that when the set is not instantiated, if a duplicate element is added, it will not be * detected until commit time. */ public void setUseLazyInstantiationForIndirectCollection(Boolean useLazyInstantiation) { if (getIndirectionPolicy() != null){ getIndirectionPolicy().setUseLazyInstantiation(useLazyInstantiation); } } /** * ADVANCED: * This method is used to have an object add to a collection once the changeSet is applied * The referenceKey parameter should only be used for direct Maps. */ public void simpleAddToCollectionChangeRecord(Object referenceKey, Object changeSetToAdd, ObjectChangeSet changeSet, AbstractSession session) { CollectionChangeRecord collectionChangeRecord = (CollectionChangeRecord)changeSet.getChangesForAttributeNamed(this.getAttributeName()); if (collectionChangeRecord == null) { collectionChangeRecord = new CollectionChangeRecord(changeSet); collectionChangeRecord.setAttribute(getAttributeName()); collectionChangeRecord.setMapping(this); changeSet.addChange(collectionChangeRecord); } this.containerPolicy.recordAddToCollectionInChangeRecord((ObjectChangeSet)changeSetToAdd, collectionChangeRecord); if (referenceKey != null) { ((ObjectChangeSet)changeSetToAdd).setNewKey(referenceKey); } } /** * ADVANCED: * This method is used to have an object removed from a collection once the changeSet is applied * The referenceKey parameter should only be used for direct Maps. */ public void simpleRemoveFromCollectionChangeRecord(Object referenceKey, Object changeSetToRemove, ObjectChangeSet changeSet, AbstractSession session) { CollectionChangeRecord collectionChangeRecord = (CollectionChangeRecord)changeSet.getChangesForAttributeNamed(this.getAttributeName()); if (collectionChangeRecord == null) { collectionChangeRecord = new CollectionChangeRecord(changeSet); collectionChangeRecord.setAttribute(getAttributeName()); collectionChangeRecord.setMapping(this); changeSet.addChange(collectionChangeRecord); } this.containerPolicy.recordRemoveFromCollectionInChangeRecord((ObjectChangeSet)changeSetToRemove, collectionChangeRecord); if (referenceKey != null) { ((ObjectChangeSet)changeSetToRemove).setOldKey(referenceKey); } } /** * INTERNAL: * Either create a new change record or update with the new value. This is used * by attribute change tracking. * Specifically in a collection mapping this will be called when the customer * Set a new collection. In this case we will need to mark the change record * with the new and the old versions of the collection. * And mark the ObjectChangeSet with the attribute name then when the changes are calculated * force a compare on the collections to determine changes. */ @Override public void updateChangeRecord(Object clone, Object newValue, Object oldValue, ObjectChangeSet objectChangeSet, UnitOfWorkImpl uow) { CollectionChangeRecord collectionChangeRecord = (CollectionChangeRecord)objectChangeSet.getChangesForAttributeNamed(this.getAttributeName()); if (collectionChangeRecord == null) { collectionChangeRecord = new CollectionChangeRecord(objectChangeSet); collectionChangeRecord.setAttribute(getAttributeName()); collectionChangeRecord.setMapping(this); objectChangeSet.addChange(collectionChangeRecord); } // the order is essential - the record should be set to deferred before recreateOriginalCollection is called - // otherwise will keep altering the change record while adding/removing each element into/from the original collection. collectionChangeRecord.setIsDeferred(true); objectChangeSet.deferredDetectionRequiredOn(getAttributeName()); if (collectionChangeRecord.getOriginalCollection() == null) { collectionChangeRecord.recreateOriginalCollection(oldValue, uow); } collectionChangeRecord.setLatestCollection(newValue); } /** * INTERNAL: * Update a ChangeRecord to replace the ChangeSet for the old entity with the changeSet for the new Entity. This is * used when an Entity is merged into itself and the Entity reference new or detached entities. */ public void updateChangeRecordForSelfMerge(ChangeRecord changeRecord, Object source, Object target, UnitOfWorkChangeSet parentUOWChangeSet, UnitOfWorkImpl unitOfWork){ getContainerPolicy().updateChangeRecordForSelfMerge(changeRecord, source, target, this, parentUOWChangeSet, unitOfWork); } /** * INTERNAL: * Add or removes a new value and its change set to the collection change record based on the event passed in. This is used by * attribute change tracking. */ public void updateCollectionChangeRecord(CollectionChangeEvent event, ObjectChangeSet changeSet, UnitOfWorkImpl uow) { if (event !=null && event.getNewValue() != null) { Object newValue = event.getNewValue(); ClassDescriptor descriptor; //PERF: Use referenceDescriptor if it does not have inheritance if (!getReferenceDescriptor().hasInheritance()) { descriptor = getReferenceDescriptor(); } else { descriptor = uow.getDescriptor(newValue); } newValue = descriptor.getObjectBuilder().unwrapObject(newValue, uow); ObjectChangeSet changeSetToAdd = descriptor.getObjectBuilder().createObjectChangeSet(newValue, (UnitOfWorkChangeSet)changeSet.getUOWChangeSet(), uow); CollectionChangeRecord collectionChangeRecord = (CollectionChangeRecord)changeSet.getChangesForAttributeNamed(this.getAttributeName()); if (collectionChangeRecord == null) { collectionChangeRecord = new CollectionChangeRecord(changeSet); collectionChangeRecord.setAttribute(getAttributeName()); collectionChangeRecord.setMapping(this); changeSet.addChange(collectionChangeRecord); } if(!collectionChangeRecord.isDeferred()) { this.containerPolicy.recordUpdateToCollectionInChangeRecord(event, changeSetToAdd, collectionChangeRecord); } } } /** * INTERNAL: * Set the change listener in the collection. * If the collection is not indirect it must be re-built. * This is used for resuming or flushing units of work. */ @Override public void setChangeListener(Object clone, PropertyChangeListener listener, UnitOfWorkImpl uow) { if (this.indirectionPolicy.usesTransparentIndirection() && isAttributeValueInstantiated(clone)) { Object attributeValue = getRealAttributeValueFromObject(clone, uow); if (!(attributeValue instanceof CollectionChangeTracker)) { Object container = attributeValue; ContainerPolicy containerPolicy = this.containerPolicy; if (attributeValue == null) { container = containerPolicy.containerInstance(1); } else { container = containerPolicy.containerInstance(containerPolicy.sizeFor(attributeValue)); for (Object iterator = containerPolicy.iteratorFor(attributeValue); containerPolicy.hasNext(iterator);) { containerPolicy.addInto(containerPolicy.nextEntry(iterator, uow), container, uow); } } setRealAttributeValueInObject(clone, container); ((CollectionChangeTracker)container).setTrackedAttributeName(getAttributeName()); ((CollectionChangeTracker)container)._persistence_setPropertyChangeListener(listener); } else { ((CollectionChangeTracker)attributeValue).setTrackedAttributeName(getAttributeName()); ((CollectionChangeTracker)attributeValue)._persistence_setPropertyChangeListener(listener); } } if (this.indirectionPolicy.usesTransparentIndirection()){ ((IndirectCollection)getRealAttributeValueFromObject(clone, uow)).clearDeferredChanges(); } } /** * PUBLIC: * indicates whether the mapping supports listOrderField, if it doesn't attempt to set listOrderField throws exception. */ public boolean isListOrderFieldSupported() { return isListOrderFieldSupported; } /** * PUBLIC: * Field holds the order of elements in the list in the db, requires collection of type List. * Throws exception if the mapping doesn't support listOrderField. */ public void setListOrderField(DatabaseField field) { if(field != null) { if(isListOrderFieldSupported) { this.listOrderField = field; } else { throw ValidationException.listOrderFieldNotSupported(this); } } else { this.listOrderField = null; } } /** * PUBLIC: * Field holds the order of elements in the list in the db, requires collection of type List. * Throws exception if the mapping doesn't support listOrderField. */ public void setListOrderFieldName(String fieldName) { setListOrderField(new DatabaseField(fieldName)); } /** * ADVANCED:: * Return whether the reference objects must be deleted * one by one, as opposed to with a single DELETE statement. * Note: Calling this method disables an optimization of the delete * behavior */ public void setMustDeleteReferenceObjectsOneByOne(Boolean deleteOneByOne) { this.mustDeleteReferenceObjectsOneByOne = deleteOneByOne; } /** * PUBLIC: * Specifies what should be done if the list of values read from listOrserField is invalid * (there should be no nulls, no duplicates, no "holes"). */ public void setOrderCorrectionType(OrderCorrectionType orderCorrectionType) { this.orderCorrectionType = orderCorrectionType; } /** * PUBLIC: * Configure the mapping to use an instance of the specified container class * to hold the target objects. * Note that if listOrderField is used then setListOrderField method * should be called before this method. *

The container class must implement (directly or indirectly) the * java.util.Collection interface. */ public void useCollectionClass(Class concreteClass) { ContainerPolicy policy = ContainerPolicy.buildPolicyFor(concreteClass, hasOrderBy() || listOrderField != null); setContainerPolicy(policy); } /** * PUBLIC: * Configure the mapping to use an instance of the specified container class * to hold the target objects. *

The container class must implement (directly or indirectly) the * java.util.SortedSet interface. */ public void useSortedSetClass(Class concreteClass, Comparator comparator) { try { SortedCollectionContainerPolicy policy = (SortedCollectionContainerPolicy)ContainerPolicy.buildPolicyFor(concreteClass); policy.setComparator(comparator); setContainerPolicy(policy); } catch (ClassCastException e) { useCollectionClass(concreteClass); } } /** * INTERNAL: * Configure the mapping to use an instance of the specified container class name * to hold the target objects. This method is used by MW. *

The container class must implement (directly or indirectly) the * java.util.SortedSet interface. */ public void useSortedSetClassName(String className) { this.useSortedSetClassName(className, null); } /** * INTERNAL: * Configure the mapping to use an instance of the specified container class name * to hold the target objects. This method is used by MW. *

The container class must implement (directly or indirectly) the * java.util.SortedSet interface. */ public void useSortedSetClassName(String className, String comparatorClassName) { SortedCollectionContainerPolicy policy = new SortedCollectionContainerPolicy(className); policy.setComparatorClassName(comparatorClassName); setContainerPolicy(policy); } /** * INTERNAL: * Used to set the collection class by name. * This is required when building from metadata to allow the correct class loader to be used. */ public void useCollectionClassName(String concreteClassName) { setContainerPolicy(new CollectionContainerPolicy(concreteClassName)); } /** * INTERNAL: * Used to set the collection class by name. * This is required when building from metadata to allow the correct class loader to be used. */ public void useListClassName(String concreteClassName) { setContainerPolicy(new ListContainerPolicy(concreteClassName)); } /** * PUBLIC: * Configure the mapping to use an instance of the specified container class * to hold the target objects. The key used to index a value in the * Map is the value returned by a call to the specified * zero-argument method. * The method must be implemented by the class (or a superclass) of any * value to be inserted into the Map. *

The container class must implement (directly or indirectly) the * java.util.Map interface. *

To facilitate resolving the method, the mapping's referenceClass * must set before calling this method. */ public void useMapClass(Class concreteClass, String keyName) { // the reference class has to be specified before coming here if (getReferenceClassName() == null) { throw DescriptorException.referenceClassNotSpecified(this); } ContainerPolicy policy = ContainerPolicy.buildPolicyFor(concreteClass); policy.setKeyName(keyName, getReferenceClassName()); setContainerPolicy(policy); } /** * PUBLIC: * Configure the mapping to use an instance of the specified container * class to hold the target objects. The key used to index a value in the * Map is an instance of the composite primary key class. *

To facilitate resolving the primary key class, the mapping's * referenceClass must set before calling this method. *

The container class must implement (directly or indirectly) the * java.util.Map interface. */ public void useMapClass(Class concreteClass) { useMapClass(concreteClass, null); } /** * INTERNAL: * Not sure were this is used, MW? */ public void useMapClassName(String concreteClassName, String methodName) { // the reference class has to be specified before coming here if (getReferenceClassName() == null) { throw DescriptorException.referenceClassNotSpecified(this); } MapContainerPolicy policy = new MapContainerPolicy(concreteClassName); policy.setKeyName(methodName, getReferenceClass().getName()); setContainerPolicy(policy); } /** * PUBLIC: * If transparent indirection is used, a special collection will be placed in the source * object's attribute. * Fetching of the contents of the collection from the database will be delayed * until absolutely necessary. (Any message sent to the collection will cause * the contents to be faulted in from the database.) * This can result in rather significant performance gains, without having to change * the source object's attribute from Collection (or List or Vector) to * ValueHolderInterface. */ public void useTransparentCollection() { setIndirectionPolicy(new TransparentIndirectionPolicy()); useCollectionClass(ClassConstants.IndirectList_Class); } /** * PUBLIC: * If transparent indirection is used, a special collection will be placed in the source * object's attribute. * Fetching of the contents of the collection from the database will be delayed * until absolutely necessary. (Any message sent to the collection will cause * the contents to be faulted in from the database.) * This can result in rather significant performance gains, without having to change * the source object's attribute from Set to * ValueHolderInterface. */ public void useTransparentSet() { setIndirectionPolicy(new TransparentIndirectionPolicy()); useCollectionClass(IndirectSet.class); setSelectionQueryContainerPolicy(ContainerPolicy.buildPolicyFor(HashSet.class)); } /** * PUBLIC: * If transparent indirection is used, a special collection will be placed in the source * object's attribute. * Fetching of the contents of the collection from the database will be delayed * until absolutely necessary. (Any message sent to the collection will cause * the contents to be faulted in from the database.) * This can result in rather significant performance gains, without having to change * the source object's attribute from List to * ValueHolderInterface. */ public void useTransparentList() { setIndirectionPolicy(new TransparentIndirectionPolicy()); useCollectionClass(ClassConstants.IndirectList_Class); setSelectionQueryContainerPolicy(ContainerPolicy.buildPolicyFor(Vector.class, hasOrderBy() || listOrderField != null)); } /** * PUBLIC: * If transparent indirection is used, a special map will be placed in the source * object's attribute. * Fetching of the contents of the map from the database will be delayed * until absolutely necessary. (Any message sent to the map will cause * the contents to be faulted in from the database.) * This can result in rather significant performance gains, without having to change * the source object's attribute from Map (or Map or Hashtable) to * ValueHolderInterface.

* The key used in the Map is the value returned by a call to the zero parameter * method named methodName. The method should be a zero argument method implemented (or * inherited) by the value to be inserted into the Map. */ public void useTransparentMap(String methodName) { setIndirectionPolicy(new TransparentIndirectionPolicy()); useMapClass(ClassConstants.IndirectMap_Class, methodName); ContainerPolicy policy = ContainerPolicy.buildPolicyFor(Hashtable.class); policy.setKeyName(methodName, getReferenceClass()); setSelectionQueryContainerPolicy(policy); } /** * INTERNAL: * To validate mappings declaration */ @Override public void validateBeforeInitialization(AbstractSession session) throws DescriptorException { super.validateBeforeInitialization(session); this.indirectionPolicy.validateContainerPolicy(session.getIntegrityChecker()); if (getAttributeAccessor() instanceof InstanceVariableAttributeAccessor) { Class attributeType = ((InstanceVariableAttributeAccessor)getAttributeAccessor()).getAttributeType(); this.indirectionPolicy.validateDeclaredAttributeTypeForCollection(attributeType, session.getIntegrityChecker()); } else if (getAttributeAccessor().isMethodAttributeAccessor()) { // 323403 Class returnType = ((MethodAttributeAccessor)getAttributeAccessor()).getGetMethodReturnType(); this.indirectionPolicy.validateGetMethodReturnTypeForCollection(returnType, session.getIntegrityChecker()); Class parameterType = ((MethodAttributeAccessor)getAttributeAccessor()).getSetMethodParameterType(); this.indirectionPolicy.validateSetMethodParameterTypeForCollection(parameterType, session.getIntegrityChecker()); } } /** * INTERNAL: * Checks if object is deleted from the database or not. */ @Override public boolean verifyDelete(Object object, AbstractSession session) throws DatabaseException { // Row is built for translation if (isReadOnly()) { return true; } if (isPrivateOwned() || isCascadeRemove()) { Object objects = getRealCollectionAttributeValueFromObject(object, session); ContainerPolicy containerPolicy = this.containerPolicy; for (Object iter = containerPolicy.iteratorFor(objects); containerPolicy.hasNext(iter);) { if (!session.verifyDelete(containerPolicy.next(iter, session))) { return false; } } } AbstractRecord row = getDescriptor().getObjectBuilder().buildRowForTranslation(object, session); //cr 3819 added the line below to fix the translationtable to ensure that it // contains the required values prepareTranslationRow(row, object, getDescriptor(), session); Object value = session.executeQuery(getSelectionQuery(), row); return this.containerPolicy.isEmpty(value); } /** * INTERNAL: * Return if this mapping supports change tracking. */ @Override public boolean isChangeTrackingSupported(Project project) { return this.indirectionPolicy.usesTransparentIndirection(); } /** * INTERNAL: * Directly build a change record without comparison */ @Override public ChangeRecord buildChangeRecord(Object clone, ObjectChangeSet owner, AbstractSession session) { Object cloneAttribute = null; cloneAttribute = getAttributeValueFromObject(clone); if ((cloneAttribute != null) && (!this.indirectionPolicy.objectIsInstantiated(cloneAttribute))) { return null; } // 2612538 - the default size of Map (32) is appropriate IdentityHashMap cloneKeyValues = new IdentityHashMap(); ContainerPolicy cp = this.containerPolicy; Object cloneObjectCollection = null; if (cloneAttribute != null) { cloneObjectCollection = getRealCollectionAttributeValueFromObject(clone, session); } else { cloneObjectCollection = cp.containerInstance(1); } Object cloneIter = cp.iteratorFor(cloneObjectCollection); while (cp.hasNext(cloneIter)) { Object firstObject = cp.next(cloneIter, session); if (firstObject != null) { cloneKeyValues.put(firstObject, firstObject); } } CollectionChangeRecord changeRecord = new CollectionChangeRecord(owner); changeRecord.setAttribute(getAttributeName()); changeRecord.setMapping(this); changeRecord.addAdditionChange(cloneKeyValues, cp, (UnitOfWorkChangeSet)owner.getUOWChangeSet(), session); if (changeRecord.hasChanges()) { return changeRecord; } return null; } /** * INTERNAL: * This method is used to load a relationship from a list of PKs. This list * may be available if the relationship has been cached. */ @Override public Object valueFromPKList(Object[] pks, AbstractRecord foreignKeys, AbstractSession session){ ContainerPolicy cp = this.containerPolicy; return cp.valueFromPKList(pks, foreignKeys, this, session); } /** * INTERNAL: * Return the value of the field from the row or a value holder on the query to obtain the object. * To get here the mapping's isJoiningSupported() should return true. */ @Override protected Object valueFromRowInternalWithJoin(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, CacheKey parentCacheKey, AbstractSession executionSession, boolean isTargetProtected) throws DatabaseException { Object value = this.containerPolicy.containerInstance(); // Extract the primary key of the source object, to filter only the joined rows for that object. Object sourceKey = this.descriptor.getObjectBuilder().extractPrimaryKeyFromRow(row, executionSession); // If the query was using joining, all of the result rows by primary key will have been computed. List rows = joinManager.getDataResultsByPrimaryKey().get(sourceKey); // If no 1-m rows were fetch joined, then get the value normally, // this can occur with pagination where the last row may not be complete. if (rows == null) { return valueFromRowInternal(row, joinManager, sourceQuery, executionSession); } int size = rows.size(); if (size > 0) { // A nested query must be built to pass to the descriptor that looks like the real query execution would, // these should be cached on the query during prepare. ObjectLevelReadQuery nestedQuery = prepareNestedJoinQueryClone(row, rows, joinManager, sourceQuery, executionSession); // A set of target cache keys must be maintained to avoid duplicates from multiple 1-m joins. Set targetPrimaryKeys = new HashSet(); ArrayList targetObjects = null; ArrayList targetRows = null; boolean shouldAddAll = this.containerPolicy.shouldAddAll(); if (shouldAddAll) { targetObjects = new ArrayList(size); targetRows = new ArrayList(size); } // For each rows, extract the target row and build the target object and add to the collection. for (int index = 0; index < size; index++) { AbstractRecord sourceRow = rows.get(index); AbstractRecord targetRow = sourceRow; // The field for many objects may be in the row, // so build the subpartion of the row through the computed values in the query, // this also helps the field indexing match. targetRow = trimRowForJoin(targetRow, joinManager, executionSession); // Partial object queries must select the primary key of the source and related objects. // If the target joined rows in null (outerjoin) means an empty collection. Object targetKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromRow(targetRow, executionSession); if (targetKey == null) { // A null primary key means an empty collection returned as nulls from an outerjoin. return this.indirectionPolicy.valueFromRow(value); } // Only build/add the target object once, skip duplicates from multiple 1-m joins. if (!targetPrimaryKeys.contains(targetKey)) { nestedQuery.setTranslationRow(targetRow); targetPrimaryKeys.add(targetKey); Object targetObject = getReferenceDescriptor().getObjectBuilder().buildObject(nestedQuery, targetRow); Object targetMapKey = this.containerPolicy.buildKeyFromJoinedRow(targetRow, joinManager, nestedQuery, parentCacheKey, executionSession, isTargetProtected); nestedQuery.setTranslationRow(null); if (targetMapKey == null){ if (shouldAddAll) { targetObjects.add(targetObject); targetRows.add(targetRow); } else { this.containerPolicy.addInto(targetObject, value, executionSession); } } else { this.containerPolicy.addInto(targetMapKey, targetObject, value, executionSession); } } } if (shouldAddAll) { this.containerPolicy.addAll(targetObjects, value, executionSession, targetRows, nestedQuery, parentCacheKey, isTargetProtected); } } return this.indirectionPolicy.valueFromRow(value); } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/ContainerMapping.java0000664000000000000000000000744712216173130024336 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings; import org.eclipse.persistence.internal.queries.*; /** * Interface used by clients to interact * with the assorted mappings that use ContainerPolicy. * * @see org.eclipse.persistence.internal.queries.ContainerPolicy * * @author Big Country * @since TOPLink/Java 4.0 */ public interface ContainerMapping { /** * PUBLIC: * Return the mapping's container policy. */ ContainerPolicy getContainerPolicy(); /** * PUBLIC: * Set the mapping's container policy. */ void setContainerPolicy(ContainerPolicy containerPolicy); /** * PUBLIC: * Configure the mapping to use an instance of the specified container class * to hold the target objects. *

The container class must implement (directly or indirectly) the * java.util.Collection interface. */ void useCollectionClass(Class concreteClass); /** * PUBLIC: * Configure the mapping to use an instance of the specified container class * to hold the target objects. *

The container class must implement (directly or indirectly) the * java.util.Collection interface. */ void useCollectionClassName(String concreteClass); /** * PUBLIC: * Configure the mapping to use an instance of the specified container class * to hold the target objects. *

The container class must implement (directly or indirectly) the * java.util.List interface. */ void useListClassName(String concreteClass); /** * PUBLIC: * Configure the mapping to use an instance of the specified container class * to hold the target objects. The key used to index a value in the * Map is the value returned by a call to the specified * zero-argument method. * The method must be implemented by the class (or a superclass) of any * value to be inserted into the Map. *

The container class must implement (directly or indirectly) the * java.util.Map interface. *

To facilitate resolving the method, the mapping's referenceClass * must set before calling this method. */ void useMapClass(Class concreteClass, String methodName); /** * PUBLIC: * Configure the mapping to use an instance of the specified container class * to hold the target objects. The key used to index a value in the * Map is the value returned by a call to the specified * zero-argument method. * The method must be implemented by the class (or a superclass) of any * value to be inserted into the Map. *

The container class must implement (directly or indirectly) the * java.util.Map interface. *

To facilitate resolving the method, the mapping's referenceClass * must set before calling this method. */ void useMapClassName(String concreteClass, String methodName); } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/AggregateObjectMapping.java0000664000000000000000000030231412216173130025420 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 07/19/2011-2.2.1 Guy Pelletier * - 338812: ManyToMany mapping in aggregate object violate integrity constraint on deletion * 08/01/2012-2.5 Chris Delahunt * - 371950: Metadata caching * 10/25/2012-2.5 Guy Pelletier * - 374688: JPA 2.1 Converter support * 09 Jan 2013-2.5 Gordon Yorke * - 397772: JPA 2.1 Entity Graph Support * 02/11/2013-2.5 Guy Pelletier * - 365931: @JoinColumn(name="FK_DEPT",insertable = false, updatable = true) causes INSERT statement to include this data value that it is associated with * 06/03/2013-2.5.1 Guy Pelletier * - 402380: 3 jpa21/advanced tests failed on server with * "java.lang.NoClassDefFoundError: org/eclipse/persistence/testing/models/jpa21/advanced/enums/Gender" ******************************************************************************/ package org.eclipse.persistence.mappings; import java.beans.PropertyChangeListener; import java.util.*; import java.util.Map.Entry; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.descriptors.FetchGroupManager; import org.eclipse.persistence.descriptors.changetracking.AttributeChangeTrackingPolicy; import org.eclipse.persistence.descriptors.changetracking.DeferredChangeDetectionPolicy; import org.eclipse.persistence.descriptors.changetracking.ObjectChangeTrackingPolicy; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.expressions.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.queries.ContainerPolicy; import org.eclipse.persistence.internal.queries.EntityFetchGroup; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; import org.eclipse.persistence.internal.queries.MappedKeyMapContainerPolicy; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.internal.descriptors.DescriptorIterator; import org.eclipse.persistence.internal.descriptors.ObjectBuilder; import org.eclipse.persistence.internal.expressions.SQLSelectStatement; import org.eclipse.persistence.logging.SessionLog; import org.eclipse.persistence.mappings.converters.Converter; import org.eclipse.persistence.mappings.foundation.AbstractTransformationMapping; import org.eclipse.persistence.mappings.foundation.MapKeyMapping; import org.eclipse.persistence.mappings.querykeys.DirectQueryKey; import org.eclipse.persistence.mappings.querykeys.QueryKey; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.DatabaseRecord; import org.eclipse.persistence.sessions.Project; /** *

Purpose:Two objects can be considered to be related by aggregation if there is a strict * 1:1 relationship between the objects. This means that the target (child or owned) object * cannot exist without the source (parent) object. * * In TopLink, it also means the data for the owned object is stored in the same table as * the parent. * * @author Sati * @since TOPLink/Java 1.0 */ public class AggregateObjectMapping extends AggregateMapping implements RelationalMapping, MapKeyMapping, EmbeddableMapping { /** * If all the fields in the database row for the aggregate object are NULL, * then, by default, the mapping will place a null in the appropriate source object * (as opposed to an aggregate object filled with nulls). * To change this behavior, set the value of this variable to false. Then the mapping * will build a new instance of the aggregate object that is filled with nulls * and place it in the source object. */ protected boolean isNullAllowed; protected DatabaseTable aggregateKeyTable = null; /** Map the name of a field in the aggregate descriptor to a field in the source table. */ /** 322233 - changed to store the source DatabaseField to hold Case and other colunm info*/ protected Map aggregateToSourceFields; /** * Map of nested attributes that need to apply an override name to their * a nested aggregate mapping's database field. Aggregate to source fields * map is the existing EclipseLink functionality and works well when all * embeddable mappings have unique database fields. This map adds specific * attribute to database field override. * @see addFieldTranslation() */ protected Map nestedFieldTranslations; /** * List of many to many mapping overrides to apply at initialize time to * their cloned aggregate mappings. */ protected List overrideManyToManyMappings; /** * List of unidirectional one to many mapping overrides to apply at * initialize time to their cloned aggregate mappings. */ protected List overrideUnidirectionalOneToManyMappings; /** * List of converters to apply at initialize time to their cloned aggregate mappings. */ protected Map converters; /** * List of maps id mappings that need to be set to read only at initialize * time on their cloned aggregate mappings. */ protected List mapsIdMappings; /** * Default constructor. */ public AggregateObjectMapping() { aggregateToSourceFields = new HashMap(5); nestedFieldTranslations = new HashMap(); mapsIdMappings = new ArrayList(); overrideManyToManyMappings = new ArrayList(); overrideUnidirectionalOneToManyMappings = new ArrayList(); converters = new HashMap(); isNullAllowed = true; } /** * INTERNAL: */ public boolean isRelationalMapping() { return true; } /** * INTERNAL: * Used when initializing queries for mappings that use a Map * Called when the selection query is being initialized to add the fields for the map key to the query */ public void addAdditionalFieldsToQuery(ReadQuery selectionQuery, Expression baseExpression){ for (DatabaseField field : getReferenceDescriptor().getAllFields()) { if (selectionQuery.isObjectLevelReadQuery()) { ((ObjectLevelReadQuery)selectionQuery).addAdditionalField(baseExpression.getField(field)); } else if (selectionQuery.isDataReadQuery()) { ((SQLSelectStatement)((DataReadQuery)selectionQuery).getSQLStatement()).addField(baseExpression.getField(field)); } } } /** * Add a converter to be applied to a mapping of the aggregate descriptor. */ public void addConverter(Converter converter, String attributeName) { converters.put(attributeName, converter); } /** * INTERNAL: * Used when initializing queries for mappings that use a Map * Called when the insert query is being initialized to ensure the fields for the map key are in the insert query */ public void addFieldsForMapKey(AbstractRecord joinRow){ for (DatabaseMapping mapping : getReferenceDescriptor().getMappings()) { if (!mapping.isReadOnly()) { for (DatabaseField field : mapping.getFields()) { if (field.isUpdatable()){ joinRow.put(field, null); } } } } } /** * PUBLIC: * Add a field name translation that maps from a field name in the * source table to a field name in the aggregate descriptor. */ public void addFieldNameTranslation(String sourceFieldName, String aggregateFieldName) { // 322233 - changed to store the sourceField instead of sourceFieldName addFieldTranslation(new DatabaseField(sourceFieldName), aggregateFieldName); } /** * PUBLIC: * Add a field translation that maps from a field in the * source table to a field name in the aggregate descriptor. */ public void addFieldTranslation(DatabaseField sourceField, String aggregateFieldName) { //AggregateObjectMapping does not seem to support Aggregates on multiple tables String unQualifiedAggregateFieldName = aggregateFieldName.substring(aggregateFieldName.lastIndexOf('.') + 1);// -1 is returned for no ".". getAggregateToSourceFields().put(unQualifiedAggregateFieldName, sourceField); } /** * INTERNAL: * In JPA users may specify a maps id mapping on a shared embeddable * descriptor. These mappings need to be set to read-only at initialize * time, after the reference descriptor is cloned. */ public void addMapsIdMapping(DatabaseMapping mapping) { mapsIdMappings.add(mapping); } /** * INTERNAL: * Add a nested field translation that maps from a field in the source table * to a field name in a nested aggregate descriptor. These are handled * slightly different that regular field translations in that they are * unique based on the attribute name. It solves the case where multiple * nested embeddables have mappings to similarly named default columns. */ public void addNestedFieldTranslation(String attributeName, DatabaseField sourceField, String aggregateFieldName) { // Aggregate field name is redundant here as we will look up the field // through the attribute name. This method signature is to satisfy the // Embeddable interface. AggregateCollectionMapping uses the aggregate // field name. nestedFieldTranslations.put(attributeName, new Object[]{sourceField, aggregateFieldName}); } /** * INTERNAL: * In JPA users may specify overrides to apply to a many to many mapping * on a shared embeddable descriptor. These settings are applied at * initialize time, after the reference descriptor is cloned. */ public void addOverrideManyToManyMapping(ManyToManyMapping mapping) { overrideManyToManyMappings.add(mapping); } /** * INTERNAL: * In JPA users may specify overrides to apply to a unidirectional one to * many mapping on a shared embeddable descriptor. These settings are * applied at initialize time, after the reference descriptor is cloned. */ public void addOverrideUnidirectionalOneToManyMapping(UnidirectionalOneToManyMapping mapping) { overrideUnidirectionalOneToManyMappings.add(mapping); } /** * INTERNAL: * For mappings used as MapKeys in MappedKeyContainerPolicy. Add the target of this mapping to the deleted * objects list if necessary * * This method is used for removal of private owned relationships. * AggregateObjectMappings are dealt with in their parent delete, so this is a no-op. * * @param object * @param manager */ public void addKeyToDeletedObjectsList(Object object, Map deletedObjects){ } /** * INTERNAL: * Return whether all the aggregate fields in the specified * row are NULL. */ protected boolean allAggregateFieldsAreNull(AbstractRecord databaseRow) { Vector fields = getReferenceFields(); int size = fields.size(); for (int index = 0; index < size; index++) { DatabaseField field = (DatabaseField)fields.get(index); Object value = databaseRow.get(field); if (value != null) { return false; } } return true; } /** * PUBLIC: * If all the fields in the database row for the aggregate object are NULL, * then, by default, the mapping will place a null in the appropriate source object * (as opposed to an aggregate object filled with nulls). This behavior can be * explicitly set by calling #allowNull(). * To change this behavior, call #dontAllowNull(). Then the mapping * will build a new instance of the aggregate object that is filled with nulls * and place it in the source object. * In either situation, when writing, the mapping will place a NULL in all the * fields in the database row for the aggregate object. * * Note: Any aggregate that has a relationship mapping automatically does not allow * null. */ public void allowNull() { setIsNullAllowed(true); } /** * INTERNAL: * Return whether the query's backup object has an attribute * value of null. */ protected boolean backupAttributeValueIsNull(WriteObjectQuery query) { if (query.getSession().isUnitOfWork()) { Object backupAttributeValue = getAttributeValueFromObject(query.getBackupClone()); if (backupAttributeValue == null) { return true; } } return false; } /** * INTERNAL: * Build and return an aggregate object from the specified row. * If a null value is allowed and all the appropriate fields in the row are NULL, return a null. * If an aggregate is referenced by the target object, return it (maintain identity) * Otherwise, simply create a new aggregate object and return it. */ public Object buildAggregateFromRow(AbstractRecord databaseRow, Object targetObject, CacheKey cacheKey, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, boolean buildShallowOriginal, AbstractSession executionSession, boolean targetIsProtected) throws DatabaseException { if (databaseRow.hasSopObject()) { Object sopAggregate = getAttributeValueFromObject(databaseRow.getSopObject()); if ((targetObject != null) && (targetObject != databaseRow.getSopObject())) { setAttributeValueInObject(targetObject, sopAggregate); } return sopAggregate; } // check for all NULLs if (isNullAllowed() && allAggregateFieldsAreNull(databaseRow)) { return null; } // maintain object identity (even if not refreshing) if target object references the aggregate // if aggregate is not referenced by the target object, construct a new aggregate Object aggregate = null; ClassDescriptor descriptor = getReferenceDescriptor(); boolean refreshing = true; if (targetObject != null){ if (descriptor.hasInheritance()) { Class newAggregateClass = descriptor.getInheritancePolicy().classFromRow(databaseRow, executionSession); descriptor = getReferenceDescriptor(newAggregateClass, executionSession); aggregate = getMatchingAttributeValueFromObject(databaseRow, targetObject, executionSession, descriptor); if ((aggregate != null) && (aggregate.getClass() != newAggregateClass)) { // if the class has changed out from underneath us, we cannot preserve object identity // build a new instance of the *new* class aggregate = descriptor.getObjectBuilder().buildNewInstance(); refreshing = false; } } else { aggregate = getMatchingAttributeValueFromObject(databaseRow, targetObject, executionSession, descriptor); } } if (aggregate == null) { aggregate = descriptor.getObjectBuilder().buildNewInstance(); refreshing = false; } ObjectBuildingQuery nestedQuery = sourceQuery; FetchGroup targetFetchGroup = null; if (sourceQuery.isObjectLevelReadQuery()) { ObjectLevelReadQuery objectQuery = (ObjectLevelReadQuery)sourceQuery; ObjectLevelReadQuery nestedObjectQuery = (ObjectLevelReadQuery)nestedQuery; String attributeName = getAttributeName(); if ((objectQuery.isPartialAttribute(attributeName) || ((joinManager != null) && joinManager.isAttributeJoined(this.descriptor, this)))) { // A nested query must be built to pass to the descriptor that looks like the real query execution would. nestedObjectQuery = (ObjectLevelReadQuery)objectQuery.deepClone(); // Must cascade the nested partial/join expression and filter the nested ones. if (objectQuery.hasPartialAttributeExpressions()) { nestedObjectQuery.setPartialAttributeExpressions(extractNestedExpressions(objectQuery.getPartialAttributeExpressions(), nestedObjectQuery.getExpressionBuilder(), false)); } else if ( nestedObjectQuery.getJoinedAttributeManager().isToManyJoin()) { // need the data results to build the child object(s). List dataResults = new ArrayList(); //setDataResults does processing and calculations (such as DataResultsByPrimaryKey) that we do not want to do on the //actual data, but it has no other direct set method nestedObjectQuery.getJoinedAttributeManager().setDataResults(dataResults, executionSession); //set the dataResults and DataResultsByPrimaryKey directly from the parent dataResults.addAll(joinManager.getDataResults_()); nestedObjectQuery.getJoinedAttributeManager().getDataResultsByPrimaryKey().putAll(joinManager.getDataResultsByPrimaryKey()); } nestedObjectQuery.setDescriptor(descriptor); //need to use the new joinManager which has the proper aggregate descriptor set joinManager = nestedObjectQuery.getJoinedAttributeManager(); } if (objectQuery.isAttributeBatchRead(this.descriptor, attributeName)) { if(nestedObjectQuery == objectQuery) { // A nested query must be built to pass to the descriptor that looks like the real query execution would. nestedObjectQuery = (ObjectLevelReadQuery)nestedObjectQuery.clone(); } // Must carry over properties for batching to work. nestedObjectQuery.setProperties(objectQuery.getProperties()); // Computed nested batch attribute expressions. nestedObjectQuery.getBatchFetchPolicy().setAttributeExpressions(extractNestedExpressions(objectQuery.getBatchReadAttributeExpressions(), nestedObjectQuery.getExpressionBuilder(), false)); } FetchGroup sourceFG = sourceQuery.getExecutionFetchGroup(descriptor); if (sourceFG != null) { if(nestedObjectQuery == objectQuery) { // A nested query must be built to pass to the descriptor that looks like the real query execution would. nestedObjectQuery = (ObjectLevelReadQuery)nestedObjectQuery.clone(); } targetFetchGroup = sourceFG.getGroup(getAttributeName()); if(targetFetchGroup != null && sourceQuery.getDescriptor().hasFetchGroupManager()) { //if the parent object has a fetchgroup manager then aggregates can support a fetchgroup manager ((ObjectLevelReadQuery)nestedObjectQuery).setFetchGroup(targetFetchGroup); }else{ targetFetchGroup = null; nestedObjectQuery.setFetchGroup(null); nestedObjectQuery.setFetchGroupName(null); } nestedObjectQuery.setShouldUseDefaultFetchGroup(false); nestedObjectQuery.prepareFetchGroup(); } if (descriptor.hasFetchGroupManager()){ descriptor.getFetchGroupManager().unionEntityFetchGroupIntoObject(aggregate, descriptor.getFetchGroupManager().getEntityFetchGroup(targetFetchGroup), executionSession, true); //merge fetchgroup into aggregate fetchgroup that may have been there from previous read. } nestedQuery = nestedObjectQuery; } if (buildShallowOriginal) { descriptor.getObjectBuilder().buildAttributesIntoShallowObject(aggregate, databaseRow, nestedQuery); } else if (executionSession.isUnitOfWork()) { descriptor.getObjectBuilder().buildAttributesIntoWorkingCopyClone(aggregate, buildWrapperCacheKeyForAggregate(cacheKey, targetIsProtected), nestedQuery, joinManager, databaseRow, (UnitOfWorkImpl)executionSession, refreshing); } else { descriptor.getObjectBuilder().buildAttributesIntoObject(aggregate, buildWrapperCacheKeyForAggregate(cacheKey, targetIsProtected), databaseRow, nestedQuery, joinManager, sourceQuery.getExecutionFetchGroup(descriptor), refreshing, executionSession); } if (sourceQuery.shouldMaintainCache() && ! sourceQuery.shouldStoreBypassCache()) { // Set the fetch group to the domain object, after built. if ((targetFetchGroup != null) && descriptor.hasFetchGroupManager()) { EntityFetchGroup entityFetchGroup = (EntityFetchGroup) descriptor.getFetchGroupManager().getEntityFetchGroup(targetFetchGroup).clone(); if (entityFetchGroup !=null){ entityFetchGroup.setRootEntity((FetchGroupTracker) cacheKey.getObject()); entityFetchGroup.setOnEntity(aggregate, executionSession); } } } return aggregate; } /** * INTERNAL: * Wrap the aggregate represented by this mapping in a cachekey so it can be processed my * methods down the stack. * @param owningCacheKey - the cache key holding the object to extract the aggregate from * @return */ protected CacheKey buildWrapperCacheKeyForAggregate(CacheKey owningCacheKey, boolean targetIsProtected) { if (!this.descriptor.getCachePolicy().isProtectedIsolation()) { return owningCacheKey; } if (!targetIsProtected || this.isMapKeyMapping || (owningCacheKey == null)) { return owningCacheKey; } CacheKey aggregateKey = owningCacheKey; Object object = owningCacheKey.getObject(); if (owningCacheKey.getObject() != null) { Object aggregate = getAttributeValueFromObject(object); aggregateKey = new CacheKey(null, aggregate, null); aggregateKey.setProtectedForeignKeys(owningCacheKey.getProtectedForeignKeys()); aggregateKey.setRecord(owningCacheKey.getRecord()); aggregateKey.setIsolated(owningCacheKey.isIsolated()); aggregateKey.setReadTime(owningCacheKey.getReadTime()); } return aggregateKey; } /** * INTERNAL: * Write null values for all aggregate fields into the parent row. */ protected void writeNullReferenceRow(AbstractRecord record) { List fields = getReferenceFields(); int size = fields.size(); for (int index = 0; index < size; index++) { record.put(fields.get(index), null); } if (size > 0) { // EL Bug 319759 - if a field is null, then the update call cache should not be used record.setNullValueInFields(true); } } /** * INTERNAL: * Used to allow object level comparisons. * In the case of an Aggregate which has no primary key must do an attribute * by attribute comparison. */ public Expression buildObjectJoinExpression(Expression expression, Object value, AbstractSession session) { Expression attributeByAttributeComparison = null; Expression join = null; Object attributeValue = null; // value need not be unwrapped as it is an aggregate, nor should it // influence a call to getReferenceDescriptor. ClassDescriptor referenceDescriptor = getReferenceDescriptor(); if ((value != null) && !referenceDescriptor.getJavaClass().isInstance(value)) { throw QueryException.incorrectClassForObjectComparison(expression, value, this); } Enumeration mappings = referenceDescriptor.getMappings().elements(); for (; mappings.hasMoreElements();) { DatabaseMapping mapping = (DatabaseMapping)mappings.nextElement(); if (value == null) { attributeValue = null; } else { attributeValue = mapping.getAttributeValueFromObject(value); } join = expression.get(mapping.getAttributeName()).equal(attributeValue); if (attributeByAttributeComparison == null) { attributeByAttributeComparison = join; } else { attributeByAttributeComparison = attributeByAttributeComparison.and(join); } } return attributeByAttributeComparison; } /** * INTERNAL: * Used to allow object level comparisons. */ public Expression buildObjectJoinExpression(Expression expression, Expression argument, AbstractSession session) { Expression attributeByAttributeComparison = null; //Enumeration mappingsEnum = getSourceToTargetKeyFields().elements(); Enumeration mappingsEnum = getReferenceDescriptor().getMappings().elements(); for (; mappingsEnum.hasMoreElements();) { DatabaseMapping mapping = (DatabaseMapping)mappingsEnum.nextElement(); String attributeName = mapping.getAttributeName(); Expression join = expression.get(attributeName).equal(argument.get(attributeName)); if (attributeByAttributeComparison == null) { attributeByAttributeComparison = join; } else { attributeByAttributeComparison = attributeByAttributeComparison.and(join); } } return attributeByAttributeComparison; } /** * INTERNAL: * Write the aggregate values into the parent row. */ protected void writeToRowFromAggregate(AbstractRecord record, Object object, Object attributeValue, AbstractSession session, WriteType writeType) throws DescriptorException { if (attributeValue == null) { if (this.isNullAllowed) { writeNullReferenceRow(record); } else { throw DescriptorException.nullForNonNullAggregate(object, this); } } else { if (!session.isClassReadOnly(attributeValue.getClass())) { getObjectBuilder(attributeValue, session).buildRow(record, attributeValue, session, writeType); } } } /** * INTERNAL: * Write the aggregate values into the parent row for shallow insert. */ protected void writeToRowFromAggregateForShallowInsert(AbstractRecord record, Object object, Object attributeValue, AbstractSession session) throws DescriptorException { if (attributeValue == null) { if (this.isNullAllowed) { writeNullReferenceRow(record); } else { throw DescriptorException.nullForNonNullAggregate(object, this); } } else { if (!session.isClassReadOnly(attributeValue.getClass())) { getObjectBuilder(attributeValue, session).buildRowForShallowInsert(record, attributeValue, session); } } } /** * INTERNAL: * Write the aggregate values into the parent row for update after shallow insert. */ protected void writeToRowFromAggregateForUpdateAfterShallowInsert(AbstractRecord record, Object object, Object attributeValue, AbstractSession session, DatabaseTable table) throws DescriptorException { if (attributeValue == null) { if (!this.isNullAllowed) { throw DescriptorException.nullForNonNullAggregate(object, this); } } else { if (!session.isClassReadOnly(attributeValue.getClass()) && !isPrimaryKeyMapping()) { getObjectBuilder(attributeValue, session).buildRowForUpdateAfterShallowInsert(record, attributeValue, session, table); } } } /** * INTERNAL: * Write the aggregate values into the parent row for update before shallow delete. */ protected void writeToRowFromAggregateForUpdateBeforeShallowDelete(AbstractRecord record, Object object, Object attributeValue, AbstractSession session, DatabaseTable table) throws DescriptorException { if (attributeValue == null) { if (!this.isNullAllowed) { throw DescriptorException.nullForNonNullAggregate(object, this); } } else { if (!session.isClassReadOnly(attributeValue.getClass()) && !isPrimaryKeyMapping()) { getObjectBuilder(attributeValue, session).buildRowForUpdateBeforeShallowDelete(record, attributeValue, session, table); } } } /** * INTERNAL: * Build and return a database row built with the values from * the specified attribute value. */ protected void writeToRowFromAggregateWithChangeRecord(AbstractRecord record, ChangeRecord changeRecord, ObjectChangeSet objectChangeSet, AbstractSession session, WriteType writeType) throws DescriptorException { if (objectChangeSet == null) { if (this.isNullAllowed) { writeNullReferenceRow(record); } else { Object object = ((ObjectChangeSet)changeRecord.getOwner()).getUnitOfWorkClone(); throw DescriptorException.nullForNonNullAggregate(object, this); } } else { if (!session.isClassReadOnly(objectChangeSet.getClassType(session))) { getReferenceDescriptor(objectChangeSet.getClassType(session), session).getObjectBuilder().buildRowWithChangeSet(record, objectChangeSet, session, writeType); } } } /** * INTERNAL: * Build and return a database row built with the changed values from * the specified attribute value. */ protected void writeToRowFromAggregateForUpdate(AbstractRecord record, WriteObjectQuery query, Object attributeValue) throws DescriptorException { if (attributeValue == null) { if (this.isNullAllowed) { if (backupAttributeValueIsNull(query)) { // both attributes are null - no update required } else { writeNullReferenceRow(record); } } else { throw DescriptorException.nullForNonNullAggregate(query.getObject(), this); } } else if ((query.getBackupClone() != null) && ((getMatchingBackupAttributeValue(query, attributeValue) == null) || !(attributeValue.getClass().equals(getMatchingBackupAttributeValue(query, attributeValue).getClass())))) { getObjectBuilder(attributeValue, query.getSession()).buildRow(record, attributeValue, query.getSession(), WriteType.UPDATE); } else { if (!query.getSession().isClassReadOnly(attributeValue.getClass())) { WriteObjectQuery clonedQuery = (WriteObjectQuery)query.clone(); clonedQuery.setObject(attributeValue); if (query.getSession().isUnitOfWork()) { Object backupAttributeValue = getMatchingBackupAttributeValue(query, attributeValue); if (backupAttributeValue == null) { backupAttributeValue = getObjectBuilder(attributeValue, query.getSession()).buildNewInstance(); } clonedQuery.setBackupClone(backupAttributeValue); } getObjectBuilder(attributeValue, query.getSession()).buildRowForUpdate(record, clonedQuery); } } } /** * INTERNAL: * Clone the attribute from the original and assign it to the clone. */ @Override public void buildClone(Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession) { Object attributeValue = getAttributeValueFromObject(original); Object aggregateClone = buildClonePart(original, clone, cacheKey, attributeValue, refreshCascade, cloningSession); if (aggregateClone != null && cloningSession.isUnitOfWork()) { ClassDescriptor descriptor = getReferenceDescriptor(aggregateClone, cloningSession); descriptor.getObjectChangePolicy().setAggregateChangeListener(clone, aggregateClone, (UnitOfWorkImpl)cloningSession, descriptor, getAttributeName()); } setAttributeValueInObject(clone, aggregateClone); } /** * INTERNAL: * Build a clone of the given element in a unitOfWork * @param element * @param cloningSession * @param isExisting * @return */ public Object buildElementClone(Object attributeValue, Object parent, CacheKey parentCacheKey, Integer refreshCascade, AbstractSession cloningSession, boolean isExisting, boolean isFromSharedCache){ Object aggregateClone = buildClonePart(attributeValue, parent, parentCacheKey, refreshCascade, cloningSession, !isExisting); if (aggregateClone != null && cloningSession.isUnitOfWork()) { ClassDescriptor descriptor = getReferenceDescriptor(aggregateClone, cloningSession); descriptor.getObjectChangePolicy().setAggregateChangeListener(parent, aggregateClone, (UnitOfWorkImpl)cloningSession, descriptor, getAttributeName()); } return aggregateClone; } /** * INTERNAL: * Set the change listener in the aggregate. */ public void setChangeListener(Object clone, PropertyChangeListener listener, UnitOfWorkImpl uow) { Object attributeValue = getAttributeValueFromObject(clone); if (attributeValue != null) { ClassDescriptor descriptor = getReferenceDescriptor(attributeValue, uow); descriptor.getObjectChangePolicy().setAggregateChangeListener(clone, attributeValue, uow, descriptor, getAttributeName()); } } /** * INTERNAL: * A combination of readFromRowIntoObject and buildClone. *

* buildClone assumes the attribute value exists on the original and can * simply be copied. *

* readFromRowIntoObject assumes that one is building an original. *

* Both of the above assumptions are false in this method, and actually * attempts to do both at the same time. *

* Extract value from the row and set the attribute to this value in the * working copy clone. * In order to bypass the shared cache when in transaction a UnitOfWork must * be able to populate working copies directly from the row. */ public void buildCloneFromRow(AbstractRecord databaseRow, JoinedAttributeManager joinManager, Object clone, CacheKey sharedCacheKey, ObjectBuildingQuery sourceQuery, UnitOfWorkImpl unitOfWork, AbstractSession executionSession) { // This method is a combination of buildggregateFromRow and // buildClonePart on the super class. // none of buildClonePart used, as not an orignal new object, nor // do we worry about creating heavy clones for aggregate objects. Object clonedAttributeValue = buildAggregateFromRow(databaseRow, clone, null, joinManager, sourceQuery, false, executionSession, true); ClassDescriptor descriptor = getReferenceDescriptor(clonedAttributeValue, unitOfWork); if (clonedAttributeValue != null) { descriptor.getObjectChangePolicy().setAggregateChangeListener(clone, clonedAttributeValue, unitOfWork, descriptor, getAttributeName()); } setAttributeValueInObject(clone, clonedAttributeValue); return; } /** * INTERNAL: * Builds a shallow original object. Only direct attributes and primary * keys are populated. In this way the minimum original required for * instantiating a working copy clone can be built without placing it in * the shared cache (no concern over cycles). */ public void buildShallowOriginalFromRow(AbstractRecord databaseRow, Object original, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, AbstractSession executionSession) { Object aggregate = buildAggregateFromRow(databaseRow, original, null, joinManager, sourceQuery, true, executionSession, true);// shallow only. setAttributeValueInObject(original, aggregate); } /** * INTERNAL: * Certain key mappings favor different types of selection query. Return the appropriate * type of selectionQuery * @return */ public ReadQuery buildSelectionQueryForDirectCollectionKeyMapping(ContainerPolicy containerPolicy){ ReadAllQuery query = new ReadAllQuery(); query.setReferenceClass(referenceClass); query.setDescriptor(getReferenceDescriptor()); query.setContainerPolicy(containerPolicy); return query; } /** * INTERNAL: * Build and return a "template" database row with all the fields * set to null. */ protected AbstractRecord buildTemplateInsertRow(AbstractSession session) { AbstractRecord result = getReferenceDescriptor().getObjectBuilder().buildTemplateInsertRow(session); List processedMappings = (List)getReferenceDescriptor().getMappings().clone(); if (getReferenceDescriptor().hasInheritance()) { for (ClassDescriptor child : getReferenceDescriptor().getInheritancePolicy().getChildDescriptors()) { for (DatabaseMapping mapping : child.getMappings()) { // Only write mappings once. if (!processedMappings.contains(mapping)) { mapping.writeInsertFieldsIntoRow(result, session); processedMappings.add(mapping); } } } } return result; } /** * INTERNAL: * Cascade discover and persist new objects during commit to the map key */ public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow, boolean getAttributeValueFromObject, Set cascadeErrors){ ObjectBuilder builder = getReferenceDescriptor(object.getClass(), uow).getObjectBuilder(); builder.cascadeDiscoverAndPersistUnregisteredNewObjects(object, newObjects, unregisteredExistingObjects, visitedObjects, uow, cascadeErrors); } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade */ public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects, boolean getAttributeValueFromObject) { Object objectReferenced = null; if (getAttributeValueFromObject){ //objects referenced by this mapping are not registered as they have // no identity, however mappings from the referenced object may need cascading. objectReferenced = getAttributeValueFromObject(object); } else { objectReferenced = object; } if ((objectReferenced == null)) { return; } if (!visitedObjects.containsKey(objectReferenced)) { visitedObjects.put(objectReferenced, objectReferenced); ObjectBuilder builder = getReferenceDescriptor(objectReferenced.getClass(), uow).getObjectBuilder(); builder.cascadePerformRemove(objectReferenced, uow, visitedObjects); } } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade */ public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { cascadePerformRemoveIfRequired(object, uow, visitedObjects, true); } /** * INTERNAL: * Cascade perform removal of orphaned private owned objects from the UnitOfWorkChangeSet */ public void cascadePerformRemovePrivateOwnedObjectFromChangeSetIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { Object attributeValue = getAttributeValueFromObject(object); if (attributeValue == null) { return; } if (!visitedObjects.containsKey(attributeValue)) { visitedObjects.put(attributeValue, attributeValue); ObjectBuilder builder = getReferenceDescriptor(attributeValue, uow).getObjectBuilder(); // cascade perform remove any related objects via ObjectBuilder for an aggregate object builder.cascadePerformRemovePrivateOwnedObjectFromChangeSet(attributeValue, uow, visitedObjects); } } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects, boolean getAttributeValueFromObject) { Object objectReferenced = null; //aggregate objects are not registered but their mappings should be. if (getAttributeValueFromObject){ objectReferenced = getAttributeValueFromObject(object); } else { objectReferenced = object; } if ((objectReferenced == null)) { return; } if (!visitedObjects.containsKey(objectReferenced)) { visitedObjects.put(objectReferenced, objectReferenced); ObjectBuilder builder = getReferenceDescriptor(objectReferenced.getClass(), uow).getObjectBuilder(); builder.cascadeRegisterNewForCreate(objectReferenced, uow, visitedObjects); } } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { cascadeRegisterNewIfRequired(object, uow, visitedObjects, true); } /** * INTERNAL: * Clone the aggregate to source field names. AggregateCollectionMapping * needs each nested embedded mapping to have its own list of aggregate * to source field names so that it can apply nested override names to * shared aggregate object mappings. */ public Object clone() { AggregateObjectMapping mappingObject = (AggregateObjectMapping) super.clone(); Map aggregateToSourceFields = new HashMap(); aggregateToSourceFields.putAll(getAggregateToSourceFields()); mappingObject.setAggregateToSourceFields(aggregateToSourceFields); return mappingObject; } /** * INTERNAL: * Return the fields handled by the mapping. */ @Override protected Vector collectFields() { return getReferenceFields(); } /** * INTERNAL: * Aggregate order by all their fields by default. */ @Override public List getOrderByNormalizedExpressions(Expression base) { List orderBys = new ArrayList(this.fields.size()); for (DatabaseField field : this.fields) { orderBys.add(base.getField(field)); } return orderBys; } /** * INTERNAL: * This method is used to store the FK fields that can be cached that correspond to noncacheable mappings * the FK field values will be used to re-issue the query when cloning the shared cache entity */ @Override public void collectQueryParameters(Set record){ for (DatabaseMapping mapping : getReferenceDescriptor().getMappings()){ if ((mapping.isForeignReferenceMapping() && !mapping.isCacheable()) || (mapping.isAggregateObjectMapping() && mapping.getReferenceDescriptor().hasNoncacheableMappings())){ ((ForeignReferenceMapping) mapping).collectQueryParameters(record); } } } /** * INTERNAL: * Convert all the class-name-based settings in this mapping to actual * class-based settings. This method is used when converting a project that * has been built with class names to a project with classes. * @param classLoader */ @Override public void convertClassNamesToClasses(ClassLoader classLoader) { super.convertClassNamesToClasses(classLoader); for (Converter converter : converters.values()) { // Convert and any Converter class names. convertConverterClassNamesToClasses(converter, classLoader); } } /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection. Returns the key. */ public Object createMapComponentFromRow(AbstractRecord dbRow, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected){ Object key = buildAggregateFromRow(dbRow, null, parentCacheKey, null, query, false, session, isTargetProtected); return key; } /** * INTERNAL: * Creates the Array of simple types used to recreate this map. */ public Object createSerializableMapKeyInfo(Object key, AbstractSession session){ return key; // Embeddables have no identity so they are not reduced to PK. } /** * INTERNAL: * Create an instance of the Key object from the key information extracted from the map. * This may return the value directly in case of a simple key or will be used as the FK to load a related entity. */ public List createMapComponentsFromSerializableKeyInfo(Object[] keyInfo, AbstractSession session){ return Arrays.asList(keyInfo); // Embeddables have no identity so they are not reduced to PK. } /** * INTERNAL: * Create an instance of the Key object from the key information extracted from the map. * This key object may be a shallow stub of the actual object if the key is an Entity type. */ public Object createStubbedMapComponentFromSerializableKeyInfo(Object keyInfo, AbstractSession session){ return keyInfo; } /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection and a join query is executed. Returns the key. */ public Object createMapComponentFromJoinedRow(AbstractRecord dbRow, JoinedAttributeManager joinManger, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected){ return createMapComponentFromRow(dbRow, query, parentCacheKey, session, isTargetProtected); } /** * INTERNAL: * Create a query key that links to the map key * @return */ public QueryKey createQueryKeyForMapKey(){ return null; } /** * INTERNAL: * For mappings used as MapKeys in MappedKeyContainerPolicy, Delete the passed object if necessary. * * This method is used for removal of private owned relationships. * AggregateObjectMappings are dealt with in their parent delete, so this is a no-op. * * @param objectDeleted * @param session */ public void deleteMapKey(Object objectDeleted, AbstractSession session){ } /** * PUBLIC: * If all the fields in the database row for the aggregate object are NULL, * then, by default, the mapping will place a null in the appropriate source object * (as opposed to an aggregate object filled with nulls). This behavior can be * explicitly set by calling #allowNull(). * To change this behavior, call #dontAllowNull(). Then the mapping * will build a new instance of the aggregate object that is filled with nulls * and place it in the source object. * In either situation, when writing, the mapping will place a NULL in all the * fields in the database row for the aggregate object. * * Note: Any aggregate that has a relationship mapping automatically does not allow * null. */ public void dontAllowNull() { setIsNullAllowed(false); } /** * INTERNAL: * This method is called to update collection tables prior to commit. */ @Override public void earlyPreDelete(DeleteObjectQuery query, Object object) { // need to go through our reference's pre-delete mappings for (DatabaseMapping mapping : getReferenceDescriptor().getPreDeleteMappings()) { Object nestedObject = getRealAttributeValueFromObject(object, query.getSession()); // If we have an aggregate object, go through the pre-delete. if (nestedObject != null) { mapping.earlyPreDelete(query, nestedObject); } } } /** * INTERNAL: * Extract the fields for the Map key from the object to use in a query. */ public Map extractIdentityFieldsForQuery(Object object, AbstractSession session){ Map keyFields = new HashMap(); ClassDescriptor descriptor =getReferenceDescriptor(); boolean usePrimaryKeyFields = (descriptor.getPrimaryKeyFields() != null && ! descriptor.getPrimaryKeyFields().isEmpty()) ? true : false; Iterator i = descriptor.getMappings().iterator(); while (i.hasNext()){ DatabaseMapping mapping = i.next(); if (!mapping.isReadOnly() && (!usePrimaryKeyFields || (usePrimaryKeyFields && mapping.isPrimaryKeyMapping()))){ Iterator fields = mapping.getFields().iterator(); while (fields.hasNext()){ DatabaseField field = fields.next(); if (field.isUpdatable()){ Object value = descriptor.getObjectBuilder().extractValueFromObjectForField(object, field, session); keyFields.put(field, value); } } } } return keyFields; } /** * INTERNAL: * Return any tables that will be required when this mapping is used as part of a join query * @return */ public List getAdditionalTablesForJoinQuery(){ return getReferenceDescriptor().getTables(); } /** * INTERNAL: * Return the selection criteria necessary to select the target object when this mapping * is a map key. * * AggregateObjectMappings do not need any additional selection criteria when they are map keys * @return */ public Expression getAdditionalSelectionCriteriaForMapKey(){ return null; } /** * INTERNAL: * Return a collection of the aggregate to source field associations. */ public Vector getAggregateToSourceFieldAssociations() { Vector associations = new Vector(getAggregateToSourceFields().size()); Iterator aggregateEnum = getAggregateToSourceFields().keySet().iterator(); Iterator sourceEnum = getAggregateToSourceFields().values().iterator(); while (aggregateEnum.hasNext()) { associations.addElement(new Association(aggregateEnum.next(), sourceEnum.next())); } return associations; } /** * INTERNAL: * Return the hashtable that stores aggregate field name to source fields. */ public Map getAggregateToSourceFields() { return aggregateToSourceFields; } /** * PUBLIC: * The classification type for the attribute this mapping represents */ public Class getAttributeClassification() { return getReferenceClass(); } /** * INTERNAL: * Return the classification for the field contained in the mapping. * This is used to convert the row value to a consistent Java value. */ public Class getFieldClassification(DatabaseField fieldToClassify) { DatabaseMapping mapping = getReferenceDescriptor().getObjectBuilder().getMappingForField(fieldToClassify); if (mapping == null) { return null;// Means that the mapping is read-only } return mapping.getFieldClassification(fieldToClassify); } /** * INTERNAL: * Return the fields that make up the identity of the mapped object. For mappings with * a primary key, it will be the set of fields in the primary key. For mappings without * a primary key it will likely be all the fields * @return */ public List getIdentityFieldsForMapKey(){ ClassDescriptor descriptor =getReferenceDescriptor(); if (descriptor.getPrimaryKeyFields() != null){ return descriptor.getPrimaryKeyFields(); } else { return getAllFieldsForMapKey(); } } /** * INTERNAL: * Get all the fields for the map key */ public List getAllFieldsForMapKey(){ return getReferenceDescriptor().getAllFields(); } /** * INTERNAL: * Return a Map of any foreign keys defined within the the MapKey * @return */ public Map getForeignKeyFieldsForMapKey(){ return null; } /** * INTERNAL: * This is used to preserve object identity during a refreshObject() * query. Return the object corresponding to the specified database row. * The default is to simply return the attribute value. */ protected Object getMatchingAttributeValueFromObject(AbstractRecord row, Object targetObject, AbstractSession session, ClassDescriptor descriptor) { return getAttributeValueFromObject(targetObject); } /** * INTERNAL: * This is used to match up objects during an update in a UOW. * Return the object corresponding to the specified attribute value. * The default is to simply return the backup attribute value. */ protected Object getMatchingBackupAttributeValue(WriteObjectQuery query, Object attributeValue) { return getAttributeValueFromObject(query.getBackupClone()); } /** * INTERNAL: * Return the query that is used when this mapping is part of a joined relationship * * This method is used when this mapping is used to map the key in a Map * @return */ public ObjectLevelReadQuery getNestedJoinQuery(JoinedAttributeManager joinManager, ObjectLevelReadQuery query, AbstractSession session){ return null; } /** * INTERNAL: * Since aggregate object mappings clone their descriptors, for inheritance the correct child clone must be found. */ public ClassDescriptor getReferenceDescriptor(Class theClass, AbstractSession session) { if (this.referenceDescriptor.getJavaClass() == theClass) { return this.referenceDescriptor; } ClassDescriptor subDescriptor = this.referenceDescriptor.getInheritancePolicy().getSubclassDescriptor(theClass); if (subDescriptor == null) { throw DescriptorException.noSubClassMatch(theClass, this); } else { return subDescriptor; } } /** * INTERNAL: * Return the fields used to build the aggregate object. */ protected Vector getReferenceFields() { return getReferenceDescriptor().getAllFields(); } /** * INTERNAL: * If required, get the targetVersion of the source object from the merge manager. * * Used with MapKeyContainerPolicy to abstract getting the target version of a source key * @return */ public Object getTargetVersionOfSourceObject(Object object, Object parent, MergeManager mergeManager, AbstractSession targetSession){ if (mergeManager.getSession().isUnitOfWork()){ UnitOfWorkImpl uow = (UnitOfWorkImpl)mergeManager.getSession(); Object aggregateObject = buildClonePart(object, parent, null, null, targetSession, uow.isOriginalNewObject(parent)); return aggregateObject; } return object; } /** * INTERNAL: * Return the class this key mapping maps or the descriptor for it * @return */ public Object getMapKeyTargetType(){ return getReferenceDescriptor(); } /** * INTERNAL: * Return if the mapping has any ownership or other dependency over its target object(s). */ public boolean hasDependency() { return getReferenceDescriptor().hasDependencyOnParts(); } /** * INTERNAL: * For an aggregate mapping the reference descriptor is cloned. The cloned descriptor is then * assigned primary keys and table names before initialize. Once the cloned descriptor is initialized * it is assigned as reference descriptor in the aggregate mapping. This is a very specific * behavior for aggregate mappings. The original descriptor is used only for creating clones and * after that the aggregate mapping never uses it. * Some initialization is done in postInitialize to ensure the target descriptor's references are initialized. */ public void initialize(AbstractSession session) throws DescriptorException { AbstractSession referenceSession = session; if( session.hasBroker()) { if (getReferenceClass() == null) { throw DescriptorException.referenceClassNotSpecified(this); } referenceSession = session.getSessionForClass(getReferenceClass()); } super.initialize(session); ClassDescriptor clonedDescriptor = (ClassDescriptor)getReferenceDescriptor().clone(); List accessorTree = getDescriptor().getAccessorTree(); if (accessorTree == null){ accessorTree = new ArrayList(); }else{ accessorTree = new ArrayList(accessorTree); } accessorTree.add(getAttributeAccessor()); clonedDescriptor.setAccessorTree(accessorTree); if (isMapKeyMapping() && clonedDescriptor.isAggregateDescriptor()){ clonedDescriptor.descriptorIsAggregateCollection(); } if (clonedDescriptor.isChildDescriptor()) { ClassDescriptor parentDescriptor = session.getDescriptor(clonedDescriptor.getInheritancePolicy().getParentClass()); initializeParentInheritance(parentDescriptor, clonedDescriptor, session); } setReferenceDescriptor(clonedDescriptor); // Apply any override m2m mappings to their cloned mappings. for (ManyToManyMapping overrideMapping : overrideManyToManyMappings) { DatabaseMapping mapping = clonedDescriptor.getMappingForAttributeName(overrideMapping.getAttributeName()); if (mapping.isManyToManyMapping()) { ManyToManyMapping mappingClone = (ManyToManyMapping) mapping; mappingClone.setRelationTable(overrideMapping.getRelationTable()); mappingClone.setSourceKeyFields(overrideMapping.getSourceKeyFields()); mappingClone.setSourceRelationKeyFields(overrideMapping.getSourceRelationKeyFields()); mappingClone.setTargetKeyFields(overrideMapping.getTargetKeyFields()); mappingClone.setTargetRelationKeyFields(overrideMapping.getTargetRelationKeyFields()); } // Else, silently ignored for now. These override mappings are set // and controlled through JPA metadata processing. } // Apply any override uni-directional 12m mappings to their cloned mappings. for (UnidirectionalOneToManyMapping overrideMapping : overrideUnidirectionalOneToManyMappings) { DatabaseMapping mapping = clonedDescriptor.getMappingForAttributeName(overrideMapping.getAttributeName()); if (mapping.isUnidirectionalOneToManyMapping()) { UnidirectionalOneToManyMapping mappingClone = (UnidirectionalOneToManyMapping) mapping; mappingClone.setSourceKeyFields(overrideMapping.getSourceKeyFields()); mappingClone.setTargetForeignKeyFields(overrideMapping.getTargetForeignKeyFields()); } // Else, silently ignored for now. These override mappings are set // and controlled through JPA metadata processing. } // Mark any mapsId mappings as read-only. for (DatabaseMapping mapsIdMapping : mapsIdMappings) { DatabaseMapping mapping = clonedDescriptor.getMappingForAttributeName(mapsIdMapping.getAttributeName()); if (mapping != null) { mapping.setIsReadOnly(true); } // Else, silently ignored for now. Maps id mappings are set and // controlled through JPA metadata processing. } // disallow null for aggregates with target foreign key relationships if (isNullAllowed) { if (getReferenceDescriptor().hasTargetForeignKeyMapping(session)) { isNullAllowed = false; session.log(SessionLog.WARNING, SessionLog.METADATA, "metadata_warning_ignore_is_null_allowed", new Object[]{this}); } } initializeReferenceDescriptor(clonedDescriptor, referenceSession); //must translate before initializing because this mapping may have nested translations. translateNestedFields(clonedDescriptor, referenceSession); clonedDescriptor.preInitialize(referenceSession); clonedDescriptor.initialize(referenceSession); // Apply any converters to their cloned mappings (after initialization // so we can successfully traverse dot notation names) for (String attributeName : converters.keySet()) { String attr = attributeName; ClassDescriptor desc = clonedDescriptor; while (attr.contains(".")) { desc = desc.getMappingForAttributeName(attr.substring(0, attr.indexOf("."))).getReferenceDescriptor(); attr = attr.substring(attr.indexOf(".") + 1); } DatabaseMapping mapping = desc.getMappingForAttributeName(attr); if (mapping != null) { // Initialize and set the converter on the mapping. converters.get(attributeName).initialize(mapping, session); } // Else, silently ignored for now. These converters are set and // controlled through JPA metadata processing. } translateFields(clonedDescriptor, referenceSession); if (clonedDescriptor.hasInheritance() && clonedDescriptor.getInheritancePolicy().hasChildren()) { //clone child descriptors initializeChildInheritance(clonedDescriptor, referenceSession); } setFields(collectFields()); // Add the nested pre delete mappings to the source entity. if (clonedDescriptor.hasPreDeleteMappings()) { getDescriptor().addPreDeleteMapping(this); } } /** * INTERNAL: * For an aggregate mapping the reference descriptor is cloned. * If the reference descriptor is involved in an inheritance tree, * all the parent and child descriptors are cloned also. * The cloned descriptors are then assigned primary keys and * table names before initialize. * This is a very specific behavior for aggregate mappings. */ public void initializeChildInheritance(ClassDescriptor parentDescriptor, AbstractSession session) throws DescriptorException { //recursive call to the further children descriptors if (parentDescriptor.getInheritancePolicy().hasChildren()) { //setFields(clonedChildDescriptor.getFields()); List childDescriptors = parentDescriptor.getInheritancePolicy().getChildDescriptors(); List cloneChildDescriptors = new ArrayList(); for (ClassDescriptor child : childDescriptors) { ClassDescriptor clonedChildDescriptor = (ClassDescriptor)child.clone(); clonedChildDescriptor.getInheritancePolicy().setParentDescriptor(parentDescriptor); initializeReferenceDescriptor(clonedChildDescriptor, session); clonedChildDescriptor.preInitialize(session); clonedChildDescriptor.initialize(session); translateFields(clonedChildDescriptor, session); cloneChildDescriptors.add(clonedChildDescriptor); initializeChildInheritance(clonedChildDescriptor, session); } parentDescriptor.getInheritancePolicy().setChildDescriptors(cloneChildDescriptors); } } /** * INTERNAL: * For an aggregate mapping the reference descriptor is cloned. * If the reference descriptor is involved in an inheritance tree, * all the parent and child descriptors are cloned also. * The cloned descriptors are then assigned primary keys and * table names before initialize. * This is a very specific behavior for aggregate mappings. */ public void initializeParentInheritance(ClassDescriptor parentDescriptor, ClassDescriptor childDescriptor, AbstractSession session) throws DescriptorException { ClassDescriptor clonedParentDescriptor = (ClassDescriptor)parentDescriptor.clone(); //recursive call to the further parent descriptors if (clonedParentDescriptor.getInheritancePolicy().isChildDescriptor()) { ClassDescriptor parentToParentDescriptor = session.getDescriptor(clonedParentDescriptor.getJavaClass()); initializeParentInheritance(parentToParentDescriptor, parentDescriptor, session); } initializeReferenceDescriptor(clonedParentDescriptor, session); Vector children = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1); children.addElement(childDescriptor); clonedParentDescriptor.getInheritancePolicy().setChildDescriptors(children); clonedParentDescriptor.preInitialize(session); clonedParentDescriptor.initialize(session); translateFields(clonedParentDescriptor, session); } /** * INTERNAL: * Initialize the cloned reference descriptor with table names and primary keys */ protected void initializeReferenceDescriptor(ClassDescriptor clonedDescriptor, AbstractSession session) { if (aggregateKeyTable != null){ clonedDescriptor.setDefaultTable(aggregateKeyTable); Vector tables = new Vector(1); tables.add(aggregateKeyTable); clonedDescriptor.setTables(tables); } else { // Must ensure default tables remains the same. clonedDescriptor.setDefaultTable(getDescriptor().getDefaultTable()); clonedDescriptor.setTables(getDescriptor().getTables()); clonedDescriptor.setPrimaryKeyFields(getDescriptor().getPrimaryKeyFields()); if (clonedDescriptor.hasTargetForeignKeyMapping(session) && !isJPAIdNested(session)) { for (DatabaseField pkField : getDescriptor().getPrimaryKeyFields()) { if (!getAggregateToSourceFields().containsKey(pkField.getName())) { // pk field from the source descriptor will have its type set by source descriptor // this only could be done if there is no aggregate field with the same name as pk field. clonedDescriptor.getObjectBuilder().getFieldsMap().put(pkField, pkField); } } } } if (this.getDescriptor().hasFetchGroupManager() && FetchGroupTracker.class.isAssignableFrom(clonedDescriptor.getJavaClass())){ if (clonedDescriptor.getFetchGroupManager() == null) { clonedDescriptor.setFetchGroupManager(new FetchGroupManager()); } } } /** * INTERNAL: * Called when iterating through descriptors to handle iteration on this mapping when it is used as a MapKey * @param iterator * @param element */ public void iterateOnMapKey(DescriptorIterator iterator, Object element){ super.iterateOnAttributeValue(iterator, element); } /** * INTERNAL: * Return whether this mapping should be traversed when we are locking * @return */ public boolean isLockableMapping(){ return true; } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isAggregateObjectMapping() { return true; } /** * INTERNAL: * Return if this mapping supports change tracking. */ public boolean isChangeTrackingSupported(Project project) { // This can be called before and after initialization. // Use the mapping reference descriptor when initialized, otherwise find the uninitialized one. ClassDescriptor referencedDescriptor = getReferenceDescriptor(); if (referencedDescriptor == null) { Iterator ordered = project.getOrderedDescriptors().iterator(); while (ordered.hasNext() && referencedDescriptor == null){ ClassDescriptor descriptor = (ClassDescriptor)ordered.next(); if (descriptor.getJavaClassName().equals(getReferenceClassName())){ referencedDescriptor = descriptor; } } } if (referencedDescriptor != null) { if (!referencedDescriptor.supportsChangeTracking(project)) { return false; } // Also check subclasses. if (referencedDescriptor.hasInheritance()) { for (Iterator iterator = referencedDescriptor.getInheritancePolicy().getChildDescriptors().iterator(); iterator.hasNext(); ) { ClassDescriptor subclassDescriptor = (ClassDescriptor)iterator.next(); if (!subclassDescriptor.supportsChangeTracking(project)) { return false; } } } return true; } return false; } /** * INTERNAL * Return true if this mapping supports cascaded version optimistic locking. */ public boolean isCascadedLockingSupported() { return true; } /** * INTERNAL: * Flags that either this mapping or nested mapping is a JPA id mapping. */ public boolean isJPAIdNested(AbstractSession session) { if (isJPAId()) { return true; } else { ClassDescriptor referenceDescriptor = getReferenceDescriptor(); if (referenceDescriptor == null) { // the mapping has not been initialized yet referenceDescriptor = session.getDescriptor(getReferenceClass()); } for (DatabaseMapping mapping : referenceDescriptor.getMappings()) { if (mapping.isAggregateObjectMapping() && ((AggregateObjectMapping)mapping).isJPAIdNested(session)) { return true; } } return false; } } /** * PUBLIC: * Return if all the fields in the database row for the aggregate object are NULL, * then, by default, the mapping will place a null in the appropriate source object * (as opposed to an aggregate object filled with nulls). * To change this behavior, set the value of this variable to false. Then the mapping * will build a new instance of the aggregate object that is filled with nulls * and place it in the source object. * * Note: Any aggregate that has a relationship mapping automatically does not allow * null. */ public boolean isNullAllowed() { return isNullAllowed; } /** * INTERNAL: * For an aggregate mapping the reference descriptor is cloned. The cloned descriptor is then * assigned primary keys and table names before initialize. Once the cloned descriptor is initialized * it is assigned as reference descriptor in the aggregate mapping. This is a very specific * behavior for aggregate mappings. The original descriptor is used only for creating clones and * after that the aggregate mapping never uses it. * Some initialization is done in postInitialize to ensure the target descriptor's references are initialized. */ public void postInitialize(AbstractSession session) throws DescriptorException { super.postInitialize(session); if (getReferenceDescriptor() != null) { getReferenceDescriptor().getCachePolicy().setCacheIsolation(this.descriptor.getCachePolicy().getCacheIsolation()); // Changed as part of fix for bug#4410581 aggregate mapping can not be set to use change tracking if owning descriptor does not use it. // Basically the policies should be the same, but we also allow deferred with attribute for CMP2 (courser grained). if (getDescriptor().getObjectChangePolicy().getClass().equals(DeferredChangeDetectionPolicy.class)) { getReferenceDescriptor().setObjectChangePolicy(new DeferredChangeDetectionPolicy()); } else if (getDescriptor().getObjectChangePolicy().getClass().equals(ObjectChangeTrackingPolicy.class) && getReferenceDescriptor().getObjectChangePolicy().getClass().equals(AttributeChangeTrackingPolicy.class)) { getReferenceDescriptor().setObjectChangePolicy(new ObjectChangeTrackingPolicy()); } //need to set the primary key classification as the mappings for the pk fields might not be available if (getReferenceDescriptor().isAggregateDescriptor()){ getReferenceDescriptor().getObjectBuilder().setPrimaryKeyClassifications(this.getDescriptor().getObjectBuilder().getPrimaryKeyClassifications()); getReferenceDescriptor().setHasSimplePrimaryKey(this.getDescriptor().hasSimplePrimaryKey()); } getReferenceDescriptor().postInitialize(session); } } /** * INTERNAL: * Making any mapping changes necessary to use a the mapping as a map key prior to initializing the mapping */ public void preinitializeMapKey(DatabaseTable table) { setTableForAggregateMappingKey(table); } /** * INTERNAL: * Making any mapping changes necessary to use a the mapping as a map key after initializing the mapping. */ public void postInitializeMapKey(MappedKeyMapContainerPolicy policy) { return; } /** * INTERNAL: * Build an aggregate object from the specified return row and put it * in the specified target object. * Return row is merged into object after execution of insert or update call * according to ReturningPolicy. * If not null changeSet must correspond to targetObject. changeSet is updated with all of the field values in the row. */ public Object readFromReturnRowIntoObject(AbstractRecord row, Object targetObject, ReadObjectQuery query, Collection handledMappings, ObjectChangeSet changeSet) throws DatabaseException { Object aggregate = getAttributeValueFromObject(targetObject); ObjectChangeSet aggregateChangeSet = null; if (aggregate == null) { aggregate = readFromRowIntoObject(row, null, targetObject, null, query, query.getSession(), true); } else { if(changeSet != null && (!changeSet.isNew() || (query.getDescriptor() != null && query.getDescriptor().shouldUseFullChangeSetsForNewObjects()))) { aggregateChangeSet = getReferenceDescriptor(aggregate, query.getSession()).getObjectBuilder().createObjectChangeSet(aggregate, (UnitOfWorkChangeSet)((UnitOfWorkImpl)query.getSession()).getUnitOfWorkChangeSet(), true, query.getSession()); } AbstractRecord aggregateRow = new DatabaseRecord(); int size = row.size(); List fields = row.getFields(); List values = row.getValues(); List aggregateFields = getReferenceFields(); for(int i=0; i < size; i++) { DatabaseField field = (DatabaseField)fields.get(i); if(aggregateFields.contains(field)) { aggregateRow.add(field, values.get(i)); } } getObjectBuilder(aggregate, query.getSession()).assignReturnRow(aggregate, query.getSession(), aggregateRow, aggregateChangeSet); } if (aggregate != null && isNullAllowed()) { boolean allAttributesNull = true; int nAggregateFields = this.fields.size(); for (int i = 0; (i < nAggregateFields) && allAttributesNull; i++) { DatabaseField field = this.fields.elementAt(i); if (row.containsKey(field)) { allAttributesNull = row.get(field) == null; } else { Object fieldValue = valueFromObject(targetObject, field, query.getSession()); if (fieldValue == null) { Object baseValue = getDescriptor().getObjectBuilder().getBaseValueForField(field, targetObject); if (baseValue != null) { DatabaseMapping baseMapping = getDescriptor().getObjectBuilder().getBaseMappingForField(field); if (baseMapping.isForeignReferenceMapping()) { ForeignReferenceMapping refMapping = (ForeignReferenceMapping)baseMapping; if (refMapping.usesIndirection()) { allAttributesNull = refMapping.getIndirectionPolicy().objectIsInstantiated(baseValue); } } else if (baseMapping.isTransformationMapping()) { AbstractTransformationMapping transMapping = (AbstractTransformationMapping)baseMapping; if (transMapping.usesIndirection()) { allAttributesNull = transMapping.getIndirectionPolicy().objectIsInstantiated(baseValue); } } } } else { allAttributesNull = false; } } } if (allAttributesNull) { aggregate = null; setAttributeValueInObject(targetObject, aggregate); } } if(changeSet != null && (!changeSet.isNew() || (query.getDescriptor() != null && query.getDescriptor().shouldUseFullChangeSetsForNewObjects()))) { AggregateChangeRecord record = (AggregateChangeRecord)changeSet.getChangesForAttributeNamed(getAttributeName()); if(aggregate == null) { if(record != null) { record.setChangedObject(null); } } else { if (record == null) { record = new AggregateChangeRecord(changeSet); record.setAttribute(getAttributeName()); record.setMapping(this); changeSet.addChange(record); } if (aggregateChangeSet == null) { // the old aggregate value was null aggregateChangeSet = getReferenceDescriptor(aggregate, query.getSession()).getObjectBuilder().createObjectChangeSet(aggregate, (UnitOfWorkChangeSet)((UnitOfWorkImpl)query.getSession()).getUnitOfWorkChangeSet(), true, query.getSession()); } record.setChangedObject(aggregateChangeSet); } } if (handledMappings != null) { handledMappings.add(this); } return aggregate; } /** * INTERNAL: * Build an aggregate object from the specified row and put it * in the specified target object. */ public Object readFromRowIntoObject(AbstractRecord databaseRow, JoinedAttributeManager joinManager, Object targetObject, CacheKey parentCacheKey, ObjectBuildingQuery sourceQuery, AbstractSession executionSession, boolean isTargetProtected) throws DatabaseException { Object aggregate = buildAggregateFromRow(databaseRow, targetObject, parentCacheKey, joinManager, sourceQuery, false, executionSession, isTargetProtected);// don't just build a shallow original setAttributeValueInObject(targetObject, aggregate); return aggregate; } /** * INTERNAL: * Rehash any hashtables based on fields. * This is used to clone descriptors for aggregates, which hammer field names. */ public void rehashFieldDependancies(AbstractSession session) { getReferenceDescriptor().rehashFieldDependancies(session); } /** * INTERNAL: * Return whether this mapping requires extra queries to update the rows if it is * used as a key in a map. This will typically be true if there are any parts to this mapping * that are not read-only. */ public boolean requiresDataModificationEventsForMapKey(){ if (getReferenceDescriptor() != null){ Iterator i = getReferenceDescriptor().getMappings().iterator(); while (i.hasNext()){ DatabaseMapping mapping = i.next(); if (!mapping.isReadOnly()){ Iterator fields = mapping.getFields().iterator(); while (fields.hasNext()){ DatabaseField field = fields.next(); if (field.isUpdatable()){ return true; } } } } return false; } return true; } /** * INTERNAL: * Set a collection of the aggregate to source field name associations. */ public void setAggregateToSourceFieldAssociations(Vector fieldAssociations) { Hashtable fieldNames = new Hashtable(fieldAssociations.size() + 1); for (Enumeration associationsEnum = fieldAssociations.elements(); associationsEnum.hasMoreElements();) { Association association = (Association)associationsEnum.nextElement(); fieldNames.put(association.getKey(), association.getValue()); } setAggregateToSourceFields(fieldNames); } /** * INTERNAL: * Set the hashtable that stores target field name to the source field name. */ public void setAggregateToSourceFields(Map aggregateToSource) { aggregateToSourceFields = aggregateToSource; } /** * PUBLIC: * Configure if all the fields in the database row for the aggregate object are NULL, * then, by default, the mapping will place a null in the appropriate source object * (as opposed to an aggregate object filled with nulls). * To change this behavior, set the value of this variable to false. Then the mapping * will build a new instance of the aggregate object that is filled with nulls * and place it in the source object. * * Note: Any aggregate that has a relationship mapping automatically does not allow * null. */ public void setIsNullAllowed(boolean isNullAllowed) { this.isNullAllowed = isNullAllowed; } /** * INTERNAL: * If this mapping is used as the key of a CollectionTableMapMapping, the table used by this * mapping will be the relation table. Set this table. */ public void setTableForAggregateMappingKey(DatabaseTable table){ aggregateKeyTable = table; } /** * INTERNAL: * Apply the field translation from the sourceField to the mappingField. */ protected void translateField(DatabaseField sourceField, DatabaseField mappingField, ClassDescriptor clonedDescriptor) { // Do not modify non-translated fields. if (sourceField != null) { //merge fieldInSource into the field from the Aggregate descriptor mappingField.setName(sourceField.getName()); mappingField.setUseDelimiters(sourceField.shouldUseDelimiters()); mappingField.useUpperCaseForComparisons(sourceField.getUseUpperCaseForComparisons()); mappingField.setNameForComparisons(sourceField.getNameForComparisons()); //copy type information mappingField.setNullable(sourceField.isNullable()); mappingField.setUpdatable(sourceField.isUpdatable()); mappingField.setInsertable(sourceField.isInsertable()); mappingField.setUnique(sourceField.isUnique()); mappingField.setScale(sourceField.getScale()); mappingField.setLength(sourceField.getLength()); mappingField.setPrecision(sourceField.getPrecision()); mappingField.setColumnDefinition(sourceField.getColumnDefinition()); // Check if the translated field specified a table qualifier. if (sourceField.hasTableName()) { mappingField.setTable(clonedDescriptor.getTable(sourceField.getTable().getName())); } // Tag this field as translated. Some mapping care to know which // have been translated in the rehashFieldDependancies call. mappingField.setIsTranslated(true); } } /** * INTERNAL: * If field names are different in the source and aggregate objects then the translation * is done here. The aggregate field name is converted to source field name from the * field name mappings stored. */ protected void translateNestedFields(ClassDescriptor clonedDescriptor, AbstractSession session) { if (nestedFieldTranslations == null){ //this only happens when using Metadata Caching return; } // Once the cloned descriptor is initialized, go through our nested // field name translations. Any errors are silently ignored as // validation is assumed to be done before hand (JPA metadata processing // does validate any nested field translation) for (Entry translations : nestedFieldTranslations.entrySet()) { String attributeName = translations.getKey(); DatabaseMapping mapping = null; ClassDescriptor nestedDescriptor = clonedDescriptor; String currentAttributeName = attributeName.substring(0, attributeName.indexOf(".")); String remainingAttributeName = attributeName.substring(attributeName.indexOf(".")+ 1); mapping = clonedDescriptor.getMappingForAttributeName(currentAttributeName); if (mapping.isAggregateObjectMapping()){ if (remainingAttributeName != null && remainingAttributeName.contains(".")){ //This should be the case otherwise the metadata validation would have validated ((AggregateObjectMapping)mapping).addNestedFieldTranslation(remainingAttributeName, (DatabaseField)translations.getValue()[0], (String)translations.getValue()[1]); }else{ ((AggregateObjectMapping)mapping).addFieldTranslation((DatabaseField) translations.getValue()[0], (String)translations.getValue()[1]); } } } } /** * INTERNAL: * If field names are different in the source and aggregate objects then the translation * is done here. The aggregate field name is converted to source field name from the * field name mappings stored. */ protected void translateFields(ClassDescriptor clonedDescriptor, AbstractSession session) { // EL Bug 326977 Vector fieldsToTranslate = (Vector) clonedDescriptor.getFields().clone(); for (Iterator qkIterator = clonedDescriptor.getQueryKeys().values().iterator(); qkIterator.hasNext();) { QueryKey queryKey = (QueryKey)qkIterator.next(); if (queryKey.isDirectQueryKey()) { DatabaseField field = ((DirectQueryKey)queryKey).getField(); fieldsToTranslate.add(field); } } // EL Bug 332080 - translate foreign reference mapping source key fields if (!clonedDescriptor.getObjectBuilder().isSimple()) { for (Iterator dcIterator = clonedDescriptor.getMappings().iterator(); dcIterator.hasNext();) { DatabaseMapping mapping = (DatabaseMapping)dcIterator.next(); if (mapping.isForeignReferenceMapping()) { Collection fkFields = ((ForeignReferenceMapping)mapping).getFieldsForTranslationInAggregate(); if (fkFields != null && !fkFields.isEmpty()) { fieldsToTranslate.addAll(fkFields); } } } } for (Iterator entry = fieldsToTranslate.iterator(); entry.hasNext();) { DatabaseField field = (DatabaseField)entry.next(); //322233 - get the source DatabaseField from the translation map. translateField(getAggregateToSourceFields().get(field.getName()), field, clonedDescriptor); } clonedDescriptor.rehashFieldDependancies(session); } /** * INTERNAL: * Allow the key mapping to unwrap the object * @param key * @param session * @return */ public Object unwrapKey(Object key, AbstractSession session){ return key; } /** * INTERNAL: * Allow the key mapping to wrap the object * @param key * @param session * @return */ public Object wrapKey(Object key, AbstractSession session){ return key; } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Write the foreign key values from the attribute to the row. */ @Override public void writeFromAttributeIntoRow(Object attribute, AbstractRecord row, AbstractSession session){ writeToRowFromAggregate(row, null, attribute, session, WriteType.UNDEFINED); } /** * INTERNAL: * Extract value of the field from the object */ @Override public Object valueFromObject(Object object, DatabaseField field, AbstractSession session) throws DescriptorException { Object attributeValue = getAttributeValueFromObject(object); if (attributeValue == null) { if (isNullAllowed()) { return null; } else { throw DescriptorException.nullForNonNullAggregate(object, this); } } else { return getObjectBuilder(attributeValue, session).extractValueFromObjectForField(attributeValue, field, session); } } /** * INTERNAL: * Get the attribute value from the object and add the appropriate * values to the specified database row. */ @Override public void writeFromObjectIntoRow(Object object, AbstractRecord databaseRow, AbstractSession session, WriteType writeType) throws DescriptorException { if (isReadOnly()) { return; } writeToRowFromAggregate(databaseRow, object, getAttributeValueFromObject(object), session, writeType); } /** * INTERNAL: * This row is built for shallow insert which happens in case of bidirectional inserts. */ @Override public void writeFromObjectIntoRowForShallowInsert(Object object, AbstractRecord row, AbstractSession session) { if (isReadOnly()) { return; } writeToRowFromAggregateForShallowInsert(row, object, getAttributeValueFromObject(object), session); } /** * INTERNAL: * This row is built for update after shallow insert which happens in case of bidirectional inserts. * It contains the foreign keys with non null values that were set to null for shallow insert. */ @Override public void writeFromObjectIntoRowForUpdateAfterShallowInsert(Object object, AbstractRecord row, AbstractSession session, DatabaseTable table) { if (isReadOnly() || !getFields().get(0).getTable().equals(table) || isPrimaryKeyMapping()) { return; } writeToRowFromAggregateForUpdateAfterShallowInsert(row, object, getAttributeValueFromObject(object), session, table); } /** * INTERNAL: * This row is built for update before shallow delete which happens in case of bidirectional inserts. * It contains the same fields as the row built by writeFromObjectIntoRowForUpdateAfterShallowInsert, but all the values are null. */ @Override public void writeFromObjectIntoRowForUpdateBeforeShallowDelete(Object object, AbstractRecord row, AbstractSession session, DatabaseTable table) { if (isReadOnly() || !getFields().get(0).getTable().equals(table) || isPrimaryKeyMapping()) { return; } writeToRowFromAggregateForUpdateBeforeShallowDelete(row, object, getAttributeValueFromObject(object), session, table); } /** * INTERNAL: * Get the attribute value from the object and add the appropriate * values to the specified database row. */ @Override public void writeFromObjectIntoRowWithChangeRecord(ChangeRecord changeRecord, AbstractRecord databaseRow, AbstractSession session, WriteType writeType) throws DescriptorException { if (isReadOnly()) { return; } writeToRowFromAggregateWithChangeRecord(databaseRow, changeRecord, (ObjectChangeSet)((AggregateChangeRecord)changeRecord).getChangedObject(), session, writeType); } /** * INTERNAL: * Get the attribute value from the object and add the changed * values to the specified database row. */ public void writeFromObjectIntoRowForUpdate(WriteObjectQuery query, AbstractRecord databaseRow) throws DescriptorException { if (isReadOnly()) { return; } writeToRowFromAggregateForUpdate(databaseRow, query, getAttributeValueFromObject(query.getObject())); } /** * INTERNAL: * Write fields needed for insert into the template for with null values. */ public void writeInsertFieldsIntoRow(AbstractRecord databaseRow, AbstractSession session) { if (isReadOnly()) { return; } AbstractRecord targetRow = buildTemplateInsertRow(session); for (Enumeration keyEnum = targetRow.keys(); keyEnum.hasMoreElements();) { DatabaseField field = (DatabaseField)keyEnum.nextElement(); Object value = targetRow.get(field); //CR-3286097 - Should use add not put, to avoid linear search. databaseRow.add(field, value); } } /** * INTERNAL: * Add a primary key join column (secondary field). * If this contain primary keys and the descriptor(or its subclass) has multiple tables * (secondary tables or joined inheritance strategy), this should also know the primary key * join columns to handle some cases properly. */ public void addPrimaryKeyJoinField(DatabaseField primaryKeyField, DatabaseField secondaryField) { // now it doesn't need to manage this as a separate table here, // it's enough just to add the mapping to ObjectBuilder.mappingsByField ObjectBuilder builder = getReferenceDescriptor().getObjectBuilder(); DatabaseMapping mapping = builder.getMappingForField(primaryKeyField); if (mapping != null) { builder.getMappingsByField().put(secondaryField, mapping); } } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/transformers/0000775000000000000000000000000012216174372022757 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/transformers/FieldTransformer.java0000664000000000000000000000400512216173130027056 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.transformers; import java.io.*; import org.eclipse.persistence.sessions.Session; import org.eclipse.persistence.mappings.foundation.AbstractTransformationMapping; /** * PUBLIC: * This interface is used by the Transformation Mapping to build the value for a * specific field. The user must provide implementations of this interface to the * Transformation Mapping. * @author mmacivor * @since 10.1.3 */ public interface FieldTransformer extends Serializable { /** * Initialize this transformer. Only required if the user needs some special * information from the mapping in order to do the transformation * @param mapping - the mapping this transformer is associated with. */ public void initialize(AbstractTransformationMapping mapping); /** * @param instance - an instance of the domain class which contains the attribute * @param session - the current session * @param fieldName - the name of the field being transformed. Used if the user wants to use this transformer for multiple fields. * @return - The value to be written for the field associated with this transformer */ public Object buildFieldValue(Object instance, String fieldName, Session session); } ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/mappings/transformers/MethodBasedFieldTransformer.javaeclipselink-2.5.1.orig/org/eclipse/persistence/mappings/transformers/MethodBasedFieldTransformer.jav0000664000000000000000000001465212216173130031026 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.transformers; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.security.AccessController; import java.security.PrivilegedActionException; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.internal.helper.ClassConstants; import org.eclipse.persistence.internal.helper.Helper; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedGetMethodParameterTypes; import org.eclipse.persistence.internal.security.PrivilegedMethodInvoker; import org.eclipse.persistence.sessions.Session; import org.eclipse.persistence.mappings.foundation.AbstractTransformationMapping; /** * @version $Header: MethodBasedFieldTransformer.java 18-sep-2006.16:20:59 gyorke Exp $ * @author mmacivor * @since 10 * This class is used to preserve the old method of doing Field Transformations * on a transformation mapping. It is used internally when the older API is used on * a TransformationMapping, and handles doing invocations on the user's domain class */ public class MethodBasedFieldTransformer implements FieldTransformer { protected transient Method fieldTransformationMethod; protected String methodName; protected AbstractTransformationMapping mapping; public MethodBasedFieldTransformer(String methodName) { this.methodName = methodName; } public void initialize(AbstractTransformationMapping mapping) { this.mapping = mapping; try { // look for the zero-argument version first fieldTransformationMethod = Helper.getDeclaredMethod(mapping.getDescriptor().getJavaClass(), methodName, new Class[0]); } catch (Exception ex) { try { // if the zero-argument version is not there, look for the one-argument version Class[] methodParameterTypes = new Class[1]; methodParameterTypes[0] = ClassConstants.PublicInterfaceSession_Class; fieldTransformationMethod = Helper.getDeclaredMethod(mapping.getDescriptor().getJavaClass(), methodName, methodParameterTypes); } catch (Exception ex2) { try { //if the one-argument version is absent, try with sessions.Session Class[] methodParameterTypes = new Class[1]; methodParameterTypes[0] = ClassConstants.SessionsSession_Class; fieldTransformationMethod = Helper.getDeclaredMethod(mapping.getDescriptor().getJavaClass(), methodName, methodParameterTypes); } catch (NoSuchMethodException exception) { throw DescriptorException.noSuchMethodWhileConvertingToMethod(methodName, mapping, exception); } catch (SecurityException exception) { throw DescriptorException.securityWhileConvertingToMethod(methodName, mapping, exception); } } } } /** * Return the Java class type of the field value. * This uses the method return type. */ public Class getFieldType() { if (this.fieldTransformationMethod != null) { return this.fieldTransformationMethod.getReturnType(); } return null; } public Object buildFieldValue(Object object, String fieldName, Session session) { Class[] parameterTypes = null; if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try{ parameterTypes = (Class [])AccessController.doPrivileged(new PrivilegedGetMethodParameterTypes(fieldTransformationMethod)); }catch (PrivilegedActionException ex){ throw (RuntimeException) ex.getCause(); } }else{ parameterTypes = PrivilegedAccessHelper.getMethodParameterTypes(fieldTransformationMethod); } Object[] parameters = new Object[parameterTypes.length]; if (parameters.length == 1) { parameters[0] = session; } try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try{ return AccessController.doPrivileged(new PrivilegedMethodInvoker(fieldTransformationMethod, object, parameters)); }catch (PrivilegedActionException ex){ if (ex.getCause() instanceof IllegalAccessException){ throw (IllegalAccessException) ex.getCause(); } if (ex.getCause() instanceof InvocationTargetException){ throw (InvocationTargetException) ex.getCause(); } throw (RuntimeException) ex.getCause(); } }else{ return PrivilegedAccessHelper.invokeMethod(fieldTransformationMethod, object, parameters); } } catch (IllegalAccessException exception) { throw DescriptorException.illegalAccessWhileInvokingFieldToMethod(fieldTransformationMethod.getName(), mapping, exception); } catch (IllegalArgumentException exception) { throw DescriptorException.illegalArgumentWhileInvokingFieldToMethod(fieldTransformationMethod.getName(), mapping, exception); } catch (InvocationTargetException exception) { throw DescriptorException.targetInvocationWhileInvokingFieldToMethod(fieldTransformationMethod.getName(), mapping, exception); } } public String getMethodName() { return methodName; } public void setMethodName(String name) { methodName = name; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/transformers/FieldTransformerAdapter.java0000664000000000000000000000310212216173130030354 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.transformers; import org.eclipse.persistence.sessions.Session; import org.eclipse.persistence.mappings.foundation.AbstractTransformationMapping; /** *

Purpose: Provides an empty implementation of FieldTransformer. * Users who do not require the full FieldTransformer API can subclass this class * and implement only the methods required. * @see org.eclipse.persistence.mappings.FieldTransformer * @version $Header: FieldTransformerAdapter.java 11-jul-2006.10:33:44 gyorke Exp $ * @author mmacivor * @since 10 */ public class FieldTransformerAdapter implements FieldTransformer { public void initialize(AbstractTransformationMapping mapping) { } public Object buildFieldValue(Object object, String fieldName, Session session) { return null; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/transformers/AttributeTransformer.java0000664000000000000000000000376012216173130030005 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.transformers; import java.io.*; import org.eclipse.persistence.sessions.Session; import org.eclipse.persistence.sessions.Record; import org.eclipse.persistence.mappings.foundation.AbstractTransformationMapping; /** * PUBLIC: * @version $Header: AttributeTransformer.java 11-jul-2006.10:33:44 gyorke Exp $ * @author mmacivor * @since 10 * This interface is used by the Transformation Mapping to build the value for a * the mapped attribute on a read. The user must provide an implementation of this interface to the * Transformation Mapping. */ public interface AttributeTransformer extends Serializable { /** * @param mapping - The mapping associated with this transformer. Only used if some special information is required. */ public void initialize(AbstractTransformationMapping mapping); /** * @param record - The metadata being used to build the object. * @param session - the current session * @param object - The current object that the attribute is being built for. * @return - The attribute value to be built into the object containing this mapping. */ public Object buildAttributeValue(Record record, Object object, Session session); } ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/mappings/transformers/MethodBasedAttributeTransformer.javaeclipselink-2.5.1.orig/org/eclipse/persistence/mappings/transformers/MethodBasedAttributeTransformer0000664000000000000000000001734712216173130031173 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.transformers; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.security.AccessController; import java.security.PrivilegedActionException; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.internal.helper.ClassConstants; import org.eclipse.persistence.internal.helper.Helper; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedGetMethodParameterTypes; import org.eclipse.persistence.internal.security.PrivilegedMethodInvoker; import org.eclipse.persistence.mappings.foundation.AbstractTransformationMapping; import org.eclipse.persistence.sessions.Record; import org.eclipse.persistence.sessions.Session; /** * @version $Header: MethodBasedAttributeTransformer.java 18-sep-2006.14:33:54 gyorke Exp $ * @author mmacivor * @since release specific (what release of product did this appear in) * This class is used to preserve the old method of doing Attribute Transformations * on a transformation mapping. It is used internally when the older API is used on * a TransformationMapping, and handles doing invocations on the user's domain class */ public class MethodBasedAttributeTransformer implements AttributeTransformer { protected transient Method attributeTransformationMethod; protected AbstractTransformationMapping mapping; protected String methodName; public MethodBasedAttributeTransformer() { } public MethodBasedAttributeTransformer(String methodName) { this.methodName = methodName; } public String getMethodName() { return methodName; } public void setMethodName(String name) { methodName = name; } public Method getAttributeTransformationMethod() { return attributeTransformationMethod; } public void setAttributeTransformationMethod(Method theMethod) { attributeTransformationMethod = theMethod; } /** * INTERNAL: * Initilizes the transformer. Looks up the transformation method on the * domain class using reflection. This method can have either 1 or 2 parameters. */ public void initialize(AbstractTransformationMapping mapping) { this.mapping = mapping; try { // look for the one-argument version first Class[] parameterTypes = new Class[1]; parameterTypes[0] = ClassConstants.Record_Class; attributeTransformationMethod = Helper.getDeclaredMethod(mapping.getDescriptor().getJavaClass(), methodName, parameterTypes); } catch (Exception ex) { try { //now look for the one-argument version with Record Class[] parameterTypes = new Class[1]; parameterTypes[0] = ClassConstants.Record_Class; attributeTransformationMethod = Helper.getDeclaredMethod(mapping.getDescriptor().getJavaClass(), methodName, parameterTypes); } catch (Exception ex2) { try { // if the one-argument version is not there, look for the two-argument version Class[] parameterTypes = new Class[2]; parameterTypes[0] = ClassConstants.Record_Class; parameterTypes[1] = ClassConstants.PublicInterfaceSession_Class; attributeTransformationMethod = Helper.getDeclaredMethod(mapping.getDescriptor().getJavaClass(), methodName, parameterTypes); } catch (Exception ex3) { try { //now look for the 2 argument version using Record and sessions Session Class[] parameterTypes = new Class[2]; parameterTypes[0] = ClassConstants.Record_Class; parameterTypes[1] = ClassConstants.SessionsSession_Class; attributeTransformationMethod = Helper.getDeclaredMethod(mapping.getDescriptor().getJavaClass(), methodName, parameterTypes); } catch (NoSuchMethodException exception) { throw DescriptorException.noSuchMethodOnInitializingAttributeMethod(mapping.getAttributeMethodName(), mapping, exception); } catch (SecurityException exception) { throw DescriptorException.securityOnInitializingAttributeMethod(mapping.getAttributeMethodName(), mapping, exception); } } } } if (attributeTransformationMethod.getReturnType() == ClassConstants.Void_Class) { throw DescriptorException.returnTypeInGetAttributeAccessor(methodName, mapping); } } /** * INTERNAL: * Build the attribute value by invoking the user's transformation method. */ public Object buildAttributeValue(Record record, Object object, Session session) { Class[] parameterTypes = null; if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try{ parameterTypes = (Class[])AccessController.doPrivileged(new PrivilegedGetMethodParameterTypes(attributeTransformationMethod)); }catch (PrivilegedActionException ex){ throw (RuntimeException)ex.getCause(); } }else{ parameterTypes = PrivilegedAccessHelper.getMethodParameterTypes(attributeTransformationMethod); } Object[] parameters = new Object[parameterTypes.length]; parameters[0] = record; if (parameters.length == 2) { parameters[1] = session; } try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try{ return AccessController.doPrivileged(new PrivilegedMethodInvoker(attributeTransformationMethod, object, parameters)); }catch (PrivilegedActionException ex){ if (ex.getCause() instanceof IllegalArgumentException){ throw (IllegalArgumentException) ex.getCause(); } if (ex.getCause() instanceof InvocationTargetException){ throw (InvocationTargetException) ex.getCause(); } throw (RuntimeException) ex.getCause(); } }else { return PrivilegedAccessHelper.invokeMethod(attributeTransformationMethod, object, parameters); } } catch (IllegalAccessException exception) { throw DescriptorException.illegalAccessWhileInvokingAttributeMethod(mapping, exception); } catch (IllegalArgumentException exception) { throw DescriptorException.illegalArgumentWhileInvokingAttributeMethod(mapping, exception); } catch (InvocationTargetException exception) { throw DescriptorException.targetInvocationWhileInvokingAttributeMethod(mapping, exception); } } } ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/mappings/transformers/AttributeTransformerAdapter.javaeclipselink-2.5.1.orig/org/eclipse/persistence/mappings/transformers/AttributeTransformerAdapter.jav0000664000000000000000000000321412216173130031137 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.transformers; import org.eclipse.persistence.sessions.Record; import org.eclipse.persistence.sessions.Session; import org.eclipse.persistence.mappings.foundation.AbstractTransformationMapping; /** *

Purpose: Provides an empty implementation of AttributeTransformer. * Users who do not require the full AttributeTransformer API can subclass this class * and implement only the methods required. * @see org.eclipse.persistence.mappings.AttributeTransformer * @version $Header: AttributeTransformerAdapter.java 11-jul-2006.10:33:44 gyorke Exp $ * @author mmacivor * @since 10 */ public class AttributeTransformerAdapter implements AttributeTransformer { public void initialize(AbstractTransformationMapping mapping) { } public Object buildAttributeValue(Record record, Object object, Session session) { return null; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/transformers/ConstantTransformer.java0000664000000000000000000000366212216173130027634 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.transformers; import org.eclipse.persistence.sessions.Session; /** *

Purpose: Allows a field to always be mapped to a constant value. * This allows default values to be provided for un-mapped fields. * @see org.eclipse.persistence.mappings.FieldTransformer * @author James Sutherland * @since 10.1.3 */ public class ConstantTransformer extends FieldTransformerAdapter { protected Object value; public ConstantTransformer() { super(); } /** * PUBLIC: * Return a constant transformer for the constant value. */ public ConstantTransformer(Object value) { this.value = value; } /** * PUBLIC: * Return the value of the constant. */ public Object getValue() { return value; } /** * PUBLIC: * Set the value of the constant. */ public void setValue(Object value) { this.value = value; } /** * INTERNAL: * Always return the constant value. */ public Object buildFieldValue(Object object, String fieldName, Session session) { return value; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/AggregateMapping.java0000664000000000000000000014413112216173130024272 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * * 30/05/2012-2.4 Guy Pelletier * - 354678: Temp classloader is still being used during metadata processing * 09 Jan 2013-2.5 Gordon Yorke * - 397772: JPA 2.1 Entity Graph Support ******************************************************************************/ package org.eclipse.persistence.mappings; import java.util.*; import java.security.AccessController; import java.security.PrivilegedActionException; import org.eclipse.persistence.internal.descriptors.changetracking.AggregateAttributeChangeListener; import org.eclipse.persistence.internal.descriptors.changetracking.AttributeChangeListener; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.descriptors.DescriptorEventManager; import org.eclipse.persistence.descriptors.DescriptorQueryManager; import org.eclipse.persistence.descriptors.changetracking.ChangeTracker; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.expressions.*; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.helper.IdentityHashSet; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.remote.*; import org.eclipse.persistence.sessions.CopyGroup; import org.eclipse.persistence.internal.queries.AttributeItem; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; /** * Purpose: Two objects can be considered to be related by aggregation if there is a strict * 1:1 relationship between the objects. This means that if the source (parent)object exists, then * the target (child or owned) object must exist. This class implements the behavior common to the * aggregate object and structure mappings. * * @author Sati * @since TopLink for Java 1.0 */ public abstract class AggregateMapping extends DatabaseMapping { /** Stores a reference class */ protected Class referenceClass; protected String referenceClassName; /** The descriptor of the reference class */ protected ClassDescriptor referenceDescriptor; /** * Indicates whether the mapping (or at least one of its nested mappings, at any nested depth) * references an entity. * To return true the mapping (or nested mapping) should be ForeignReferenceMapping with non-null and non-aggregate reference descriptor. * Lazily initialized. */ protected Boolean hasNestedIdentityReference; /** * Default constructor. */ public AggregateMapping() { super(); this.setWeight(WEIGHT_AGGREGATE); } /** * Make a copy of the sourceQuery for the attribute. */ protected DeleteObjectQuery buildAggregateDeleteQuery(DeleteObjectQuery sourceQuery, Object sourceAttributeValue) { DeleteObjectQuery aggregateQuery = new DeleteObjectQuery(); buildAggregateModifyQuery(sourceQuery, aggregateQuery, sourceAttributeValue); return aggregateQuery; } /** * Initialize the aggregate query with the settings from the source query. */ protected void buildAggregateModifyQuery(ObjectLevelModifyQuery sourceQuery, ObjectLevelModifyQuery aggregateQuery, Object sourceAttributeValue) { // If we are map key mapping we can't build a backupAttributeValue // from a back up clone since a map key mapping does not map a field // on the source queries backup clone. if (sourceQuery.getSession().isUnitOfWork() && ! isMapKeyMapping()) { Object backupAttributeValue = getAttributeValueFromBackupClone(sourceQuery.getBackupClone()); if (backupAttributeValue == null) { backupAttributeValue = getObjectBuilder(sourceAttributeValue, sourceQuery.getSession()).buildNewInstance(); } aggregateQuery.setBackupClone(backupAttributeValue); } aggregateQuery.setCascadePolicy(sourceQuery.getCascadePolicy()); aggregateQuery.setObject(sourceAttributeValue); aggregateQuery.setTranslationRow(sourceQuery.getTranslationRow()); aggregateQuery.setSession(sourceQuery.getSession()); aggregateQuery.setProperties(sourceQuery.getProperties()); } /** * Make a copy of the sourceQuery for the attribute. */ protected WriteObjectQuery buildAggregateWriteQuery(WriteObjectQuery sourceQuery, Object sourceAttributeValue) { WriteObjectQuery aggregateQuery = new WriteObjectQuery(); buildAggregateModifyQuery(sourceQuery, aggregateQuery, sourceAttributeValue); return aggregateQuery; } /** * INTERNAL: * Clone the attribute from the clone and assign it to the backup. */ public void buildBackupClone(Object clone, Object backup, UnitOfWorkImpl unitOfWork) { Object attributeValue = getAttributeValueFromObject(clone); setAttributeValueInObject(backup, buildBackupClonePart(attributeValue, unitOfWork)); } /** * INTERNAL: * Build and return a backup clone of the attribute. */ protected Object buildBackupClonePart(Object attributeValue, UnitOfWorkImpl unitOfWork) { if (attributeValue == null) { return null; } return getObjectBuilder(attributeValue, unitOfWork).buildBackupClone(attributeValue, unitOfWork); } /** * INTERNAL: * Clone the attribute from the original and assign it to the clone. */ @Override public void buildClone(Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession) { Object attributeValue = getAttributeValueFromObject(original); setAttributeValueInObject(clone, buildClonePart(original, clone, cacheKey, attributeValue, refreshCascade, cloningSession)); } /** * INTERNAL: * A combination of readFromRowIntoObject and buildClone. *

* buildClone assumes the attribute value exists on the original and can * simply be copied. *

* readFromRowIntoObject assumes that one is building an original. *

* Both of the above assumptions are false in this method, and actually * attempts to do both at the same time. *

* Extract value from the row and set the attribute to this value in the * working copy clone. * In order to bypass the shared cache when in transaction a UnitOfWork must * be able to populate working copies directly from the row. */ public void buildCloneFromRow(AbstractRecord databaseRow, JoinedAttributeManager joinManager, Object clone, CacheKey sharedCacheKey, ObjectBuildingQuery sourceQuery, UnitOfWorkImpl unitOfWork, AbstractSession executionSession) { // automatically returns a uow result from scratch that doesn't need cloning Object cloneAttributeValue = valueFromRow(databaseRow, joinManager, sourceQuery, sharedCacheKey, executionSession, true, null); setAttributeValueInObject(clone, cloneAttributeValue); } /** * INTERNAL: * Build and return a clone of the attribute. */ protected Object buildClonePart(Object original, Object clone, CacheKey cacheKey, Object attributeValue, Integer refreshCascade, AbstractSession cloningSession) { return buildClonePart(attributeValue, clone, cacheKey, refreshCascade, cloningSession, cloningSession.isUnitOfWork() && ((UnitOfWorkImpl)cloningSession).isOriginalNewObject(original)); } /** * INTERNAL: * Build and return a clone of the attribute. */ protected Object buildClonePart(Object attributeValue, Object clone, CacheKey parentCacheKey, Integer refreshCascade, AbstractSession cloningSession, boolean isNewObject) { if (attributeValue == null) { return null; } if (cloningSession.isUnitOfWork() && isNewObject) { // only true if cloningSession is UOW as this signature only exists in this mapping. ((UnitOfWorkImpl)cloningSession).addNewAggregate(attributeValue); } // Do not clone for read-only. if (cloningSession.isUnitOfWork() && cloningSession.isClassReadOnly(attributeValue.getClass())){ return attributeValue; } ObjectBuilder aggregateObjectBuilder = getObjectBuilder(attributeValue, cloningSession); // bug 2612602 as we are building the working copy make sure that we call to correct clone method. Object clonedAttributeValue = aggregateObjectBuilder.instantiateWorkingCopyClone(attributeValue, cloningSession); aggregateObjectBuilder.populateAttributesForClone(attributeValue, parentCacheKey, clonedAttributeValue, refreshCascade, cloningSession); //also clone the fetch group reference if applied if (aggregateObjectBuilder.getDescriptor().hasFetchGroupManager()) { aggregateObjectBuilder.getDescriptor().getFetchGroupManager().copyAggregateFetchGroupInto(attributeValue, clonedAttributeValue, clone, cloningSession); } return clonedAttributeValue; } /** * INTERNAL: * Copy of the attribute of the object. * This is NOT used for unit of work but for templatizing an object. */ @Override public void buildCopy(Object copy, Object original, CopyGroup group) { Object attributeValue = getAttributeValueFromObject(original); setAttributeValueInObject(copy, buildCopyOfAttributeValue(attributeValue, group)); } /** * Copy of the attribute of the object. * This is NOT used for unit of work but for templatizing an object. */ protected Object buildCopyOfAttributeValue(Object attributeValue, CopyGroup group) { if (attributeValue == null) { return null; } return getObjectBuilder(attributeValue, group.getSession()).copyObject(attributeValue, group); } /** * INTERNAL: * In case Query By Example is used, this method generates an expression from a attribute value pair. Since * this is an Aggregate mapping, a recursive call is made to the buildExpressionFromExample method of * ObjectBuilder. */ public Expression buildExpression(Object queryObject, QueryByExamplePolicy policy, Expression expressionBuilder, Map processedObjects, AbstractSession session) { String attributeName = this.getAttributeName(); Object attributeValue = this.getRealAttributeValueFromObject(queryObject, session); if (!policy.shouldIncludeInQuery(queryObject.getClass(), attributeName, attributeValue)) { //the attribute name and value pair is not to be included in the query. return null; } if (attributeValue == null) { //even though it is null, it is to be always included in the query Expression expression = expressionBuilder.get(attributeName); return policy.completeExpressionForNull(expression); } ObjectBuilder objectBuilder = getReferenceDescriptor().getObjectBuilder(); return objectBuilder.buildExpressionFromExample(attributeValue, policy, expressionBuilder.get(attributeName), processedObjects, session); } /** * INTERNAL: * Build and return a new instance of the specified attribute. * This will be populated by a merge. */ protected Object buildNewMergeInstanceOf(Object sourceAttributeValue, AbstractSession session) { return getObjectBuilder(sourceAttributeValue, session).buildNewInstance(); } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade */ // public void cascadePerformDeleteIfRequired(Object object, UnitOfWork uow, Map visitedObjects){ //objects referenced by this mapping are not registered as they have // no identity, this is a no-op. // } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ // public void cascadeRegisterNewIfRequired(Object object, UnitOfWork uow, Map visitedObjects){ //aggregate objects are not registeres as they have no identity, this is a no-op. // } /** * INTERNAL: * Compare the attributes. Return true if they are alike. */ protected boolean compareAttributeValues(Object attributeValue1, Object attributeValue2, AbstractSession session) { if ((attributeValue1 == null) && (attributeValue2 == null)) { return true; } if ((attributeValue1 == null) || (attributeValue2 == null)) { return false; } if (attributeValue1.getClass() != attributeValue2.getClass()) { return false; } return getObjectBuilder(attributeValue1, session).compareObjects(attributeValue1, attributeValue2, session); } /** * INTERNAL: * Compare the changes between two aggregates. * Return a change record holding the changes. */ public ChangeRecord compareForChange(Object clone, Object backup, ObjectChangeSet owner, AbstractSession session) { Object cloneAttribute = getAttributeValueFromObject(clone); Object backupAttribute = null; if (!owner.isNew()) { backupAttribute = getAttributeValueFromObject(backup); if ((cloneAttribute == null) && (backupAttribute == null)) { return null;// no change } if ((cloneAttribute != null) && (backupAttribute != null) && (!cloneAttribute.getClass().equals(backupAttribute.getClass()))) { backupAttribute = null; } } AggregateChangeRecord changeRecord = new AggregateChangeRecord(owner); changeRecord.setAttribute(getAttributeName()); changeRecord.setMapping(this); changeRecord.setOldValue(backupAttribute); if (cloneAttribute == null) {// the attribute was set to null changeRecord.setChangedObject(null); return changeRecord; } ObjectBuilder builder = getObjectBuilder(cloneAttribute, session); //if the owner is new then the backup will be null, if the owner is new then the aggregate is new //if the backup is null but the owner is not new then this aggregate is new ObjectChangeSet initialChanges = builder.createObjectChangeSet(cloneAttribute, (UnitOfWorkChangeSet)owner.getUOWChangeSet(), (backupAttribute == null), session); ObjectChangeSet changeSet = builder.compareForChange(cloneAttribute, backupAttribute, (UnitOfWorkChangeSet)owner.getUOWChangeSet(), session); if (changeSet == null) { if (initialChanges.isNew()) { // This happens if original aggregate is of class A, the new aggregate // is of class B (B inherits from A) - and neither A nor B has any mapped attributes. // CR3664 changeSet = initialChanges; } else { return null;// no change } } changeRecord.setChangedObject(changeSet); return changeRecord; } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ public boolean compareObjects(Object firstObject, Object secondObject, AbstractSession session) { return compareAttributeValues(getAttributeValueFromObject(firstObject), getAttributeValueFromObject(secondObject), session); } /** * INTERNAL: * Convert all the class-name-based settings in this mapping to actual class-based * settings. This method is used when converting a project that has been built * with class names to a project with classes. * @param classLoader */ @Override public void convertClassNamesToClasses(ClassLoader classLoader) { super.convertClassNamesToClasses(classLoader); if (getReferenceClassName() != null) { try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()) { try { setReferenceClass((Class)AccessController.doPrivileged(new PrivilegedClassForName(getReferenceClassName(), true, classLoader))); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(getReferenceClassName(), exception.getException()); } } else { setReferenceClass(org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(getReferenceClassName(), true, classLoader)); } } catch (ClassNotFoundException exc) { throw ValidationException.classNotFoundWhileConvertingClassNames(getReferenceClassName(), exc); } } } /** * INTERNAL: * Execute a descriptor event for the specified event code. */ protected void executeEvent(int eventCode, ObjectLevelModifyQuery query) { ClassDescriptor referenceDescriptor = getReferenceDescriptor(query.getObject(), query.getSession()); // PERF: Avoid events if no listeners. if (referenceDescriptor.getEventManager().hasAnyEventListeners()) { referenceDescriptor.getEventManager().executeEvent(new org.eclipse.persistence.descriptors.DescriptorEvent(eventCode, query)); } } /** * INTERNAL: * An object has been serialized from the server to the remote client. * Replace the transient attributes of the remote value holders * with client-side objects. */ protected void fixAttributeValue(Object attributeValue, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { if (attributeValue == null) { return; } getObjectBuilder(attributeValue, query.getSession()).fixObjectReferences(attributeValue, objectDescriptors, processedObjects, query, session); } /** * INTERNAL: * An object has been serialized from the server to the remote client. * Replace the transient attributes of the remote value holders * with client-side objects. */ public void fixObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { Object attributeValue = getAttributeValueFromObject(object); fixAttributeValue(attributeValue, objectDescriptors, processedObjects, query, session); } /** * Return the appropriate attribute value. * This method is a hack to allow the aggregate collection * subclass to override.... */ protected Object getAttributeValueFromBackupClone(Object backupClone) { return getAttributeValueFromObject(backupClone); } /** * Convenience method */ protected ObjectBuilder getObjectBuilderForClass(Class javaClass, AbstractSession session) { return getReferenceDescriptor(javaClass, session).getObjectBuilder(); } /** * Convenience method */ protected ObjectBuilder getObjectBuilder(Object attributeValue, AbstractSession session) { return getReferenceDescriptor(attributeValue, session).getObjectBuilder(); } /** * Convenience method */ protected DescriptorQueryManager getQueryManager(Object attributeValue, AbstractSession session) { return getReferenceDescriptor(attributeValue, session).getQueryManager(); } /** * PUBLIC: * Returns the reference class */ public Class getReferenceClass() { return referenceClass; } /** * INTERNAL: * Used by MW. */ public String getReferenceClassName() { if ((referenceClassName == null) && (referenceClass != null)) { referenceClassName = referenceClass.getName(); } return referenceClassName; } /** * INTERNAL: * Return the referenceDescriptor. This is a descriptor which is associated with the reference class. * NOTE: If you are looking for the descriptor for a specific aggregate object, use * #getReferenceDescriptor(Object). This will ensure you get the right descriptor if the object's * descriptor is part of an inheritance tree. */ public ClassDescriptor getReferenceDescriptor() { return referenceDescriptor; } /** * INTERNAL: * For inheritance purposes. */ protected ClassDescriptor getReferenceDescriptor(Class theClass, AbstractSession session) { if (this.referenceDescriptor.getJavaClass() == theClass) { return this.referenceDescriptor; } ClassDescriptor subDescriptor = session.getDescriptor(theClass); if (subDescriptor == null) { throw DescriptorException.noSubClassMatch(theClass, this); } else { return subDescriptor; } } /** * Convenience method */ protected ClassDescriptor getReferenceDescriptor(Object attributeValue, AbstractSession session) { if (attributeValue == null) { return this.referenceDescriptor; } else { return getReferenceDescriptor(attributeValue.getClass(), session); } } /** * INTERNAL: * Indicates whether the mapping (or at least one of its nested mappings, at any nested depth) * references an entity. * To return true the mapping (or nested mapping) should be ForeignReferenceMapping with non-null and non-aggregate reference descriptor. */ @Override public boolean hasNestedIdentityReference() { if (hasNestedIdentityReference == null) { hasNestedIdentityReference = getReferenceDescriptor().hasNestedIdentityReference(true); } return hasNestedIdentityReference; } /** * INTERNAL: * Initialize the reference descriptor. */ public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if (getReferenceClass() == null) { throw DescriptorException.referenceClassNotSpecified(this); } setReferenceDescriptor(session.getDescriptor(getReferenceClass())); ClassDescriptor refDescriptor = this.getReferenceDescriptor(); if (refDescriptor == null) { session.getIntegrityChecker().handleError(DescriptorException.descriptorIsMissing(getReferenceClass().getName(), this)); return; } if (refDescriptor.isDescriptorTypeAggregate()) { refDescriptor.checkInheritanceTreeAggregateSettings(session, this); } else { session.getIntegrityChecker().handleError(DescriptorException.referenceDescriptorIsNotAggregate(getReferenceClass().getName(), this)); } } /** * INTERNAL: * Related mapping should implement this method to return true. */ public boolean isAggregateMapping() { return true; } /** * INTERNAL: * Iterate on the appropriate attribute value. */ public void iterate(DescriptorIterator iterator) { iterateOnAttributeValue(iterator, getAttributeValueFromObject(iterator.getVisitedParent())); } /** * Iterate on the specified attribute value. */ protected void iterateOnAttributeValue(DescriptorIterator iterator, Object attributeValue) { iterator.iterateForAggregateMapping(attributeValue, this, getReferenceDescriptor(attributeValue, iterator.getSession())); } /** * Force instantiation of the load group. */ @Override public void load(final Object object, AttributeItem item, final AbstractSession session, final boolean fromFetchGroup) { if (item.getGroup() != null) { Object value = getAttributeValueFromObject(object); if (value != null) { getObjectBuilder(value, session).load(value, item.getGroup(), session, fromFetchGroup); } } } /** * Force instantiation of all indirections. */ @Override public void loadAll(Object object, AbstractSession session, IdentityHashSet loaded) { Object value = getAttributeValueFromObject(object); if (value != null) { getObjectBuilder(value, session).loadAll(value, session); } } /** * Merge the attribute values. */ protected void mergeAttributeValue(Object targetAttributeValue, boolean isTargetUnInitialized, Object sourceAttributeValue, MergeManager mergeManager, AbstractSession targetSession) { // don't merge read-only attributes if (mergeManager.getSession().isClassReadOnly(sourceAttributeValue.getClass())) { return; } if (mergeManager.getSession().isClassReadOnly(targetAttributeValue.getClass())) { return; } // Toggle change tracking during the merge. ClassDescriptor descriptor = getReferenceDescriptor(sourceAttributeValue, mergeManager.getSession()); descriptor.getObjectChangePolicy().dissableEventProcessing(targetAttributeValue); try { descriptor.getObjectBuilder().mergeIntoObject(targetAttributeValue, isTargetUnInitialized, sourceAttributeValue, mergeManager, targetSession); } finally { descriptor.getObjectChangePolicy().enableEventProcessing(targetAttributeValue); } } /** * INTERNAL: * Merge changes from the source to the target object. * With aggregates the merge must cascade to the object changes for the aggregate object * because aggregate objects have no identity outside of themselves. * The actual aggregate object does not need to be replaced, because even if the clone references * another aggregate it appears the same to TopLink */ public void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { ObjectChangeSet aggregateChangeSet = (ObjectChangeSet)((AggregateChangeRecord)changeRecord).getChangedObject(); if (aggregateChangeSet == null) {// the change was to set the value to null setAttributeValueInObject(target, null); return; } Object sourceAggregate = null; if (source != null) { sourceAggregate = getAttributeValueFromObject(source); } ObjectBuilder objectBuilder = getObjectBuilderForClass(aggregateChangeSet.getClassType(mergeManager.getSession()), mergeManager.getSession()); //Bug#4719341 Always obtain aggregate attribute value from the target object regardless of new or not Object targetAggregate = getAttributeValueFromObject(target); boolean wasOriginalNull = false; if (targetAggregate == null || targetAggregate == sourceAggregate) { targetAggregate = objectBuilder.buildNewInstance(); wasOriginalNull = true; } else { //bug 205939 - use the type from the changeset to determine if a new aggregate instance //is needed because of a class change. The old way of using the sourceAggregate will not //work on a remote system after cache sync because the sourceAggregate will not be available if (aggregateChangeSet.getClassType(mergeManager.getSession()) != targetAggregate.getClass()) { targetAggregate = objectBuilder.buildNewInstance(); wasOriginalNull = true; } } objectBuilder.mergeChangesIntoObject(targetAggregate, aggregateChangeSet, sourceAggregate, mergeManager, targetSession,false, wasOriginalNull); setAttributeValueInObject(target, targetAggregate); } /** * INTERNAL: * Merge changes from the source to the target object. This merge is only called when a changeSet for the target * does not exist or the target is uninitialized */ public void mergeIntoObject(Object target, boolean isTargetUnInitialized, Object source, MergeManager mergeManager, AbstractSession targetSession) { Object sourceAttributeValue = getAttributeValueFromObject(source); if (sourceAttributeValue == null) { setAttributeValueInObject(target, null); return; } Object targetAttributeValue = getAttributeValueFromObject(target); boolean originalWasNull = targetAttributeValue == null; if (targetAttributeValue == null || targetAttributeValue == sourceAttributeValue || !targetAttributeValue.getClass().equals(sourceAttributeValue.getClass())) { // avoid null-pointer/nothing to merge to - create a new instance // (a new clone cannot be used as all changes must be merged) targetAttributeValue = buildNewMergeInstanceOf(sourceAttributeValue, mergeManager.getSession()); mergeAttributeValue(targetAttributeValue, true, sourceAttributeValue, mergeManager, targetSession); // setting new instance so fire event as if set was called by user. // this call will eventually get passed to updateChangeRecord which will //ensure this new aggregates is fully initialized with listeners. // If merge into the unit of work, must only merge and raise the event is the value changed. if ((mergeManager.shouldMergeCloneIntoWorkingCopy() || mergeManager.shouldMergeCloneWithReferencesIntoWorkingCopy()) && !mergeManager.isForRefresh()) { this.descriptor.getObjectChangePolicy().raiseInternalPropertyChangeEvent(target, getAttributeName(), getAttributeValueFromObject(target), targetAttributeValue); } } else { mergeAttributeValue(targetAttributeValue, isTargetUnInitialized, sourceAttributeValue, mergeManager, targetSession); } if(this.descriptor.hasFetchGroupManager()) { FetchGroup sourceFetchGroup = this.descriptor.getFetchGroupManager().getObjectFetchGroup(source); FetchGroup targetFetchGroup = this.descriptor.getFetchGroupManager().getObjectFetchGroup(target); if(targetFetchGroup != null) { if(!targetFetchGroup.isSupersetOf(sourceFetchGroup)) { targetFetchGroup.onUnfetchedAttribute((FetchGroupTracker)target, null); } } else if (originalWasNull && sourceFetchGroup != null){ this.descriptor.getFetchGroupManager().setObjectFetchGroup(target, sourceFetchGroup, targetSession); } } // Must re-set variable to allow for set method to re-morph changes. setAttributeValueInObject(target, targetAttributeValue); } /** * INTERNAL: * The message is passed to its reference class descriptor. */ public void postDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!isReadOnly()) { postDeleteAttributeValue(query, getAttributeValueFromObject(query.getObject())); } } /** * INTERNAL: * The message is passed to its reference class descriptor. */ public void postDeleteAttributeValue(DeleteObjectQuery query, Object attributeValue) throws DatabaseException, OptimisticLockException { if (attributeValue == null) { return; } // PERF: Avoid for simple aggregates. ClassDescriptor descriptor = getReferenceDescriptor(attributeValue, query.getSession()); if (descriptor.getObjectBuilder().isSimple() && !descriptor.getEventManager().hasAnyEventListeners()) { return; } DeleteObjectQuery aggregateQuery = buildAggregateDeleteQuery(query, attributeValue); descriptor.getQueryManager().postDelete(aggregateQuery); executeEvent(DescriptorEventManager.PostDeleteEvent, aggregateQuery); } /** * INTERNAL: * The message is passed to its reference class descriptor. */ public void postInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!isReadOnly()) { postInsertAttributeValue(query, getAttributeValueFromObject(query.getObject())); } } /** * INTERNAL: * The message is passed to its reference class descriptor. */ public void postInsertAttributeValue(WriteObjectQuery query, Object attributeValue) throws DatabaseException, OptimisticLockException { if (attributeValue == null) { return; } // PERF: Avoid for simple aggregates. ClassDescriptor descriptor = getReferenceDescriptor(attributeValue, query.getSession()); if (descriptor.getObjectBuilder().isSimple() && !descriptor.getEventManager().hasAnyEventListeners()) { return; } WriteObjectQuery aggregateQuery = buildAggregateWriteQuery(query, attributeValue); descriptor.getQueryManager().postInsert(aggregateQuery); executeEvent(DescriptorEventManager.PostInsertEvent, aggregateQuery); // aggregates do not actually use a query to write to the database so the post write must be called here executeEvent(DescriptorEventManager.PostWriteEvent, aggregateQuery); } /** * INTERNAL: * The message is passed to its reference class descriptor. */ public void postUpdate(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!isReadOnly()) { postUpdateAttributeValue(query, getAttributeValueFromObject(query.getObject())); } } /** * INTERNAL: * The message is passed to its reference class descriptor. */ public void postUpdateAttributeValue(WriteObjectQuery query, Object attributeValue) throws DatabaseException, OptimisticLockException { if (attributeValue == null) { return; } // PERF: Avoid for simple aggregates. AbstractSession session = query.getSession(); ClassDescriptor descriptor = getReferenceDescriptor(attributeValue, session); if (descriptor.getObjectBuilder().isSimple() && !descriptor.getEventManager().hasAnyEventListeners()) { return; } ObjectChangeSet changeSet = null; UnitOfWorkChangeSet uowChangeSet = null; if (session.isUnitOfWork() && (((UnitOfWorkImpl)session).getUnitOfWorkChangeSet() != null)) { uowChangeSet = (UnitOfWorkChangeSet)((UnitOfWorkImpl)session).getUnitOfWorkChangeSet(); changeSet = (ObjectChangeSet)uowChangeSet.getObjectChangeSetForClone(attributeValue); } WriteObjectQuery aggregateQuery = buildAggregateWriteQuery(query, attributeValue); aggregateQuery.setObjectChangeSet(changeSet); descriptor.getQueryManager().postUpdate(aggregateQuery); executeEvent(DescriptorEventManager.PostUpdateEvent, aggregateQuery); // aggregates do not actually use a query to write to the database so the post write must be called here executeEvent(DescriptorEventManager.PostWriteEvent, aggregateQuery); } /** * INTERNAL: * The message is passed to its reference class descriptor. */ public void preDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!isReadOnly()) { preDeleteAttributeValue(query, getAttributeValueFromObject(query.getObject())); } } /** * INTERNAL: * The message is passed to its reference class descriptor. */ public void preDeleteAttributeValue(DeleteObjectQuery query, Object attributeValue) throws DatabaseException, OptimisticLockException { if (attributeValue == null) { return; } // PERF: Avoid for simple aggregates. AbstractSession session = query.getSession(); ClassDescriptor descriptor = getReferenceDescriptor(attributeValue, session); if (descriptor.getObjectBuilder().isSimple() && !descriptor.getEventManager().hasAnyEventListeners()) { return; } DeleteObjectQuery aggregateQuery = buildAggregateDeleteQuery(query, attributeValue); executeEvent(DescriptorEventManager.PreDeleteEvent, aggregateQuery); descriptor.getQueryManager().preDelete(aggregateQuery); } /** * INTERNAL: * The message is passed to its reference class descriptor. */ public void preInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!isReadOnly()) { preInsertAttributeValue(query, getAttributeValueFromObject(query.getObject())); } } /** * INTERNAL: * The message is passed to its reference class descriptor. */ public void preInsertAttributeValue(WriteObjectQuery query, Object attributeValue) throws DatabaseException, OptimisticLockException { if (attributeValue == null) { return; } // PERF: Avoid for simple aggregates. AbstractSession session = query.getSession(); ClassDescriptor descriptor = getReferenceDescriptor(attributeValue, session); if (descriptor.getObjectBuilder().isSimple() && !descriptor.getEventManager().hasAnyEventListeners()) { return; } WriteObjectQuery aggregateQuery = buildAggregateWriteQuery(query, attributeValue); ObjectChangeSet changeSet = null; if (session.isUnitOfWork() && (((UnitOfWorkImpl)session).getUnitOfWorkChangeSet() != null)) { UnitOfWorkChangeSet uowChangeSet = (UnitOfWorkChangeSet)((UnitOfWorkImpl)session).getUnitOfWorkChangeSet(); changeSet = (ObjectChangeSet)uowChangeSet.getObjectChangeSetForClone(aggregateQuery.getObject()); } aggregateQuery.setObjectChangeSet(changeSet); // aggregates do not actually use a query to write to the database so the pre-write must be called here if (changeSet == null) {// then we didn't fire events at calculations executeEvent(DescriptorEventManager.PreWriteEvent, aggregateQuery); executeEvent(DescriptorEventManager.PreInsertEvent, aggregateQuery); } descriptor.getQueryManager().preInsert(aggregateQuery); } /** * INTERNAL: * The message is passed to its reference class descriptor. */ public void preUpdate(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!isReadOnly()) { preUpdateAttributeValue(query, getAttributeValueFromObject(query.getObject())); } } /** * INTERNAL: * The message is passed to its reference class descriptor. */ public void preUpdateAttributeValue(WriteObjectQuery query, Object attributeValue) throws DatabaseException, OptimisticLockException { if (attributeValue == null) { return; } // PERF: Avoid for simple aggregates. AbstractSession session = query.getSession(); ClassDescriptor descriptor = getReferenceDescriptor(attributeValue, session); if (descriptor.getObjectBuilder().isSimple() && !descriptor.getEventManager().hasAnyEventListeners()) { return; } WriteObjectQuery aggregateQuery = buildAggregateWriteQuery(query, attributeValue); ObjectChangeSet changeSet = null; UnitOfWorkChangeSet uowChangeSet = null; if (session.isUnitOfWork() && (((UnitOfWorkImpl)session).getUnitOfWorkChangeSet() != null)) { uowChangeSet = (UnitOfWorkChangeSet)((UnitOfWorkImpl)session).getUnitOfWorkChangeSet(); changeSet = (ObjectChangeSet)uowChangeSet.getObjectChangeSetForClone(aggregateQuery.getObject()); } aggregateQuery.setObjectChangeSet(changeSet); // aggregates do not actually use a query to write to the database so the pre-write must be called here if (changeSet == null) {// then we didn't fire events at calculations executeEvent(DescriptorEventManager.PreWriteEvent, aggregateQuery); executeEvent(DescriptorEventManager.PreUpdateEvent, aggregateQuery); } descriptor.getQueryManager().preUpdate(aggregateQuery); } /** * INTERNAL: * Once a descriptor is serialized to the remote session, all its mappings and reference descriptors are traversed. * Usually the mappings are initialized and the serialized reference descriptors are replaced with local descriptors * if they already exist in the remote session. */ public void remoteInitialization(DistributedSession session) { super.remoteInitialization(session); ClassDescriptor refDescriptor = getReferenceDescriptor(); if (session.hasCorrespondingDescriptor(refDescriptor)) { ClassDescriptor correspondingDescriptor = session.getDescriptorCorrespondingTo(refDescriptor); setReferenceDescriptor(correspondingDescriptor); } else { session.privilegedAddDescriptor(refDescriptor); refDescriptor.remoteInitialization(session); } } /** * PUBLIC: * This is a reference class whose instances this mapping will store in the domain objects. */ public void setReferenceClass(Class aClass) { referenceClass = aClass; } /** * INTERNAL: * Used by MW. */ public void setReferenceClassName(String aClassName) { referenceClassName = aClassName; } /** * INTERNAL: * Set the referenceDescriptor. This is a descriptor which is associated with * the reference class. */ protected void setReferenceDescriptor(ClassDescriptor aDescriptor) { referenceDescriptor = aDescriptor; } /** * INTERNAL: * Either create a new change record or update the change record with the new value. * This is used by attribute change tracking. */ public void updateChangeRecord(Object sourceClone, Object newValue, Object oldValue, ObjectChangeSet objectChangeSet, UnitOfWorkImpl uow) throws DescriptorException { //This method will be called when either the referenced aggregate has //been changed or a component of the referenced aggregate has been changed //this case is determined by the value of the sourceClone boolean isNewRecord = false; AggregateChangeRecord changeRecord = (AggregateChangeRecord)objectChangeSet.getChangesForAttributeNamed(this.getAttributeName()); if (changeRecord == null){ changeRecord = new AggregateChangeRecord(objectChangeSet); changeRecord.setAttribute(this.getAttributeName()); changeRecord.setMapping(this); objectChangeSet.addChange(changeRecord); isNewRecord = true; } if ( sourceClone.getClass().equals(objectChangeSet.getClassType(uow)) ) { if (isNewRecord) { changeRecord.setOldValue(oldValue); } // event was fired on the parent to the aggregate, the attribute value changed. ClassDescriptor referenceDescriptor = getReferenceDescriptor(newValue, uow); if ( newValue == null ) { // attribute set to null changeRecord.setChangedObject(null); if (referenceDescriptor.getObjectChangePolicy().isAttributeChangeTrackingPolicy()){ if(((ChangeTracker)oldValue)._persistence_getPropertyChangeListener() != null) { //need to detach listener ((AggregateAttributeChangeListener)((ChangeTracker)oldValue)._persistence_getPropertyChangeListener()).setParentListener(null); } } return; }else{ // attribute set to new aggregate UnitOfWorkChangeSet uowChangeSet = (UnitOfWorkChangeSet)objectChangeSet.getUOWChangeSet(); //force comparison change detection to build changeset. ObjectChangeSet aggregateChangeSet = (ObjectChangeSet)uowChangeSet.getObjectChangeSetForClone(newValue); if (aggregateChangeSet != null) { aggregateChangeSet.clear(true); // old differences must be thrown away because difference is between old value and new value } //make sure the listener is initialized if (referenceDescriptor.getObjectChangePolicy().isAttributeChangeTrackingPolicy()){ if(oldValue != null && ((ChangeTracker)oldValue)._persistence_getPropertyChangeListener() != null) { //need to detach listener ((AggregateAttributeChangeListener)((ChangeTracker)oldValue)._persistence_getPropertyChangeListener()).setParentListener(null); } //need to attach new listener. AggregateAttributeChangeListener newListener = (AggregateAttributeChangeListener)((ChangeTracker)newValue)._persistence_getPropertyChangeListener(); if (newListener == null){ newListener = new AggregateAttributeChangeListener(referenceDescriptor, uow, ((AttributeChangeListener)((ChangeTracker)sourceClone)._persistence_getPropertyChangeListener()), this.getAttributeName(), newValue); ((ChangeTracker)newValue)._persistence_setPropertyChangeListener(newListener); } newListener.setParentListener((AttributeChangeListener)((ChangeTracker)sourceClone)._persistence_getPropertyChangeListener()); if (changeRecord.getChangedObject() != null && changeRecord.getChangedObject().hasChanges()) { // the oldValue has been already changed - get the original oldValue. oldValue = changeRecord.getOldValue(); } if (oldValue != null) { if(referenceDescriptor != getReferenceDescriptor(oldValue, uow)) { // oldValue and newValue belong to different types - have to start from scratch. oldValue = null; } } } //force comparison change detection to build changeset. changeRecord.setChangedObject(referenceDescriptor.getObjectChangePolicy().createObjectChangeSetThroughComparison(newValue,oldValue, uowChangeSet, (oldValue == null), uow, referenceDescriptor)); // process nested aggregates for(DatabaseMapping mapping : referenceDescriptor.getMappings()) { if(mapping.isAggregateObjectMapping()) { Object nestedNewValue = mapping.getAttributeValueFromObject(newValue); Object nestedOldValue = null; if(oldValue != null) { nestedOldValue = mapping.getAttributeValueFromObject(oldValue); } mapping.updateChangeRecord(newValue, nestedNewValue, nestedOldValue, (org.eclipse.persistence.internal.sessions.ObjectChangeSet)changeRecord.getChangedObject(), uow); } } referenceDescriptor.getObjectChangePolicy().setChangeSetOnListener((ObjectChangeSet)changeRecord.getChangedObject(), newValue); } } else { //a value was set on the aggregate but the aggregate was not changed. if (referenceDescriptor.getObjectChangePolicy().isAttributeChangeTrackingPolicy()){ //The aggregate that is referenced is Attribute Change Tracked as well. changeRecord.setChangedObject(((AggregateAttributeChangeListener)((ChangeTracker)sourceClone)._persistence_getPropertyChangeListener()).getObjectChangeSet()); } else { // not tracked at attribute level, lets force build a changeset then. changeRecord.setChangedObject(referenceDescriptor.getObjectChangePolicy().createObjectChangeSetThroughComparison(sourceClone, null, (UnitOfWorkChangeSet)objectChangeSet.getUOWChangeSet(), true, uow, referenceDescriptor)); } } } /** * INTERNAL: * Return whether the specified object and all its components have been deleted. */ public boolean verifyDelete(Object object, AbstractSession session) throws DatabaseException { return verifyDeleteOfAttributeValue(getAttributeValueFromObject(object), session); } /** * INTERNAL: * Return whether the specified object and all its components have been deleted. */ protected boolean verifyDeleteOfAttributeValue(Object attributeValue, AbstractSession session) throws DatabaseException { if (attributeValue == null) { return true; } for (Enumeration mappings = getReferenceDescriptor(attributeValue, session).getMappings().elements(); mappings.hasMoreElements();) { DatabaseMapping mapping = (DatabaseMapping)mappings.nextElement(); if (!mapping.verifyDelete(attributeValue, session)) { return false; } } return true; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/DirectToFieldMapping.java0000664000000000000000000000460512216173130025066 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 11/10/2011-2.4 Guy Pelletier * - 357474: Address primaryKey option from tenant discriminator column ******************************************************************************/ package org.eclipse.persistence.mappings; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.mappings.foundation.AbstractDirectMapping; import org.eclipse.persistence.internal.sessions.AbstractRecord; /** * Purpose: Maps an attribute to the corresponding database field type. * The list of field types that are supported by TopLink's direct to field mapping * is dependent on the relational database being used. * A converter can be used to convert between the object and data type if they do not match. * * @see org.eclipse.persistence.mappings.converters.Converter * @see org.eclipse.persistence.mappings.converters.ObjectTypeConverter * @see org.eclipse.persistence.mappings.converters.TypeConversionConverter * @see org.eclipse.persistence.mappings.converters.SerializedObjectConverter * * @author Sati * @since TopLink/Java 1.0 */ public class DirectToFieldMapping extends AbstractDirectMapping implements RelationalMapping { /** * Default constructor. */ public DirectToFieldMapping() { super(); } /** * INTERNAL: */ public boolean isRelationalMapping() { return true; } /** * PUBLIC: * Set the field name in the mapping. */ public void setFieldName(String FieldName) { setField(new DatabaseField(FieldName)); } protected void writeValueIntoRow(AbstractRecord row, DatabaseField field, Object fieldValue) { row.add(getField(), fieldValue); } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/foundation/0000775000000000000000000000000012216174372022400 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/foundation/MapKeyMapping.java0000664000000000000000000002477012216173130025746 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * tware - initial API check-in for MappedKeyMapContainerPolicy ******************************************************************************/ package org.eclipse.persistence.mappings.foundation; import java.util.List; import java.util.Map; import java.util.Set; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.expressions.Expression; import org.eclipse.persistence.internal.descriptors.DescriptorIterator; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.internal.helper.DatabaseTable; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.queries.ContainerPolicy; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; import org.eclipse.persistence.internal.queries.MappedKeyMapContainerPolicy; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.MergeManager; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.mappings.querykeys.QueryKey; import org.eclipse.persistence.queries.ObjectBuildingQuery; import org.eclipse.persistence.queries.ObjectLevelReadQuery; import org.eclipse.persistence.queries.ReadQuery; /** * MapKeyMapping is implemented by DatabaseMappings that can be used to map the key in a map * that uses a MappedKeyMapContainerPolicy. This interface provides the facilities to retreive data * for the key from the database, to get data from the object to put in the database, and to appropriately * initialize the mappings. * * @see MappedKeyMapContainerPolicy * @see AbstractDirectMapping * @see AggregateObjectMapping * @see OneToOneMapping * @author tware * */ public interface MapKeyMapping extends MapComponentMapping { /** * INTERNAL: * Used when initializing queries for mappings that use a Map * Called when the selection query is being initialized to add the fields for the map key to the query */ public void addAdditionalFieldsToQuery(ReadQuery selectionQuery, Expression baseExpression); /** * INTERNAL: * Used when initializing queries for mappings that use a Map * Called when the insert query is being initialized to ensure the fields for the map key are in the insert query */ public void addFieldsForMapKey(AbstractRecord joinRow); /** * INTERNAL: * For mappings used as MapKeys in MappedKeyContainerPolicy. Add the target of this mapping to the deleted * objects list if necessary * * This method is used for removal of private owned relationships * * @param object * @param manager */ public void addKeyToDeletedObjectsList(Object object, Map deletedObjects); /** * Build a clone of the given element in a unitOfWork * @param element * @param cloningSession * @param isExisting * @return */ public Object buildElementClone(Object element, Object parent, CacheKey cacheKey, Integer refreshCascade, AbstractSession cloningSession, boolean isExisting, boolean isFromSharedCache); /** * INTERNAL: * Depending on the MapKeyMapping, a different selection query may be required to retrieve the * map when the map is based on a DirectCollectionMapping * @return */ public ReadQuery buildSelectionQueryForDirectCollectionKeyMapping(ContainerPolicy containerPolicy); /** * INTERNAL: * Cascade discover and persist new objects during commit to the map key */ public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow, boolean getAttributeValueFromObject, Set cascadeErrors); /** * INTERNAL: * Cascade perform delete through mappings that require the cascade */ public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects, boolean getAttributeValueFromObject); /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects, boolean getAttributeValueFromObject); /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection and a join query is used. * Returns the key. */ public Object createMapComponentFromJoinedRow(AbstractRecord dbRow, JoinedAttributeManager joinManager, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected); /** * INTERNAL: * Create a query key that links to the map key * @return */ public QueryKey createQueryKeyForMapKey(); /** * INTERNAL: * Creates the Array of simple types used to recreate this map. */ public Object createSerializableMapKeyInfo(Object key, AbstractSession session); /** * INTERNAL: * Create an instance of the Key object from the key information extracted from the map. * This may return the value directly in case of a simple key or will be used as the FK to load a related entity. */ public List createMapComponentsFromSerializableKeyInfo(Object[] keyInfo, AbstractSession session); /** * INTERNAL: * Create an instance of the Key object from the key information extracted from the map. * This key object may be a shallow stub of the actual object if the key is an Entity type. */ public Object createStubbedMapComponentFromSerializableKeyInfo(Object keyInfo, AbstractSession session); /** * INTERNAL: * For mappings used as MapKeys in MappedKeyContainerPolicy, Delete the passed object if necessary. * * This method is used for removal of private owned relationships * * @param objectDeleted * @param session */ public void deleteMapKey(Object objectDeleted, AbstractSession session); /** * INTERNAL: * Return any tables that will be required when this mapping is used as part of a join query * @return */ public List getAdditionalTablesForJoinQuery(); /** * INTERNAL: * Get all the fields for the map key */ public List getAllFieldsForMapKey(); /** * INTERNAL: * Return a Map of any foreign keys defined within the the MapKey * @return */ public Map getForeignKeyFieldsForMapKey(); /** * INTERNAL: * Get the descriptor for the Map Key * @return */ public ClassDescriptor getReferenceDescriptor(); /** * INTERNAL: * Return the fields that make up the identity of the mapped object. For mappings with * a primary key, it will be the set of fields in the primary key. For mappings without * a primary key it will likely be all the fields * @return */ public List getIdentityFieldsForMapKey(); /** * INTERNAL: * Return the query that is used when this mapping is part of a joined relationship * @return */ public ObjectLevelReadQuery getNestedJoinQuery(JoinedAttributeManager joinManager, ObjectLevelReadQuery query, AbstractSession session); /** * INTERNAL: * Return the selection criteria necessary to select the target object * @return */ public Expression getAdditionalSelectionCriteriaForMapKey(); /** * INTERNAL: * If required, get the targetVersion of the source object from the merge manager * @return */ public Object getTargetVersionOfSourceObject(Object object, Object parent, MergeManager mergeManager, AbstractSession targetSession); /** * INTERNAL: * Return the class this key mapping maps or the descriptor for it * @return */ public Object getMapKeyTargetType(); /** * INTERNAL: * Called when iterating through descriptors to handle iteration on this mapping when it is used as a MapKey * @param iterator * @param element */ public void iterateOnMapKey(DescriptorIterator iterator, Object element); /** * INTERNAL: * Extract the fields for the Map key from the object to use in a query * @return */ public Map extractIdentityFieldsForQuery(Object key, AbstractSession session); /** * INTERNAL: * Making any mapping changes necessary to use a the mapping as a map key prior to initializing the mapping */ public void preinitializeMapKey(DatabaseTable table) throws DescriptorException; /** * INTERNAL: * Making any mapping changes necessary to use a the mapping as a map key after initializing the mapping */ public void postInitializeMapKey(MappedKeyMapContainerPolicy policy) throws DescriptorException; /** * INTERNAL: * Return whether this mapping requires extra queries to update the rows if it is * used as a key in a map. This will typically be true if there are any parts to this mapping * that are not read-only. */ public boolean requiresDataModificationEventsForMapKey(); /** * INTERNAL: * Allow the key mapping to unwrap the object * @param key * @param session * @return */ public Object unwrapKey(Object key, AbstractSession session); /** * INTERNAL: * Allow the key mapping to wrap the object * @param key * @param session * @return */ public Object wrapKey(Object key, AbstractSession session); } ././@LongLink0000000000000000000000000000016100000000000011563 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/mappings/foundation/AbstractCompositeDirectCollectionMapping.javaeclipselink-2.5.1.orig/org/eclipse/persistence/mappings/foundation/AbstractCompositeDirectCollection0000664000000000000000000007277112216173130031125 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 06/03/2013-2.5.1 Guy Pelletier * - 402380: 3 jpa21/advanced tests failed on server with * "java.lang.NoClassDefFoundError: org/eclipse/persistence/testing/models/jpa21/advanced/enums/Gender" ******************************************************************************/ package org.eclipse.persistence.mappings.foundation; import java.util.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.queries.*; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.mappings.converters.*; import org.eclipse.persistence.mappings.structures.ArrayCollectionMapping; import org.eclipse.persistence.mappings.structures.ArrayCollectionMappingHelper; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.remote.*; import org.eclipse.persistence.sessions.CopyGroup; /** * AbstractCompositeDirectCollectionMapping consolidates the behavior of mappings that * map collections of "native" data objects (e.g. Strings). * These are objects that do not have their own descriptor and repeat within the XML record * for the containing object. * * @author Big Country * @since TOPLink/Java 3.0 */ public abstract class AbstractCompositeDirectCollectionMapping extends DatabaseMapping implements ContainerMapping, ArrayCollectionMapping { /** This is the field holding the nested collection. */ protected DatabaseField field; /** This is the "data type" associated with each element in the nested collection. Depending on the data store, this could be optional. */ protected String elementDataTypeName; /** Allows user defined conversion between the object value and the database value. */ protected Converter valueConverter; /** This determines the type of container used to hold the nested collection in the object. */ private ContainerPolicy containerPolicy; /** * Default constructor. */ public AbstractCompositeDirectCollectionMapping() { super(); this.containerPolicy = ContainerPolicy.buildDefaultPolicy(); this.elementDataTypeName = ""; this.setWeight(WEIGHT_AGGREGATE); } /** * PUBLIC: * Return the converter on the mapping. * A converter can be used to convert between the direct collection's object value and database value. */ public Converter getValueConverter() { return valueConverter; } /** * PUBLIC: * Indicates if there is a converter on the mapping. */ public boolean hasValueConverter() { return getValueConverter() != null; } /** * PUBLIC: * Set the converter on the mapping. * A converter can be used to convert between the direct collection's object value and database value. */ public void setValueConverter(Converter valueConverter) { this.valueConverter = valueConverter; } /** * INTERNAL: * Build and return a new element based on the change set. */ public Object buildAddedElementFromChangeSet(Object changeSet, MergeManager mergeManager, AbstractSession targetSession) { return this.buildElementFromChangeSet(changeSet, mergeManager, targetSession); } /** * INTERNAL: * Clone the attribute from the clone and assign it to the backup. * For these mappings, this is the same as building the first clone. */ public void buildBackupClone(Object clone, Object backup, UnitOfWorkImpl unitOfWork) { this.buildClone(clone, null, backup, null, unitOfWork); } /** * INTERNAL: * Build and return a change set for the specified element. * Direct collections simply store the element itself, since it is immutable. */ public Object buildChangeSet(Object element, ObjectChangeSet owner, AbstractSession session) { return element; } /** * INTERNAL: * Clone the attribute from the original and assign it to the clone. */ @Override public void buildClone(Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession) { Object attributeValue = this.getAttributeValueFromObject(original); this.setAttributeValueInObject(clone, this.buildClonePart(attributeValue, cacheKey, cloningSession)); } /** * INTERNAL: * Extract value from the row and set the attribute to this value in the * working copy clone. * In order to bypass the shared cache when in transaction a UnitOfWork must * be able to populate working copies directly from the row. */ public void buildCloneFromRow(AbstractRecord row, JoinedAttributeManager joinManager, Object clone, CacheKey sharedCacheKey, ObjectBuildingQuery sourceQuery, UnitOfWorkImpl unitOfWork, AbstractSession executionSession) { // for direct collection a cloned value is no different from an original value Object cloneAttributeValue = valueFromRow(row, joinManager, sourceQuery, sharedCacheKey, executionSession, true, new Boolean[1]); setAttributeValueInObject(clone, cloneAttributeValue); } /** * Build and return a clone of the specified attribute value. */ protected Object buildClonePart(Object attributeValue, CacheKey parentCacheKey, AbstractSession cloningSession) { if (attributeValue == null) { return this.getContainerPolicy().containerInstance(); } else { if ((getValueConverter() == null) || (!getValueConverter().isMutable())) { return this.getContainerPolicy().cloneFor(attributeValue); } // Clone the values of the collection as well. Object cloneContainer = this.getContainerPolicy().containerInstance(); Object iterator = this.getContainerPolicy().iteratorFor(attributeValue); while (this.getContainerPolicy().hasNext(iterator)) { Object originalValue = this.getContainerPolicy().next(iterator, cloningSession); // Bug 4182377 - there was a typo in the conversion logic Object cloneValue = getValueConverter().convertDataValueToObjectValue(getValueConverter().convertObjectValueToDataValue(originalValue, cloningSession), cloningSession); this.getContainerPolicy().addInto(cloneValue, cloneContainer, cloningSession); } return cloneContainer; } } /** * INTERNAL: * Copy of the attribute of the object. * This is NOT used for unit of work but for templatizing an object. */ @Override public void buildCopy(Object copy, Object original, CopyGroup group) { Object attributeValue = getAttributeValueFromObject(original); if (attributeValue == null) { attributeValue = getContainerPolicy().containerInstance(); } else { attributeValue = getContainerPolicy().cloneFor(attributeValue); } setAttributeValueInObject(copy, attributeValue); } /** * Build and return a new element based on the change set. * Direct collections simply store the element itself, since it is immutable. */ protected Object buildElementFromChangeSet(Object changeSet, MergeManager mergeManager, AbstractSession targetSession) { return changeSet; } /** * INTERNAL: * Build and return a new element based on the specified element. * Direct collections simply return the element itself, since it is immutable. */ public Object buildElementFromElement(Object object, MergeManager mergeManager, AbstractSession targetSession) { return object; } /** * INTERNAL: * Build and return a new element based on the change set. */ public Object buildRemovedElementFromChangeSet(Object changeSet, MergeManager mergeManager, AbstractSession targetSession) { return this.buildElementFromChangeSet(changeSet, mergeManager, targetSession); } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade */ public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { //objects referenced by this mapping are not registered as they have // no identity, this is a no-op. } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { //objects referenced by this mapping are not registered as they have // no identity, this is a no-op. } /** * Return the fields handled by the mapping. */ protected Vector collectFields() { Vector fields = new Vector(1); fields.addElement(this.getField()); return fields; } /** * INTERNAL: * Compare the non-null elements. Return true if they are alike. * Use #equals() to determine if two elements are the same. */ public boolean compareElements(Object element1, Object element2, AbstractSession session) { return element1.equals(element2); } /** * INTERNAL: * Compare the non-null elements and return true if they are alike. */ public boolean compareElementsForChange(Object element1, Object element2, AbstractSession session) { return this.compareElements(element1, element2, session); } protected ChangeRecord convertToChangeRecord(Object cloneCollection, ObjectChangeSet owner, AbstractSession session) { //since a minimal update for composites can't be done, we are only recording //an all-or-none change. Therefore, this can be treated as a simple direct //value. ContainerPolicy cp = this.getContainerPolicy(); Object container = cp.containerInstance(); Object iter = cp.iteratorFor(cloneCollection); while (cp.hasNext(iter)) { cp.addInto(cp.next(iter, session), container, session); } DirectToFieldChangeRecord changeRecord = new DirectToFieldChangeRecord(owner); changeRecord.setAttribute(getAttributeName()); changeRecord.setMapping(this); changeRecord.setNewValue(container); return changeRecord; } /** * INTERNAL: * An object has been serialized from the server to the client. * Replace the transient attributes of the remote value holders * with client-side objects. */ public void fixObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { // Do nothing.... // The nested collection should de-serialize without need for any further manipulation. } /** * PUBLIC: * Return the class each element in the object's * collection should be converted to, before the collection * is inserted into the object. * This is optional - if left null, the elements will be added * to the object's collection unconverted. */ public Class getAttributeElementClass() { if (!(getValueConverter() instanceof TypeConversionConverter)) { return null; } return ((TypeConversionConverter)getValueConverter()).getObjectClass(); } /** * INTERNAL: * Return the mapping's containerPolicy. */ public ContainerPolicy getContainerPolicy() { return containerPolicy; } /** * INTERNAL: * Return the field that holds the nested collection. */ public DatabaseField getField() { return field; } /** * INTERNAL: */ public boolean isAbstractCompositeDirectCollectionMapping() { return true; } /** * PUBLIC: * Return the class each element in the database row's * collection should be converted to, before the collection * is inserted into the database. * This is optional - if left null, the elements will be added * to the database row's collection unconverted. */ public Class getFieldElementClass() { if (!(getValueConverter() instanceof TypeConversionConverter)) { return null; } return ((TypeConversionConverter)getValueConverter()).getDataClass(); } /** * PUBLIC: * Return the name of the field that holds the nested collection. */ public String getFieldName() { return this.getField().getName(); } /** * INTERNAL: * Convenience method. * Return the value of an attribute, unwrapping value holders if necessary. * If the value is null, build a new container. */ public Object getRealCollectionAttributeValueFromObject(Object object, AbstractSession session) throws DescriptorException { Object value = this.getRealAttributeValueFromObject(object, session); if (value == null) { value = this.getContainerPolicy().containerInstance(1); } return value; } /** * INTERNAL: * Initialize the mapping. */ public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if (getField() == null) { throw DescriptorException.fieldNameNotSetInMapping(this); } setField(getDescriptor().buildField(getField())); setFields(collectFields()); if (getValueConverter() != null) { getValueConverter().initialize(this, session); } } /** * INTERNAL: * Iterate on the appropriate attribute value. */ public void iterate(DescriptorIterator iterator) { // PERF: Only iterate when required. if (iterator.shouldIterateOnPrimitives()) { Object attributeValue = this.getAttributeValueFromObject(iterator.getVisitedParent()); if (attributeValue == null) { return; } ContainerPolicy cp = this.getContainerPolicy(); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { iterator.iteratePrimitiveForMapping(cp.next(iter, iterator.getSession()), this); } } } /** * INTERNAL: * Return whether the element's user-defined Map key has changed * since it was cloned from the original version. * Direct elements are not allowed to have keys. */ public boolean mapKeyHasChanged(Object element, AbstractSession session) { return false; } /** * PUBLIC: * Set the class each element in the object's * collection should be converted to, before the collection * is inserted into the object. * This is optional - if left null, the elements will be added * to the object's collection unconverted. */ public void setAttributeElementClass(Class attributeElementClass) { TypeConversionConverter converter; if (getValueConverter() instanceof TypeConversionConverter) { converter = (TypeConversionConverter)getValueConverter(); } else { converter = new TypeConversionConverter(); setValueConverter(converter); } converter.setObjectClass(attributeElementClass); } /** * PUBLIC: * Set the class each element in the object's * collection should be converted to, before the collection * is inserted into the object. * This is optional - if left null, the elements will be added * to the object's collection unconverted. */ public void setAttributeElementClassName(String attributeElementClass) { TypeConversionConverter converter; if (getValueConverter() instanceof TypeConversionConverter) { converter = (TypeConversionConverter)getValueConverter(); } else { converter = new TypeConversionConverter(); setValueConverter(converter); } converter.setObjectClassName(attributeElementClass); } /** * ADVANCED: * Set the mapping's containerPolicy. */ public void setContainerPolicy(ContainerPolicy containerPolicy) { this.containerPolicy = containerPolicy; } /** * Set the field that holds the nested collection. */ public void setField(DatabaseField field) { this.field = field; } /** * PUBLIC: * Set the class each element in the database row's * collection should be converted to, before the collection * is inserted into the database. * This is optional - if left null, the elements will be added * to the database row's collection unconverted. */ public void setFieldElementClass(Class fieldElementClass) { TypeConversionConverter converter; if (getValueConverter() instanceof TypeConversionConverter) { converter = (TypeConversionConverter)getValueConverter(); } else { converter = new TypeConversionConverter(); setValueConverter(converter); } converter.setDataClass(fieldElementClass); } /** * PUBLIC: * Configure the mapping to use an instance of the specified container class * to hold the nested objects. *

jdk1.2.x: The container class must implement (directly or indirectly) the Collection interface. *

jdk1.1.x: The container class must be a subclass of Vector. */ public void useCollectionClass(Class concreteClass) { this.setContainerPolicy(ContainerPolicy.buildPolicyFor(concreteClass)); } /** * INTERNAL: * Used to set the collection class by name. * This is required when building from metadata to allow the correct class loader to be used. */ public void useCollectionClassName(String concreteClassName) { setContainerPolicy(new CollectionContainerPolicy(concreteClassName)); } /** * INTERNAL: * Used to set the collection class by name. * This is required when building from metadata to allow the correct class loader to be used. */ public void useListClassName(String concreteClassName) { setContainerPolicy(new ListContainerPolicy(concreteClassName)); } /** * PUBLIC: * Mapping does not support Map containers. * It supports only Collection containers. */ public void useMapClass(Class concreteClass, String methodName) { throw new UnsupportedOperationException(this.getClass().getName() + ".useMapClass(Class, String)"); } public void useMapClassName(String concreteContainerClassName, String methodName) { throw new UnsupportedOperationException(this.getClass().getName() + ".useMapClass(String, String)"); } /** * PUBLIC: * Sets whether the mapping uses a single node. * @param True if the items in the collection are in a single node or false if each of the items in the collection is in its own node */ public void setUsesSingleNode(boolean usesSingleNode) { if (getField() instanceof Field) { ((Field)getField()).setUsesSingleNode(usesSingleNode); } } /** * PUBLIC: * Checks whether the mapping uses a single node. * * @returns True if the items in the collection are in a single node or false if each of the items in the collection is in its own node. */ public boolean usesSingleNode() { if (getField() instanceof Field) { return ((Field)getField()).usesSingleNode(); } return false; } /** * INTERNAL: * Build the nested collection from the database row. */ @Override public Object valueFromRow(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, CacheKey cacheKey, AbstractSession executionSession, boolean isTargetProtected, Boolean[] wasCacheUsed) throws DatabaseException { if (this.descriptor.getCachePolicy().isProtectedIsolation()){ if (this.isCacheable && isTargetProtected && cacheKey != null){ //cachekey will be null when isolating to uow //used cached collection Object result = null; Object cached = cacheKey.getObject(); if (cached != null){ if (wasCacheUsed != null){ wasCacheUsed[0] = Boolean.TRUE; } Object attributeValue = this.getAttributeValueFromObject(cached); return buildClonePart(attributeValue, cacheKey, executionSession); } return result; }else if (!this.isCacheable && !isTargetProtected && cacheKey != null){ return null; } } if (row.hasSopObject()) { return getAttributeValueFromObject(row.getSopObject()); } ContainerPolicy cp = this.getContainerPolicy(); Object fieldValue = row.getValues(this.getField()); if (fieldValue == null) { return cp.containerInstance(); } Vector fieldValues = this.getDescriptor().buildDirectValuesFromFieldValue(fieldValue); if (fieldValues == null) { return cp.containerInstance(); } Object result = cp.containerInstance(fieldValues.size()); for (Enumeration stream = fieldValues.elements(); stream.hasMoreElements();) { Object element = stream.nextElement(); if (this.getValueConverter() != null) { element = getValueConverter().convertDataValueToObjectValue(element, executionSession); } cp.addInto(element, result, sourceQuery.getSession()); } return result; } /** * INTERNAL: * Get the attribute value from the object and * store it in the appropriate field of the row. */ @Override public void writeFromObjectIntoRow(Object object, AbstractRecord row, AbstractSession session, WriteType writeType) { if (this.isReadOnly()) { return; } Object attributeValue = this.getAttributeValueFromObject(object); if (attributeValue == null) { row.put(this.getField(), null); return; } ContainerPolicy cp = this.getContainerPolicy(); Vector elements = new Vector(cp.sizeFor(attributeValue)); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { Object element = cp.next(iter, session); if (this.getValueConverter() != null) { element = getValueConverter().convertObjectValueToDataValue(element, session); } if (element != null) { elements.addElement(element); } } Object fieldValue = null; if (!elements.isEmpty()) { fieldValue = this.getDescriptor().buildFieldValueFromDirectValues(elements, elementDataTypeName, session); } row.put(this.getField(), fieldValue); } /** * INTERNAL: * If any part of the nested collection has changed, the whole thing is written. */ @Override public void writeFromObjectIntoRowForUpdate(WriteObjectQuery writeQuery, AbstractRecord row) throws DescriptorException { AbstractSession session = writeQuery.getSession(); if (session.isUnitOfWork()) { if (this.compareObjects(writeQuery.getObject(), writeQuery.getBackupClone(), session)) { return;// nothing is changed, no work required } } this.writeFromObjectIntoRow(writeQuery.getObject(), row, session, WriteType.UPDATE); } /** * INTERNAL: * Get the appropriate attribute value from the object * and put it in the appropriate field of the database row. * Loop through the reference objects and extract the * primary keys and put them in the vector of "nested" rows. */ @Override public void writeFromObjectIntoRowWithChangeRecord(ChangeRecord changeRecord, AbstractRecord row, AbstractSession session, WriteType writeType) { Object object = ((ObjectChangeSet)changeRecord.getOwner()).getUnitOfWorkClone(); this.writeFromObjectIntoRow(object, row, session, writeType); } /** * INTERNAL: * Write the fields needed for insert into the template with null values. */ public void writeInsertFieldsIntoRow(AbstractRecord row, AbstractSession session) { if (this.isReadOnly()) { return; } row.put(this.getField(), null); } /** * INTERNAL: * Return the classifiction for the field contained in the mapping. * This is used to convert the row value to a consistent java value. * By default this is unknown. */ public Class getFieldClassification(DatabaseField fieldToClassify) { return getAttributeElementClass(); } public boolean isCollectionMapping() { return true; } @Override public void convertClassNamesToClasses(ClassLoader classLoader){ super.convertClassNamesToClasses(classLoader); this.containerPolicy.convertClassNamesToClasses(classLoader); // Convert and any Converter class names. convertConverterClassNamesToClasses(valueConverter, classLoader); } /** * INTERNAL: * Build and return the change record that results * from comparing the two direct collection attributes. */ public ChangeRecord compareForChange(Object clone, Object backup, ObjectChangeSet owner, AbstractSession session) { return (new ArrayCollectionMappingHelper(this)).compareForChange(clone, backup, owner, session); } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ public boolean compareObjects(Object object1, Object object2, AbstractSession session) { return (new ArrayCollectionMappingHelper(this)).compareObjects(object1, object2, session); } /** * INTERNAL: * Merge changes from the source to the target object. */ public void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { (new ArrayCollectionMappingHelper(this)).mergeChangesIntoObject(target, changeRecord, source, mergeManager, targetSession); } /** * INTERNAL: * Merge changes from the source to the target object. * Simply replace the entire target collection. */ public void mergeIntoObject(Object target, boolean isTargetUnInitialized, Object source, MergeManager mergeManager, AbstractSession targetSession) { (new ArrayCollectionMappingHelper(this)).mergeIntoObject(target, isTargetUnInitialized, source, mergeManager, targetSession); } /** * ADVANCED: * This method is used to have an object add to a collection once the changeSet is applied * The referenceKey parameter should only be used for direct Maps. */ public void simpleAddToCollectionChangeRecord(Object referenceKey, Object changeSetToAdd, ObjectChangeSet changeSet, AbstractSession session) { (new ArrayCollectionMappingHelper(this)).simpleAddToCollectionChangeRecord(referenceKey, changeSetToAdd, changeSet, session); } /** * ADVANCED: * This method is used to have an object removed from a collection once the changeSet is applied * The referenceKey parameter should only be used for direct Maps. */ public void simpleRemoveFromCollectionChangeRecord(Object referenceKey, Object changeSetToRemove, ObjectChangeSet changeSet, AbstractSession session) { (new ArrayCollectionMappingHelper(this)).simpleRemoveFromCollectionChangeRecord(referenceKey, changeSetToRemove, changeSet, session); } /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection. Returns the key. */ public Object createMapComponentFromRow(AbstractRecord dbRow, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected){ Object key = dbRow.get(getField()); if (getValueConverter() != null){ key = getValueConverter().convertDataValueToObjectValue(key, session); } return key; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/foundation/AbstractColumnMapping.java0000664000000000000000000002533512216173130027477 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 11/13/2009-2.0 mobrien - 294765: MapKey keyType DirectToField processing * should return attributeClassification class in getMapKeyTargetType when * accessor.attributeField is null in the absence of a MapKey annotation * 11/10/2011-2.4 Guy Pelletier * - 357474: Address primaryKey option from tenant discriminator column * 30/05/2012-2.4 Guy Pelletier * - 354678: Temp classloader is still being used during metadata processing * 06/03/2013-2.5.1 Guy Pelletier * - 402380: 3 jpa21/advanced tests failed on server with * "java.lang.NoClassDefFoundError: org/eclipse/persistence/testing/models/jpa21/advanced/enums/Gender" ******************************************************************************/ package org.eclipse.persistence.mappings.foundation; import java.security.AccessController; import java.security.PrivilegedActionException; import java.util.*; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.internal.descriptors.DescriptorIterator; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; import org.eclipse.persistence.internal.security.PrivilegedNewInstanceFromClass; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.mappings.converters.Converter; import org.eclipse.persistence.queries.ObjectLevelReadQuery; import org.eclipse.persistence.sessions.Session; import org.eclipse.persistence.sessions.remote.DistributedSession; /** * Purpose: Maps an attribute or some other property to the corresponding * database field type. The list of field types that are supported by * EclipseLink's direct to field mapping is dependent on the relational database * being used. * * @see org.eclipse.persistence.mappings.foundation.AbstractDirectMapping * @see org.eclipse.persistence.mappings.foundation.MultitenantPrimaryKeyMapping * * @author Guy Pelletier * @since TopLink/Java 1.0 */ public abstract class AbstractColumnMapping extends DatabaseMapping { /** DatabaseField which this mapping represents. */ protected DatabaseField field; /** Allows user defined conversion between the object attribute value and the database value. */ protected Converter converter; protected String converterClassName; /** Flag to support insertable JPA setting */ protected boolean isInsertable = true; /** Flag to support updatable JPA setting */ protected boolean isUpdatable = true; /** * Default constructor. */ public AbstractColumnMapping() { super(); this.setWeight(WEIGHT_DIRECT); } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade. */ public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { // objects referenced by this mapping are not registered as they have // no identity, this is a no-op. } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade. */ public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { // objects referenced by this mapping are not registered as they have // no identity, this is a no-op. } /** * INTERNAL: * The mapping clones itself to create deep copy. */ @Override public Object clone() { AbstractColumnMapping clone = (AbstractColumnMapping)super.clone(); // Field must be cloned so aggregates do not share fields. clone.setField(getField().clone()); return clone; } /** * Returns the field this mapping represents. */ @Override protected Vector collectFields() { Vector databaseField = new Vector(1); databaseField.addElement(field); return databaseField; } /** * INTERNAL: * Convert all the class-name-based settings in this mapping to actual class-based settings * This method is implemented by subclasses as necessary. */ @Override public void convertClassNamesToClasses(ClassLoader classLoader){ super.convertClassNamesToClasses(classLoader); // Field may have a type name that needs to be initialize. if (field != null) { field.convertClassNamesToClasses(classLoader); } // Convert and any Converter class names. convertConverterClassNamesToClasses(converter, classLoader); // Instantiate any custom converter class if (converterClassName != null) { Class converterClass; Converter converter; try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { converterClass = (Class) AccessController.doPrivileged(new PrivilegedClassForName(converterClassName, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(converterClassName, exception.getException()); } try { converter = (Converter) AccessController.doPrivileged(new PrivilegedNewInstanceFromClass(converterClass)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(converterClassName, exception.getException()); } } else { converterClass = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(converterClassName, true, classLoader); converter = (Converter) org.eclipse.persistence.internal.security.PrivilegedAccessHelper.newInstanceFromClass(converterClass); } } catch (ClassNotFoundException exc) { throw ValidationException.classNotFoundWhileConvertingClassNames(converterClassName, exc); } catch (Exception e) { // Catches IllegalAccessException and InstantiationException throw ValidationException.classNotFoundWhileConvertingClassNames(converterClassName, e); } setConverter(converter); } } /** * INTERNAL: * An object has been serialized from the server to the client. * Replace the transient attributes of the remote value holders * with client-side objects. */ @Override public void fixObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { } /** * PUBLIC: * Return the converter on the mapping. * A converter can be used to convert between the object's value and database value of the attribute. */ public Converter getConverter() { return converter; } /** * INTERNAL: * Returns the field which this mapping represents. */ public DatabaseField getField() { return field; } /** * INTERNAL: * Convert the object (attribute or property) value to a field value. */ public abstract Object getFieldValue(Object objectValue, AbstractSession session); /** * INTERNAL: * Allows for subclasses to convert the the attribute or property value. */ public abstract Object getObjectValue(Object fieldValue, Session session); /** * Indicates if the mapping has a converter set on it. * * @return true if there is a converter set on the mapping, * false otherwise. */ public boolean hasConverter() { return converter != null; } /** * INTERNAL: */ @Override public boolean isAbstractColumnMapping() { return true; } /** * INTERNAL: * Return true if this mapping is insertable. */ protected boolean isInsertable() { return isInsertable; } /** * INTERNAL: * Return true if this mapping is updatable. */ protected boolean isUpdatable() { return isUpdatable; } /** * INTERNAL: * Iterate on the appropriate attribute. */ @Override public void iterate(DescriptorIterator iterator) { // PERF: Only iterate when required. if (iterator.shouldIterateOnPrimitives()) { iterator.iteratePrimitiveForMapping(getAttributeValueFromObject(iterator.getVisitedParent()), this); } } /** * PUBLIC: * Set the converter on the mapping. * A converter can be used to convert between the object's value and database value of the attribute. */ public void setConverter(Converter converter) { this.converter = converter; } /** * PUBLIC: * Set the converter class name on the mapping. It will be instantiated * during the convertClassNamesToClasses. * A converter can be used to convert between the object's value and * database value of the attribute. */ public void setConverterClassName(String converterClassName) { this.converterClassName = converterClassName; } /** * ADVANCED: * Set the field in the mapping. * This can be used for advanced field types, such as XML nodes, or to set the field type. */ public void setField(DatabaseField theField) { field = theField; } /** * INTERNAL: */ @Override public String toString() { return getClass().getName() + "[" + getAttributeName() + "-->" + getField() + "]"; } /** * INTERNAL: */ protected abstract void writeValueIntoRow(AbstractRecord row, DatabaseField field, Object value); } ././@LongLink0000000000000000000000000000015300000000000011564 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/mappings/foundation/AbstractCompositeCollectionMapping.javaeclipselink-2.5.1.orig/org/eclipse/persistence/mappings/foundation/AbstractCompositeCollectionMappin0000664000000000000000000010462212216173130031126 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * * 30/05/2012-2.4 Guy Pelletier * - 354678: Temp classloader is still being used during metadata processing ******************************************************************************/ package org.eclipse.persistence.mappings.foundation; import java.util.*; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.queries.*; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.mappings.converters.Converter; import org.eclipse.persistence.mappings.structures.ArrayCollectionMapping; import org.eclipse.persistence.mappings.structures.ArrayCollectionMappingHelper; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.remote.*; import org.eclipse.persistence.sessions.CopyGroup; /** * Define an embedded collection of objects. * This is used in structured data-types, such as EIS, NoSQL and object-relational Array (varray, nested table) data-types. * The target objects must be aggregate (embedded) and are stored with the parent object. */ public abstract class AbstractCompositeCollectionMapping extends AggregateMapping implements ContainerMapping, ArrayCollectionMapping { /** The aggregate objects are stored in a single field. */ protected DatabaseField field; /** This determines the type of container used to hold the aggregate objects. */ private ContainerPolicy containerPolicy; /** Allows user defined conversion between the object attribute value and the database value. */ protected Converter converter; /** * Default constructor. */ public AbstractCompositeCollectionMapping() { super(); this.containerPolicy = ContainerPolicy.buildDefaultPolicy(); } /** * INTERNAL: * Build and return a new element based on the change set. */ public Object buildAddedElementFromChangeSet(Object changeSet, MergeManager mergeManager, AbstractSession targetSession) { return this.buildElementFromChangeSet(changeSet, mergeManager, targetSession); } /** * Build and return a backup clone of the attribute. */ @Override protected Object buildBackupClonePart(Object attributeValue, UnitOfWorkImpl unitOfWork) { ContainerPolicy cp = this.getContainerPolicy(); if (attributeValue == null) { return cp.containerInstance(); } Object backupAttributeValue = cp.containerInstance(cp.sizeFor(attributeValue)); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { Object backupElement = super.buildBackupClonePart(cp.next(iter, unitOfWork), unitOfWork); cp.addInto(backupElement, backupAttributeValue, unitOfWork); } return backupAttributeValue; } /** * INTERNAL: * Build and return a change set for the specified element. */ public Object buildChangeSet(Object element, ObjectChangeSet owner, AbstractSession session) { ObjectBuilder objectBuilder = this.getObjectBuilder(element, session); return objectBuilder.compareForChange(element, null, (UnitOfWorkChangeSet)owner.getUOWChangeSet(), session); } /** * Build and return a clone of the attribute. */ @Override protected Object buildClonePart(Object original, Object clone, CacheKey cacheKey, Object attributeValue, Integer refreshCascade, AbstractSession clonningSession) { ContainerPolicy cp = this.getContainerPolicy(); if (attributeValue == null) { return cp.containerInstance(); } Object clonedAttributeValue = cp.containerInstance(cp.sizeFor(attributeValue)); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { Object cloneElement = super.buildClonePart(original, clone, cacheKey, cp.next(iter, clonningSession), refreshCascade, clonningSession); cp.addInto(cloneElement, clonedAttributeValue, clonningSession); } return clonedAttributeValue; } /** * Copy of the attribute of the object. * This is NOT used for unit of work but for templatizing an object. */ @Override protected Object buildCopyOfAttributeValue(Object attributeValue, CopyGroup group) { ContainerPolicy cp = this.getContainerPolicy(); if (attributeValue == null) { return cp.containerInstance(); } Object attributeValueCopy = cp.containerInstance(cp.sizeFor(attributeValue)); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { Object copyElement = super.buildCopyOfAttributeValue(cp.next(iter, group.getSession()), group); cp.addInto(copyElement, attributeValueCopy, group.getSession()); } return attributeValueCopy; } /** * Build and return a new element based on the change set. */ protected Object buildElementFromChangeSet(Object changeSet, MergeManager mergeManager, AbstractSession targetSession) { ObjectChangeSet objectChangeSet = (ObjectChangeSet)changeSet; ObjectBuilder objectBuilder = this.getObjectBuilderForClass(objectChangeSet.getClassType(mergeManager.getSession()), mergeManager.getSession()); Object result = objectBuilder.buildNewInstance(); objectBuilder.mergeChangesIntoObject(result, objectChangeSet, null, mergeManager, targetSession); return result; } /** * INTERNAL: * Build and return a new element based on the specified element. */ public Object buildElementFromElement(Object element, MergeManager mergeManager, AbstractSession targetSession) { ObjectBuilder objectBuilder = this.getObjectBuilder(element, mergeManager.getSession()); Object result = objectBuilder.buildNewInstance(); objectBuilder.mergeIntoObject(result, true, element, mergeManager, targetSession); return result; } /** * INTERNAL: * Build and return a new element based on the change set. */ public Object buildRemovedElementFromChangeSet(Object changeSet, MergeManager mergeManager, AbstractSession targetSession) { return this.buildElementFromChangeSet(changeSet, mergeManager, targetSession); } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade */ @Override public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects){ Object cloneAttribute = null; cloneAttribute = getAttributeValueFromObject(object); if ( cloneAttribute == null ) { return; } ContainerPolicy cp = getContainerPolicy(); Object cloneObjectCollection = null; cloneObjectCollection = getRealCollectionAttributeValueFromObject(object, uow); Object cloneIter = cp.iteratorFor(cloneObjectCollection); while (cp.hasNext(cloneIter)) { Object objectToCascadeOn = cp.next(cloneIter, uow); if (objectToCascadeOn != null && (!visitedObjects.containsKey(objectToCascadeOn)) ) { visitedObjects.put(objectToCascadeOn, objectToCascadeOn); ObjectBuilder builder = getReferenceDescriptor(objectToCascadeOn.getClass(), uow).getObjectBuilder(); builder.cascadePerformRemove(objectToCascadeOn, uow, visitedObjects); } } } /** * INTERNAL: * Cascade discover and persist new objects during commit. */ @Override public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow, Set cascadeErrors) { Object cloneAttribute = getAttributeValueFromObject(object); if (cloneAttribute == null ) { return; } ContainerPolicy containerPolicy = getContainerPolicy(); Object cloneObjectCollection = getRealCollectionAttributeValueFromObject(object, uow); Object iterator = containerPolicy.iteratorFor(cloneObjectCollection); while (containerPolicy.hasNext(iterator)) { Object nextObject = containerPolicy.next(iterator, uow); if (nextObject != null) { ObjectBuilder builder = getReferenceDescriptor(nextObject.getClass(), uow).getObjectBuilder(); builder.cascadeDiscoverAndPersistUnregisteredNewObjects(nextObject, newObjects, unregisteredExistingObjects, visitedObjects, uow, cascadeErrors); } } } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ @Override public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects){ //aggregate objects are not registered but their mappings should be. Object cloneAttribute = null; cloneAttribute = getAttributeValueFromObject(object); if ( cloneAttribute == null ) { return; } ObjectBuilder builder = null; ContainerPolicy cp = getContainerPolicy(); Object cloneObjectCollection = null; cloneObjectCollection = getRealCollectionAttributeValueFromObject(object, uow); Object cloneIter = cp.iteratorFor(cloneObjectCollection); while (cp.hasNext(cloneIter)) { Object nextObject = cp.next(cloneIter, uow); if (nextObject != null && (! visitedObjects.containsKey(nextObject)) ) { visitedObjects.put(nextObject, nextObject); builder = getReferenceDescriptor(nextObject.getClass(), uow).getObjectBuilder(); builder.cascadeRegisterNewForCreate(nextObject, uow, visitedObjects); } } } /** * Return the fields handled by the mapping. */ @Override protected Vector collectFields() { Vector fields = new Vector(1); fields.addElement(this.getField()); return fields; } /** * INTERNAL: * Compare the non-null elements and return true if they are alike. */ public boolean compareElements(Object element1, Object element2, AbstractSession session) { if (element1.getClass() != element2.getClass()) { return false; } return this.getObjectBuilder(element1, session).compareObjects(element1, element2, session); } /** * INTERNAL: * Compare the non-null elements and return true if they are alike. */ public boolean compareElementsForChange(Object element1, Object element2, AbstractSession session) { return this.compareElements(element1, element2, session); } /** * INTERNAL: * Convert all the class-name-based settings in this mapping to actual class-based * settings. This method is used when converting a project that has been built * with class names to a project with classes. * @param classLoader */ @Override public void convertClassNamesToClasses(ClassLoader classLoader){ super.convertClassNamesToClasses(classLoader); containerPolicy.convertClassNamesToClasses(classLoader); } protected ChangeRecord convertToChangeRecord(Object cloneCollection, ObjectChangeSet owner, AbstractSession session) { ContainerPolicy cp = getContainerPolicy(); Object cloneIter = cp.iteratorFor(cloneCollection); Vector collectionChanges = new Vector(2); while (cp.hasNext(cloneIter)) { Object aggregateObject = cp.next(cloneIter, session); // For CR#2258 quietly ignore nulls inserted into a collection. if (aggregateObject != null) { ObjectChangeSet changes = getReferenceDescriptor(aggregateObject.getClass(), session).getObjectBuilder().compareForChange(aggregateObject, null, (UnitOfWorkChangeSet)owner.getUOWChangeSet(), session); collectionChanges.addElement(changes); } } //cr 3013 Removed if collection is empty return null block, which prevents recording clear() change AggregateCollectionChangeRecord changeRecord = new AggregateCollectionChangeRecord(owner); changeRecord.setAttribute(getAttributeName()); changeRecord.setMapping(this); changeRecord.setChangedValues(collectionChanges); return changeRecord; } /** * An object has been serialized from the server to the remote client. * Replace the transient attributes of the remote value holders * with client-side objects. */ @Override protected void fixAttributeValue(Object attributeValue, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { if (attributeValue == null) { return; } ContainerPolicy cp = this.getContainerPolicy(); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { super.fixAttributeValue(cp.next(iter, session), objectDescriptors, processedObjects, query, session); } } /** * Return the appropriate attribute value. * This method is a hack to allow the aggregate collection * subclass to override.... * The intent is to return the aggregate object in the backup clone * that corresponds to the one in the working copy. * Since we don't know which element in the backup clone * collection corresponds any given element in the working copy * collection (there is no "primary key"); we simply return null, * which will cause a new, empty, instance to be built and used * for comparison. */ @Override protected Object getAttributeValueFromBackupClone(Object backupClone) { return null; } /** * INTERNAL: * Return the mapping's containerPolicy. */ @Override public ContainerPolicy getContainerPolicy() { return containerPolicy; } /** * PUBLIC: * Return the converter on the mapping. * A converter can be used to convert between the object's value and database value of the attribute. */ public Converter getConverter() { return converter; } /** * INTERNAL: * Return the field mapped by this mapping. */ @Override public DatabaseField getField() { return field; } /** * INTERNAL: * Convenience method. * Return the value of an attribute, unwrapping value holders if necessary. * If the value is null, build a new container. */ @Override public Object getRealCollectionAttributeValueFromObject(Object object, AbstractSession session) throws DescriptorException { Object value = this.getRealAttributeValueFromObject(object, session); if (value == null) { value = this.getContainerPolicy().containerInstance(1); } return value; } /** * This is required for ObjectArrayMapping which defines a name for the collection type. * Currently this type name is not required or used in general with the SDK. */ protected String getStructureName() { return ""; } /** * PUBLIC: * Indicates if there is a converter on the mapping. */ public boolean hasConverter() { return getConverter() != null; } /** * INTERNAL: */ @Override public boolean isAbstractCompositeCollectionMapping() { return true; } /** * INTERNAL: * The mapping is initialized with the given session. This mapping is fully initialized * after this. */ @Override public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if (getField() == null) { throw DescriptorException.fieldNameNotSetInMapping(this); } setField(getDescriptor().buildField(getField())); setFields(collectFields()); if (hasConverter()) { getConverter().initialize(this, session); } } /** * Iterate on the specified attribute value. */ @Override protected void iterateOnAttributeValue(DescriptorIterator descriptorIterator, Object attributeValue) { if (attributeValue == null) { return; } ContainerPolicy cp = this.getContainerPolicy(); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { super.iterateOnAttributeValue(descriptorIterator, cp.next(iter, descriptorIterator.getSession())); } } /** * INTERNAL: * Return whether the element's user-defined Map key has changed * since it was cloned from the original version. * Aggregate elements cannot change their keys without detection. */ public boolean mapKeyHasChanged(Object element, AbstractSession session) { return false; } /** * The message is passed to its reference class descriptor. */ @Override public void postDeleteAttributeValue(DeleteObjectQuery query, Object attributeValue) throws DatabaseException, OptimisticLockException { if (attributeValue == null) { return; } ContainerPolicy cp = this.getContainerPolicy(); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { super.postDeleteAttributeValue(query, cp.next(iter, query.getSession())); } } /** * The message is passed to its reference class descriptor. */ @Override public void postInsertAttributeValue(WriteObjectQuery query, Object attributeValue) throws DatabaseException, OptimisticLockException { if (attributeValue == null) { return; } ContainerPolicy cp = this.getContainerPolicy(); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { super.postInsertAttributeValue(query, cp.next(iter, query.getSession())); } } /** * The message is passed to its reference class descriptor. */ @Override public void postUpdateAttributeValue(WriteObjectQuery query, Object attributeValue) throws DatabaseException, OptimisticLockException { if (attributeValue == null) { return; } ContainerPolicy cp = this.getContainerPolicy(); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { super.postUpdateAttributeValue(query, cp.next(iter, query.getSession())); } } /** * The message is passed to its reference class descriptor. */ @Override public void preDeleteAttributeValue(DeleteObjectQuery query, Object attributeValue) throws DatabaseException, OptimisticLockException { if (attributeValue == null) { return; } ContainerPolicy cp = this.getContainerPolicy(); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { super.preDeleteAttributeValue(query, cp.next(iter, query.getSession())); } } /** * The message is passed to its reference class descriptor. */ @Override public void preInsertAttributeValue(WriteObjectQuery query, Object attributeValue) throws DatabaseException, OptimisticLockException { if (attributeValue == null) { return; } ContainerPolicy cp = this.getContainerPolicy(); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { super.preInsertAttributeValue(query, cp.next(iter, query.getSession())); } } /** * The message is passed to its reference class descriptor. */ @Override public void preUpdateAttributeValue(WriteObjectQuery query, Object attributeValue) throws DatabaseException, OptimisticLockException { if (attributeValue == null) { return; } ContainerPolicy cp = this.getContainerPolicy(); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { super.preUpdateAttributeValue(query, cp.next(iter, query.getSession())); } } /** * ADVANCED: * Set the mapping's containerPolicy. */ public void setContainerPolicy(ContainerPolicy containerPolicy) { this.containerPolicy = containerPolicy; } /** * PUBLIC: * Set the converter on the mapping. * A converter can be used to convert between the object's value and database value of the attribute. */ public void setConverter(Converter converter) { this.converter = converter; } /** * Set the field in the mapping. */ public void setField(DatabaseField field) { this.field = field; } /** * PUBLIC: * Configure the mapping to use an instance of the specified container class * to hold the target objects. *

jdk1.2.x: The container class must implement (directly or indirectly) the Collection interface. *

jdk1.1.x: The container class must be a subclass of Vector. */ public void useCollectionClass(Class concreteContainerClass) { this.setContainerPolicy(ContainerPolicy.buildPolicyFor(concreteContainerClass)); } public void useCollectionClassName(String concreteContainerClassName) { this.setContainerPolicy(new CollectionContainerPolicy(concreteContainerClassName)); } public void useListClassName(String concreteContainerClassName) { this.setContainerPolicy(new ListContainerPolicy(concreteContainerClassName)); } /** * PUBLIC: * Configure the mapping to use an instance of the specified container class * to hold the target objects. The key used to index the value in the Map * is the value returned by a call to the specified zero-argument method. * The method must be implemented by the class (or a superclass) of the * value to be inserted into the Map. *

jdk1.2.x: The container class must implement (directly or indirectly) the Map interface. *

jdk1.1.x: The container class must be a subclass of Hashtable. *

The referenceClass must be set before calling this method. */ public void useMapClass(Class concreteContainerClass, String methodName) { // the reference class has to be specified before coming here if (this.getReferenceClassName() == null) { throw DescriptorException.referenceClassNotSpecified(this); } ContainerPolicy policy = ContainerPolicy.buildPolicyFor(concreteContainerClass); policy.setKeyName(methodName, getReferenceClass()); this.setContainerPolicy(policy); } public void useMapClassName(String concreteContainerClassName, String methodName) { // the reference class has to be specified before coming here if (this.getReferenceClassName() == null) { throw DescriptorException.referenceClassNotSpecified(this); } MapContainerPolicy policy = new MapContainerPolicy(concreteContainerClassName); policy.setKeyName(methodName, getReferenceClass().getName()); this.setContainerPolicy(policy); } /** * INTERNAL: * Build and return an aggregate collection from the specified row. */ @Override public Object valueFromRow(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, CacheKey cacheKey, AbstractSession executionSession, boolean isTargetProtected, Boolean[] wasCacheUsed) throws DatabaseException { if (this.descriptor.getCachePolicy().isProtectedIsolation()){ if (this.isCacheable && isTargetProtected && cacheKey != null){ //cachekey will be null when isolating to uow //used cached collection Object result = null; Object cached = cacheKey.getObject(); if (cached != null){ if (wasCacheUsed != null){ wasCacheUsed[0] = Boolean.TRUE; } Object attributeValue = this.getAttributeValueFromObject(cached); Integer refreshCascade = null; if (sourceQuery != null && sourceQuery.isObjectBuildingQuery() && sourceQuery.shouldRefreshIdentityMapResult()) { refreshCascade = sourceQuery.getCascadePolicy(); } return buildClonePart(cached, null, cacheKey, attributeValue, refreshCascade, executionSession); } return result; }else if (!this.isCacheable && !isTargetProtected && cacheKey != null){ return null; } } if (row.hasSopObject()) { return getAttributeValueFromObject(row.getSopObject()); } ContainerPolicy cp = this.getContainerPolicy(); Object fieldValue = row.getValues(this.getField()); // BUG#2667762 there could be whitespace in the row instead of null if ((fieldValue == null) || (fieldValue instanceof String)) { return cp.containerInstance(); } Vector nestedRows = this.getReferenceDescriptor().buildNestedRowsFromFieldValue(fieldValue, executionSession); if (nestedRows == null) { return cp.containerInstance(); } Object result = cp.containerInstance(nestedRows.size()); for (Enumeration stream = nestedRows.elements(); stream.hasMoreElements();) { AbstractRecord nestedRow = (AbstractRecord)stream.nextElement(); ClassDescriptor descriptor = this.getReferenceDescriptor(); if (descriptor.hasInheritance()) { Class newElementClass = descriptor.getInheritancePolicy().classFromRow(nestedRow, executionSession); descriptor = this.getReferenceDescriptor(newElementClass, executionSession); } Object element = buildCompositeObject(descriptor, nestedRow, sourceQuery, cacheKey, joinManager, executionSession); if (hasConverter()) { element = getConverter().convertDataValueToObjectValue(element, executionSession); } cp.addInto(element, result, sourceQuery.getSession()); } return result; } protected abstract Object buildCompositeObject(ClassDescriptor descriptor, AbstractRecord nestedRow, ObjectBuildingQuery query, CacheKey parentCacheKey, JoinedAttributeManager joinManger, AbstractSession targetSession); /** * Return whether the specified object and all its components have been deleted. */ @Override protected boolean verifyDeleteOfAttributeValue(Object attributeValue, AbstractSession session) throws DatabaseException { if (attributeValue == null) { return true; } ContainerPolicy cp = this.getContainerPolicy(); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { if (!super.verifyDeleteOfAttributeValue(cp.next(iter, session), session)) { return false; } } return true; } /** * INTERNAL: * Get the attribute value from the object and add the appropriate * values to the specified database row. */ @Override public void writeFromObjectIntoRow(Object object, AbstractRecord row, AbstractSession session, WriteType writeType) throws DescriptorException { if (this.isReadOnly()) { return; } Object attributeValue = this.getAttributeValueFromObject(object); if (attributeValue == null) { row.put(this.getField(), null); return; } ContainerPolicy cp = this.getContainerPolicy(); Vector nestedRows = new Vector(cp.sizeFor(attributeValue)); for (Object iter = cp.iteratorFor(attributeValue); cp.hasNext(iter);) { Object element = cp.next(iter, session); // convert the value - if necessary if (hasConverter()) { element = getConverter().convertObjectValueToDataValue(element, session); } nestedRows.addElement(buildCompositeRow(element, session, row, writeType)); } Object fieldValue = null; if (!nestedRows.isEmpty()) { fieldValue = this.getReferenceDescriptor().buildFieldValueFromNestedRows(nestedRows, getStructureName(), session); } row.put(this.getField(), fieldValue); } protected abstract AbstractRecord buildCompositeRow(Object attributeValue, AbstractSession session, AbstractRecord record, WriteType writeType); /** * INTERNAL: * Get the attribute value from the object and add the changed * values to the specified database row. */ @Override public void writeFromObjectIntoRowForUpdate(WriteObjectQuery writeQuery, AbstractRecord row) throws DescriptorException { AbstractSession session = writeQuery.getSession(); //Helper.toDo("bjv: need to figure out how to handle read-only elements..."); if (session.isClassReadOnly(this.getReferenceClass())) { return; } if (session.isUnitOfWork()) { if (this.compareObjects(writeQuery.getObject(), writeQuery.getBackupClone(), session)) { return;// nothing has changed - don't put anything in the row } } this.writeFromObjectIntoRow(writeQuery.getObject(), row, session, WriteType.UPDATE); } /** * INTERNAL: * Get the attribute value from the object and add the appropriate * values to the specified database row. */ @Override public void writeFromObjectIntoRowWithChangeRecord(ChangeRecord changeRecord, AbstractRecord row, AbstractSession session, WriteType writeType) throws DescriptorException { Object object = ((ObjectChangeSet)changeRecord.getOwner()).getUnitOfWorkClone(); this.writeFromObjectIntoRow(object, row, session, writeType); } /** * INTERNAL: * Write fields needed for insert into the template with null values. */ @Override public void writeInsertFieldsIntoRow(AbstractRecord record, AbstractSession session) { if (this.isReadOnly()) { return; } record.put(this.getField(), null); } @Override public boolean isCollectionMapping() { return true; } /** * INTERNAL: * Build and return the change record that results * from comparing the two direct collection attributes. */ public ChangeRecord compareForChange(Object clone, Object backup, ObjectChangeSet owner, AbstractSession session) { return (new ArrayCollectionMappingHelper(this)).compareForChange(clone, backup, owner, session); } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ public boolean compareObjects(Object object1, Object object2, AbstractSession session) { return (new ArrayCollectionMappingHelper(this)).compareObjects(object1, object2, session); } /** * INTERNAL: * Merge changes from the source to the target object. */ public void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { (new ArrayCollectionMappingHelper(this)).mergeChangesIntoObject(target, changeRecord, source, mergeManager, targetSession); } /** * INTERNAL: * Merge changes from the source to the target object. * Simply replace the entire target collection. */ public void mergeIntoObject(Object target, boolean isTargetUnInitialized, Object source, MergeManager mergeManager, AbstractSession targetSession) { (new ArrayCollectionMappingHelper(this)).mergeIntoObject(target, isTargetUnInitialized, source, mergeManager, targetSession); } /** * ADVANCED: * This method is used to have an object add to a collection once the changeSet is applied * The referenceKey parameter should only be used for direct Maps. */ public void simpleAddToCollectionChangeRecord(Object referenceKey, Object changeSetToAdd, ObjectChangeSet changeSet, AbstractSession session) { (new ArrayCollectionMappingHelper(this)).simpleAddToCollectionChangeRecord(referenceKey, changeSetToAdd, changeSet, session); } /** * ADVANCED: * This method is used to have an object removed from a collection once the changeSet is applied * The referenceKey parameter should only be used for direct Maps. */ public void simpleRemoveFromCollectionChangeRecord(Object referenceKey, Object changeSetToRemove, ObjectChangeSet changeSet, AbstractSession session) { (new ArrayCollectionMappingHelper(this)).simpleRemoveFromCollectionChangeRecord(referenceKey, changeSetToRemove, changeSet, session); } /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection. Returns the key. */ public Object createMapComponentFromRow(AbstractRecord dbRow, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected){ return valueFromRow(dbRow, null, query, parentCacheKey, query.getExecutionSession(), isTargetProtected, null); } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/foundation/MapComponentMapping.java0000664000000000000000000000426612216173130027156 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * tware - initial API check-in for MappedKeyMapContainerPolicy * 14/05/2012-2.4 Guy Pelletier * - 376603: Provide for table per tenant support for multitenant applications ******************************************************************************/ package org.eclipse.persistence.mappings.foundation; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.queries.ObjectBuildingQuery; /** * A MapComponentMapping is any mapping that can be used as the key or the value * in a mapping that uses a MappedKeyMapContainerPolicy. This interface is generally implemented * by mappings that provide the value in a mapping to a Map. Mappings that provide the key generally * implement sub-interface MapKeyMapping * * @see MappedKeyContainerPolicy * @see MapKeyMapping * @see DirectCollectionMapping * @see AggregateCollectionMapping * @see OneToManyMapping * @see UnidirectionalOneToManyMapping * @see ManyToManyMapping * @author tware * */ public interface MapComponentMapping { /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection. Returns the key. */ public Object createMapComponentFromRow(AbstractRecord dbRow, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected); /** * INTERNAL * Called when cloning the container policy. */ public Object clone(); } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/foundation/AbstractDirectMapping.java0000664000000000000000000015162412216173130027455 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 11/13/2009-2.0 mobrien - 294765: MapKey keyType DirectToField processing * should return attributeClassification class in getMapKeyTargetType when * accessor.attributeField is null in the absence of a MapKey annotation ******************************************************************************/ package org.eclipse.persistence.mappings.foundation; import java.security.AccessController; import java.security.PrivilegedActionException; import java.util.*; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.expressions.*; import org.eclipse.persistence.internal.databaseaccess.DatabaseAccessor; import org.eclipse.persistence.internal.databaseaccess.DatabasePlatform; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.expressions.SQLSelectStatement; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.queries.ContainerPolicy; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; import org.eclipse.persistence.internal.queries.MappedKeyMapContainerPolicy; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.mappings.converters.*; import org.eclipse.persistence.mappings.querykeys.DirectQueryKey; import org.eclipse.persistence.mappings.querykeys.QueryKey; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.remote.*; import org.eclipse.persistence.sessions.CopyGroup; import org.eclipse.persistence.sessions.Project; import org.eclipse.persistence.sessions.Session; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; /** * Purpose: Maps an attribute to the corresponding database field type. * The list of field types that are supported by EclipseLink's direct to field mapping * is dependent on the relational database being used. * A converter can be used to convert between the object and data type if they do not match. * * @see Converter * @see ObjectTypeConverter * @see TypeConversionConverter * @see SerializedObjectConverter * @see ClassInstanceConverter * * @author Sati * @since TopLink/Java 1.0 */ public abstract class AbstractDirectMapping extends AbstractColumnMapping implements MapKeyMapping { /** To specify the conversion type */ protected transient Class attributeClassification; protected String attributeClassificationName; /** PERF: Also store object class of attribute in case of primitive. */ protected transient Class attributeObjectClassification; /** Support specification of the value to use for null. */ protected transient Object nullValue; protected DatabaseTable keyTableForMapKey = null; protected String fieldClassificationClassName = null; /** PERF: Avoid default null value conversion check if not default null value set in conversion manager. */ protected boolean bypassDefaultNullValueCheck; /** * PERF: Indicates if this mapping's attribute is a simple atomic value and cannot be modified, only replaced. * This is a tri-state to allow user to set to true or false, as default is false but * some data-types such as Calendar or byte[] or converter types may be desired to be used as mutable. */ protected Boolean isMutable; /** * Default constructor. */ public AbstractDirectMapping() { super(); } /** * INTERNAL: * Used when initializing queries for mappings that use a Map. * Called when the selection query is being initialized to add the fields for the map key to the query. */ public void addAdditionalFieldsToQuery(ReadQuery selectionQuery, Expression baseExpression){ if (selectionQuery.isObjectLevelReadQuery()){ ((ObjectLevelReadQuery)selectionQuery).addAdditionalField(baseExpression.getField(getField())); } else if (selectionQuery.isDataReadQuery()){ ((SQLSelectStatement)((DataReadQuery)selectionQuery).getSQLStatement()).addField(baseExpression.getField(getField())); } } /** * INTERNAL: * Used when initializing queries for mappings that use a Map * Called when the insert query is being initialized to ensure the fields for the map key are in the insert query. */ public void addFieldsForMapKey(AbstractRecord joinRow) { if (!isReadOnly()){ if (isUpdatable()){ joinRow.put(getField(), null); } } } /** * INTERNAL: * For mappings used as MapKeys in MappedKeyContainerPolicy. Add the target of this mapping to the deleted * objects list if necessary * * This method is used for removal of private owned relationships * DirectMappings are dealt with in their parent delete, so this is a no-op. */ public void addKeyToDeletedObjectsList(Object object, Map deletedObjects) { } /** * PUBLIC: * Return true if the attribute for this mapping is a simple atomic value that cannot be modified, * only replaced. * This is false by default unless a mutable converter is used such as the SerializedObjectConverter. * This can be set to false in this case, or if a Calendar or byte[] is desired to be used as a mutable value it can be set to true. */ public boolean isMutable() { if (isMutable == null) { return false; } return isMutable.booleanValue(); } /** * PUBLIC: * Return true if the attribute for this mapping is a simple atomic value that cannot be modified, * only replaced. * This is false by default unless a mutable converter is used such as the SerializedObjectConverter. * This can be set to false in this case, or if a Calendar or byte[] is desired to be used as a mutable value it can be set to true. */ public void setIsMutable(boolean isMutable) { if (isMutable == true) { this.isMutable = Boolean.TRUE; } else { this.isMutable = Boolean.FALSE; } } /** * INTERNAL: * Clone the attribute from the clone and assign it to the backup. */ @Override public void buildBackupClone(Object clone, Object backup, UnitOfWorkImpl unitOfWork) { buildClone(clone, null, backup, null, unitOfWork); } /** * INTERNAL: * Directly build a change record without comparison */ @Override public ChangeRecord buildChangeRecord(Object clone, ObjectChangeSet owner, AbstractSession session) { return internalBuildChangeRecord(getAttributeValueFromObject(clone), null, owner); } /** * INTERNAL: * Clone the attribute from the original and assign it to the clone. */ @Override public void buildClone(Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession) { buildCloneValue(original, clone, cloningSession); } /** * INTERNAL: * Extract value from the row and set the attribute to this value in the * working copy clone. * In order to bypass the shared cache when in transaction a UnitOfWork must * be able to populate working copies directly from the row. */ @Override public void buildCloneFromRow(AbstractRecord databaseRow, JoinedAttributeManager joinManager, Object clone, CacheKey sharedCacheKey, ObjectBuildingQuery sourceQuery, UnitOfWorkImpl unitOfWork, AbstractSession executionSession) { // Even though the correct value may exist on the original, we can't // make that assumption. It is easy to just build it again from the // row even if copy policy already copied it. // That optimization is lost. Object attributeValue = valueFromRow(databaseRow, joinManager, sourceQuery, sharedCacheKey, executionSession, true, null); setAttributeValueInObject(clone, attributeValue); } /** * INTERNAL: * Clone the attribute from the original and assign it to the clone. * If mutability is configured to be true, clone the attribute if it is an instance of * byte[], java.util.Calendar or java.util.Date (or their subclasses). */ public void buildCloneValue(Object original, Object clone, AbstractSession session) { Object attributeValue = getAttributeValueFromObject(original); attributeValue = buildCloneValue(attributeValue, session); setAttributeValueInObject(clone, attributeValue); } /** * INTERNAL: * Clone the actual value represented by this mapping. Do set the cloned value into the object. */ protected Object buildCloneValue(Object attributeValue, AbstractSession session) { Object newAttributeValue = attributeValue; if (isMutable() && attributeValue != null) { // EL Bug 252047 - Mutable attributes are not cloned when isMutable is enabled on a Direct Mapping if (attributeValue instanceof byte[]) { int length = ((byte[]) attributeValue).length; byte[] arrayCopy = new byte[length]; System.arraycopy(attributeValue, 0, arrayCopy, 0, length); newAttributeValue = arrayCopy; } else if (attributeValue instanceof Byte[]) { int length = ((Byte[]) attributeValue).length; Byte[] arrayCopy = new Byte[length]; System.arraycopy(attributeValue, 0, arrayCopy, 0, length); newAttributeValue = arrayCopy; } else if (attributeValue instanceof char[]) { int length = ((char[]) attributeValue).length; char[] arrayCopy = new char[length]; System.arraycopy(attributeValue, 0, arrayCopy, 0, length); newAttributeValue = arrayCopy; } else if (attributeValue instanceof Character[]) { int length = ((Character[]) attributeValue).length; Character[] arrayCopy = new Character[length]; System.arraycopy(attributeValue, 0, arrayCopy, 0, length); newAttributeValue = arrayCopy; } else if (attributeValue instanceof Date) { newAttributeValue = ((Date)attributeValue).clone(); } else if (attributeValue instanceof Calendar) { newAttributeValue = ((Calendar)attributeValue).clone(); } else { newAttributeValue = getObjectValue(getFieldValue(attributeValue, session), session); } } return newAttributeValue; } /** * INTERNAL: * Copy of the attribute of the object. * This is NOT used for unit of work but for templatizing an object. */ @Override public void buildCopy(Object copy, Object original, CopyGroup group) { buildCloneValue(original, copy, group.getSession()); } /** * Build a clone of the given element in a unitOfWork. */ public Object buildElementClone(Object attributeValue, Object parent, CacheKey cacheKey, Integer refreshCascade, AbstractSession cloningSession, boolean isExisting, boolean isFromSharedCache){ return buildCloneValue(attributeValue, cloningSession); } /** * INTERNAL: * In case Query By Example is used, this method builds and returns an expression that * corresponds to a single attribute and it's value for a directToField mapping. */ @Override public Expression buildExpression(Object queryObject, QueryByExamplePolicy policy, Expression expressionBuilder, Map processedObjects, AbstractSession session) { String attributeName = this.getAttributeName(); Object attributeValue = this.getAttributeValueFromObject(queryObject); if (!policy.shouldIncludeInQuery(queryObject.getClass(), attributeName, attributeValue)) { //the attribute name and value pair is not to be included in the query. return null; } Expression expression = expressionBuilder.get(attributeName); if (attributeValue == null) { expression = policy.completeExpressionForNull(expression); } else { expression = policy.completeExpression(expression, attributeValue, attributeValue.getClass()); } return expression; } /** * INTERNAL: * Certain key mappings favor different types of selection query. Return the appropriate * type of selectionQuery. */ public ReadQuery buildSelectionQueryForDirectCollectionKeyMapping(ContainerPolicy containerPolicy){ DataReadQuery query = new DataReadQuery(); query.setSQLStatement(new SQLSelectStatement()); query.setContainerPolicy(containerPolicy); return query; } /** * INTERNAL: * Cascade discover and persist new objects during commit to the map key. */ public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow, boolean getAttributeValueFromObject, Set cascadeErrors){ //objects referenced by this mapping are not registered as they have // no identity, this is a no-op. } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade. */ public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects, boolean getAttributeValueFromObject) { //objects referenced by this mapping are not registered as they have // no identity, this is a no-op. } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade. */ public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { //objects referenced by this mapping are not registered as they have // no identity, this is a no-op. } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade. */ public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects, boolean getAttributeValueFromObject) { //objects referenced by this mapping are not registered as they have // no identity, this is a no-op. } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade. */ public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { //objects referenced by this mapping are not registered as they have // no identity, this is a no-op. } /** * INTERNAL: * The mapping clones itself to create deep copy. */ @Override public Object clone() { AbstractDirectMapping clone = (AbstractDirectMapping)super.clone(); // Field must be cloned so aggregates do not share fields. clone.setField(getField().clone()); return clone; } /** * INTERNAL: * Compare the clone and backup clone values and return a change record if the value changed. */ @Override public ChangeRecord compareForChange(Object clone, Object backUp, ObjectChangeSet owner, AbstractSession session) { // same code as write from object into row for update if (owner.isNew()) { return internalBuildChangeRecord(getAttributeValueFromObject(clone), null, owner); } else if (!compareObjects(backUp, clone, session)) { Object oldValue = null; if (backUp != null && clone != backUp) { oldValue = getAttributeValueFromObject(backUp); } return internalBuildChangeRecord(getAttributeValueFromObject(clone), oldValue, owner); } return null; } /** * INTERNAL: * For mappings used as MapKeys in MappedKeyContainerPolicy, Delete the passed object if necessary. * * This method is used for removal of private owned relationships * DirectMappings are dealt with in their parent delete, so this is a no-op. */ public void deleteMapKey(Object objectDeleted, AbstractSession session){ } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ @Override public boolean compareObjects(Object firstObject, Object secondObject, AbstractSession session) { Object firstValue = getAttributeValueFromObject(firstObject); Object secondValue = getAttributeValueFromObject(secondObject); return compareObjectValues(firstValue, secondValue, session); } /** * INTERNAL: * Compare the attribute values. */ protected boolean compareObjectValues(Object firstValue, Object secondValue, AbstractSession session) { // PERF: Check identity before conversion. if (firstValue == secondValue) { return true; } if ((firstValue != null) && (secondValue != null)) { // PERF: Check equals first, as normally no change. // Also for serialization objects bytes may not be consistent, but equals may work (HashMap). if (firstValue.equals(secondValue)) { return true; } } // CR2114 - following two lines modified; getFieldValue() needs class as an argument firstValue = getFieldValue(firstValue, session); secondValue = getFieldValue(secondValue, session); // PERF: Check identity/nulls before special type comparison. if (firstValue == secondValue) { return true; } if ((firstValue == null) || (secondValue == null)) { return false; } // PERF: Check equals first, as normally no change. if (firstValue.equals(secondValue)) { return true; } return Helper.comparePotentialArrays(firstValue, secondValue); } /** * INTERNAL: * Convert all the class-name-based settings in this mapping to actual class-based settings * This method is implemented by subclasses as necessary. */ @Override public void convertClassNamesToClasses(ClassLoader classLoader){ super.convertClassNamesToClasses(classLoader); if (getAttributeClassificationName() != null) { Class attributeClass = null; try{ if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { attributeClass = (Class)AccessController.doPrivileged(new PrivilegedClassForName(getAttributeClassificationName(), true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(getAttributeClassificationName(), exception.getException()); } } else { attributeClass = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(getAttributeClassificationName(), true, classLoader); } } catch (ClassNotFoundException exc){ throw ValidationException.classNotFoundWhileConvertingClassNames(getAttributeClassificationName(), exc); } setAttributeClassification(attributeClass); } if (fieldClassificationClassName != null){ Class fieldClassification = null; try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { fieldClassification = (Class) AccessController.doPrivileged(new PrivilegedClassForName(fieldClassificationClassName, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(fieldClassificationClassName, exception.getException()); } } else { fieldClassification = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(fieldClassificationClassName, true, classLoader); } } catch (ClassNotFoundException exc) { throw ValidationException.classNotFoundWhileConvertingClassNames(fieldClassificationClassName, exc); } catch (Exception e) { // Catches IllegalAccessException and InstantiationException throw ValidationException.classNotFoundWhileConvertingClassNames(fieldClassificationClassName, e); } setFieldClassification(fieldClassification); } } /** * INTERNAL: * Creates the Array of simple types used to recreate this map. */ public Object createSerializableMapKeyInfo(Object key, AbstractSession session){ return key; // DirectToFields are already simple types. } /** * INTERNAL: * Create an instance of the Key object from the key information extracted from the map. * This may return the value directly in case of a simple key or will be used as the FK to load a related entity. */ public List createMapComponentsFromSerializableKeyInfo(Object[] keyInfo, AbstractSession session){ return Arrays.asList(keyInfo); // DirectToFields are already simple types. } /** * INTERNAL: * Create an instance of the Key object from the key information extracted from the map. * This key object may be a shallow stub of the actual object if the key is an Entity type. */ public Object createStubbedMapComponentFromSerializableKeyInfo(Object keyInfo, AbstractSession session){ return keyInfo; } /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection. Returns the key. */ public Object createMapComponentFromRow(AbstractRecord dbRow, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected) { Object key = dbRow.get(getField()); key = getObjectValue(key, session); return key; } /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection and a join query is executed. Returns the key. */ public Object createMapComponentFromJoinedRow(AbstractRecord dbRow, JoinedAttributeManager joinManger, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected) { return createMapComponentFromRow(dbRow, query, parentCacheKey, session, isTargetProtected); } /** * INTERNAL: * Create a query key that links to the map key. */ public QueryKey createQueryKeyForMapKey() { DirectQueryKey queryKey = new DirectQueryKey(); queryKey.setField(getField()); return queryKey; } /** * INTERNAL: * Extract the fields for the Map key from the object to use in a query. */ public Map extractIdentityFieldsForQuery(Object object, AbstractSession session){ Map fields = new HashMap(); Object key = object; if (getConverter() != null){ key = getConverter().convertObjectValueToDataValue(key , session); } fields.put(getField(), key); return fields; } /** * INTERNAL: * Return any tables that will be required when this mapping is used as part of a join query. */ public List getAdditionalTablesForJoinQuery() { List tables = new ArrayList(1); tables.add(getField().getTable()); return tables; } /** * PUBLIC: * Some databases do not properly support all of the base data types. For these databases, * the base data type must be explicitly specified in the mapping to tell EclipseLink to force * the instance variable value to that data type. */ public Class getAttributeClassification() { return attributeClassification; } /** * INTERNAL: * Return the class name of the attribute type. * This is only used by the MW. */ public String getAttributeClassificationName() { if ((attributeClassificationName == null) && (attributeClassification != null)) { attributeClassificationName = attributeClassification.getName(); } return attributeClassificationName; } /** * INTERNAL: * Allows for subclasses to convert the attribute value. */ public Object getObjectValue(Object fieldValue, Session session) { // PERF: Direct variable access. Object attributeValue = fieldValue; if ((fieldValue == null) && (this.nullValue != null)) {// Translate default null value return this.nullValue; } // Allow for user defined conversion to the object value. if (this.converter != null) { attributeValue = this.converter.convertDataValueToObjectValue(attributeValue, session); } else { // PERF: Avoid conversion check when not required. if ((attributeValue == null) || (attributeValue.getClass() != this.attributeObjectClassification)) { if ((attributeValue != null) || !this.bypassDefaultNullValueCheck) { try { attributeValue = session.getDatasourcePlatform().convertObject(attributeValue, this.attributeClassification); } catch (ConversionException e) { throw ConversionException.couldNotBeConverted(this, getDescriptor(), e); } } } } if (attributeValue == null) {// Translate default null value, conversion may have produced null. attributeValue = this.nullValue; } return attributeValue; } /** * INTERNAL: * Same as getObjectValue method, but without checking fieldValue's class. * Used in case the fieldValue class is already known to be the same as attributeClassification. */ public Object getObjectValueWithoutClassCheck(Object fieldValue, Session session) { if ((fieldValue == null) && (this.nullValue != null)) {// Translate default null value return this.nullValue; } // PERF: Direct variable access. Object attributeValue = fieldValue; // Allow for user defined conversion to the object value. if (this.converter != null) { attributeValue = this.converter.convertDataValueToObjectValue(attributeValue, session); } else { // PERF: Avoid conversion check when not required. if (attributeValue == null) { if (!this.bypassDefaultNullValueCheck) { try { attributeValue = session.getDatasourcePlatform().convertObject(null, this.attributeClassification); } catch (ConversionException e) { throw ConversionException.couldNotBeConverted(this, getDescriptor(), e); } } } } if (attributeValue == null) {// Translate default null value, conversion may have produced null. attributeValue = this.nullValue; } return attributeValue; } /** * INTERNAL: */ @Override public boolean isAbstractDirectMapping() { return true; } /** * INTERNAL: * Get the descriptor for this mapping * This method is potentially called when this mapping is used as a map key and * will return null since direct mappings do not have reference descriptors. */ public ClassDescriptor getReferenceDescriptor(){ return null; } /** * INTERNAL: * Return the classification for the field contained in the mapping. * This is used to convert the row value to a consistent Java value. */ public Class getFieldClassification(DatabaseField fieldToClassify) { // PERF: This method is a major performance code point, // so has been micro optimized and uses direct variable access. if (fieldToClassify.type != null) { return fieldToClassify.type; } else { if (hasConverter()) { return null; } else { // PERF: Ensure the object type is used for primitives. return Helper.getObjectClass(this.attributeClassification); } } } /** * ADVANCED: * Return the class type of the field value. * This can be used if field value differs from the object value, * has specific typing requirements such as usage of java.sql.Blob or NChar. */ public Class getFieldClassification() { if (getField() == null) { return null; } return getField().getType(); } /** * ADVANCED: * Set the class type of the field value. * This can be used if field value differs from the object value, * has specific typing requirements such as usage of java.sql.Blob or NChar. * This must be called after the field name has been set. */ public void setFieldClassification(Class fieldType) { getField().setType(fieldType); } /** * INTERNAL: * Set the name of the class that will be used for setFieldClassification and deploy time * Used internally by JPA deployment. * * @see setFieldClassification(Class fieldType) * @param className */ public void setFieldClassificationClassName(String className){ this.fieldClassificationClassName = className; } /** * ADVANCED: * Set the JDBC type of the field value. * This can be used if field type does not correspond directly to a Java class type, * such as MONEY. * This is used for binding. */ public void setFieldType(int jdbcType) { getField().setSqlType(jdbcType); } /** * PUBLIC: * Name of the field this mapping represents. */ public String getFieldName() { return getField().getQualifiedName(); } /** * INTERNAL: * Convert the attribute value to a field value. * Process any converter if defined, and check for null values. */ public Object getFieldValue(Object attributeValue, AbstractSession session) { // PERF: This method is a major performance code point, // so has been micro optimized and uses direct variable access. Object fieldValue = attributeValue; if ((this.nullValue != null) && (this.nullValue.equals(fieldValue))) { return null; } // Allow for user defined conversion to the object value. if (this.converter != null) { fieldValue = this.converter.convertObjectValueToDataValue(fieldValue, session); } Class fieldClassification = this.field.type; if (fieldClassification == null) { fieldClassification = getFieldClassification(this.field); } // PERF: Avoid conversion if not required. // EclipseLink bug 240407 - nulls not translated when writing to database if ((fieldValue == null) || (fieldClassification != fieldValue.getClass())) { if ((fieldValue != null) || !this.bypassDefaultNullValueCheck) { try { fieldValue = session.getPlatform(this.descriptor.getJavaClass()).convertObject(fieldValue, fieldClassification); } catch (ConversionException exception) { throw ConversionException.couldNotBeConverted(this, this.descriptor, exception); } } } return fieldValue; } /** * INTERNAL: * Return a Map of any foreign keys defined within the the MapKey. */ public Map getForeignKeyFieldsForMapKey(){ return null; } /** * INTERNAL: * Return the fields that make up the identity of the mapped object. For mappings with * a primary key, it will be the set of fields in the primary key. For mappings without * a primary key it will likely be all the fields. */ public List getIdentityFieldsForMapKey(){ return getAllFieldsForMapKey(); } /** * INTERNAL: * Get all the fields for the map key. */ public List getAllFieldsForMapKey(){ Vector fields = new Vector(1); fields.add(getField()); return fields; } /** * INTERNAL: * Return the query that is used when this mapping is part of a joined relationship * This method is used when this mapping is used to map the key in a Map. */ public ObjectLevelReadQuery getNestedJoinQuery(JoinedAttributeManager joinManager, ObjectLevelReadQuery query, AbstractSession session){ return null; } /** * PUBLIC: * Allow for the value used for null to be specified. * This can be used to convert database null values to application specific values, when null values * are not allowed by the application (such as in primitives). * Note: the default value for NULL is used on reads, writes, and query SQL generation */ public Object getNullValue() { return nullValue; } /** * INTERNAL: * Return the selection criteria necessary to select the target object when this mapping * is a map key. * DirectMappings do not need any additional selection criteria when they are map keys. */ public Expression getAdditionalSelectionCriteriaForMapKey(){ return null; } /** * INTERNAL: * If required, get the targetVersion of the source object from the merge manager. * Used with MapKeyContainerPolicy to abstract getting the target version of a source key. */ public Object getTargetVersionOfSourceObject(Object object, Object parent, MergeManager mergeManager, AbstractSession targetSession){ return object; } /** * INTERNAL: * Return the class this key mapping maps or the descriptor for it * @return */ public Class getMapKeyTargetType() { Class aClass = getAttributeAccessor().getAttributeClass(); // 294765: check the attributeClassification when the MapKey annotation is not specified if (null == aClass) { aClass = getAttributeClassification(); } if (null == aClass) { aClass = getField().getType(); } return aClass; } /** * INTERNAL: * Return the weight of the mapping, used to sort mappings to ensure that * DirectToField Mappings get merged first */ @Override public Integer getWeight() { return this.weight; } /** * INTERNAL: * Once descriptors are serialized to the remote session. All its mappings and reference descriptors are traversed. Usually * mappings are initialized and serialized reference descriptors are replaced with local descriptors if they already exist on the * remote session. */ @Override public void remoteInitialization(DistributedSession session) { if (!isRemotelyInitialized()) { super.remoteInitialization(session); if (this.attributeClassification == null) { this.attributeClassification = getAttributeAccessor().getAttributeClass(); } this.attributeObjectClassification = Helper.getObjectClass(this.attributeClassification); } } /** * INTERNAL: * Initialize the attribute classification. */ @Override public void preInitialize(AbstractSession session) throws DescriptorException { super.preInitialize(session); // Allow the attribute class to be set by the user. if (this.attributeClassification == null) { this.attributeClassification = getAttributeAccessor().getAttributeClass(); } this.attributeObjectClassification = Helper.getObjectClass(this.attributeClassification); // Initialize isMutable if not specified, default is false (assumes not mutable). if (this.isMutable == null) { if (hasConverter()) { setIsMutable(getConverter().isMutable()); } else { setIsMutable(false); } // If mapping a temporal type, use the project mutable default. if ((getAttributeClassification() != null) && (ClassConstants.UTILDATE.isAssignableFrom(getAttributeClassification()) || ClassConstants.CALENDAR.isAssignableFrom(getAttributeClassification()))) { setIsMutable(session.getProject().getDefaultTemporalMutable()); } } Map nullValues = session.getPlatform(this.descriptor.getJavaClass()).getConversionManager().getDefaultNullValues(); bypassDefaultNullValueCheck = (!this.attributeClassification.isPrimitive()) && ((nullValues == null) || (!nullValues.containsKey(this.attributeClassification))); } /** * INTERNAL: * The mapping is initialized with the given session. * This mapping is fully initialized after this. */ @Override public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if (getField() == null) { session.getIntegrityChecker().handleError(DescriptorException.fieldNameNotSetInMapping(this)); } // Before potentially swapping out the field with an already built one, // set the JPA insertable and updatable flags based on the settings from // this mappings field. This must be done now. The reason for this code // is to cover the case where multiple mappings map to the same field. // One of those mappings must be write only, therefore, depending on the // initialization order we do not want to set the writable mapping as // non insertable and non updatable. isInsertable = getField().isInsertable(); isUpdatable = getField().isUpdatable(); if (keyTableForMapKey == null){ setField(getDescriptor().buildField(getField())); } else { setField(getDescriptor().buildField(getField(), keyTableForMapKey)); } setFields(collectFields()); if (hasConverter()) { getConverter().initialize(this, session); } // Must unwrap Struct types on WLS. if (getField().getSqlType() == java.sql.Types.STRUCT) { getDescriptor().setIsNativeConnectionRequired(true); } } /** * INTERNAL: * Build a change record. */ public ChangeRecord internalBuildChangeRecord(Object newValue, Object oldValue, ObjectChangeSet owner) { DirectToFieldChangeRecord changeRecord = new DirectToFieldChangeRecord(owner); changeRecord.setAttribute(getAttributeName()); changeRecord.setMapping(this); changeRecord.setNewValue(newValue); changeRecord.setOldValue(oldValue); return changeRecord; } /** * INTERNAL: */ @Override public boolean isDirectToFieldMapping() { return true; } /** * INTERNAL: * Called when iterating through descriptors to handle iteration on this mapping when it is used as a MapKey. */ public void iterateOnMapKey(DescriptorIterator iterator, Object element){ if (iterator.shouldIterateOnPrimitives()) { iterator.iteratePrimitiveForMapping(element, this); } } /** * INTERNAL: * Merge changes from the source to the target object. */ @Override public void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { setAttributeValueInObject(target, buildCloneValue(((DirectToFieldChangeRecord)changeRecord).getNewValue(), mergeManager.getSession())); } /** * INTERNAL: * Merge changes from the source to the target object. This merge is only called when a changeSet for the target * does not exist or the target is uninitialized */ @Override public void mergeIntoObject(Object target, boolean isTargetUnInitialized, Object source, MergeManager mergeManager, AbstractSession targetSession) { // If merge into the unit of work, must only merge and raise the event is the value changed. if ((mergeManager.shouldMergeCloneIntoWorkingCopy() || mergeManager.shouldMergeCloneWithReferencesIntoWorkingCopy()) && !mergeManager.isForRefresh() && this.descriptor.getObjectChangePolicy().isObjectChangeTrackingPolicy()) { // if it didn't change then there will be no event Object attributeValue = getAttributeValueFromObject(source); Object targetAttribute = getAttributeValueFromObject(target); if (!compareObjectValues(attributeValue, targetAttribute, mergeManager.getSession())) { setAttributeValueInObject(target, buildCloneValue(attributeValue, mergeManager.getSession())); //set the value first, if the owner is new ( or aggregate) the change set may be created directly //from the target. this.descriptor.getObjectChangePolicy().raiseInternalPropertyChangeEvent(target, getAttributeName(), targetAttribute, attributeValue); } } else { setAttributeValueInObject(target, buildCloneValue(getAttributeValueFromObject(source), mergeManager.getSession())); } } /** * INTERNAL: * Making any mapping changes necessary to use a the mapping as a map key prior to initializing the mapping. */ public void preinitializeMapKey(DatabaseTable table) throws DescriptorException { this.keyTableForMapKey = table; } /** * INTERNAL: * Making any mapping changes necessary to use a the mapping as a map key after initializing the mapping. */ public void postInitializeMapKey(MappedKeyMapContainerPolicy policy) { if (getField().getType() == null) { getField().setType(getFieldClassification(getField())); } } /** * INTERNAL: * Return whether this mapping requires extra queries to update the rows if it is * used as a key in a map. This will typically be true if there are any parts to this mapping * that are not read-only. */ public boolean requiresDataModificationEventsForMapKey(){ return !isReadOnly() && isUpdatable(); } /** * PUBLIC: * Some databases do not properly support all of the base data types. For these databases, * the base data type must be explicitly specified in the mapping to tell EclipseLink to force * the instance variable value to that data type */ public void setAttributeClassification(Class attributeClassification) { this.attributeClassification = attributeClassification; } /** * INTERNAL: * Set the name of the class for MW usage. */ public void setAttributeClassificationName(String attributeClassificationName) { this.attributeClassificationName = attributeClassificationName; } /** * PUBLIC: * Allow for the value used for null to be specified. * This can be used to convert database null values to application specific values, when null values * are not allowed by the application (such as in primitives). * Note: the default value for NULL is used on reads, writes, and query SQL generation */ public void setNullValue(Object nullValue) { this.nullValue = nullValue; } /** * INTERNAL: */ @Override public String toString() { return getClass().getName() + "[" + getAttributeName() + "-->" + getField() + "]"; } /** * INTERNAL: * Either create a new change record or update with the new value. This is used * by attribute change tracking. */ @Override public void updateChangeRecord(Object clone, Object newValue, Object oldValue, ObjectChangeSet objectChangeSet, UnitOfWorkImpl uow) { DirectToFieldChangeRecord changeRecord = (DirectToFieldChangeRecord)objectChangeSet.getChangesForAttributeNamed(this.getAttributeName()); if (changeRecord == null) { objectChangeSet.addChange(internalBuildChangeRecord(newValue, oldValue, objectChangeSet)); } else { changeRecord.setNewValue(newValue); } } /** * INTERNAL: * Return if this mapping supports change tracking. */ @Override public boolean isChangeTrackingSupported(Project project) { return !isMutable(); } /** * INTERNAL: * Return if this mapping requires its attribute value to be cloned. */ @Override public boolean isCloningRequired() { return isMutable() || getDescriptor().getCopyPolicy().buildsNewInstance(); } /** * INTERNAL: * Allow the key mapping to unwrap the object. */ public Object unwrapKey(Object key, AbstractSession session){ return key; } /** * INTERNAL: * Allow for subclasses to perform validation. */ @Override public void validateBeforeInitialization(AbstractSession session) throws DescriptorException { if ((getFieldName() == null) || (getFieldName().length() == 0)) { session.getIntegrityChecker().handleError(DescriptorException.noFieldNameForMapping(this)); } } /** * INTERNAL: * Allow the key mapping to wrap the object. */ public Object wrapKey(Object key, AbstractSession session){ return key; } /** * INTERNAL: * Get the value from the object for this mapping. */ @Override public Object valueFromObject(Object object, DatabaseField field, AbstractSession session) throws DescriptorException { return getFieldValue(getAttributeValueFromObject(object), session); } /** * INTERNAL: * Builds a shallow original object. Only direct attributes and primary * keys are populated. In this way the minimum original required for * instantiating a working copy clone can be built without placing it in * the shared cache (no concern over cycles). * @parameter original later the input to buildCloneFromRow */ @Override public void buildShallowOriginalFromRow(AbstractRecord databaseRow, Object original, JoinedAttributeManager joinManager, ObjectBuildingQuery query, AbstractSession executionSession) { readFromRowIntoObject(databaseRow, null, original, null, query, executionSession, true); } /** * INTERNAL: * Return the mapping's attribute value from the row. * The execution session is passed for the case of building a UnitOfWork clone * directly from a row, the session set in the query will not know which platform to use * for converting the value. Allows the correct session to be passed in. */ @Override public Object valueFromRow(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery query, CacheKey cacheKey, AbstractSession executionSession, boolean isTargetProtected, Boolean[] wasCacheUsed) { if (this.descriptor.getCachePolicy().isProtectedIsolation()) { if (this.isCacheable && isTargetProtected && cacheKey != null) { Object cached = cacheKey.getObject(); if (cached != null) { if (wasCacheUsed != null){ wasCacheUsed[0] = Boolean.TRUE; } Object attributeValue = getAttributeValueFromObject(cached); return buildCloneValue(attributeValue, executionSession); } } } if (row.hasSopObject()) { return getAttributeValueFromObject(row.getSopObject()); } // PERF: Direct variable access. Object fieldValue = row.get(getField()); Object attributeValue = getObjectValue(fieldValue, executionSession); return attributeValue; } /** * INTERNAL: * Returns the value for the mapping directly from the result-set. * PERF: Used for optimized object building. */ @Override public Object valueFromResultSet(ResultSet resultSet, ObjectBuildingQuery query, AbstractSession session, DatabaseAccessor accessor, ResultSetMetaData metaData, int columnNumber, DatabasePlatform platform) throws SQLException { if (this.attributeObjectClassification == ClassConstants.STRING) { return getObjectValueWithoutClassCheck(resultSet.getString(columnNumber), session); } else if (this.attributeObjectClassification == ClassConstants.LONG) { return getObjectValueWithoutClassCheck(resultSet.getLong(columnNumber), session); } else if (this.attributeObjectClassification == ClassConstants.INTEGER) { return getObjectValueWithoutClassCheck(resultSet.getInt(columnNumber), session); } Object fieldValue = accessor.getObject(resultSet, getField(), metaData, columnNumber, platform, true, session); return getObjectValue(fieldValue, session); } protected abstract void writeValueIntoRow(AbstractRecord row, DatabaseField field, Object value); /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. * Validation preventing primary key updates is implemented here. */ @Override public void writeFromObjectIntoRowWithChangeRecord(ChangeRecord changeRecord, AbstractRecord row, AbstractSession session, WriteType writeType) { if (isReadOnly() || (writeType.equals(WriteType.INSERT) && ! isInsertable()) || (writeType.equals(WriteType.UPDATE) && ! isUpdatable())) { return; } if (this.isPrimaryKeyMapping && !changeRecord.getOwner().isNew()) { throw ValidationException.primaryKeyUpdateDisallowed(changeRecord.getOwner().getClassName(), changeRecord.getAttribute()); } Object attributeValue = ((DirectToFieldChangeRecord)changeRecord).getNewValue(); Object fieldValue = getFieldValue(attributeValue, session); // EL Bug 319759 - if a field is null, then the update call cache should not be used if (fieldValue == null) { row.setNullValueInFields(true); } row.add(getField(), fieldValue); } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public void writeFromObjectIntoRow(Object object, AbstractRecord row, AbstractSession session, WriteType writeType) { if (isReadOnly() || (writeType.equals(WriteType.INSERT) && ! isInsertable()) || (writeType.equals(WriteType.UPDATE) && ! isUpdatable())) { return; } Object attributeValue = getAttributeValueFromObject(object); Object fieldValue = getFieldValue(attributeValue, session); // EL Bug 319759 - if a field is null, then the update call cache should not be used if (fieldValue == null) { row.setNullValueInFields(true); } writeValueIntoRow(row, getField(), fieldValue); } /** * INTERNAL: * Write the attribute value from the object to the row for update. */ @Override public void writeFromObjectIntoRowForUpdate(WriteObjectQuery query, AbstractRecord databaseRow) { if (query.getSession().isUnitOfWork()) { if (compareObjects(query.getBackupClone(), query.getObject(), query.getSession())) { return; } } super.writeFromObjectIntoRowForUpdate(query, databaseRow); } /** * INTERNAL: * Write fields needed for insert into the template for with null values. */ @Override public void writeInsertFieldsIntoRow(AbstractRecord databaseRow, AbstractSession session) { if (isInsertable() && ! isReadOnly()) { databaseRow.add(getField(), null); } } /** * INTERNAL: * Write fields needed for update into the template for with null values. * By default inserted fields are used. */ @Override public void writeUpdateFieldsIntoRow(AbstractRecord databaseRow, AbstractSession session) { if (isUpdatable() && ! isReadOnly()) { databaseRow.add(getField(), null); } } } ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/mappings/foundation/AbstractTransformationMapping.javaeclipselink-2.5.1.orig/org/eclipse/persistence/mappings/foundation/AbstractTransformationMapping.jav0000664000000000000000000017416112216173130031111 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 08/23/2010-2.2 Michael O'Brien * - 323043: application.xml module ordering may cause weaving not to occur causing an NPE. * warn if expected "_persistence_*_vh" method not found * instead of throwing NPE during deploy validation. ******************************************************************************/ package org.eclipse.persistence.mappings.foundation; import java.security.AccessController; import java.security.PrivilegedActionException; import java.util.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.indirection.*; import org.eclipse.persistence.internal.databaseaccess.DatabasePlatform; import org.eclipse.persistence.internal.databaseaccess.FieldTypeDefinition; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.indirection.*; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; import org.eclipse.persistence.internal.sessions.remote.*; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; import org.eclipse.persistence.internal.security.PrivilegedNewInstanceFromClass; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.sessions.DatabaseRecord; import org.eclipse.persistence.mappings.Association; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.mappings.transformers.*; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.remote.*; import org.eclipse.persistence.sessions.CopyGroup; import org.eclipse.persistence.sessions.Project; /** *

Purpose: A transformation mapping is used for a specialized translation between how * a value is represented in Java and its representation on the databae. Transformation mappings * should only be used when other mappings are inadequate. * * @author Sati * @since TOPLink/Java 1.0 */ public abstract class AbstractTransformationMapping extends DatabaseMapping { /** Name of the class which implements AttributeTransformer to be used to retrieve the attribute value */ protected String attributeTransformerClassName; /** attributeTransformerClassName is converter to an instance of AttributeTransformer */ protected AttributeTransformer attributeTransformer; /** Stores field name and the class name of a FieldTransformer in a vector to preserve order */ protected List fieldTransformations; /** The TransformerClassNames are converted into instances of FieldTransformer */ protected List fieldToTransformers; /** PERF: Indicates if this mapping's attribute is a simple value which cannot be modified only replaced. */ protected boolean isMutable; /** Implements indirection behaviour */ protected IndirectionPolicy indirectionPolicy; /** * PUBLIC: * Default constructor. */ public AbstractTransformationMapping() { fieldTransformations = new ArrayList(); fieldToTransformers = new ArrayList(); setIsMutable(true); dontUseIndirection(); this.setWeight(WEIGHT_TRANSFORM); } /** * PUBLIC: * Add the field and the name of the method * that returns the value to be placed in said field * when the object is written to the database. * The method may take zero arguments, or it may * take a single argument of type * org.eclipse.persistence.sessions.Session. */ public void addFieldTransformation(DatabaseField field, String methodName) { MethodBasedFieldTransformation transformation = new MethodBasedFieldTransformation(); transformation.setField(field); transformation.setMethodName(methodName); getFieldTransformations().add(transformation); } /** * PUBLIC: * Add the name of field and the name of the method * that returns the value to be placed in said field * when the object is written to the database. * The method may take zero arguments, or it may * take a single argument of type * org.eclipse.persistence.sessions.Session. */ public void addFieldTransformation(String fieldName, String methodName) { addFieldTransformation(new DatabaseField(fieldName), methodName); } /** * INTERNAL: * Add the name of a field and the name of a class which implements * the FieldTransformer interface. When the object is written, the transform * method will be called on the FieldTransformer to acquire the value to put * in the field. */ public void addFieldTransformerClassName(String fieldName, String className) { addFieldTransformerClassName(new DatabaseField(fieldName), className); } /** * INTERNAL: * Add the name of a field and the name of a class which implements * the FieldTransformer interface. When the object is written, the transform * method will be called on the FieldTransformer to acquire the value to put * in the field. */ public void addFieldTransformerClassName(DatabaseField field, String className) { TransformerBasedFieldTransformation transformation = new TransformerBasedFieldTransformation(); transformation.setField(field); transformation.setTransformerClassName(className); getFieldTransformations().add(transformation); } /** * PUBLIC: * Add the name of field and the transformer * that returns the value to be placed in the field * when the object is written to the database. */ public void addFieldTransformer(String fieldName, FieldTransformer transformer) { this.addFieldTransformer(new DatabaseField(fieldName), transformer); } /** * PUBLIC: * Add the field and the transformer * that returns the value to be placed in the field * when the object is written to the database. */ public void addFieldTransformer(DatabaseField field, FieldTransformer transformer) { TransformerBasedFieldTransformation transformation = new TransformerBasedFieldTransformation(transformer); transformation.setField(field); getFieldTransformations().add(transformation); } /** * INTERNAL: * The referenced object is checked if it is instantiated or not */ protected boolean areObjectsToBeProcessedInstantiated(Object object) { return this.indirectionPolicy.objectIsInstantiated(getAttributeValueFromObject(object)); } /** * INTERNAL: * Clone the attribute from the clone and assign it to the backup. */ @Override public void buildBackupClone(Object clone, Object backup, UnitOfWorkImpl unitOfWork) { // If mapping is a no-attribute transformation mapping, do nothing if (isWriteOnly()) { return; } Object attributeValue = getAttributeValueFromObject(clone); Object clonedAttributeValue = this.indirectionPolicy.backupCloneAttribute(attributeValue, clone, backup, unitOfWork); setAttributeValueInObject(backup, clonedAttributeValue); } /** * INTERNAL * Build a phantom row that contains only the fields * for the mapping, populated with the values generated by * invoking the field methods on the specified object. */ protected AbstractRecord buildPhantomRowFrom(Object domainObject, AbstractSession session) { AbstractRecord row = new DatabaseRecord(this.fieldToTransformers.size()); for (Object[] pair : this.fieldToTransformers) { DatabaseField field = (DatabaseField)pair[0]; FieldTransformer transformer = (FieldTransformer)pair[1]; Object fieldValue = this.invokeFieldTransformer(field, transformer, domainObject, session); row.put(field, fieldValue); } return row; } /** * INTERNAL: * Builds a shallow original object. Only direct attributes and primary * keys are populated. In this way the minimum original required for * instantiating a working copy clone can be built without placing it in * the shared cache (no concern over cycles). * @parameter original later the input to buildCloneFromRow */ @Override public void buildShallowOriginalFromRow(AbstractRecord record, Object original, JoinedAttributeManager joinManager, ObjectBuildingQuery query, AbstractSession executionSession) { // In this case we know it is a primary key mapping, so hope that it // is essentially a direct mapping. If it is a 1-1 with a // no-indirection pointer back to original, then will get a stack // overflow. // Only solution to this is to trigger the transformation using the root // session. UnitOfWorkImpl unitOfWork = (UnitOfWorkImpl)query.getSession(); query.setSession(unitOfWork.getParent()); try { readFromRowIntoObject(record, joinManager, original, null, query, executionSession, false); } finally { query.setSession(unitOfWork); } } /** * INTERNAL: * Used during building the backup shallow copy to copy the vector without re-registering the target objects. * For 1-1 or ref the reference is from the clone so it is already registered. */ @Override public Object buildBackupCloneForPartObject(Object attributeValue, Object clone, Object backup, UnitOfWorkImpl unitOfWork) { return buildCloneForPartObject(attributeValue, clone, null, backup, unitOfWork, null, true, true); } /** * INTERNAL: * Clone the attribute from the original and assign it to the clone. */ @Override public void buildClone(Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession) { // If mapping is a no-attribute transformation mapping, do nothing if (isWriteOnly()) { return; } Object attributeValue = getAttributeValueFromObject(original); Object clonedAttributeValue = this.indirectionPolicy.cloneAttribute(attributeValue, original, cacheKey, clone, refreshCascade, cloningSession, false);// building clone from an original not a row. setAttributeValueInObject(clone, clonedAttributeValue); } /** * INTERNAL: * Extract value from the row and set the attribute to this value in the * working copy clone. * In order to bypass the shared cache when in transaction a UnitOfWork must * be able to populate working copies directly from the row. */ @Override public void buildCloneFromRow(AbstractRecord record, JoinedAttributeManager joinManager, Object clone, CacheKey sharedCacheKey, ObjectBuildingQuery sourceQuery, UnitOfWorkImpl unitOfWork, AbstractSession executionSession) { // If mapping is a no-attribute transformation mapping, do nothing if (isWriteOnly()) { return; } // This will set the value in the clone automatically. Object attributeValue = readFromRowIntoObject(record, joinManager, clone, sharedCacheKey, sourceQuery, executionSession, true); if (usesIndirection()) { boolean wasCacheUsed = this.isCacheable && sharedCacheKey != null && this.descriptor.getCachePolicy().isProtectedIsolation() && sharedCacheKey.getObject() != null; //it would be better if wasCacheUsed could be calculated within readFromRowIntoObject but that would require changing the signature of all mappings just for //transformation mapping. if (!wasCacheUsed){ //if the cache was used then the attribute has already been cloned by readFromRowIntoObject attributeValue = this.indirectionPolicy.cloneAttribute(attributeValue, null,// no original null, clone, null, unitOfWork, true);// build clone directly from row. } setAttributeValueInObject(clone, attributeValue); } } /** * INTERNAL: * Require for cloning, the part must be cloned. * Ignore the attribute value, go right to the object itself. */ public Object buildCloneForPartObject(Object attributeValue, Object original, CacheKey cacheKey, Object clone, AbstractSession cloningSession, Integer refreshCascade, boolean isExisting, boolean isFromSharedCache) { if (isReadOnly() || !isMutable()) { return attributeValue; } AbstractRecord row = buildPhantomRowFrom(original, cloningSession); return invokeAttributeTransformer(row, clone, cloningSession); } /** * INTERNAL: * Copy of the attribute of the object. * This is NOT used for unit of work but for templatizing an object. */ @Override public void buildCopy(Object copy, Object original, CopyGroup group) { // If mapping is a no-attribute transformation mapping, do nothing if (isWriteOnly()) { return; } Object clonedAttributeValue; // If the mapping is read-only, a direct pass through of the value will be performed. // This is done because the method invocation is not possible as the row will be // empty and we have no way to clone the value. // Since the value cannot change anyway we just pass it through. if (isReadOnly() || !isMutable()) { clonedAttributeValue = getRealAttributeValueFromObject(original, group.getSession()); } else { AbstractRecord row = buildPhantomRowFrom(original, group.getSession()); clonedAttributeValue = invokeAttributeTransformer(row, copy, group.getSession()); } this.indirectionPolicy.reset(copy); setRealAttributeValueInObject(copy, clonedAttributeValue); } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade */ @Override public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects){ //objects referenced by this mapping are not registered as they have // no identity, this is a no-op. } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ @Override public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects){ //Objects referenced through transformation mappings are not registered as // they have no identity, this is a no-op. } /** * INTERNAL: * The mapping clones itself to create deep copy. */ @Override public Object clone() { AbstractTransformationMapping clone = (AbstractTransformationMapping)super.clone(); clone.setFieldToTransformers(new ArrayList(this.fieldToTransformers.size())); for (Object[] pair : this.fieldToTransformers) { Object[] transformation = new Object[2]; transformation[0] = pair[0]; transformation[1] = pair[1]; clone.getFieldToTransformers().add(transformation); } clone.setIndirectionPolicy((IndirectionPolicy)indirectionPolicy.clone()); return clone; } /** * INTERNAL: * Return all the fields with this mapping. */ @Override protected Vector collectFields() { Vector databaseFields = new Vector(this.fieldToTransformers.size()); for (Object[] pair : this.fieldToTransformers) { databaseFields.add(pair[0]); } return databaseFields; } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ @Override public ChangeRecord compareForChange(Object clone, Object backUp, ObjectChangeSet owner, AbstractSession session) { if (isReadOnly() || isWriteOnly()) { return null; } Object cloneAttribute = getAttributeValueFromObject(clone); Object backUpAttribute = null; if ((cloneAttribute != null) && (!this.indirectionPolicy.objectIsInstantiated(cloneAttribute))) { return null; } boolean difference = false; Object backupValue = null; if (owner.isNew()) { difference = true; } else { if (backUp != null) { backUpAttribute = getAttributeValueFromObject(backUp); backupValue = this.indirectionPolicy.getRealAttributeValueFromObject(backUp, backUpAttribute); } boolean backUpIsInstantiated = ((backUpAttribute == null) || (this.indirectionPolicy.objectIsInstantiated(backUpAttribute))); Object cloneValue = this.indirectionPolicy.getRealAttributeValueFromObject(clone, cloneAttribute); if (backUpIsInstantiated) { if (cloneValue == backupValue) { return null; } if (((cloneValue != null && (backupValue != null)) && cloneValue.equals(backupValue))) { return null; } } for (Object[] pair : this.fieldToTransformers) { DatabaseField field = (DatabaseField)pair[0]; FieldTransformer transformer = (FieldTransformer)pair[1]; Object cloneFieldValue = null; Object backUpFieldValue = null; if (clone != null) { cloneFieldValue = invokeFieldTransformer(field, transformer, clone, session); } if ((backUpIsInstantiated) && (backUp != null)) { backUpFieldValue = invokeFieldTransformer(field, transformer, backUp, session); } if (cloneFieldValue == backUpFieldValue) { continue; // skip this iteration, go to the next one } if ((cloneFieldValue == null) || (backUpFieldValue == null)) { difference = true; break; // There is a difference. } if (cloneFieldValue.equals(backUpFieldValue)) { continue; // skip this iteration, go to the next one } if (Helper.comparePotentialArrays(cloneFieldValue, backUpFieldValue)) { continue; // skip this iteration, go to the next one } difference = true; break; // There is a difference. } } if (difference) { return internalBuildChangeRecord(clone, backupValue, owner, session); } return null; } /** * INTERNAL: * Directly build a change record without comparison */ @Override public ChangeRecord buildChangeRecord(Object clone, ObjectChangeSet owner, AbstractSession session) { return internalBuildChangeRecord(clone, null, owner, session); } /** * INTERNAL: * Build a change record. */ public ChangeRecord internalBuildChangeRecord(Object clone, Object oldValue, ObjectChangeSet owner, AbstractSession session) { TransformationMappingChangeRecord changeRecord = new TransformationMappingChangeRecord(owner); changeRecord.setRow(buildPhantomRowFrom(clone, session)); changeRecord.setAttribute(getAttributeName()); changeRecord.setMapping(this); changeRecord.setOldValue(oldValue); return changeRecord; } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ @Override public boolean compareObjects(Object firstObject, Object secondObject, AbstractSession session) { if (!isWriteOnly()) { // PERF: Checks if attribute values are equal first before apply field translation. Object firstValue = getRealAttributeValueFromObject(firstObject, session); Object secondValue = getRealAttributeValueFromObject(secondObject, session); if (firstValue == secondValue) { return true; } if ((firstValue == null) || (secondValue == null)) { return false; } if (firstValue.equals(secondValue)) { return true; } } for (Object[] pair : this.fieldToTransformers) { DatabaseField field = (DatabaseField)pair[0]; FieldTransformer transformer = (FieldTransformer)pair[1]; Object firstFieldValue = invokeFieldTransformer(field, transformer, firstObject, session); Object secondFieldValue = invokeFieldTransformer(field, transformer, secondObject, session); if (firstFieldValue == secondFieldValue) { continue; // skip this iteration, go to the next one } if ((firstFieldValue == null) || (secondFieldValue == null)) { return false; } if (!firstFieldValue.equals(secondFieldValue)) { if (!Helper.comparePotentialArrays(firstFieldValue, secondFieldValue)) { return false; } } } return true; } /** * INTERNAL: * Convert all the class-name-based settings in this mapping to actual class-based * settings * @param classLoader */ @Override public void convertClassNamesToClasses(ClassLoader classLoader){ super.convertClassNamesToClasses(classLoader); if (attributeTransformerClassName != null) { Class attributeTransformerClass = null; try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()) { try { attributeTransformerClass = (Class)AccessController.doPrivileged(new PrivilegedClassForName(attributeTransformerClassName, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(attributeTransformerClassName, exception.getException()); } } else { attributeTransformerClass = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(attributeTransformerClassName, true, classLoader); } } catch (ClassNotFoundException exc){ throw ValidationException.classNotFoundWhileConvertingClassNames(attributeTransformerClassName, exc); } this.setAttributeTransformerClass(attributeTransformerClass); } for (FieldTransformation transformation : getFieldTransformations()) { if (transformation instanceof TransformerBasedFieldTransformation) { TransformerBasedFieldTransformation transformer = (TransformerBasedFieldTransformation)transformation; String transformerClassName = transformer.getTransformerClassName(); if (transformerClassName == null) { return; } Class transformerClass = null; try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { transformerClass = (Class)AccessController.doPrivileged(new PrivilegedClassForName(transformerClassName, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(transformerClassName, exception.getException()); } } else { transformerClass = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(transformerClassName, true, classLoader); } } catch (ClassNotFoundException exc){ throw ValidationException.classNotFoundWhileConvertingClassNames(transformerClassName, exc); } transformer.setTransformerClass(transformerClass); } } } /** * INTERNAL: * Builder the unit of work value holder. * Ignore the original object. * @param buildDirectlyFromRow indicates that we are building the clone directly * from a row as opposed to building the original from the row, putting it in * the shared cache, and then cloning the original. */ @Override public DatabaseValueHolder createCloneValueHolder(ValueHolderInterface attributeValue, Object original, Object clone, AbstractRecord row, AbstractSession cloningSession, boolean buildDirectlyFromRow) { return cloningSession.createCloneTransformationValueHolder(attributeValue, original, clone, this); } /** * PUBLIC: * Indirection means that a ValueHolder will be put in-between the attribute and the real object. * This defaults to false and only required for transformations that perform database access. */ public void dontUseIndirection() { setIndirectionPolicy(new NoIndirectionPolicy()); } /** * INTERNAL: * An object has been serialized from the server to the client. * Replace the transient attributes of the remote value holders * with client-side objects. */ @Override public void fixObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { this.indirectionPolicy.fixObjectReferences(object, objectDescriptors, processedObjects, query, session); } /** * INTERNAL: * The attributeTransformer stores an instance of the class which implements * AttributeTransformer. */ public AttributeTransformer getAttributeTransformer() { return attributeTransformer; } /** * PUBLIC: * Return the attribute transformation method name. */ public String getAttributeMethodName() { if (this.attributeTransformer instanceof MethodBasedAttributeTransformer) { return ((MethodBasedAttributeTransformer)this.attributeTransformer).getMethodName(); } return null; } /** * INTERNAL: * Return the attribute transformer's class. * This is used to map to XML. */ public Class getAttributeTransformerClass() { if ((this.attributeTransformer == null) || (this.attributeTransformer instanceof MethodBasedAttributeTransformer)) { return null; } return this.attributeTransformer.getClass(); } /** * INTERNAL: * Set the attribute transformer's class. * This is used to map from XML. */ public void setAttributeTransformerClass(Class attributeTransformerClass) { if (attributeTransformerClass == null) { return; } try { Object instance = null; if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()) { try { instance = AccessController.doPrivileged(new PrivilegedNewInstanceFromClass(attributeTransformerClass)); } catch (PrivilegedActionException ex) { throw (Exception)ex.getCause(); } } else { instance = PrivilegedAccessHelper.newInstanceFromClass(attributeTransformerClass); } setAttributeTransformer((AttributeTransformer)instance); } catch (Exception exception) { throw DescriptorException.attributeTransformerClassInvalid(attributeTransformerClass.getName(), this, exception); } } /** * INTERNAL: * Return the attribute transformer class name */ public String getAttributeTransformerClassName() { return attributeTransformerClassName; } /** * INTERNAL: * Check for write-only, one-way transformation. */ public Object getAttributeValueFromObject(Object object) throws DescriptorException { if (isWriteOnly()) { return null; } Object attributeValue = super.getAttributeValueFromObject(object); return this.indirectionPolicy.validateAttributeOfInstantiatedObject(attributeValue); } /** * INTERNAL: * Returns a Vector which stores fieldnames and the respective method/transformer names. */ public List getFieldTransformations() { return fieldTransformations; } /** * INTERNAL: * @return a vector which stores fields and their respective transformers. */ public List getFieldToTransformers() { return fieldToTransformers; } /** * INTERNAL: * Return the mapping's indirection policy. */ public IndirectionPolicy getIndirectionPolicy() { return indirectionPolicy; } /** * INTERNAL: * Returns the real attribute value from the reference object's attribute value. * If the attribute is using indirection the value of the value-holder is returned. * If the value holder is not instantiated then it is instantiated. */ @Override public Object getRealAttributeValueFromAttribute(Object attributeValue, Object object, AbstractSession session) { return this.indirectionPolicy.getRealAttributeValueFromObject(object, attributeValue); } /** * INTERNAL: * Trigger the instantiation of the attribute if lazy. */ @Override public void instantiateAttribute(Object object, AbstractSession session) { this.indirectionPolicy.instantiateObject(object, getAttributeValueFromObject(object)); } /** * INTERNAL: * Extract and return the appropriate value from the * specified remote value holder. */ @Override public Object getValueFromRemoteValueHolder(RemoteValueHolder remoteValueHolder) { return this.indirectionPolicy.getValueFromRemoteValueHolder(remoteValueHolder); } /** * INTERNAL: * The mapping is initialized with the given session. */ @Override public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); initializeAttributeTransformer(session); initializeFieldToTransformers(session); setFields(collectFields()); this.indirectionPolicy.initialize(); if (usesIndirection()) { for (DatabaseField field : this.fields) { field.setKeepInRow(true); } } } /** * INTERNAL: * Convert the attribute transformer class name into an AttributeTransformer * If the old-style method name in set, then use a MethodBasedAttributeTRansformer */ protected void initializeAttributeTransformer(AbstractSession databaseSession) throws DescriptorException { if (isWriteOnly()) { return; } this.attributeTransformer.initialize(this); } /** * INTERNAL: * Required for reverse compatibility and test cases: * @return a hash table containing the fieldName and their respective method names */ public Hashtable getFieldNameToMethodNames() { Hashtable table = new Hashtable(getFieldTransformations().size()); Iterator transformations = getFieldTransformations().iterator(); while (transformations.hasNext()) { FieldTransformation transformation = (FieldTransformation)transformations.next(); if (transformation instanceof MethodBasedFieldTransformation) { table.put(transformation.getField().getQualifiedName(), ((MethodBasedFieldTransformation)transformation).getMethodName()); } } return table; } /** * INTERNAL: * Convert the field names and their corresponding method names to * DatabaseFields and Methods. */ protected void initializeFieldToTransformers(AbstractSession session) throws DescriptorException { for (Object[] pair : this.fieldToTransformers) { pair[0] = getDescriptor().buildField(((DatabaseField)pair[0])); ((FieldTransformer)pair[1]).initialize(this); } for (FieldTransformation transformation : getFieldTransformations()) { DatabaseField field = getDescriptor().buildField(transformation.getField()); String transformerClassName = "MethodBasedFieldTransformer"; FieldTransformer transformer = null; try { transformer = transformation.buildTransformer(); } catch (ConversionException ex) { if (transformation instanceof TransformerBasedFieldTransformation) { transformerClassName = ((TransformerBasedFieldTransformation)transformation).getTransformerClassName(); } throw DescriptorException.fieldTransformerClassNotFound(transformerClassName, this, ex); } catch (Exception ex) { if (transformation instanceof TransformerBasedFieldTransformation) { transformerClassName = ((TransformerBasedFieldTransformation)transformation).getTransformerClassName(); } throw DescriptorException.fieldTransformerClassInvalid(transformerClassName, this, ex); } transformer.initialize(this); // Attempt to ensure a type is set on the field. if (field.getType() == null) { if (transformer instanceof MethodBasedFieldTransformer) { field.setType(((MethodBasedFieldTransformer)transformer).getFieldType()); } else if (field.getColumnDefinition() != null) { // Search for the type for this field definition. if (session.getDatasourcePlatform() instanceof DatabasePlatform) { Iterator iterator = session.getPlatform().getFieldTypes().entrySet().iterator(); while (iterator.hasNext()) { Map.Entry entry = (Map.Entry)iterator.next(); if (((FieldTypeDefinition)entry.getValue()).getName().equals(field.getColumnDefinition())) { field.setType((Class)entry.getKey()); break; } } } } } Object[] fieldToTransformer = new Object[2]; fieldToTransformer[0] = field; fieldToTransformer[1] = transformer; this.fieldToTransformers.add(fieldToTransformer); } } /** * INTERNAL: * Invoke the buildAttributeValue method on the AttributeTransformer */ public Object invokeAttributeTransformer(AbstractRecord record, Object domainObject, AbstractSession session) throws DescriptorException { return this.attributeTransformer.buildAttributeValue(record, domainObject, session); } /** * INTERNAL: * Invoke the buildFieldValue on the appropriate FieldTransformer */ protected Object invokeFieldTransformer(DatabaseField field, FieldTransformer transformer, Object domainObject, AbstractSession session) throws DescriptorException { return transformer.buildFieldValue(domainObject, field.getName(), session); } protected Object invokeFieldTransformer(DatabaseField field, Object domainObject, AbstractSession session) { for (Object[] pair : this.fieldToTransformers) { if (field.equals(pair[0])) { return invokeFieldTransformer(field, (FieldTransformer)pair[1], domainObject, session); } } return null; } /** * PUBLIC: * Return true if the attribute for this mapping is not a simple atomic value that cannot be modified, * only replaced. * This is true by default for non-primitives, but can be set to false to avoid cloning * and change comparison in the unit of work. */ public boolean isMutable() { return isMutable; } /** * INTERNAL: * Return true if read-only is explicitly set to true; * otherwise return whether the transformation has no fields * (no fields = read-only) */ @Override public boolean isReadOnly() { if (super.isReadOnly()) { return true; } else { return getFieldTransformations().isEmpty() && this.fieldToTransformers.isEmpty(); } } /** * INTERNAL: */ @Override public boolean isTransformationMapping() { return true; } /** * INTERNAL: * Return if the transformation has no attribute, is write only. */ @Override public boolean isWriteOnly() { return (getAttributeName() == null) && ((this.attributeTransformer == null) && (this.attributeTransformerClassName == null)); } /** * INTERNAL: * Perform the iteration opperation on the iterators current objects attributes. * Only require if primitives are desired. */ @Override public void iterate(DescriptorIterator iterator) { Object attributeValue = getAttributeValueFromObject(iterator.getVisitedParent()); this.indirectionPolicy.iterateOnAttributeValue(iterator, attributeValue); } /** * INTERNAL: * Iterate on the attribute value. * The value holder has already been processed. */ @Override public void iterateOnRealAttributeValue(DescriptorIterator iterator, Object realAttributeValue) { iterator.iteratePrimitiveForMapping(realAttributeValue, this); } /** * INTERNAL: * Merge changes from the source to the target object. Which is the original from the parent UnitOfWork */ @Override public void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { if (isWriteOnly()) { return; } // PERF: If not mutable then just set the value from the source. if (!isMutable() && (source != null)) { setRealAttributeValueInObject(target, getRealAttributeValueFromObject(source, mergeManager.getSession())); return; } AbstractRecord record = (AbstractRecord)((TransformationMappingChangeRecord)changeRecord).getRecord(); Object attributeValue = invokeAttributeTransformer(record, target, targetSession); setRealAttributeValueInObject(target, attributeValue); } /** * INTERNAL: * Merge changes from the source to the target object. */ @Override public void mergeIntoObject(Object target, boolean isTargetUnInitialized, Object source, MergeManager mergeManager, AbstractSession targetSession) { if (isWriteOnly()) { return; } // do refresh check first as I may need to reset remote value holder if (mergeManager.shouldRefreshRemoteObject() && usesIndirection()) { this.indirectionPolicy.mergeRemoteValueHolder(target, source, mergeManager); return; } if (mergeManager.isForRefresh()) { if (!areObjectsToBeProcessedInstantiated(target)) { // This will occur when the clone's value has not been instantiated yet and we do not need // the refresh that attribute return; } } else if (!areObjectsToBeProcessedInstantiated(source)) { // I am merging from a clone into an original. No need to do merge if the attribute was never // modified return; } if (isTargetUnInitialized) { // This will happen if the target object was removed from the cache before the commit was attempted if (mergeManager.shouldMergeWorkingCopyIntoOriginal() && (!areObjectsToBeProcessedInstantiated(source))) { setAttributeValueInObject(target, this.indirectionPolicy.getOriginalIndirectionObject(getAttributeValueFromObject(source), targetSession)); return; } } if (isReadOnly()) { // if it is read only then we do not have any fields specified for the // transformer, without fields we can not build the row, so just copy // over the value alternatively we could build the entire row for the object. setRealAttributeValueInObject(target, getRealAttributeValueFromObject(source, mergeManager.getSession())); return; } if (!isMutable()) { Object attribute = getRealAttributeValueFromObject(source, mergeManager.getSession()); if (this.descriptor.getObjectChangePolicy().isObjectChangeTrackingPolicy()) { // Object level or attribute level so lets see if we need to raise the event? Object targetAttribute = getRealAttributeValueFromObject(target, mergeManager.getSession()); if ((mergeManager.shouldMergeCloneIntoWorkingCopy() || mergeManager.shouldMergeCloneWithReferencesIntoWorkingCopy()) && !mergeManager.isForRefresh() && (((targetAttribute == null) && (attribute != null)) || ((targetAttribute != null) && ((attribute == null) || ((!targetAttribute.equals(attribute)) && (!Helper.comparePotentialArrays(targetAttribute, attribute))))))) { this.descriptor.getObjectChangePolicy().raiseInternalPropertyChangeEvent(target, getAttributeName(), targetAttribute, attribute); } } setRealAttributeValueInObject(target, attribute); return; } // This dumps the attribute into the row and back. AbstractRecord row = buildPhantomRowFrom(source, mergeManager.getSession()); Object attributeValue = invokeAttributeTransformer(row, source, mergeManager.getSession()); AbstractRecord targetRow = buildPhantomRowFrom(target, mergeManager.getSession()); setRealAttributeValueInObject(target, attributeValue); //set the change after the set on the object as this mapping uses the object to build the change record. if (this.descriptor.getObjectChangePolicy().isObjectChangeTrackingPolicy()) { for (Enumeration keys = targetRow.keys(); keys.hasMoreElements(); ){ Object field = keys.nextElement(); if ((mergeManager.shouldMergeCloneIntoWorkingCopy() || mergeManager.shouldMergeCloneWithReferencesIntoWorkingCopy()) && (!row.get(field).equals(targetRow.get(field)))) { this.descriptor.getObjectChangePolicy().raiseInternalPropertyChangeEvent(target, getAttributeName(), invokeAttributeTransformer(targetRow, source, mergeManager.getSession()), attributeValue); break; } } } } /** * INTERNAL: * Allow for initialization of properties and validation. */ @Override public void preInitialize(AbstractSession session) throws DescriptorException { if (isWriteOnly()) { return;// Allow for one-way transformations. } super.preInitialize(session); // PERF: Also auto-set mutable to false is the attribute type is a primitive. // This means it is not necessary to clone the value (through double transformation). if ((getAttributeClassification() != null) && (getAttributeClassification().isPrimitive() || Helper.isPrimitiveWrapper(getAttributeClassification()) || getAttributeClassification().equals(ClassConstants.STRING) || getAttributeClassification().equals(ClassConstants.BIGDECIMAL) || getAttributeClassification().equals(ClassConstants.NUMBER))) { setIsMutable(false); } } /** * INTERNAL: * Extracts value from return row and set the attribute to the value in the object. * Return row is merged into object after execution of insert or update call * according to ReturningPolicy. */ public Object readFromReturnRowIntoObject(AbstractRecord row, Object object, ReadObjectQuery query, Collection handledMappings, ObjectChangeSet changeSet) throws DatabaseException { int size = this.fields.size(); AbstractRecord transformationRow = new DatabaseRecord(size); for (int i = 0; i < size; i++) { DatabaseField field = this.fields.get(i); Object value; if (row.containsKey(field)) { value = row.get(field); } else { value = valueFromObject(object, field, query.getSession()); } transformationRow.add(field, value); } if(changeSet != null && (!changeSet.isNew() || (query.getDescriptor() != null && query.getDescriptor().shouldUseFullChangeSetsForNewObjects()))) { TransformationMappingChangeRecord record = (TransformationMappingChangeRecord)changeSet.getChangesForAttributeNamed(attributeName); if (record == null) { record = new TransformationMappingChangeRecord(changeSet); record.setAttribute(attributeName); record.setMapping(this); record.setOldValue(getAttributeValueFromObject(object)); changeSet.addChange(record); } record.setRow(transformationRow); } Object attributeValue = readFromRowIntoObject(transformationRow, null, object, null, query, query.getSession(), true); if (handledMappings != null) { handledMappings.add(this); } return attributeValue; } /** * INTERNAL: * Extract value from the row and set the attribute to the value in the object. */ @Override public Object readFromRowIntoObject(AbstractRecord row, JoinedAttributeManager joinManager, Object object, CacheKey parentCacheKey, ObjectBuildingQuery query, AbstractSession executionSession, boolean isTargetProtected) throws DatabaseException { if (isWriteOnly()) { return null; } if (this.descriptor.getCachePolicy().isProtectedIsolation()) { if (this.isCacheable && isTargetProtected && parentCacheKey != null) { Object cached = parentCacheKey.getObject(); if (cached != null) { Object attributeValue = getAttributeValueFromObject(cached); Integer refreshCascade = null; if (query != null && query.shouldRefreshIdentityMapResult()){ refreshCascade = query.getCascadePolicy(); } return this.indirectionPolicy.cloneAttribute(attributeValue, cached, parentCacheKey, object, refreshCascade, executionSession, false); } return null; } } if (row != null && row.hasSopObject()) { return getAttributeValueFromObject(row.getSopObject()); } Object attributeValue = this.indirectionPolicy.valueFromMethod(object, row, query.getSession()); Object oldAttribute = null; if (executionSession.isUnitOfWork() && query.shouldRefreshIdentityMapResult()){ oldAttribute = this.getAttributeValueFromObject(object); } try { this.attributeAccessor.setAttributeValueInObject(object, attributeValue); } catch (DescriptorException exception) { exception.setMapping(this); throw exception; } if (executionSession.isUnitOfWork() && query.shouldRefreshIdentityMapResult()){ if (this.indirectionPolicy.objectIsInstantiatedOrChanged(oldAttribute)){ this.indirectionPolicy.instantiateObject(object, attributeValue); } } return attributeValue; } /** * INTERNAL: * Needed for backwards compatibility */ public Vector getFieldNameToMethodNameAssociations() { Vector associations = new Vector(); for (Iterator source = getFieldTransformations().iterator(); source.hasNext();) { FieldTransformation tf = (FieldTransformation)source.next(); if (tf instanceof MethodBasedFieldTransformation) { Association ass = new Association(); ass.setKey(tf.getField().getQualifiedName()); ass.setValue(((MethodBasedFieldTransformation)tf).getMethodName()); associations.addElement(ass); } } return associations; } /** * INTERNAL: * needed for backwards compatibility */ public void setFieldNameToMethodNameAssociations(Vector associations) { setFieldTransformations(org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(associations.size())); for (Iterator source = associations.iterator(); source.hasNext();) { Association ass = (Association)source.next(); MethodBasedFieldTransformation tf = new MethodBasedFieldTransformation(); tf.setField(new DatabaseField((String)ass.getKey())); tf.setMethodName((String)ass.getValue()); getFieldTransformations().add(tf); } } /** * INTERNAL: * Once descriptors are serialized to the remote session. All its mappings and reference descriptors are traversed. Usually * mappings are initialized and serialized reference descriptors are replaced with local descriptors if they already exist on the * remote session. */ @Override public void remoteInitialization(DistributedSession session) { setFieldToTransformers(new Vector()); // Remote mappings is initialized here again because while serializing only the uninitialized data is passed // as the initialized data is not serializable. if (!isWriteOnly()) { super.remoteInitialization(session); initializeAttributeTransformer(session); } initializeFieldToTransformers(session); } /** * PUBLIC: * Set the AttributeTransformer, this transformer will be used to extract the value for the * object's attribute from the database row. */ public void setAttributeTransformer(AttributeTransformer transformer) { attributeTransformer = transformer; if ((transformer != null) && !(transformer instanceof MethodBasedAttributeTransformer)) { attributeTransformerClassName = transformer.getClass().getName(); } } /** * INTERNAL: * Set the Attribute Transformer Class Name * @param className */ public void setAttributeTransformerClassName(String className) { attributeTransformerClassName = className; } /** * PUBLIC: * To set the attribute method name. The method is invoked internally by TopLink * to retrieve the value to store in the domain object. The method receives Record * as its parameter and optionally Session, and should extract the value from the * record to set into the object, but should not set the value on the object, only return it. */ public void setAttributeTransformation(String methodName) { if ((methodName != null) && (methodName != "")) { setAttributeTransformer(new MethodBasedAttributeTransformer(methodName)); } else { setAttributeTransformer(null); } } /** * INTERNAL: * Check for write-only, one-way transformations. */ public void setAttributeValueInObject(Object object, Object value) { if (isWriteOnly()) { return; } super.setAttributeValueInObject(object, value); } /** * PUBLIC: * Set if the value of the attribute is atomic or a complex mutable object and can be modified without replacing the entire object. * This defaults to true for non-primitives, but can be set to false to optimize object cloning and change comparison. */ public void setIsMutable(boolean mutable) { this.isMutable = mutable; } /** * INTERNAL: * Set the value of the attribute mapped by this mapping, * placing it inside a value holder if necessary. * If the value holder is not instantiated then it is instantiated. * Check for write-only, one-way transformations. */ @Override public void setRealAttributeValueInObject(Object object, Object value) throws DescriptorException { if (isWriteOnly()) { return; } this.indirectionPolicy.setRealAttributeValueInObject(object, value); } /** * INTERNAL: * Set the field to method name associations. */ public void setFieldTransformations(List fieldTransformations) { this.fieldTransformations = fieldTransformations; } protected void setFieldToTransformers(List fieldToTransformers) { this.fieldToTransformers = fieldToTransformers; } /** * ADVANCED: * Set the indirection policy. */ public void setIndirectionPolicy(IndirectionPolicy indirectionPolicy) { this.indirectionPolicy = indirectionPolicy; indirectionPolicy.setMapping(this); } /** * INTERNAL: * Will be used by Gromit. For details see usesIndirection(). * @see #useBasicIndirection() * @see #dontUseIndirection() */ public void setUsesIndirection(boolean usesIndirection) { if (usesIndirection) { useBasicIndirection(); } else { dontUseIndirection(); } } /** * INTERNAL: * Either create a new change record or update the change record with the new value. * This is used by attribute change tracking. */ @Override public void updateChangeRecord(Object clone, Object newValue, Object oldValue, ObjectChangeSet objectChangeSet, UnitOfWorkImpl uow) { TransformationMappingChangeRecord changeRecord = (TransformationMappingChangeRecord)objectChangeSet.getChangesForAttributeNamed(this.getAttributeName()); Object updatedObject = descriptor.getInstantiationPolicy().buildNewInstance(); this.setAttributeValueInObject(updatedObject, newValue); if (!isWriteOnly()) { if (changeRecord == null) { objectChangeSet.addChange(internalBuildChangeRecord(updatedObject, oldValue, objectChangeSet, uow)); } else { changeRecord.setRow(this.buildPhantomRowFrom(updatedObject, uow)); } } } /** * INTERNAL: * Return if this mapping supports change tracking. */ @Override public boolean isChangeTrackingSupported(Project project) { return ! isMutable(); } /** * INTERNAL: * Return whether the specified object is instantiated. */ @Override public boolean isAttributeValueFromObjectInstantiated(Object object) { return this.indirectionPolicy.objectIsInstantiated(object); } /** * PUBLIC: * Indirection means that a ValueHolder will be put in-between the attribute and the real object. * This defaults to false and only required for transformations that perform database access. */ public void useBasicIndirection() { setIndirectionPolicy(new BasicIndirectionPolicy()); } /** * PUBLIC: * Indirection means that a IndirectContainer (wrapping a ValueHolder) will be put in-between * the attribute and the real object. * This allows for the reading of the target from the database to be delayed until accessed. * This defaults to true and is strongly suggested as it give a huge performance gain. */ public void useContainerIndirection(Class containerClass) { ContainerIndirectionPolicy policy = new ContainerIndirectionPolicy(); policy.setContainerClass(containerClass); setIndirectionPolicy(policy); } /** * PUBLIC: * Indirection means that a ValueHolder will be put in-between the attribute and the real object. * This defaults to false and only required for transformations that perform database access. * @see #useBasicIndirection() */ public void useIndirection() { useBasicIndirection(); } /** * PUBLIC: * Indirection means that a ValueHolder will be put in-between the attribute and the real object. * This defaults to false and only required for transformations that perform database access. * @see org.eclipse.persistence.mappings.IndirectionPolicy */ public boolean usesIndirection() { return this.indirectionPolicy.usesIndirection(); } /** * INTERNAL: * Validate mapping declaration */ @Override public void validateBeforeInitialization(AbstractSession session) throws DescriptorException { super.validateBeforeInitialization(session); if (isWriteOnly()) { return; } if ((this.attributeTransformer == null) && (this.attributeTransformerClassName == null)) { session.getIntegrityChecker().handleError(DescriptorException.noAttributeTransformationMethod(this)); } if (getAttributeAccessor() instanceof InstanceVariableAttributeAccessor) { Class attributeType = ((InstanceVariableAttributeAccessor)getAttributeAccessor()).getAttributeType(); this.indirectionPolicy.validateDeclaredAttributeType(attributeType, session.getIntegrityChecker()); } else if (getAttributeAccessor().isMethodAttributeAccessor()) { // 323403 Class returnType = ((MethodAttributeAccessor)getAttributeAccessor()).getGetMethodReturnType(); this.indirectionPolicy.validateGetMethodReturnType(returnType, session.getIntegrityChecker()); Class parameterType = ((MethodAttributeAccessor)getAttributeAccessor()).getSetMethodParameterType(); this.indirectionPolicy.validateSetMethodParameterType(parameterType, session.getIntegrityChecker()); } } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public Object valueFromObject(Object object, DatabaseField field, AbstractSession session) { return invokeFieldTransformer(field, object, session); } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public void writeFromObjectIntoRow(Object object, AbstractRecord row, AbstractSession session, WriteType writeType) { if (isReadOnly()) { return; } for (Object[] pair : this.fieldToTransformers) { DatabaseField field = (DatabaseField)pair[0]; FieldTransformer transformer = (FieldTransformer)pair[1]; Object fieldValue = invokeFieldTransformer(field, transformer, object, session); row.put(field, fieldValue); } } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public void writeFromObjectIntoRowWithChangeRecord(ChangeRecord changeRecord, AbstractRecord row, AbstractSession session, WriteType writeType) { if (isReadOnly()) { return; } for (Object[] pair : this.fieldToTransformers) { DatabaseField field = (DatabaseField)pair[0]; Object fieldValue = ((TransformationMappingChangeRecord)changeRecord).getRecord().get(field); row.put(field, fieldValue); } } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. * But before that check if the reference object is instantiated or not. */ @Override public void writeFromObjectIntoRowForUpdate(WriteObjectQuery query, AbstractRecord record) { if (!areObjectsToBeProcessedInstantiated(query.getObject())) { return; } if (query.getSession().isUnitOfWork()) { if (compareObjects(query.getBackupClone(), query.getObject(), query.getSession())) { return; } } writeFromObjectIntoRow(query.getObject(), record, query.getSession(), WriteType.UPDATE); } /** * INTERNAL: * Write fields needed for insert into the template for with null values. */ @Override public void writeInsertFieldsIntoRow(AbstractRecord record, AbstractSession session) { if (isReadOnly()) { return; } for (Object[] pair : this.fieldToTransformers) { DatabaseField field = (DatabaseField)pair[0]; record.put(field, null); } } } ././@LongLink0000000000000000000000000000014700000000000011567 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/mappings/foundation/AbstractCompositeObjectMapping.javaeclipselink-2.5.1.orig/org/eclipse/persistence/mappings/foundation/AbstractCompositeObjectMapping.ja0000664000000000000000000003565612216173130031013 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * * 30/05/2012-2.4 Guy Pelletier * - 354678: Temp classloader is still being used during metadata processing ******************************************************************************/ package org.eclipse.persistence.mappings.foundation; import java.util.*; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.mappings.converters.Converter; import org.eclipse.persistence.queries.*; /** * Chunks of data from non-relational data sources can have an * embedded component objects. These can be * mapped using this mapping. The format of the embedded * data is determined by the reference descriptor. * * @author Big Country * @since TOPLink/Java 3.0 */ public abstract class AbstractCompositeObjectMapping extends AggregateMapping { /** The aggregate object is stored in a single field. */ protected DatabaseField field; /** Allows user defined conversion between the object attribute value and the database value. */ protected Converter converter; /** * Default constructor. */ public AbstractCompositeObjectMapping() { super(); } /** * INTERNAL: * Cascade perform delete through mappings that require the cascade */ public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { //objects referenced by this mapping are not registered as they have // no identity, however mappings from the referenced object may need cascading. Object objectReferenced = getRealAttributeValueFromObject(object, uow); if (objectReferenced == null) { return; } if (!visitedObjects.containsKey(objectReferenced)) { visitedObjects.put(objectReferenced, objectReferenced); ObjectBuilder builder = getReferenceDescriptor(objectReferenced.getClass(), uow).getObjectBuilder(); builder.cascadePerformRemove(objectReferenced, uow, visitedObjects); } } /** * INTERNAL: * Cascade discover and persist new objects during commit. */ public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow, Set cascadeErrors) { Object objectReferenced = getRealAttributeValueFromObject(object, uow); if (objectReferenced != null) { ObjectBuilder builder = getReferenceDescriptor(objectReferenced.getClass(), uow).getObjectBuilder(); builder.cascadeRegisterNewForCreate(objectReferenced, uow, visitedObjects); } } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { //aggregate objects are not registered but their mappings should be. Object objectReferenced = getRealAttributeValueFromObject(object, uow); if (objectReferenced == null) { return; } if (!visitedObjects.containsKey(objectReferenced)) { visitedObjects.put(objectReferenced, objectReferenced); ObjectBuilder builder = getReferenceDescriptor(objectReferenced.getClass(), uow).getObjectBuilder(); builder.cascadeRegisterNewForCreate(objectReferenced, uow, visitedObjects); } } /** * Return the fields mapped by the mapping. */ protected Vector collectFields() { Vector fields = new Vector(1); fields.addElement(this.getField()); return fields; } /** * PUBLIC: * Return the converter on the mapping. * A converter can be used to convert between the object's value and database value of the attribute. */ public Converter getConverter() { return converter; } /** * INTERNAL: * The aggregate object is held in a single field. */ public DatabaseField getField() { return field; } /** * PUBLIC: * Indicates if there is a converter on the mapping. */ public boolean hasConverter() { return getConverter() != null; } /** * INTERNAL: */ public boolean isAbstractCompositeObjectMapping() { return true; } /** * INTERNAL: * Initialize the mapping. */ public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if (getField() == null) { throw DescriptorException.fieldNameNotSetInMapping(this); } setField(getDescriptor().buildField(getField())); setFields(collectFields()); // initialize the converter - if necessary if (hasConverter()) { getConverter().initialize(this, session); } } /** * INTERNAL: * Set the value of the attribute mapped by this mapping. */ public void setAttributeValueInObject(Object object, Object value) throws DescriptorException { // PERF: Direct variable access. try { this.attributeAccessor.setAttributeValueInObject(object, value); } catch (DescriptorException exception) { exception.setMapping(this); throw exception; } } /** * PUBLIC: * Set the converter on the mapping. * A converter can be used to convert between the object's value and database value of the attribute. */ public void setConverter(Converter converter) { this.converter = converter; } /** * The aggregate object is held in a single field. */ public void setField(DatabaseField field) { this.field = field; } /** * INTERNAL: * Extract and return value of the field from the object */ public Object valueFromObject(Object object, DatabaseField field, AbstractSession session) throws DescriptorException { Object attributeValue = this.getAttributeValueFromObject(object); if(this.getConverter() != null) { this.getConverter().convertObjectValueToDataValue(attributeValue, session); } if (attributeValue == null) { return null; } else { return this.getObjectBuilder(attributeValue, session).extractValueFromObjectForField(attributeValue, field, session); } } /** * INTERNAL: * Extract and return the aggregate object from * the specified row. */ public Object valueFromRow(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, CacheKey cacheKey, AbstractSession executionSession, boolean isTargetProtected, Boolean[] wasCacheUsed) throws DatabaseException { if (this.descriptor.getCachePolicy().isProtectedIsolation()) { if (this.isCacheable && isTargetProtected && cacheKey != null) { //cachekey will be null when isolating to uow //used cached collection Object result = null; Object cached = cacheKey.getObject(); if (cached != null) { if (wasCacheUsed != null){ wasCacheUsed[0] = Boolean.TRUE; } Object attributeValue = this.getAttributeValueFromObject(cached); Integer refreshCascade = null; if (sourceQuery != null && sourceQuery.isObjectBuildingQuery() && sourceQuery.shouldRefreshIdentityMapResult()){ refreshCascade = sourceQuery.getCascadePolicy(); } //get the clone root. return buildClonePart(cached, executionSession.getIdentityMapAccessor().getFromIdentityMap(cacheKey.getKey(), referenceClass), cacheKey, attributeValue, refreshCascade, executionSession); } return result; } else if (!this.isCacheable && !isTargetProtected && (cacheKey != null)) { return null; } } if (row.hasSopObject()) { return getAttributeValueFromObject(row.getSopObject()); } Object fieldValue = row.get(this.field); // BUG#2667762 there could be whitespace in the row instead of null if ((fieldValue == null) || (fieldValue instanceof String)) { return null; } // pretty sure we can ignore inheritance here: AbstractRecord nestedRow = this.referenceDescriptor.buildNestedRowFromFieldValue(fieldValue); ClassDescriptor descriptor = this.referenceDescriptor; if (descriptor.hasInheritance()) { Class nestedElementClass = descriptor.getInheritancePolicy().classFromRow(nestedRow, executionSession); descriptor = getReferenceDescriptor(nestedElementClass, executionSession); } ObjectBuilder objectBuilder = descriptor.getObjectBuilder(); Object toReturn = buildCompositeObject(objectBuilder, nestedRow, sourceQuery, cacheKey, joinManager, executionSession); if (this.converter != null) { toReturn = this.converter.convertDataValueToObjectValue(toReturn, executionSession); } return toReturn; } /** * INTERNAL: * Builds a shallow original object. Only direct attributes and primary * keys are populated. In this way the minimum original required for * instantiating a working copy clone can be built without placing it in * the shared cache (no concern over cycles). */ public void buildShallowOriginalFromRow(AbstractRecord row, Object original, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, AbstractSession executionSession) { Object fieldValue = row.get(this.getField()); // BUG#2667762 there could be whitespace in the row instead of null if ((fieldValue == null) || (fieldValue instanceof String)) { return; } // pretty sure we can ignore inheritance here: AbstractRecord nestedRow = this.getReferenceDescriptor().buildNestedRowFromFieldValue(fieldValue); ClassDescriptor descriptor = this.getReferenceDescriptor(); if (descriptor.hasInheritance()) { Class nestedElementClass = descriptor.getInheritancePolicy().classFromRow(nestedRow, executionSession); descriptor = this.getReferenceDescriptor(nestedElementClass, executionSession); } ObjectBuilder objectBuilder = descriptor.getObjectBuilder(); // instead of calling buildCompositeObject, which calls either objectBuilder. // buildObject or buildNewInstance and buildAttributesIntoObject, do the // following always. Since shallow original no concern over cycles or caching. Object element = objectBuilder.buildNewInstance(); objectBuilder.buildAttributesIntoShallowObject(element, nestedRow, sourceQuery); setAttributeValueInObject(original, element); } protected abstract Object buildCompositeObject(ObjectBuilder objectBuilder, AbstractRecord nestedRow, ObjectBuildingQuery query, CacheKey parentCacheKey, JoinedAttributeManager joinManger, AbstractSession targetSession); /** * INTERNAL: * Build the value for the database field and put it in the * specified database row. */ @Override public void writeFromObjectIntoRow(Object object, AbstractRecord record, AbstractSession session, WriteType writeType) throws DescriptorException { if (this.isReadOnly()) { return; } Object attributeValue = this.getAttributeValueFromObject(object); if(getConverter() != null) { getConverter().convertObjectValueToDataValue(attributeValue, session); } if (attributeValue == null) { record.put(this.getField(), null); } else { Object fieldValue = buildCompositeRow(attributeValue, session, record, writeType); record.put(this.getField(), fieldValue); } } protected abstract Object buildCompositeRow(Object attributeValue, AbstractSession session, AbstractRecord record, WriteType writeType); /** * INTERNAL: * If it has changed, build the value for the database field and put it in the * specified database row. * If any part of the aggregate object has changed, the entire object is * written to the database row (i.e. partial updates are not supported). */ @Override public void writeFromObjectIntoRowForUpdate(WriteObjectQuery query, AbstractRecord row) throws DescriptorException { if (query.getSession().isUnitOfWork()) { if (this.compareObjects(query.getObject(), query.getBackupClone(), query.getSession())) { return;// nothing has changed } } this.writeFromObjectIntoRow(query.getObject(), row, query.getSession(), WriteType.UPDATE); } /** * INTERNAL: * Get the attribute value from the object and add the appropriate * values to the specified database row. */ @Override public void writeFromObjectIntoRowWithChangeRecord(ChangeRecord changeRecord, AbstractRecord row, AbstractSession session, WriteType writeType) throws DescriptorException { Object object = ((ObjectChangeSet)changeRecord.getOwner()).getUnitOfWorkClone(); this.writeFromObjectIntoRow(object, row, session, writeType); } /** * INTERNAL: * Write fields needed for insert into the template for with null values. */ public void writeInsertFieldsIntoRow(AbstractRecord record, AbstractSession session) { if (this.isReadOnly()) { return; } record.put(this.getField(), null); } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/OneToManyMapping.java0000664000000000000000000020213612216173130024255 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 07/19/2011-2.2.1 Guy Pelletier * - 338812: ManyToMany mapping in aggregate object violate integrity constraint on deletion ******************************************************************************/ package org.eclipse.persistence.mappings; import java.util.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.expressions.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.*; import org.eclipse.persistence.internal.queries.*; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.DatabaseRecord; import org.eclipse.persistence.internal.descriptors.CascadeLockingPolicy; import org.eclipse.persistence.internal.expressions.SQLUpdateStatement; import org.eclipse.persistence.mappings.foundation.MapComponentMapping; /** *

Purpose: This mapping is used to represent the * typical RDBMS relationship between a single * source object and collection of target objects; where, * on the database, the target objects have references * (foreign keys) to the source object. * * @author Sati * @since TOPLink/Java 1.0 */ public class OneToManyMapping extends CollectionMapping implements RelationalMapping, MapComponentMapping { /** Used for data modification events. */ protected static final String PostInsert = "postInsert"; protected static final String ObjectRemoved = "objectRemoved"; protected static final String ObjectAdded = "objectAdded"; /** The target foreign key fields that reference the sourceKeyFields. */ protected Vector targetForeignKeyFields; /** The (typically primary) source key fields that are referenced by the targetForeignKeyFields. */ protected Vector sourceKeyFields; /** This maps the target foreign key fields to the corresponding (primary) source key fields. */ protected transient Map targetForeignKeysToSourceKeys; /** This maps the (primary) source key fields to the corresponding target foreign key fields. */ protected transient Map sourceKeysToTargetForeignKeys; /** All targetForeignKeyFields should have the same table. * Used only in case data modification events required. **/ protected transient DatabaseTable targetForeignKeyTable; /** Primary keys of targetForeignKeyTable: * the same as referenceDescriptor().getPrimaryKeyFields() in case the table is default table of reference descriptor; * otherwise contains secondary table's primary key fields in the same order as default table primary keys mapped to them. * Used only in case data modification events required. **/ protected transient List targetPrimaryKeyFields; /** * Query used to update a single target row setting its foreign key to point to the source. * Run once for each target added to the source. * Example: * for Employee with managedEmployees attribute mapped with UnidirectionalOneToMany * the query looks like: * UPDATE EMPLOYEE SET MANAGER_ID = 1 WHERE (EMP_ID = 2) * where 1 is id of the source, and 2 is the id of the target to be added. * Used only in case data modification events required. **/ protected DataModifyQuery addTargetQuery; protected boolean hasCustomAddTargetQuery; /** * Query used to update a single target row changing its foreign key value from the one pointing to the source to null. * Run once for each target removed from the source. * Example: * for Employee with managedEmployees attribute mapped with UnidirectionalOneToMany * the query looks like: * UPDATE EMPLOYEE SET MANAGER_ID = null WHERE ((MANAGER_ID = 1) AND (EMP_ID = 2)) * where 1 is id of the source, and 2 is the id of the target to be removed. * Used only in case data modification events required. **/ protected DataModifyQuery removeTargetQuery; protected boolean hasCustomRemoveTargetQuery; /** * Query used to update all target rows changing target foreign key value from the one pointing to the source to null. * Run before the source object is deleted. * Example: * for Employee with managedEmployees attribute mapped with UnidirectionalOneToMany * the query looks like: * UPDATE EMPLOYEE SET MANAGER_ID = null WHERE (MANAGER_ID = 1) * where 1 is id of the source to be deleted. * Used only in case data modification events required. **/ protected DataModifyQuery removeAllTargetsQuery; protected boolean hasCustomRemoveAllTargetsQuery; /** * PUBLIC: * Default constructor. */ public OneToManyMapping() { super(); this.targetForeignKeysToSourceKeys = new HashMap(2); this.sourceKeysToTargetForeignKeys = new HashMap(2); this.sourceKeyFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1); this.targetForeignKeyFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1); this.deleteAllQuery = new DeleteAllQuery(); this.removeTargetQuery = new DataModifyQuery(); this.removeAllTargetsQuery = new DataModifyQuery(); this.isListOrderFieldSupported = true; } /** * INTERNAL: */ @Override public boolean isRelationalMapping() { return true; } /** * INTERNAL: * Add the associated fields to the appropriate collections. */ @Override public void addTargetForeignKeyField(DatabaseField targetForeignKeyField, DatabaseField sourceKeyField) { getTargetForeignKeyFields().addElement(targetForeignKeyField); getSourceKeyFields().addElement(sourceKeyField); } /** * PUBLIC: * Define the target foreign key relationship in the one-to-many mapping. * This method is used for composite target foreign key relationships. * That is, the target object's table has multiple foreign key fields * that are references to * the source object's (typically primary) key fields. * Both the target foreign key field name and the corresponding * source primary key field name must be specified. * Because the target object's table must store a foreign key to the source table, * the target object must map that foreign key, this is normally done through a * one-to-one mapping back-reference. Other options include: *

    *
  • use a DirectToFieldMapping and maintain the * foreign key fields directly in the target *
  • use a ManyToManyMapping *
  • use an AggregateCollectionMapping *
* @see DirectToFieldMapping * @see ManyToManyMapping * @see AggregateCollectionMapping */ public void addTargetForeignKeyFieldName(String targetForeignKeyFieldName, String sourceKeyFieldName) { addTargetForeignKeyField(new DatabaseField(targetForeignKeyFieldName), new DatabaseField(sourceKeyFieldName)); } /** * INTERNAL: * Verifies listOrderField's table: it must be the same table that contains all target foreign keys. * Precondition: listOrderField != null. */ protected void buildListOrderField() { if(this.listOrderField.hasTableName()) { if(!this.targetForeignKeyTable.equals(this.listOrderField.getTable())) { throw DescriptorException.listOrderFieldTableIsWrong(this.getDescriptor(), this, this.listOrderField.getTable(), this.targetForeignKeyTable); } } else { listOrderField.setTable(this.targetForeignKeyTable); } this.listOrderField = this.getReferenceDescriptor().buildField(this.listOrderField, this.targetForeignKeyTable); } /** * The selection criteria are created with target foreign keys and source "primary" keys. * These criteria are then used to read the target records from the table. * These criteria are also used as the default "delete all" criteria. * * CR#3922 - This method is almost the same as buildSelectionCriteria() the difference * is that TargetForeignKeysToSourceKeys contains more information after login then SourceKeyFields * contains before login. */ protected Expression buildDefaultSelectionCriteriaAndAddFieldsToQuery() { Expression selectionCriteria = null; Expression builder = new ExpressionBuilder(); for (Iterator keys = getTargetForeignKeysToSourceKeys().keySet().iterator(); keys.hasNext();) { DatabaseField targetForeignKey = (DatabaseField)keys.next(); DatabaseField sourceKey = getTargetForeignKeysToSourceKeys().get(targetForeignKey); Expression partialSelectionCriteria = builder.getField(targetForeignKey).equal(builder.getParameter(sourceKey)); selectionCriteria = partialSelectionCriteria.and(selectionCriteria); } getContainerPolicy().addAdditionalFieldsToQuery(getSelectionQuery(), builder); return selectionCriteria; } /** * This method would allow customers to get the potential selection criteria for a mapping * prior to initialization. This would allow them to more easily create an amendment method * that would amend the SQL for the join. * * CR#3922 - This method is almost the same as buildDefaultSelectionCriteria() the difference * is that TargetForeignKeysToSourceKeys contains more information after login then SourceKeyFields * contains before login. */ public Expression buildSelectionCriteria() { //CR3922 Expression selectionCriteria = null; Expression builder = new ExpressionBuilder(); Enumeration sourceKeys = getSourceKeyFields().elements(); for (Enumeration targetForeignKeys = getTargetForeignKeyFields().elements(); targetForeignKeys.hasMoreElements();) { DatabaseField targetForeignKey = (DatabaseField)targetForeignKeys.nextElement(); DatabaseField sourceKey = (DatabaseField)sourceKeys.nextElement(); Expression partialSelectionCriteria = builder.getField(targetForeignKey).equal(builder.getParameter(sourceKey)); selectionCriteria = partialSelectionCriteria.and(selectionCriteria); } return selectionCriteria; } /** * INTERNAL: * This method is used to store the FK fields that can be cached that correspond to noncacheable mappings * the FK field values will be used to re-issue the query when cloning the shared cache entity */ @Override public void collectQueryParameters(Set cacheFields){ for (DatabaseField field : getSourceKeyFields()) { cacheFields.add(field); } } /** * INTERNAL: * Clone the appropriate attributes. */ @Override public Object clone() { OneToManyMapping clone = (OneToManyMapping)super.clone(); clone.setTargetForeignKeysToSourceKeys(new HashMap(getTargetForeignKeysToSourceKeys())); if (addTargetQuery != null){ clone.addTargetQuery = (DataModifyQuery) this.addTargetQuery.clone(); } clone.removeTargetQuery = (DataModifyQuery) this.removeTargetQuery.clone(); clone.removeAllTargetsQuery = (DataModifyQuery) this.removeAllTargetsQuery.clone(); return clone; } /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection. Returns the key. */ public Object createMapComponentFromRow(AbstractRecord dbRow, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected){ return session.executeQuery(getSelectionQuery(), dbRow); } /** * Delete all the reference objects with a single query. */ protected void deleteAll(DeleteObjectQuery query, AbstractSession session) throws DatabaseException { Object attribute = getAttributeValueFromObject(query.getObject()); if (usesIndirection()) { if (!this.indirectionPolicy.objectIsInstantiated(attribute)) { // An empty Vector indicates to DeleteAllQuery that no objects should be removed from cache ((DeleteAllQuery)this.deleteAllQuery).executeDeleteAll(session.getSessionForClass(this.referenceClass), query.getTranslationRow(), new Vector(0)); return; } } Object referenceObjects = getRealCollectionAttributeValueFromObject(query.getObject(), session); // PERF: Avoid delete if empty. if (session.isUnitOfWork() && this.containerPolicy.isEmpty(referenceObjects)) { return; } ((DeleteAllQuery)this.deleteAllQuery).executeDeleteAll(session.getSessionForClass(getReferenceClass()), query.getTranslationRow(), this.containerPolicy.vectorFor(referenceObjects, session)); } /** * This method will make sure that all the records privately owned by this mapping are * actually removed. If such records are found then those are all read and removed one * by one along with their privately owned parts. */ protected void deleteReferenceObjectsLeftOnDatabase(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { Object objects = readPrivateOwnedForObject(query); // Delete all these object one by one. ContainerPolicy cp = getContainerPolicy(); for (Object iter = cp.iteratorFor(objects); cp.hasNext(iter);) { query.getSession().deleteObject(cp.next(iter, query.getSession())); } } /** * INTERNAL: * Extract the source primary key value from the target row. * Used for batch reading, most following same order and fields as in the mapping. */ @Override protected Object extractKeyFromTargetRow(AbstractRecord row, AbstractSession session) { int size = this.sourceKeyFields.size(); Object[] key = new Object[size]; ConversionManager conversionManager = session.getDatasourcePlatform().getConversionManager(); for (int index = 0; index < size; index++) { DatabaseField targetField = this.targetForeignKeyFields.get(index); DatabaseField sourceField = this.sourceKeyFields.get(index); Object value = row.get(targetField); // Must ensure the classification gets a cache hit. try { value = conversionManager.convertObject(value, sourceField.getType()); } catch (ConversionException e) { throw ConversionException.couldNotBeConverted(this, getDescriptor(), e); } key[index] = value; } return new CacheId(key); } /** * Extract the key field values from the specified row. * Used for batch reading. Keep the fields in the same order * as in the targetForeignKeysToSourceKeys map. */ @Override protected Object extractBatchKeyFromRow(AbstractRecord row, AbstractSession session) { int size = this.sourceKeyFields.size(); Object[] key = new Object[size]; ConversionManager conversionManager = session.getDatasourcePlatform().getConversionManager(); for (int index = 0; index < size; index++) { DatabaseField sourceField = this.sourceKeyFields.get(index); Object value = row.get(sourceField); // Must ensure the classification to get a cache hit. try { value = conversionManager.convertObject(value, sourceField.getType()); } catch (ConversionException exception) { throw ConversionException.couldNotBeConverted(this, this.descriptor, exception); } key[index] = value; } return new CacheId(key); } /** * Overrides CollectionMappig because this mapping requires a DeleteAllQuery instead of a ModifyQuery. */ protected ModifyQuery getDeleteAllQuery() { if (deleteAllQuery == null) { deleteAllQuery = new DeleteAllQuery();//this is casted to a DeleteAllQuery } return deleteAllQuery; } /** * INTERNAL: * Return source key fields for translation by an AggregateObjectMapping */ @Override public Collection getFieldsForTranslationInAggregate() { return getSourceKeyFields(); } /** * PUBLIC: * Return the source key field names associated with the mapping. * These are in-order with the targetForeignKeyFieldNames. */ public Vector getSourceKeyFieldNames() { Vector fieldNames = new Vector(getSourceKeyFields().size()); for (Enumeration fieldsEnum = getSourceKeyFields().elements(); fieldsEnum.hasMoreElements();) { fieldNames.addElement(((DatabaseField)fieldsEnum.nextElement()).getQualifiedName()); } return fieldNames; } /** * INTERNAL: * Return the source key fields. */ public Vector getSourceKeyFields() { return sourceKeyFields; } /** * INTERNAL: * Return the source/target key fields. */ public Map getSourceKeysToTargetForeignKeys() { if (sourceKeysToTargetForeignKeys == null) { sourceKeysToTargetForeignKeys = new HashMap(2); } return sourceKeysToTargetForeignKeys; } /** * INTERNAL: * Primary keys of targetForeignKeyTable. */ public List getTargetPrimaryKeyFields() { return this.targetPrimaryKeyFields; } /** * INTERNAL: * Return the target foreign key field names associated with the mapping. * These are in-order with the targetForeignKeyFieldNames. */ public Vector getTargetForeignKeyFieldNames() { Vector fieldNames = new Vector(getTargetForeignKeyFields().size()); for (Enumeration fieldsEnum = getTargetForeignKeyFields().elements(); fieldsEnum.hasMoreElements();) { fieldNames.addElement(((DatabaseField)fieldsEnum.nextElement()).getQualifiedName()); } return fieldNames; } /** * INTERNAL: * Return the target foreign key fields. */ public Vector getTargetForeignKeyFields() { return targetForeignKeyFields; } /** * INTERNAL: * Return the target/source key fields. */ public Map getTargetForeignKeysToSourceKeys() { if (targetForeignKeysToSourceKeys == null) { targetForeignKeysToSourceKeys = new HashMap(2); } return targetForeignKeysToSourceKeys; } /** * INTERNAL: * Maintain for backward compatibility. * This is 'public' so StoredProcedureGenerator * does not have to use the custom query expressions. */ public Map getTargetForeignKeyToSourceKeys() { return getTargetForeignKeysToSourceKeys(); } /** * INTERNAL: * Return whether the mapping has any inverse constraint dependencies, * such as foreign keys and join tables. */ @Override public boolean hasInverseConstraintDependency() { return true; } /** * INTERNAL: * Initialize the mapping. */ @Override public void initialize(AbstractSession session) throws DescriptorException { if (session.hasBroker()) { if (getReferenceClass() == null) { throw DescriptorException.referenceClassNotSpecified(this); } // substitute session that owns the mapping for the session that owns reference descriptor. session = session.getBroker().getSessionForClass(getReferenceClass()); } super.initialize(session); getContainerPolicy().initialize(session, getReferenceDescriptor().getDefaultTable()); if (shouldInitializeSelectionCriteria()) { setSelectionCriteria(buildDefaultSelectionCriteriaAndAddFieldsToQuery()); } initializeDeleteAllQuery(session); if (requiresDataModificationEvents() || getContainerPolicy().requiresDataModificationEvents()) { initializeAddTargetQuery(session); initializeRemoveTargetQuery(session); initializeRemoveAllTargetsQuery(session); } if (getReferenceDescriptor().hasTablePerClassPolicy()) { // This will do nothing if we have already prepared for this // source mapping or if the source mapping does not require // any special prepare logic. getReferenceDescriptor().getTablePerClassPolicy().prepareChildrenSelectionQuery(this, session); } // Check if any foreign keys reference a secondary table. if (getDescriptor().getTables().size() > 1) { DatabaseTable firstTable = getDescriptor().getTables().get(0); for (DatabaseField field : getSourceKeyFields()) { if (!field.getTable().equals(firstTable)) { getDescriptor().setHasMultipleTableConstraintDependecy(true); } } } } /** * INTERNAL: * Initialize addTargetQuery. */ protected void initializeAddTargetQuery(AbstractSession session) { AbstractRecord modifyRow = createModifyRowForAddTargetQuery(); if(modifyRow.isEmpty()) { return; } if (!hasCustomAddTargetQuery){ addTargetQuery = new DataModifyQuery(); } if (!addTargetQuery.hasSessionName()) { addTargetQuery.setSessionName(session.getName()); } if (hasCustomAddTargetQuery) { return; } // all fields in modifyRow must have the same table DatabaseTable table = (modifyRow.getFields().get(0)).getTable(); // Build where clause expression. Expression whereClause = null; Expression builder = new ExpressionBuilder(); int size = targetPrimaryKeyFields.size(); for (int index = 0; index < size; index++) { DatabaseField targetPrimaryKey = targetPrimaryKeyFields.get(index); Expression expression = builder.getField(targetPrimaryKey).equal(builder.getParameter(targetPrimaryKey)); whereClause = expression.and(whereClause); } SQLUpdateStatement statement = new SQLUpdateStatement(); statement.setTable(table); statement.setWhereClause(whereClause); statement.setModifyRow(modifyRow); addTargetQuery.setSQLStatement(statement); } /** * INTERNAL: */ protected AbstractRecord createModifyRowForAddTargetQuery() { AbstractRecord modifyRow = new DatabaseRecord(); containerPolicy.addFieldsForMapKey(modifyRow); if(listOrderField != null) { modifyRow.add(listOrderField, null); } return modifyRow; } /** * INTERNAL: * Initialize changeOrderTargetQuery. */ protected void initializeChangeOrderTargetQuery(AbstractSession session) { boolean hasChangeOrderTargetQuery = changeOrderTargetQuery != null; if(!hasChangeOrderTargetQuery) { changeOrderTargetQuery = new DataModifyQuery(); } changeOrderTargetQuery = new DataModifyQuery(); if (!changeOrderTargetQuery.hasSessionName()) { changeOrderTargetQuery.setSessionName(session.getName()); } if (hasChangeOrderTargetQuery) { return; } DatabaseTable table = this.listOrderField.getTable(); // Build where clause expression. Expression whereClause = null; Expression builder = new ExpressionBuilder(); int size = targetPrimaryKeyFields.size(); for (int index = 0; index < size; index++) { DatabaseField targetPrimaryKey = targetPrimaryKeyFields.get(index); Expression expression = builder.getField(targetPrimaryKey).equal(builder.getParameter(targetPrimaryKey)); whereClause = expression.and(whereClause); } AbstractRecord modifyRow = new DatabaseRecord(); modifyRow.add(this.listOrderField, null); SQLUpdateStatement statement = new SQLUpdateStatement(); statement.setTable(table); statement.setWhereClause(whereClause); statement.setModifyRow(modifyRow); changeOrderTargetQuery.setSQLStatement(statement); } /** * Initialize the delete all query. * This query is used to delete the collection of objects from the * database. */ protected void initializeDeleteAllQuery(AbstractSession session) { ((DeleteAllQuery)getDeleteAllQuery()).setReferenceClass(getReferenceClass()); getDeleteAllQuery().setName(getAttributeName()); ((DeleteAllQuery)getDeleteAllQuery()).setIsInMemoryOnly(isCascadeOnDeleteSetOnDatabase()); if (!hasCustomDeleteAllQuery()) { // the selection criteria are re-used by the delete all query if (getSelectionCriteria() == null) { getDeleteAllQuery().setSelectionCriteria(buildDefaultSelectionCriteriaAndAddFieldsToQuery()); } else { getDeleteAllQuery().setSelectionCriteria(getSelectionCriteria()); } } if (!getDeleteAllQuery().hasSessionName()) { getDeleteAllQuery().setSessionName(session.getName()); } if (getDeleteAllQuery().getPartitioningPolicy() == null) { getDeleteAllQuery().setPartitioningPolicy(getPartitioningPolicy()); } } /** * INTERNAL: * Initialize targetForeignKeyTable and initializeTargetPrimaryKeyFields. * This method should be called after initializeTargetForeignKeysToSourceKeys method, * which creates targetForeignKeyFields (guaranteed to be not empty in case * requiresDataModificationEvents method returns true - the only case for the method to be called). */ protected void initializeTargetPrimaryKeyFields() { // all target foreign key fields must have the same table. int size = getTargetForeignKeyFields().size(); HashSet tables = new HashSet(); for(int i=0; i < size; i++) { tables.add(getTargetForeignKeyFields().get(i).getTable()); } if(tables.size() == 1) { this.targetForeignKeyTable = getTargetForeignKeyFields().get(0).getTable(); } else { // multiple foreign key tables - throw exception. throw DescriptorException.multipleTargetForeignKeyTables(this.getDescriptor(), this, tables); } List defaultTablePrimaryKeyFields = getReferenceDescriptor().getPrimaryKeyFields(); if(this.targetForeignKeyTable.equals(getReferenceDescriptor().getDefaultTable())) { this.targetPrimaryKeyFields = defaultTablePrimaryKeyFields; } else { int sizePk = defaultTablePrimaryKeyFields.size(); this.targetPrimaryKeyFields = new ArrayList(); for(int i=0; i < sizePk; i++) { this.targetPrimaryKeyFields.add(null); } Map map = getReferenceDescriptor().getAdditionalTablePrimaryKeyFields().get(this.targetForeignKeyTable); Iterator> it = map.entrySet().iterator(); while(it.hasNext()) { Map.Entry entry = it.next(); DatabaseField sourceField = entry.getKey(); DatabaseField targetField = entry.getValue(); DatabaseField additionalTableField; DatabaseField defaultTableField; if(sourceField.getTable().equals(this.targetForeignKeyTable)) { additionalTableField = sourceField; defaultTableField = targetField; } else { defaultTableField = sourceField; additionalTableField = targetField; } int index = defaultTablePrimaryKeyFields.indexOf(defaultTableField); getReferenceDescriptor().buildField(additionalTableField, this.targetForeignKeyTable); this.targetPrimaryKeyFields.set(index, additionalTableField); } } } /** * INTERNAL: * Initialize removeTargetQuery. */ protected void initializeRemoveTargetQuery(AbstractSession session) { if (!removeTargetQuery.hasSessionName()) { removeTargetQuery.setSessionName(session.getName()); } if (hasCustomRemoveTargetQuery) { return; } // All targetForeignKeys should have the same table DatabaseTable table = targetForeignKeyFields.get(0).getTable(); // Build where clause expression. Expression whereClause = null; Expression builder = new ExpressionBuilder(); int size = targetPrimaryKeyFields.size(); for (int index = 0; index < size; index++) { DatabaseField targetPrimaryKey = targetPrimaryKeyFields.get(index); Expression expression = builder.getField(targetPrimaryKey).equal(builder.getParameter(targetPrimaryKey)); whereClause = expression.and(whereClause); } AbstractRecord modifyRow = new DatabaseRecord(); if(shouldRemoveTargetQueryModifyTargetForeignKey()) { size = targetForeignKeyFields.size(); for (int index = 0; index < size; index++) { DatabaseField targetForeignKey = targetForeignKeyFields.get(index); modifyRow.put(targetForeignKey, null); Expression expression = builder.getField(targetForeignKey).equal(builder.getParameter(targetForeignKey)); whereClause = expression.and(whereClause); } } if(listOrderField != null) { modifyRow.add(listOrderField, null); } SQLUpdateStatement statement = new SQLUpdateStatement(); statement.setTable(table); statement.setWhereClause(whereClause); statement.setModifyRow(modifyRow); removeTargetQuery.setSQLStatement(statement); } /** * Initialize and set the descriptor for the referenced class in this mapping. * Added here initialization of target foreign keys and target primary keys so that they are ready when * CollectionMapping.initialize initializes listOrderField. */ protected void initializeReferenceDescriptor(AbstractSession session) throws DescriptorException { super.initializeReferenceDescriptor(session); if (!isSourceKeySpecified()) { // sourceKeyFields will be empty when #setTargetForeignKeyFieldName() is used setSourceKeyFields(org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(getDescriptor().getPrimaryKeyFields())); } initializeTargetForeignKeysToSourceKeys(); if (usesIndirection()) { for (DatabaseField field : getSourceKeyFields()) { field.setKeepInRow(true); } } if(requiresDataModificationEvents() || getContainerPolicy().requiresDataModificationEvents()) { initializeTargetPrimaryKeyFields(); } } /** * INTERNAL: * Initialize removeAllTargetsQuery. */ protected void initializeRemoveAllTargetsQuery(AbstractSession session) { if (!removeAllTargetsQuery.hasSessionName()) { removeAllTargetsQuery.setSessionName(session.getName()); } if (hasCustomRemoveAllTargetsQuery) { return; } // All targetForeignKeys should have the same table DatabaseTable table = targetForeignKeyFields.get(0).getTable(); // Build where clause expression. Expression whereClause = null; Expression builder = new ExpressionBuilder(); AbstractRecord modifyRow = new DatabaseRecord(); int size = targetForeignKeyFields.size(); for (int index = 0; index < size; index++) { DatabaseField targetForeignKey = targetForeignKeyFields.get(index); if(shouldRemoveTargetQueryModifyTargetForeignKey()) { modifyRow.put(targetForeignKey, null); } Expression expression = builder.getField(targetForeignKey).equal(builder.getParameter(targetForeignKey)); whereClause = expression.and(whereClause); } if(this.listOrderField != null) { // targetForeignKeys and listOrderField should have the same table modifyRow.add(this.listOrderField, null); } SQLUpdateStatement statement = new SQLUpdateStatement(); statement.setTable(table); statement.setWhereClause(whereClause); statement.setModifyRow(modifyRow); removeAllTargetsQuery.setSQLStatement(statement); } /** * Verify, munge, and hash the target foreign keys and source keys. */ protected void initializeTargetForeignKeysToSourceKeys() throws DescriptorException { if (getTargetForeignKeyFields().isEmpty()) { if (shouldInitializeSelectionCriteria() || requiresDataModificationEvents() || getContainerPolicy().requiresDataModificationEvents()) { throw DescriptorException.noTargetForeignKeysSpecified(this); } else { // if they have specified selection criteria, the keys do not need to be specified return; } } if (getTargetForeignKeyFields().size() != getSourceKeyFields().size()) { throw DescriptorException.targetForeignKeysSizeMismatch(this); } for (int index = 0; index < getTargetForeignKeyFields().size(); index++) { DatabaseField field = getReferenceDescriptor().buildField(getTargetForeignKeyFields().get(index)); getTargetForeignKeyFields().set(index, field); } for (int index = 0; index < getSourceKeyFields().size(); index++) { DatabaseField field = getDescriptor().buildField(getSourceKeyFields().get(index)); getSourceKeyFields().set(index, field); } Iterator targetForeignKeys = getTargetForeignKeyFields().iterator(); Iterator sourceKeys = getSourceKeyFields().iterator(); while (targetForeignKeys.hasNext()) { DatabaseField targetForeignKey = targetForeignKeys.next(); DatabaseField sourcePrimaryKey = sourceKeys.next(); getTargetForeignKeysToSourceKeys().put(targetForeignKey, sourcePrimaryKey); getSourceKeysToTargetForeignKeys().put(sourcePrimaryKey, targetForeignKey); } } /** * INTERNAL: */ @Override public boolean isOneToManyMapping() { return true; } /** * Return whether the source key is specified. * It will be empty when #setTargetForeignKeyFieldName(String) is used. */ protected boolean isSourceKeySpecified() { return !getSourceKeyFields().isEmpty(); } /** * INTERNAL: * An object was added to the collection during an update, insert it if private. */ @Override protected void objectAddedDuringUpdate(ObjectLevelModifyQuery query, Object objectAdded, ObjectChangeSet changeSet, Map extraData) throws DatabaseException, OptimisticLockException { // First insert/update object. super.objectAddedDuringUpdate(query, objectAdded, changeSet, extraData); if (requiresDataModificationEvents() || containerPolicy.requiresDataModificationEvents()){ // In the uow data queries are cached until the end of the commit. if (query.shouldCascadeOnlyDependentParts()) { // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[4]; event[0] = ObjectAdded; event[1] = query; event[2] = objectAdded; event[3] = extraData; query.getSession().getCommitManager().addDataModificationEvent(this, event); } else { updateTargetForeignKeyPostUpdateSource_ObjectAdded(query, objectAdded, extraData); } } } /** * INTERNAL: * An object was removed to the collection during an update, delete it if private. */ @Override protected void objectRemovedDuringUpdate(ObjectLevelModifyQuery query, Object objectDeleted, Map extraData) throws DatabaseException, OptimisticLockException { if(!isPrivateOwned()) { if (requiresDataModificationEvents() || containerPolicy.requiresDataModificationEvents()){ // In the uow data queries are cached until the end of the commit. if (query.shouldCascadeOnlyDependentParts()) { // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = ObjectRemoved; event[1] = query; event[2] = objectDeleted; query.getSession().getCommitManager().addDataModificationEvent(this, event); } else { updateTargetForeignKeyPostUpdateSource_ObjectRemoved(query, objectDeleted); } } } // Delete object after join entry is delete if private. super.objectRemovedDuringUpdate(query, objectDeleted, extraData); } /** * INTERNAL: * Perform the commit event. * This is used in the uow to delay data modifications. */ @Override public void performDataModificationEvent(Object[] event, AbstractSession session) throws DatabaseException, DescriptorException { // Hey I might actually want to use an inner class here... ok array for now. if (event[0] == PostInsert) { updateTargetRowPostInsertSource((WriteObjectQuery)event[1]); } else if (event[0] == ObjectRemoved) { updateTargetForeignKeyPostUpdateSource_ObjectRemoved((WriteObjectQuery)event[1], event[2]); } else if (event[0] == ObjectAdded) { updateTargetForeignKeyPostUpdateSource_ObjectAdded((WriteObjectQuery)event[1], event[2], (Map)event[3]); } else { throw DescriptorException.invalidDataModificationEventCode(event[0], this); } } /** * INTERNAL: * Insert the reference objects. */ @Override public void postInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (isReadOnly()) { return; } if (shouldObjectModifyCascadeToParts(query) && !query.shouldCascadeOnlyDependentParts()) { Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); // insert each object one by one ContainerPolicy cp = getContainerPolicy(); for (Object iter = cp.iteratorFor(objects); cp.hasNext(iter);) { Object wrappedObject = cp.nextEntry(iter, query.getSession()); Object object = cp.unwrapIteratorResult(wrappedObject); if (isPrivateOwned()) { // no need to set changeSet as insert is a straight copy InsertObjectQuery insertQuery = new InsertObjectQuery(); insertQuery.setIsExecutionClone(true); insertQuery.setObject(object); insertQuery.setCascadePolicy(query.getCascadePolicy()); query.getSession().executeQuery(insertQuery); } else { // This will happen in a cascaded query. // This is done only for persistence by reachability and is not required if the targets are in the queue anyway // Avoid cycles by checking commit manager, this is allowed because there is no dependency. if (!query.getSession().getCommitManager().isCommitInPreModify(object)) { WriteObjectQuery writeQuery = new WriteObjectQuery(); writeQuery.setIsExecutionClone(true); writeQuery.setObject(object); writeQuery.setCascadePolicy(query.getCascadePolicy()); query.getSession().executeQuery(writeQuery); } } cp.propogatePostInsert(query, wrappedObject); } } if (requiresDataModificationEvents() || getContainerPolicy().requiresDataModificationEvents()){ // only cascade dependents in UOW if (query.shouldCascadeOnlyDependentParts()) { if (!isReadOnly() && (requiresDataModificationEvents() || containerPolicy.shouldUpdateForeignKeysPostInsert())) { // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[2]; event[0] = PostInsert; event[1] = query; query.getSession().getCommitManager().addDataModificationEvent(this, event); } } else { if (!isReadOnly() && (requiresDataModificationEvents() || containerPolicy.shouldUpdateForeignKeysPostInsert())){ updateTargetRowPostInsertSource(query); } } } } /** * INTERNAL: * Update the reference objects. */ @Override public void postUpdate(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (this.isReadOnly) { return; } if (!requiresDataModificationEvents() && !shouldObjectModifyCascadeToParts(query)){ return; } // if the target objects are not instantiated, they could not have been changed.... if (!isAttributeValueInstantiatedOrChanged(query.getObject())) { return; } if (query.getObjectChangeSet() != null) { // UnitOfWork writeChanges(query.getObjectChangeSet(), query); } else { // OLD COMMIT compareObjectsAndWrite(query); } } /** * INTERNAL: * Return the selection criteria used to IN batch fetching. */ @Override protected Expression buildBatchCriteria(ExpressionBuilder builder, ObjectLevelReadQuery query) { int size = this.targetForeignKeyFields.size(); if (size > 1) { // Support composite keys using nested IN. List fields = new ArrayList(size); for (DatabaseField targetForeignKeyField : this.targetForeignKeyFields) { fields.add(builder.getField(targetForeignKeyField)); } return query.getSession().getPlatform().buildBatchCriteriaForComplexId(builder, fields); } else { return query.getSession().getPlatform().buildBatchCriteria(builder, builder.getField(this.targetForeignKeyFields.get(0))); } } /** * INTERNAL: * Delete the reference objects. */ @Override public void preDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!shouldObjectModifyCascadeToParts(query)) { if (this.listOrderField != null) { updateTargetRowPreDeleteSource(query); } return; } AbstractSession session = query.getSession(); // If privately-owned parts have their privately-owned sub-parts, delete them one by one; // else delete everything in one shot. if (mustDeleteReferenceObjectsOneByOne()) { Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), session); ContainerPolicy cp = getContainerPolicy(); if (this.isCascadeOnDeleteSetOnDatabase && session.isUnitOfWork()) { for (Object iterator = cp.iteratorFor(objects); cp.hasNext(iterator);) { Object wrappedObject = cp.nextEntry(iterator, session); Object object = cp.unwrapIteratorResult(wrappedObject); ((UnitOfWorkImpl)session).getCascadeDeleteObjects().add(object); } } int cascade = query.getCascadePolicy(); for (Object iterator = cp.iteratorFor(objects); cp.hasNext(iterator);) { Object wrappedObject = cp.nextEntry(iterator, session); Object object = cp.unwrapIteratorResult(wrappedObject); // PERF: Avoid query execution if already deleted. if (!session.getCommitManager().isCommitCompletedInPostOrIgnore(object) || this.containerPolicy.propagatesEventsToCollection()) { if (session.isUnitOfWork() && ((UnitOfWorkImpl)session).isObjectNew(object) ){ session.getCommitManager().markIgnoreCommit(object); } else { DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); deleteQuery.setIsExecutionClone(true); deleteQuery.setObject(object); deleteQuery.setCascadePolicy(cascade); session.executeQuery(deleteQuery); this.containerPolicy.propogatePreDelete(deleteQuery, wrappedObject); } } } if (!session.isUnitOfWork()) { // This deletes any objects on the database, as the collection in memory may have been changed. // This is not required for unit of work, as the update would have already deleted these objects, // and the backup copy will include the same objects causing double deletes. deleteReferenceObjectsLeftOnDatabase(query); } } else { deleteAll(query, session); } } /** * Prepare a cascade locking policy. */ @Override public void prepareCascadeLockingPolicy() { CascadeLockingPolicy policy = new CascadeLockingPolicy(getDescriptor(), getReferenceDescriptor()); policy.setQueryKeyFields(getSourceKeysToTargetForeignKeys()); getReferenceDescriptor().addCascadeLockingPolicy(policy); } /** * INTERNAL: * Returns whether this mapping uses data modification events to complete its writes * @see UnidirectionalOneToManyMapping */ public boolean requiresDataModificationEvents(){ return this.listOrderField != null; } /** * PUBLIC: * The default add target query for mapping can be overridden by specifying the new query. * This query must set new value to target foreign key. */ public void setCustomAddTargetQuery(DataModifyQuery query) { addTargetQuery = query; hasCustomAddTargetQuery = true; } /** * PUBLIC: */ public void setAddTargetSQLString(String sqlString) { DataModifyQuery query = new DataModifyQuery(); query.setSQLString(sqlString); setCustomAddTargetQuery(query); } /** * PUBLIC: * The default remove target query for mapping can be overridden by specifying the new query. * In case target foreign key references the source, this query must set target foreign key to null. */ public void setCustomRemoveTargetQuery(DataModifyQuery query) { removeTargetQuery = query; hasCustomRemoveTargetQuery = true; } /** * PUBLIC: * The default remove all targets query for mapping can be overridden by specifying the new query. * This query must set all target foreign keys that reference the source to null. */ public void setCustomRemoveAllTargetsQuery(DataModifyQuery query) { removeAllTargetsQuery = query; hasCustomRemoveAllTargetsQuery = true; } /** * PUBLIC: * Set the SQL string used by the mapping to delete the target objects. * This allows the developer to override the SQL * generated by TopLink with a custom SQL statement or procedure call. * The arguments are * translated from the fields of the source row, by replacing the field names * marked by '#' with the values for those fields at execution time. * A one-to-many mapping will only use this delete all optimization if the target objects * can be deleted in a single SQL call. This is possible when the target objects * are in a single table, do not using locking, do not contain other privately-owned * parts, do not read subclasses, etc. *

* Example: "delete from PHONE where OWNER_ID = #EMPLOYEE_ID" */ @Override public void setDeleteAllSQLString(String sqlString) { DeleteAllQuery query = new DeleteAllQuery(); query.setSQLString(sqlString); setCustomDeleteAllQuery(query); } /** * PUBLIC: * Set the name of the session to execute the mapping's queries under. * This can be used by the session broker to override the default session * to be used for the target class. */ @Override public void setSessionName(String name) { super.setSessionName(name); if (addTargetQuery != null){ addTargetQuery.setSessionName(name); } removeTargetQuery.setSessionName(name); removeAllTargetsQuery.setSessionName(name); } /** * INTERNAL: * Set the source key field names associated with the mapping. * These must be in-order with the targetForeignKeyFieldNames. */ public void setSourceKeyFieldNames(Vector fieldNames) { Vector fields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(fieldNames.size()); for (Enumeration fieldNamesEnum = fieldNames.elements(); fieldNamesEnum.hasMoreElements();) { fields.addElement(new DatabaseField((String)fieldNamesEnum.nextElement())); } setSourceKeyFields(fields); } /** * INTERNAL: * Set the source key fields. */ public void setSourceKeyFields(Vector sourceKeyFields) { this.sourceKeyFields = sourceKeyFields; } /** * PUBLIC: * Define the target foreign key relationship in the one-to-many mapping. * This method can be used when the foreign and primary keys * have only a single field each. * (Use #addTargetForeignKeyFieldName(String, String) * for "composite" keys.) * Only the target foreign key field name is specified and the source * (primary) key field is * assumed to be the primary key of the source object. * Because the target object's table must store a foreign key to the source table, * the target object must map that foreign key, this is normally done through a * one-to-one mapping back-reference. Other options include: *

    *
  • use a DirectToFieldMapping and maintain the * foreign key fields directly in the target *
  • use a ManyToManyMapping *
  • use an AggregateCollectionMapping *
* @see DirectToFieldMapping * @see ManyToManyMapping * @see AggregateCollectionMapping */ public void setTargetForeignKeyFieldName(String targetForeignKeyFieldName) { getTargetForeignKeyFields().addElement(new DatabaseField(targetForeignKeyFieldName)); } /** * PUBLIC: * Define the target foreign key relationship in the one-to-many mapping. * This method is used for composite target foreign key relationships. * That is, the target object's table has multiple foreign key fields to * the source object's (typically primary) key fields. * Both the target foreign key field names and the corresponding source primary * key field names must be specified. */ public void setTargetForeignKeyFieldNames(String[] targetForeignKeyFieldNames, String[] sourceKeyFieldNames) { if (targetForeignKeyFieldNames.length != sourceKeyFieldNames.length) { throw DescriptorException.targetForeignKeysSizeMismatch(this); } for (int i = 0; i < targetForeignKeyFieldNames.length; i++) { addTargetForeignKeyFieldName(targetForeignKeyFieldNames[i], sourceKeyFieldNames[i]); } } /** * INTERNAL: * Set the target key field names associated with the mapping. * These must be in-order with the sourceKeyFieldNames. */ public void setTargetForeignKeyFieldNames(Vector fieldNames) { Vector fields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(fieldNames.size()); for (Enumeration fieldNamesEnum = fieldNames.elements(); fieldNamesEnum.hasMoreElements();) { fields.addElement(new DatabaseField((String)fieldNamesEnum.nextElement())); } setTargetForeignKeyFields(fields); } /** * INTERNAL: * Set the target fields. */ public void setTargetForeignKeyFields(Vector targetForeignKeyFields) { this.targetForeignKeyFields = targetForeignKeyFields; } /** * INTERNAL: * Set the target fields. */ protected void setTargetForeignKeysToSourceKeys(Map targetForeignKeysToSourceKeys) { this.targetForeignKeysToSourceKeys = targetForeignKeysToSourceKeys; } /** * Return whether any process leading to object modification * should also affect its parts. * Used by write, insert, update, and delete. */ @Override protected boolean shouldObjectModifyCascadeToParts(ObjectLevelModifyQuery query) { if (isReadOnly()) { return false; } if (isPrivateOwned()) { return true; } if (containerPolicy.isMappedKeyMapPolicy() && containerPolicy.requiresDataModificationEvents()){ return true; } return query.shouldCascadeAllParts(); } /** * INTERNAL * If it's not a map then target foreign key has been already modified (set to null). */ protected boolean shouldRemoveTargetQueryModifyTargetForeignKey() { return containerPolicy.isMapPolicy(); } /** * INTERNAL * Return true if this mapping supports cascaded version optimistic locking. */ @Override public boolean isCascadedLockingSupported() { return true; } /** * INTERNAL: * Return if this mapping support joining. */ @Override public boolean isJoiningSupported() { return true; } /** * INTERNAL: * Update target foreign keys after a new source was inserted. This follows following steps. */ public void updateTargetRowPostInsertSource(WriteObjectQuery query) throws DatabaseException { if (isReadOnly() || addTargetQuery == null) { return; } ContainerPolicy cp = getContainerPolicy(); Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); if (cp.isEmpty(objects)) { return; } prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getDescriptor(), query.getSession()); AbstractRecord keyRow = buildKeyRowForTargetUpdate(query); // Extract target field and its value. Construct insert statement and execute it int size = targetPrimaryKeyFields.size(); int objectIndex = 0; for (Object iter = cp.iteratorFor(objects); cp.hasNext(iter);) { AbstractRecord databaseRow = new DatabaseRecord(); databaseRow.mergeFrom(keyRow); Object wrappedObject = cp.nextEntry(iter, query.getSession()); Object object = cp.unwrapIteratorResult(wrappedObject); for(int index = 0; index < size; index++) { DatabaseField targetPrimaryKey = targetPrimaryKeyFields.get(index); Object targetKeyValue = getReferenceDescriptor().getObjectBuilder().extractValueFromObjectForField(object, targetPrimaryKey, query.getSession()); databaseRow.put(targetPrimaryKey, targetKeyValue); } ContainerPolicy.copyMapDataToRow(cp.getKeyMappingDataForWriteQuery(wrappedObject, query.getSession()), databaseRow); if(listOrderField != null) { databaseRow.put(listOrderField, objectIndex++); } query.getSession().executeQuery(addTargetQuery, databaseRow); } } protected AbstractRecord buildKeyRowForTargetUpdate(ObjectLevelModifyQuery query){ return new DatabaseRecord(); } /** * INTERNAL: * Update target foreign key after a target object was added to the source. This follows following steps. *

- Extract primary key and its value from the source object. *

- Extract target key and its value from the target object. *

- Construct an update statement with above fields and values for target table. *

- execute the statement. */ public void updateTargetForeignKeyPostUpdateSource_ObjectAdded(ObjectLevelModifyQuery query, Object objectAdded, Map extraData) throws DatabaseException { if (isReadOnly() || addTargetQuery == null) { return; } ContainerPolicy cp = getContainerPolicy(); prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getDescriptor(), query.getSession()); AbstractRecord databaseRow = buildKeyRowForTargetUpdate(query); // Extract target field and its value. Construct insert statement and execute it int size = targetPrimaryKeyFields.size(); for (int index = 0; index < size; index++) { DatabaseField targetPrimaryKey = targetPrimaryKeyFields.get(index); Object targetKeyValue = getReferenceDescriptor().getObjectBuilder().extractValueFromObjectForField(cp.unwrapIteratorResult(objectAdded), targetPrimaryKey, query.getSession()); databaseRow.put(targetPrimaryKey, targetKeyValue); } ContainerPolicy.copyMapDataToRow(cp.getKeyMappingDataForWriteQuery(objectAdded, query.getSession()), databaseRow); if(listOrderField != null && extraData != null) { databaseRow.put(listOrderField, extraData.get(listOrderField)); } query.getSession().executeQuery(addTargetQuery, databaseRow); } /** * INTERNAL: * Update target foreign key after a target object was removed from the source. This follows following steps. *

- Extract primary key and its value from the source object. *

- Extract target key and its value from the target object. *

- Construct an update statement with above fields and values for target table. *

- execute the statement. */ public void updateTargetForeignKeyPostUpdateSource_ObjectRemoved(ObjectLevelModifyQuery query, Object objectRemoved) throws DatabaseException { if (this.isReadOnly) { return; } AbstractSession session = query.getSession(); prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getDescriptor(), session); AbstractRecord translationRow = new DatabaseRecord(); // Extract primary key and value from the source (use translation row). int size = this.sourceKeyFields.size(); AbstractRecord modifyRow = new DatabaseRecord(size); for (int index = 0; index < size; index++) { DatabaseField sourceKey = this.sourceKeyFields.get(index); DatabaseField targetForeignKey = this.targetForeignKeyFields.get(index); Object sourceKeyValue = query.getTranslationRow().get(sourceKey); translationRow.add(targetForeignKey, sourceKeyValue); // Need to set this value to null in the modify row. modifyRow.add(targetForeignKey, null); } if(listOrderField != null) { modifyRow.add(listOrderField, null); } ContainerPolicy cp = getContainerPolicy(); // Extract target field and its value from the object. size = targetPrimaryKeyFields.size(); for (int index = 0; index < size; index++) { DatabaseField targetPrimaryKey = targetPrimaryKeyFields.get(index); Object targetKeyValue = getReferenceDescriptor().getObjectBuilder().extractValueFromObjectForField(cp.unwrapIteratorResult(objectRemoved), targetPrimaryKey, session); translationRow.add(targetPrimaryKey, targetKeyValue); } // Need a different modify row than translation row, as the same field has different values in each. DataModifyQuery removeQuery = (DataModifyQuery)this.removeTargetQuery.clone(); removeQuery.setModifyRow(modifyRow); removeQuery.setHasModifyRow(true); removeQuery.setIsExecutionClone(true); session.executeQuery(removeQuery, translationRow); } /** * INTERNAL: * Update target foreign key after a target object was removed from the source. This follows following steps. *

- Extract primary key and its value from the source object. *

- Extract target key and its value from the target object. *

- Construct an update statement with above fields and values for target table. *

- execute the statement. */ public void updateTargetRowPreDeleteSource(ObjectLevelModifyQuery query) throws DatabaseException { if (this.isReadOnly) { return; } // Extract primary key and value from the source. int size = this.sourceKeyFields.size(); AbstractRecord translationRow = new DatabaseRecord(size); AbstractRecord modifyRow = new DatabaseRecord(size); for (int index = 0; index < size; index++) { DatabaseField sourceKey = this.sourceKeyFields.get(index); DatabaseField targetForeignKey = this.targetForeignKeyFields.get(index); Object sourceKeyValue = query.getTranslationRow().get(sourceKey); translationRow.add(targetForeignKey, sourceKeyValue); // Need to set this value to null in the modify row. modifyRow.add(targetForeignKey, null); } if(listOrderField != null) { modifyRow.add(listOrderField, null); } // Need a different modify row than translation row, as the same field has different values in each. DataModifyQuery removeQuery = (DataModifyQuery)this.removeAllTargetsQuery.clone(); removeQuery.setModifyRow(modifyRow); removeQuery.setHasModifyRow(true); removeQuery.setIsExecutionClone(true); query.getSession().executeQuery(removeQuery, translationRow); } /** * INTERNAL: * Used to verify whether the specified object is deleted or not. */ @Override public boolean verifyDelete(Object object, AbstractSession session) throws DatabaseException { if (this.isPrivateOwned() || isCascadeRemove()) { Object objects = getRealCollectionAttributeValueFromObject(object, session); ContainerPolicy containerPolicy = getContainerPolicy(); for (Object iter = containerPolicy.iteratorFor(objects); containerPolicy.hasNext(iter);) { if (!session.verifyDelete(containerPolicy.next(iter, session))) { return false; } } } return true; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/AggregateCollectionMapping.java0000664000000000000000000042107312216173130026311 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 12/30/2010-2.3 Guy Pelletier * - 312253: Descriptor exception with Embeddable on DDL gen * 07/27/2012-2.5 Chris Delahunt * - 371950: Metadata caching * 10/25/2012-2.5 Guy Pelletier * - 374688: JPA 2.1 Converter support * 02/11/2013-2.5 Guy Pelletier * - 365931: @JoinColumn(name="FK_DEPT",insertable = false, updatable = true) causes INSERT statement to include this data value that it is associated with ******************************************************************************/ package org.eclipse.persistence.mappings; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.Vector; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.descriptors.DescriptorEvent; import org.eclipse.persistence.descriptors.DescriptorEventManager; import org.eclipse.persistence.descriptors.changetracking.AttributeChangeTrackingPolicy; import org.eclipse.persistence.descriptors.changetracking.DeferredChangeDetectionPolicy; import org.eclipse.persistence.descriptors.changetracking.ObjectChangeTrackingPolicy; import org.eclipse.persistence.exceptions.ConversionException; import org.eclipse.persistence.exceptions.DatabaseException; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.exceptions.OptimisticLockException; import org.eclipse.persistence.expressions.Expression; import org.eclipse.persistence.expressions.ExpressionBuilder; import org.eclipse.persistence.expressions.ExpressionMath; import org.eclipse.persistence.indirection.IndirectList; import org.eclipse.persistence.indirection.ValueHolder; import org.eclipse.persistence.internal.descriptors.DescriptorIterator; import org.eclipse.persistence.internal.descriptors.ObjectBuilder; import org.eclipse.persistence.internal.expressions.SQLUpdateStatement; import org.eclipse.persistence.internal.helper.ConversionManager; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.internal.helper.DatabaseTable; import org.eclipse.persistence.internal.helper.IdentityHashSet; import org.eclipse.persistence.internal.helper.NonSynchronizedVector; import org.eclipse.persistence.internal.identitymaps.CacheId; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.queries.AttributeItem; import org.eclipse.persistence.internal.queries.ContainerPolicy; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.AggregateCollectionChangeRecord; import org.eclipse.persistence.internal.sessions.ChangeRecord; import org.eclipse.persistence.internal.sessions.MergeManager; import org.eclipse.persistence.internal.sessions.ObjectChangeSet; import org.eclipse.persistence.internal.sessions.UnitOfWorkChangeSet; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.mappings.converters.Converter; import org.eclipse.persistence.mappings.foundation.MapComponentMapping; import org.eclipse.persistence.queries.DataModifyQuery; import org.eclipse.persistence.queries.DatabaseQuery; import org.eclipse.persistence.queries.DeleteAllQuery; import org.eclipse.persistence.queries.DeleteObjectQuery; import org.eclipse.persistence.queries.InsertObjectQuery; import org.eclipse.persistence.queries.ModifyQuery; import org.eclipse.persistence.queries.ObjectBuildingQuery; import org.eclipse.persistence.queries.ObjectLevelModifyQuery; import org.eclipse.persistence.queries.ObjectLevelReadQuery; import org.eclipse.persistence.queries.ReadAllQuery; import org.eclipse.persistence.queries.ReadQuery; import org.eclipse.persistence.queries.UpdateObjectQuery; import org.eclipse.persistence.queries.WriteObjectQuery; import org.eclipse.persistence.sessions.CopyGroup; import org.eclipse.persistence.sessions.DatabaseRecord; import org.eclipse.persistence.sessions.Project; import org.eclipse.persistence.sessions.remote.DistributedSession; /** *

Purpose: The aggregate collection mapping is used to represent the aggregate relationship between a single * source object and a collection of target objects. The target objects cannot exist without the existence of the * source object (privately owned) * Unlike the normal aggregate mapping, there is a target table being mapped from the target objects. * Unlike normal 1:m mapping, there is no 1:1 back reference mapping, as foreign key constraints have been resolved by the aggregation. * * @author King (Yaoping) Wang * @since TOPLink/Java 3.0 */ public class AggregateCollectionMapping extends CollectionMapping implements RelationalMapping, MapComponentMapping, EmbeddableMapping { /** This is a key in the target table which is a foreign key in the target table. */ protected Vector targetForeignKeyFields; /** This is a primary key in the source table that is used as foreign key in the target table */ protected Vector sourceKeyFields; /** Foreign keys in the target table to the related keys in the source table */ protected Map targetForeignKeyToSourceKeys; /** Map the name of a field in the aggregate collection descriptor to a field in the actual table specified in the mapping. */ protected Map aggregateToSourceFields; /** Map the name of an attribute of the reference descriptor mapped with AggregateCollectionMapping to aggregateToSourceFieldNames * that should be applied to this mapping. */ protected Map> nestedAggregateToSourceFields; /** In RemoteSession case the mapping needs the reference descriptor serialized from the server, * but referenceDescriptor attribute defined as transient in the superclass. To overcome that * in non-remote case referenceDescriptor is assigned to remoteReferenceDescriptor; in remote - another way around. */ protected ClassDescriptor remoteReferenceDescriptor; /** Default source table that should be used with the default source fields of this mapping. */ protected DatabaseTable defaultSourceTable; /** Indicates whether the entire target object is primary key - in that case the object can't be updated in the db, * but rather deleted and then re-inserted. */ protected boolean isEntireObjectPK; /** These queries used to update listOrderField */ protected transient DataModifyQuery updateListOrderFieldQuery; protected transient DataModifyQuery bulkUpdateListOrderFieldQuery; protected transient DataModifyQuery pkUpdateListOrderFieldQuery; /** indicates whether listOrderField value could be updated in the db. Used only if listOrderField!=null */ protected boolean isListOrderFieldUpdatable; protected static final String min = "min"; protected static final String max = "max"; protected static final String shift = "shift"; protected static final String pk = "pk"; protected static final String bulk = "bulk"; /** * Indicates whether the mapping (or at least one of its nested mappings, at any nested depth) * references an entity. * To return true the mapping (or nested mapping) should be ForeignReferenceMapping with non-null and non-aggregate reference descriptor. * Lazily initialized. */ protected Boolean hasNestedIdentityReference; /** * PUBLIC: * Default constructor. */ public AggregateCollectionMapping() { this.aggregateToSourceFields = new HashMap(5); this.nestedAggregateToSourceFields = new HashMap>(5); this.targetForeignKeyToSourceKeys = new HashMap(5); this.sourceKeyFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1); this.targetForeignKeyFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1); this.deleteAllQuery = new DeleteAllQuery(); //aggregates should always cascade all operations this.setCascadeAll(true); this.isListOrderFieldSupported = true; this.isListOrderFieldUpdatable = true; this.isPrivateOwned = true; } /** * INTERNAL: */ public boolean isRelationalMapping() { return true; } /** * INTERNAL: * In JPA users may specify overrides to apply to a many to many mapping * on a shared embeddable descriptor. These settings are applied at * initialize time, after the reference descriptor is cloned. In an * aggregate collection case, this is not supported and currently silently * ignored and does nothing. */ public void addOverrideManyToManyMapping(ManyToManyMapping mapping) { // Not supported at this time ... } /** * INTERNAL: * In JPA users may specify overrides to apply to a unidirectional one to * many mapping on a shared embeddable descriptor. These settings are * applied at initialize time, after the reference descriptor is cloned. In * an aggregate collection case, this is not supported and currently * silently ignored and does nothing. */ public void addOverrideUnidirectionalOneToManyMapping(UnidirectionalOneToManyMapping mapping) { // Not supported at this time ... } /** * Add a converter to be applied to a mapping of the aggregate descriptor. */ public void addConverter(Converter converter, String attributeName) { // Not supported at this time ... } /** * PUBLIC: * Maps a field name in the aggregate descriptor * to a field name in the source table. */ public void addFieldNameTranslation(String sourceFieldName, String aggregateFieldName) { addFieldTranslation(new DatabaseField(sourceFieldName), aggregateFieldName); } /** * PUBLIC: * Maps a field name in the aggregate descriptor * to a field in the source table. */ public void addFieldTranslation(DatabaseField sourceField, String aggregateField) { aggregateToSourceFields.put(aggregateField, sourceField); } /** * PUBLIC: * * Maps a field name in the aggregate descriptor * to a field name in the source table. */ public void addFieldTranslations(Map map) { aggregateToSourceFields.putAll(map); } /** * PUBLIC: * Map the name of an attribute of the reference descriptor mapped with AggregateCollectionMapping to aggregateToSourceFieldNames * that should be applied to this mapping. */ public void addNestedFieldNameTranslation(String attributeName, String sourceFieldName, String aggregateFieldName) { addNestedFieldTranslation(attributeName, new DatabaseField(sourceFieldName), aggregateFieldName); } /** * PUBLIC: * Map the name of an attribute of the reference descriptor mapped with AggregateCollectionMapping to aggregateToSourceFieldNames * that should be applied to this mapping. */ public void addNestedFieldTranslation(String attributeName, DatabaseField sourceField, String aggregateFieldName) { Map attributeFieldNameTranslation = nestedAggregateToSourceFields.get(attributeName); if (attributeFieldNameTranslation == null) { attributeFieldNameTranslation = new HashMap(5); nestedAggregateToSourceFields.put(attributeName, attributeFieldNameTranslation); } attributeFieldNameTranslation.put(aggregateFieldName, sourceField); } /** * PUBLIC: * Map the name of an attribute of the reference descriptor mapped with AggregateCollectionMapping to aggregateToSourceFields * that should be applied to this mapping. */ public void addNestedFieldNameTranslations(String attributeName, Map map) { Map attributeFieldNameTranslation = nestedAggregateToSourceFields.get(attributeName); if (attributeFieldNameTranslation == null) { nestedAggregateToSourceFields.put(attributeName, map); } else { attributeFieldNameTranslation.putAll(map); } } /** * PUBLIC: * Define the target foreign key relationship in the 1-M aggregate collection mapping. * Both the target foreign key field and the source primary key field must be specified. */ @Override public void addTargetForeignKeyField(DatabaseField targetForeignKey, DatabaseField sourceKey) { getTargetForeignKeyFields().addElement(targetForeignKey); getSourceKeyFields().addElement(sourceKey); } /** * PUBLIC: * Define the target foreign key relationship in the 1-M aggregate collection mapping. * Both the target foreign key field name and the source primary key field name must be specified. */ public void addTargetForeignKeyFieldName(String targetForeignKey, String sourceKey) { addTargetForeignKeyField(new DatabaseField(targetForeignKey), new DatabaseField(sourceKey)); } /** * INTERNAL: * Used during building the backup shallow copy to copy the vector without re-registering the target objects. */ public Object buildBackupCloneForPartObject(Object attributeValue, Object clone, Object backup, UnitOfWorkImpl unitOfWork) { ContainerPolicy containerPolicy = getContainerPolicy(); if (attributeValue == null) { return containerPolicy.containerInstance(1); } Object clonedAttributeValue = containerPolicy.containerInstance(containerPolicy.sizeFor(attributeValue)); if (isSynchronizeOnMerge) { synchronized (attributeValue) { for (Object valuesIterator = containerPolicy.iteratorFor(attributeValue); containerPolicy.hasNext(valuesIterator);) { Object wrappedElement = containerPolicy.nextEntry(valuesIterator, unitOfWork); Object cloneValue = buildElementBackupClone(containerPolicy.unwrapIteratorResult(wrappedElement), unitOfWork); containerPolicy.addInto(containerPolicy.keyFromIterator(valuesIterator), cloneValue, clonedAttributeValue, unitOfWork); } } } else { for (Object valuesIterator = containerPolicy.iteratorFor(attributeValue); containerPolicy.hasNext(valuesIterator);) { Object wrappedElement = containerPolicy.nextEntry(valuesIterator, unitOfWork); Object cloneValue = buildElementBackupClone(containerPolicy.unwrapIteratorResult(wrappedElement), unitOfWork); containerPolicy.addInto(containerPolicy.keyFromIterator(valuesIterator), cloneValue, clonedAttributeValue, unitOfWork); } } return clonedAttributeValue; } /** * INTERNAL: * Require for cloning, the part must be cloned. * Ignore the objects, use the attribute value. * this is identical to the super class except that the element must be added to the new * aggregates collection so that the referenced objects will be cloned correctly */ @Override public Object buildCloneForPartObject(Object attributeValue, Object original, CacheKey cacheKey, Object clone, AbstractSession cloningSession, Integer refreshCascade, boolean isExisting, boolean isFromSharedCache) { ContainerPolicy containerPolicy = getContainerPolicy(); if (attributeValue == null) { return containerPolicy.containerInstance(1); } Object clonedAttributeValue = containerPolicy.containerInstance(containerPolicy.sizeFor(attributeValue)); Object temporaryCollection = null; if (isSynchronizeOnMerge) { // I need to synchronize here to prevent the collection from changing while I am cloning it. // This will occur when I am merging into the cache and I am instantiating a UOW valueHolder at the same time // I can not synchronize around the clone, as this will cause deadlocks, so I will need to copy the collection then create the clones // I will use a temporary collection to help speed up the process synchronized (attributeValue) { temporaryCollection = containerPolicy.cloneFor(attributeValue); } } else { temporaryCollection = attributeValue; } for (Object valuesIterator = containerPolicy.iteratorFor(temporaryCollection); containerPolicy.hasNext(valuesIterator);) { Object wrappedElement = containerPolicy.nextEntry(valuesIterator, cloningSession); Object originalElement = containerPolicy.unwrapIteratorResult(wrappedElement); //need to add to aggregate list in the case that there are related objects. if (cloningSession.isUnitOfWork() && ((UnitOfWorkImpl)cloningSession).isOriginalNewObject(original)) { ((UnitOfWorkImpl)cloningSession).addNewAggregate(originalElement); } Object cloneValue = buildElementClone(originalElement, clone, cacheKey, refreshCascade, cloningSession, isExisting, isFromSharedCache); Object clonedKey = containerPolicy.buildCloneForKey(containerPolicy.keyFromIterator(valuesIterator), clone, cacheKey, refreshCascade, cloningSession, isExisting, isFromSharedCache); containerPolicy.addInto(clonedKey, cloneValue, clonedAttributeValue, cloningSession); } if(temporaryCollection instanceof IndirectList) { ((IndirectList)clonedAttributeValue).setIsListOrderBrokenInDb(((IndirectList)temporaryCollection).isListOrderBrokenInDb()); } return clonedAttributeValue; } /** * INTERNAL: * Clone the aggregate collection, if necessary. */ protected Object buildElementBackupClone(Object element, UnitOfWorkImpl unitOfWork) { // Do not clone for read-only. if (unitOfWork.isClassReadOnly(element.getClass(), getReferenceDescriptor())) { return element; } ClassDescriptor aggregateDescriptor = getReferenceDescriptor(element.getClass(), unitOfWork); Object clonedElement = aggregateDescriptor.getObjectBuilder().buildBackupClone(element, unitOfWork); return clonedElement; } /** * INTERNAL: * Clone the aggregate collection, if necessary. */ public Object buildElementClone(Object element, Object parent, CacheKey parentCacheKey, Integer refreshCascade, AbstractSession cloningSession, boolean isExisting, boolean isFromSharedCache) { // Do not clone for read-only. if (cloningSession.isUnitOfWork() && cloningSession.isClassReadOnly(element.getClass(), getReferenceDescriptor())) { return element; } ClassDescriptor aggregateDescriptor = getReferenceDescriptor(element.getClass(), cloningSession); // bug 2612602 as we are building the working copy make sure that we call to correct clone method. Object clonedElement = aggregateDescriptor.getObjectBuilder().instantiateWorkingCopyClone(element, cloningSession); aggregateDescriptor.getObjectBuilder().populateAttributesForClone(element, parentCacheKey, clonedElement, refreshCascade, cloningSession); if (cloningSession.isUnitOfWork()){ // CR 4155 add the originals to the UnitOfWork so that we can find it later in the merge // as aggregates have no identity. If we don't do this we will loose indirection information. ((UnitOfWorkImpl)cloningSession).getCloneToOriginals().put(clonedElement, element); } return clonedElement; } /** * INTERNAL: * This method is used to store the FK fields that can be cached that correspond to noncacheable mappings * the FK field values will be used to re-issue the query when cloning the shared cache entity */ @Override public void collectQueryParameters(Set cacheFields){ for (DatabaseField field : getSourceKeyFields()) { cacheFields.add(field); } } /** * INTERNAL: * Cascade discover and persist new objects during commit. */ public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow, Set cascadeErrors) { //aggregate objects are not registered but their mappings should be. Object cloneAttribute = null; cloneAttribute = getAttributeValueFromObject(object); if ((cloneAttribute == null) || (!getIndirectionPolicy().objectIsInstantiated(cloneAttribute))) { return; } ObjectBuilder builder = null; ContainerPolicy cp = getContainerPolicy(); Object cloneObjectCollection = null; cloneObjectCollection = getRealCollectionAttributeValueFromObject(object, uow); Object cloneIter = cp.iteratorFor(cloneObjectCollection); while (cp.hasNext(cloneIter)) { Object wrappedObject = cp.nextEntry(cloneIter, uow); Object nextObject = cp.unwrapIteratorResult(wrappedObject); if (nextObject != null) { builder = getReferenceDescriptor(nextObject.getClass(), uow).getObjectBuilder(); builder.cascadeDiscoverAndPersistUnregisteredNewObjects(nextObject, newObjects, unregisteredExistingObjects, visitedObjects, uow, cascadeErrors); cp.cascadeDiscoverAndPersistUnregisteredNewObjects(wrappedObject, newObjects, unregisteredExistingObjects, visitedObjects, uow, cascadeErrors); } } } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { // Aggregate objects are not registered but their mappings should be. Object attributeValue = getAttributeValueFromObject(object); if ((attributeValue == null) // Also check if the source is new, then must always cascade. || (!this.indirectionPolicy.objectIsInstantiated(attributeValue) && !uow.isCloneNewObject(object))) { return; } ObjectBuilder builder = null; ContainerPolicy cp = this.containerPolicy; Object cloneObjectCollection = null; cloneObjectCollection = getRealCollectionAttributeValueFromObject(object, uow); Object cloneIter = cp.iteratorFor(cloneObjectCollection); while (cp.hasNext(cloneIter)) { Object wrappedObject = cp.nextEntry(cloneIter, uow); Object nextObject = cp.unwrapIteratorResult(wrappedObject); if (nextObject != null && (! visitedObjects.containsKey(nextObject))){ visitedObjects.put(nextObject, nextObject); builder = getReferenceDescriptor(nextObject.getClass(), uow).getObjectBuilder(); builder.cascadeRegisterNewForCreate(nextObject, uow, visitedObjects); cp.cascadeRegisterNewIfRequired(wrappedObject, uow, visitedObjects); } } } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects){ //aggregate objects are not registered but their mappings should be. Object cloneAttribute = getAttributeValueFromObject(object); if ((cloneAttribute == null)) { return; } // PERF: If not instantiated, then avoid instantiating, delete-all will handle deletion. if (usesIndirection() && (!mustDeleteReferenceObjectsOneByOne())) { if (!this.indirectionPolicy.objectIsInstantiated(cloneAttribute)) { return; } } ObjectBuilder builder = null; ContainerPolicy cp = getContainerPolicy(); Object cloneObjectCollection = null; cloneObjectCollection = getRealCollectionAttributeValueFromObject(object, uow); Object cloneIter = cp.iteratorFor(cloneObjectCollection); while (cp.hasNext(cloneIter)) { Object wrappedObject = cp.nextEntry(cloneIter, uow); Object nextObject = cp.unwrapIteratorResult(wrappedObject); if (nextObject != null && ( ! visitedObjects.containsKey(nextObject) ) ){ visitedObjects.put(nextObject, nextObject); if (this.isCascadeOnDeleteSetOnDatabase) { uow.getCascadeDeleteObjects().add(nextObject); } builder = getReferenceDescriptor(nextObject.getClass(), uow).getObjectBuilder(); builder.cascadePerformRemove(nextObject, uow, visitedObjects); cp.cascadePerformRemoveIfRequired(wrappedObject, uow, visitedObjects); } } } /** * INTERNAL: * Cascade perform removal of orphaned private owned objects from the UnitOfWorkChangeSet */ public void cascadePerformRemovePrivateOwnedObjectFromChangeSetIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { // if the object is not instantiated, do not instantiate or cascade Object attributeValue = getAttributeValueFromObject(object); if (attributeValue != null && getIndirectionPolicy().objectIsInstantiated(attributeValue)) { Object cloneObjectCollection = getRealCollectionAttributeValueFromObject(object, uow); ContainerPolicy cp = getContainerPolicy(); for (Object cloneIter = cp.iteratorFor(cloneObjectCollection); cp.hasNext(cloneIter);) { Object referencedObject = cp.next(cloneIter, uow); if (referencedObject != null && !visitedObjects.containsKey(referencedObject)) { visitedObjects.put(referencedObject, referencedObject); ObjectBuilder builder = getReferenceDescriptor(referencedObject.getClass(), uow).getObjectBuilder(); builder.cascadePerformRemovePrivateOwnedObjectFromChangeSet(referencedObject, uow, visitedObjects); } } } } /** * INTERNAL: * The mapping clones itself to create deep copy. */ public Object clone() { AggregateCollectionMapping mappingObject = (AggregateCollectionMapping)super.clone(); mappingObject.setTargetForeignKeyToSourceKeys(new HashMap(getTargetForeignKeyToSourceKeys())); mappingObject.setSourceKeyFields(org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(getSourceKeyFields())); mappingObject.setTargetForeignKeyFields(org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(getTargetForeignKeyFields())); mappingObject.aggregateToSourceFields = new HashMap(this.aggregateToSourceFields); mappingObject.nestedAggregateToSourceFields = new HashMap(this.nestedAggregateToSourceFields); if(updateListOrderFieldQuery != null) { mappingObject.updateListOrderFieldQuery = this.updateListOrderFieldQuery; } if(bulkUpdateListOrderFieldQuery != null) { mappingObject.bulkUpdateListOrderFieldQuery = this.bulkUpdateListOrderFieldQuery; } if(pkUpdateListOrderFieldQuery != null) { mappingObject.pkUpdateListOrderFieldQuery = this.pkUpdateListOrderFieldQuery; } return mappingObject; } /** * INTERNAL: * This method is used to create a change record from comparing two aggregate collections * @return ChangeRecord */ public ChangeRecord compareForChange(Object clone, Object backUp, ObjectChangeSet owner, AbstractSession session) { Object cloneAttribute = null; Object backUpAttribute = null; cloneAttribute = getAttributeValueFromObject(clone); if ((cloneAttribute != null) && (!getIndirectionPolicy().objectIsInstantiated(cloneAttribute))) { //If the clone's valueholder was not triggered then no changes were made. return null; } if (!owner.isNew()) { backUpAttribute = getAttributeValueFromObject(backUp); if ((backUpAttribute == null) && (cloneAttribute == null)) { return null; } ContainerPolicy cp = getContainerPolicy(); Object backupCollection = null; Object cloneCollection = null; cloneCollection = getRealCollectionAttributeValueFromObject(clone, session); backupCollection = getRealCollectionAttributeValueFromObject(backUp, session); if (cp.sizeFor(backupCollection) != cp.sizeFor(cloneCollection)) { return convertToChangeRecord(cloneCollection, backupCollection, owner, session); } boolean change = false; if (cp.isMapPolicy()){ change = compareMapCollectionForChange((Map)cloneCollection, (Map)backupCollection, session); } else { Object cloneIterator = cp.iteratorFor(cloneCollection); Object backUpIterator = cp.iteratorFor(backupCollection); // For bug 2863721 must use a different UnitOfWorkChangeSet as here just // seeing if changes are needed. If changes are needed then a // real changeSet will be created later. UnitOfWorkChangeSet uowComparisonChangeSet = new UnitOfWorkChangeSet(session); while (cp.hasNext(cloneIterator)) { Object cloneObject = cp.next(cloneIterator, session); // For CR#2285 assume that if null is added the collection has changed. if (cloneObject == null) { change = true; break; } Object backUpObject = null; if (cp.hasNext(backUpIterator)) { backUpObject = cp.next(backUpIterator, session); } else { change = true; break; } if (cloneObject.getClass().equals(backUpObject.getClass())) { ObjectBuilder builder = getReferenceDescriptor(cloneObject.getClass(), session).getObjectBuilder(); ObjectChangeSet initialChanges = builder.createObjectChangeSet(cloneObject, uowComparisonChangeSet, owner.isNew(), session); //compare for changes will return null if no change is detected and I need to remove the changeSet ObjectChangeSet changes = builder.compareForChange(cloneObject, backUpObject, uowComparisonChangeSet, session); if (changes != null) { change = true; break; } } else { change = true; break; } } if (cp.hasNext(backUpIterator)){ change = true; } } if ((change == true)) { return convertToChangeRecord(cloneCollection, backupCollection, owner, session); } else { return null; } } return convertToChangeRecord(getRealCollectionAttributeValueFromObject(clone, session), containerPolicy.containerInstance(), owner, session); } /** * INTERNAL: * Determine if an AggregateCollection that is contained as a map has changed by comparing the values in the * clone to the values in the backup. * @param cloneObjectCollection * @param backUpCollection * @param session * @return */ protected boolean compareMapCollectionForChange(Map cloneObjectCollection, Map backUpCollection, AbstractSession session){ HashMap originalKeyValues = new HashMap(10); HashMap cloneKeyValues = new HashMap(10); Object backUpIter = containerPolicy.iteratorFor(backUpCollection); while (containerPolicy.hasNext(backUpIter)) {// Make a lookup of the objects Map.Entry entry = (Map.Entry)containerPolicy.nextEntry(backUpIter, session); originalKeyValues.put(entry.getKey(), entry.getValue()); } UnitOfWorkChangeSet uowComparisonChangeSet = new UnitOfWorkChangeSet(session); Object cloneIter = containerPolicy.iteratorFor(cloneObjectCollection); while (containerPolicy.hasNext(cloneIter)) {//Compare them with the objects from the clone Map.Entry wrappedFirstObject = (Map.Entry)containerPolicy.nextEntry(cloneIter, session); Object firstValue = wrappedFirstObject.getValue(); Object firstKey = wrappedFirstObject.getKey(); Object backupValue = originalKeyValues.get(firstKey); if (!originalKeyValues.containsKey(firstKey)) { return true; } else if ((backupValue == null) && (firstValue != null)) {//the object was not in the backup return true; } else { ObjectBuilder builder = getReferenceDescriptor(firstValue.getClass(), session).getObjectBuilder(); ObjectChangeSet changes = builder.compareForChange(firstValue, backupValue, uowComparisonChangeSet, session); if (changes != null) { return true; } else { originalKeyValues.remove(firstKey); } } } return !originalKeyValues.isEmpty(); } /** * INTERNAL: * Old and new lists are compared and only the changes are written to the database. * Called only if listOrderField != null */ protected void compareListsAndWrite(List previousList, List currentList, WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if(this.isListOrderFieldUpdatable) { compareListsAndWrite_UpdatableListOrderField(previousList, currentList, query); } else { compareListsAndWrite_NonUpdatableListOrderField(previousList, currentList, query); } } /** * INTERNAL: * Old and new lists are compared and only the changes are written to the database. * Called only if listOrderField != null */ protected void compareListsAndWrite_NonUpdatableListOrderField(List previousList, List currentList, WriteObjectQuery query) throws DatabaseException, OptimisticLockException { boolean shouldRepairOrder = false; if(currentList instanceof IndirectList) { shouldRepairOrder = ((IndirectList)currentList).isListOrderBrokenInDb(); } HashMap previousAndCurrentByKey = new HashMap(); int pkSize = getReferenceDescriptor().getPrimaryKeyFields().size(); // First index the current objects by their primary key. for (int i=0; i < currentList.size(); i++) { Object currentObject = currentList.get(i); try { CacheId primaryKey = (CacheId)getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(currentObject, query.getSession()); primaryKey.add(i); Object[] previousAndCurrent = new Object[]{null, currentObject}; previousAndCurrentByKey.put(primaryKey, previousAndCurrent); } catch (NullPointerException e) { // For CR#2646 quietly discard nulls added to a collection mapping. // This try-catch is essentially a null check on currentObject, for // ideally the customer should check for these themselves. if (currentObject != null) { throw e; } } } if (shouldRepairOrder) { DeleteAllQuery deleteAllQuery = (DeleteAllQuery)this.deleteAllQuery; if (this.isCascadeOnDeleteSetOnDatabase) { deleteAllQuery = (DeleteAllQuery)deleteAllQuery.clone(); deleteAllQuery.setIsInMemoryOnly(false); } deleteAllQuery.executeDeleteAll(query.getSession().getSessionForClass(getReferenceClass()), query.getTranslationRow(), new Vector(previousList)); } else { // Next index the previous objects (read from db or from backup in uow) for(int i=0; i < previousList.size(); i++) { Object previousObject = previousList.get(i); CacheId primaryKey = (CacheId)getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(previousObject, query.getSession()); primaryKey.add(i); Object[] previousAndCurrent = previousAndCurrentByKey.get(primaryKey); if (previousAndCurrent == null) { // there's no current object - that means that previous object should be deleted DatabaseRecord extraData = new DatabaseRecord(1); extraData.put(this.listOrderField, i); objectRemovedDuringUpdate(query, previousObject, extraData); } else { previousAndCurrent[0] = previousObject; } } } Iterator> it = previousAndCurrentByKey.entrySet().iterator(); while(it.hasNext()) { Map.Entry entry = it.next(); Object key = entry.getKey(); Object[] previousAndCurrent = entry.getValue(); // previousObject may be null, meaning currentObject has been added to the list Object previousObject = previousAndCurrent[0]; // currentObject is not null Object currentObject = previousAndCurrent[1]; if(previousObject == null) { // there's no previous object - that means that current object should be added. // index of currentObject in currentList int iCurrent = (Integer)((CacheId)key).getPrimaryKey()[pkSize]; DatabaseRecord extraData = new DatabaseRecord(1); extraData.put(this.listOrderField, iCurrent); objectAddedDuringUpdate(query, currentObject, null, extraData); } else { if(!this.isEntireObjectPK) { objectUnchangedDuringUpdate(query, currentObject, previousObject); } } } if(shouldRepairOrder) { ((IndirectList)currentList).setIsListOrderBrokenInDb(false); } } /** * INTERNAL: * Old and new lists are compared and only the changes are written to the database. * Called only if listOrderField != null */ protected void compareListsAndWrite_UpdatableListOrderField(List previousList, List currentList, WriteObjectQuery query) throws DatabaseException, OptimisticLockException { boolean shouldRepairOrder = false; if(currentList instanceof IndirectList) { shouldRepairOrder = ((IndirectList)currentList).isListOrderBrokenInDb(); } // Object[] = {previousObject, currentObject, previousIndex, currentIndex} HashMap previousAndCurrentByKey = new HashMap(); // a SortedMap, current index mapped by previous index, both indexes must exist and be not equal. TreeMap currentIndexByPreviousIndex = new TreeMap(); // First index the current objects by their primary key. for(int i=0; i < currentList.size(); i++) { Object currentObject = currentList.get(i); try { Object primaryKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(currentObject, query.getSession()); Object[] previousAndCurrent = new Object[]{null, currentObject, null, i}; previousAndCurrentByKey.put(primaryKey, previousAndCurrent); } catch (NullPointerException e) { // For CR#2646 quietly discard nulls added to a collection mapping. // This try-catch is essentially a null check on currentObject, for // ideally the customer should check for these themselves. if (currentObject != null) { throw e; } } } // Next index the previous objects (read from db or from backup in uow), also remove the objects to be removed. for(int i=0; i < previousList.size(); i++) { Object previousObject = previousList.get(i); Object primaryKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(previousObject, query.getSession()); Object[] previousAndCurrent = previousAndCurrentByKey.get(primaryKey); if(previousAndCurrent == null) { // there's no current object - that means that previous object should be deleted objectRemovedDuringUpdate(query, previousObject, null); } else { previousAndCurrent[0] = previousObject; previousAndCurrent[2] = i; int iCurrent = (Integer)previousAndCurrent[3]; if(i != iCurrent || shouldRepairOrder) { currentIndexByPreviousIndex.put(i, iCurrent); } } } // some order indexes should be changed if(!currentIndexByPreviousIndex.isEmpty()) { boolean shouldUpdateOrderUsingPk = shouldRepairOrder; if(!shouldUpdateOrderUsingPk) { // search for cycles in order changes, such as, for instance: // previous index 1, 2 // current index 2, 1 // or // previous index 1, 3, 5 // current index 3, 5, 1 // those objects order index can't be updated using their previous order index value - should use pk in where clause instead. // For now, if a cycle is found let's update all order indexes using pk. // Ideally that should be refined in the future so that only indexes participating in cycles updated using pks - others still through bulk update. boolean isCycleFound = false; int iCurrentMax = -1; Iterator itCurrentIndexes = currentIndexByPreviousIndex.values().iterator(); while(itCurrentIndexes.hasNext() && !isCycleFound) { int iCurrent = itCurrentIndexes.next(); if(iCurrent > iCurrentMax) { iCurrentMax = iCurrent; } else { isCycleFound = true; } } shouldUpdateOrderUsingPk = isCycleFound; } if(shouldUpdateOrderUsingPk) { Iterator> it = previousAndCurrentByKey.entrySet().iterator(); while(it.hasNext()) { Map.Entry entry = it.next(); Object key = entry.getKey(); Object[] previousAndCurrent = entry.getValue(); // previousObject may be null, meaning currentObject has been added to the list Object previousObject = previousAndCurrent[0]; if(previousObject != null) { Object currentObject = previousAndCurrent[1]; if(!this.isEntireObjectPK) { objectUnchangedDuringUpdate(query, currentObject, previousObject); } int iPrevious = (Integer)previousAndCurrent[2]; int iCurrent = (Integer)previousAndCurrent[3]; if(iPrevious != iCurrent || shouldRepairOrder) { objectChangedListOrderDuringUpdate(query, key, iCurrent); } } } } else { // update the objects - but not their order values if(!this.isEntireObjectPK) { Iterator> iterator = previousAndCurrentByKey.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry entry = iterator.next(); Object[] previousAndCurrent = entry.getValue(); // previousObject may be null, meaning currentObject has been added to the list Object previousObject = previousAndCurrent[0]; if( previousObject != null) { Object currentObject = previousAndCurrent[1]; objectUnchangedDuringUpdate(query, currentObject, previousObject); } } } // a bulk update query will be executed for each bunch of adjacent previous indexes from which current indexes could be obtained with a shift, for instance: // previous index 1, 2, 3 // current index 5, 6, 7 // the sql will look like: // UPDATE ... SET ListOrderField = ListOrderField + 4 WHERE 1 <= ListOrderField AND ListOrderField <= 3 AND FK = ... int iMin = -1; int iMax = -1; int iShift = 0; // each index corresponds to a bunch of objects to be shifted ArrayList iMinList = new ArrayList(); ArrayList iMaxList = new ArrayList(); ArrayList iShiftList = new ArrayList(); Iterator> itEntries = currentIndexByPreviousIndex.entrySet().iterator(); while(itEntries.hasNext()) { Map.Entry entry = itEntries.next(); int iPrevious = entry.getKey(); int iCurrent = entry.getValue(); if(iMin >= 0) { // the shift should be the same for all indexes participating in bulk update int iPreviousExpected = iMax + 1; if(iPrevious == iPreviousExpected && iCurrent == iPreviousExpected + iShift) { iMax++; } else { iMinList.add(iMin); iMaxList.add(iMax); iShiftList.add(iShift); iMin = -1; } } if(iMin == -1) { // start defining a new bulk update - define iShift, iFirst, iLast iMin = iPrevious; iMax = iPrevious; iShift = iCurrent - iPrevious; } } if(iMin >= 0) { iMinList.add(iMin); iMaxList.add(iMax); iShiftList.add(iShift); } // Order is important - shouldn't override indexes in one bunch while shifting another one. // Look for the left-most and right-most bunches and update them first. while(!iMinList.isEmpty()) { int iMinLeft = previousList.size() + 1; int iMinRight = -1; int indexShiftLeft = -1; int indexShiftRight = -1; for(int i=0; i < iMinList.size(); i++) { iMin = iMinList.get(i); iShift = iShiftList.get(i); if(iShift < 0) { if(iMin < iMinLeft) { iMinLeft = iMin; indexShiftLeft = i; } } else { // iShift > 0 if(iMin > iMinRight) { iMinRight = iMin; indexShiftRight = i; } } } if(indexShiftLeft >= 0) { objectChangedListOrderDuringUpdate(query, iMinList.get(indexShiftLeft), iMaxList.get(indexShiftLeft), iShiftList.get(indexShiftLeft)); } if(indexShiftRight >= 0) { objectChangedListOrderDuringUpdate(query, iMinList.get(indexShiftRight), iMaxList.get(indexShiftRight), iShiftList.get(indexShiftRight)); } if(indexShiftLeft >= 0) { iMinList.remove(indexShiftLeft); iMaxList.remove(indexShiftLeft); iShiftList.remove(indexShiftLeft); } if(indexShiftRight >= 0) { iMinList.remove(indexShiftRight); iMaxList.remove(indexShiftRight); iShiftList.remove(indexShiftRight); } } } } // Add the new objects Iterator> iterator = previousAndCurrentByKey.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry entry = iterator.next(); Object[] previousAndCurrent = entry.getValue(); // previousObject may be null, meaning currentObject has been added to the list Object previousObject = previousAndCurrent[0]; if (previousObject == null) { // there's no previous object - that means that current object should be added. // currentObject is not null Object currentObject = previousAndCurrent[1]; // index of currentObject in currentList int iCurrent = (Integer)previousAndCurrent[3]; DatabaseRecord extraData = new DatabaseRecord(1); extraData.put(this.listOrderField, iCurrent); objectAddedDuringUpdate(query, currentObject, null, extraData); } } if (shouldRepairOrder) { ((IndirectList)currentList).setIsListOrderBrokenInDb(false); } } protected int objectChangedListOrderDuringUpdate(WriteObjectQuery query, int iMin, int iMax, int iShift) { DataModifyQuery updateQuery; AbstractRecord translationRow = query.getTranslationRow().clone(); translationRow.put(min, iMin); if(iMin == iMax) { translationRow.put(this.listOrderField, iMin + iShift); updateQuery = updateListOrderFieldQuery; } else { translationRow.put(max, iMax); translationRow.put(shift, iShift); updateQuery = bulkUpdateListOrderFieldQuery; } return (Integer)query.getSession().executeQuery(updateQuery, translationRow); } protected int objectChangedListOrderDuringUpdate(WriteObjectQuery query, Object key, int newOrderValue) { AbstractRecord translationRow = query.getTranslationRow().clone(); translationRow.put(this.listOrderField, newOrderValue); getReferenceDescriptor().getObjectBuilder().writeIntoRowFromPrimaryKeyValues(translationRow, key, query.getSession(), true); return (Integer)query.getSession().executeQuery(this.pkUpdateListOrderFieldQuery, translationRow); } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ public boolean compareObjects(Object firstObject, Object secondObject, AbstractSession session) { Object firstCollection = getRealCollectionAttributeValueFromObject(firstObject, session); Object secondCollection = getRealCollectionAttributeValueFromObject(secondObject, session); if(this.listOrderField != null) { return this.compareLists((List)firstCollection, (List)secondCollection, session); } ContainerPolicy containerPolicy = getContainerPolicy(); if (containerPolicy.sizeFor(firstCollection) != containerPolicy.sizeFor(secondCollection)) { return false; } if (containerPolicy.sizeFor(firstCollection) == 0) { return true; } if (isMapKeyMapping()) { Object firstIter = containerPolicy.iteratorFor(firstCollection); Object secondIter = containerPolicy.iteratorFor(secondCollection); Map keyValues = new HashMap(); while (containerPolicy.hasNext(secondIter)) { Map.Entry secondEntry = (Map.Entry)containerPolicy.nextEntry(secondIter, session); Object primaryKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(secondEntry.getValue(), session); Object key = secondEntry.getKey(); keyValues.put(key, primaryKey); } while (containerPolicy.hasNext(firstIter)) { Map.Entry firstEntry = (Map.Entry)containerPolicy.nextEntry(firstIter, session); Object primaryKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(firstEntry.getValue(), session); Object key = firstEntry.getKey(); if (!primaryKey.equals(keyValues.get(key))) { return false; } } } else { //iterator the first aggregate collection for (Object iterFirst = containerPolicy.iteratorFor(firstCollection); containerPolicy.hasNext(iterFirst);) { //fetch the next object from the first iterator. Object firstAggregateObject = containerPolicy.next(iterFirst, session); //iterator the second aggregate collection for (Object iterSecond = containerPolicy.iteratorFor(secondCollection); true;) { //fetch the next object from the second iterator. Object secondAggregateObject = containerPolicy.next(iterSecond, session); //matched object found, break to outer FOR loop if (getReferenceDescriptor().getObjectBuilder().compareObjects(firstAggregateObject, secondAggregateObject, session)) { break; } if (!containerPolicy.hasNext(iterSecond)) { return false; } } } } return true; } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ public boolean compareLists(List firstList, List secondList, AbstractSession session) { if (firstList.size() != secondList.size()) { return false; } int size = firstList.size(); for(int i=0; i < size; i++) { Object firstObject = firstList.get(i); Object secondObject = secondList.get(i); if (!getReferenceDescriptor().getObjectBuilder().compareObjects(firstObject, secondObject, session)) { return false; } } return true; } /** * INTERNAL: * This method is used to convert the contents of an aggregateCollection into a * changeRecord * @return org.eclipse.persistence.internal.sessions.AggregateCollectionChangeRecord the changerecord representing this AggregateCollectionMapping * @param owner org.eclipse.persistence.internal.sessions.ObjectChangeSet the ChangeSet that uses this record * @param cloneCollection Object the collection to convert * @param session org.eclipse.persistence.internal.sessions.AbstractSession */ protected ChangeRecord convertToChangeRecord(Object cloneCollection, Object backupCollection, ObjectChangeSet owner, AbstractSession session) { ContainerPolicy cp = getContainerPolicy(); Object cloneIter = cp.iteratorFor(cloneCollection); Vector collectionChanges = new Vector(2); while (cp.hasNext(cloneIter)) { Object entry = cp.nextEntry(cloneIter, session); Object aggregateObject = cp.unwrapIteratorResult(entry); // For CR#2258 quietly ignore nulls inserted into a collection. if (aggregateObject != null) { ObjectChangeSet changes = getReferenceDescriptor(aggregateObject.getClass(), session).getObjectBuilder().compareForChange(aggregateObject, null, (UnitOfWorkChangeSet)owner.getUOWChangeSet(), session); changes.setNewKey(cp.keyFromIterator(cloneIter)); collectionChanges.addElement(changes); } } //cr 3013 Removed if collection is empty return null block, which prevents recording clear() change AggregateCollectionChangeRecord changeRecord = new AggregateCollectionChangeRecord(owner); changeRecord.setAttribute(getAttributeName()); changeRecord.setMapping(this); changeRecord.setChangedValues(collectionChanges); changeRecord.setOriginalCollection(backupCollection); getContainerPolicy().compareCollectionsForChange(backupCollection, cloneCollection, changeRecord, session, remoteReferenceDescriptor); return changeRecord; } /** * INTERNAL: * Copies member's value */ @Override protected Object copyElement(Object original, CopyGroup group) { if (original == null) { return null; } ClassDescriptor descriptor = getReferenceDescriptor(original.getClass(), group.getSession()); if (descriptor == null) { return original; } return descriptor.getObjectBuilder().copyObject(original, group); } /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection. Returns the key. */ public Object createMapComponentFromRow(AbstractRecord dbRow, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected){ return valueFromRow(dbRow, null, query, parentCacheKey, query.getExecutionSession(), isTargetProtected, null); } /** * To delete all the entries matching the selection criteria from the table stored in the * referenced descriptor */ protected void deleteAll(DeleteObjectQuery query, AbstractSession session) throws DatabaseException { Object attribute = getAttributeValueFromObject(query.getObject()); if (usesIndirection()) { if (!this.indirectionPolicy.objectIsInstantiated(attribute)) { // An empty Vector indicates to DeleteAllQuery that no objects should be removed from cache ((DeleteAllQuery)this.deleteAllQuery).executeDeleteAll(session.getSessionForClass(this.referenceClass), query.getTranslationRow(), new Vector(0)); return; } } Object referenceObjects = getRealCollectionAttributeValueFromObject(query.getObject(), session); // PERF: Avoid delete if empty. if (session.isUnitOfWork() && this.containerPolicy.isEmpty(referenceObjects)) { return; } ((DeleteAllQuery)this.deleteAllQuery).executeDeleteAll(session.getSessionForClass(this.referenceClass), query.getTranslationRow(), this.containerPolicy.vectorFor(referenceObjects, session)); } /** * INTERNAL: * Execute a descriptor event for the specified event code. */ protected void executeEvent(int eventCode, ObjectLevelModifyQuery query) { ClassDescriptor referenceDescriptor = getReferenceDescriptor(query.getObject().getClass(), query.getSession()); // PERF: Avoid events if no listeners. if (referenceDescriptor.getEventManager().hasAnyEventListeners()) { referenceDescriptor.getEventManager().executeEvent(new DescriptorEvent(eventCode, query)); } } /** * INTERNAL: * Extract the source primary key value from the target row. * Used for batch reading, most following same order and fields as in the mapping. */ @Override protected Object extractKeyFromTargetRow(AbstractRecord row, AbstractSession session) { int size = this.targetForeignKeyFields.size(); Object[] key = new Object[size]; ConversionManager conversionManager = session.getDatasourcePlatform().getConversionManager(); for (int index = 0; index < size; index++) { DatabaseField targetField = this.targetForeignKeyFields.get(index); DatabaseField sourceField = this.sourceKeyFields.get(index); Object value = row.get(targetField); // Must ensure the classification gets a cache hit. try { value = conversionManager.convertObject(value, sourceField.getType()); } catch (ConversionException e) { throw ConversionException.couldNotBeConverted(this, getDescriptor(), e); } key[index] = value; } return new CacheId(key); } /** * INTERNAL: * Extract the primary key value from the source row. * Used for batch reading, most following same order and fields as in the mapping. */ @Override protected Object extractBatchKeyFromRow(AbstractRecord row, AbstractSession session) { int size = this.sourceKeyFields.size(); Object[] key = new Object[size]; ConversionManager conversionManager = session.getDatasourcePlatform().getConversionManager(); for (int index = 0; index < size; index++) { DatabaseField field = this.sourceKeyFields.get(index); Object value = row.get(field); // Must ensure the classification gets a cache hit. try { value = conversionManager.convertObject(value, field.getType()); } catch (ConversionException exception) { throw ConversionException.couldNotBeConverted(this, this.descriptor, exception); } key[index] = value; } return new CacheId(key); } /** * INTERNAL: * Return the selection criteria used to IN batch fetching. */ @Override protected Expression buildBatchCriteria(ExpressionBuilder builder, ObjectLevelReadQuery query) { int size = this.targetForeignKeyFields.size(); if (size > 1) { // Support composite keys using nested IN. List fields = new ArrayList(size); for (DatabaseField targetForeignKeyField : this.targetForeignKeyFields) { fields.add(builder.getField(targetForeignKeyField)); } return query.getSession().getPlatform().buildBatchCriteriaForComplexId(builder, fields); } else { return query.getSession().getPlatform().buildBatchCriteria(builder, builder.getField(this.targetForeignKeyFields.get(0))); } } /** * INTERNAL: * Allow the mapping the do any further batch preparation. */ @Override protected void postPrepareNestedBatchQuery(ReadQuery batchQuery, ObjectLevelReadQuery query) { super.postPrepareNestedBatchQuery(batchQuery, query); ReadAllQuery aggregateBatchQuery = (ReadAllQuery)batchQuery; for (DatabaseField relationField : getTargetForeignKeyFields()) { aggregateBatchQuery.getAdditionalFields().add(relationField); } } /** * INTERNAL: * return the aggregate Record with the primary keys from the source table and target table */ public AbstractRecord getAggregateRow(ObjectLevelModifyQuery query, Object object) { Vector referenceObjectKeys = getReferenceObjectKeys(query); AbstractRecord aggregateRow = new DatabaseRecord(); Vector keys = getTargetForeignKeyFields(); for (int keyIndex = 0; keyIndex < keys.size(); keyIndex++) { aggregateRow.put(keys.elementAt(keyIndex), referenceObjectKeys.elementAt(keyIndex)); } getReferenceDescriptor(object.getClass(), query.getSession()).getObjectBuilder().buildRow(aggregateRow, object, query.getSession(), WriteType.UNDEFINED); return aggregateRow; } /** * Delete all criteria is created with target foreign keys and source keys. * This criteria is then used to delete target records from the table. */ protected Expression getDeleteAllCriteria(AbstractSession session) { Expression expression; Expression criteria = null; Expression builder = new ExpressionBuilder(); for (Iterator keys = getTargetForeignKeyToSourceKeys().keySet().iterator(); keys.hasNext();) { DatabaseField targetForeignKey = (DatabaseField)keys.next(); DatabaseField sourceKey = getTargetForeignKeyToSourceKeys().get(targetForeignKey); expression = builder.getField(targetForeignKey).equal(builder.getParameter(sourceKey)); criteria = expression.and(criteria); } return criteria; } /** * Overrides CollectionMappig because this mapping requires a DeleteAllQuery instead of a ModifyQuery. */ protected ModifyQuery getDeleteAllQuery() { if (deleteAllQuery == null) { deleteAllQuery = new DeleteAllQuery();//this is casted to a DeleteAllQuery } return deleteAllQuery; } /** * INTERNAL: * Return the referenceDescriptor. This is a descriptor which is associated with the reference class. * NOTE: If you are looking for the descriptor for a specific aggregate object, use * #getReferenceDescriptor(Object). This will ensure you get the right descriptor if the object's * descriptor is part of an inheritance tree. */ public ClassDescriptor getReferenceDescriptor() { if (referenceDescriptor == null) { referenceDescriptor = remoteReferenceDescriptor; } return referenceDescriptor; } /** * INTERNAL: * for inheritance purpose */ public ClassDescriptor getReferenceDescriptor(Class theClass, AbstractSession session) { if (this.referenceDescriptor.getJavaClass() == theClass) { return this.referenceDescriptor; } else { ClassDescriptor subDescriptor; // Since aggregate collection mappings clone their descriptors, for inheritance the correct child clone must be found. subDescriptor = this.referenceDescriptor.getInheritancePolicy().getSubclassDescriptor(theClass); if (subDescriptor == null) { throw DescriptorException.noSubClassMatch(theClass, this); } else { return subDescriptor; } } } /** * INTERNAL: * get reference object keys */ public Vector getReferenceObjectKeys(ObjectLevelModifyQuery query) throws DatabaseException, OptimisticLockException { Vector referenceObjectKeys = new Vector(getSourceKeyFields().size()); //For CR#2587-S.M. For nested aggregate collections the source keys can easily be read from the original query. AbstractRecord translationRow = query.getTranslationRow(); for (Enumeration sourcekeys = getSourceKeyFields().elements(); sourcekeys.hasMoreElements();) { DatabaseField sourceKey = (DatabaseField)sourcekeys.nextElement(); // CR#2587. Try first to get the source key from the original query. If that fails try to get it from the object. Object referenceKey = null; if ((translationRow != null) && (translationRow.containsKey(sourceKey))) { referenceKey = translationRow.get(sourceKey); } else { referenceKey = getDescriptor().getObjectBuilder().extractValueFromObjectForField(query.getObject(), sourceKey, query.getSession()); } referenceObjectKeys.addElement(referenceKey); } return referenceObjectKeys; } /** * PUBLIC: * Return the source key field names associated with the mapping. * These are in-order with the targetForeignKeyFieldNames. */ public Vector getSourceKeyFieldNames() { Vector fieldNames = new Vector(getSourceKeyFields().size()); for (Enumeration fieldsEnum = getSourceKeyFields().elements(); fieldsEnum.hasMoreElements();) { fieldNames.addElement(((DatabaseField)fieldsEnum.nextElement()).getQualifiedName()); } return fieldNames; } /** * INTERNAL: * Return the source key names associated with the mapping */ public Vector getSourceKeyFields() { return sourceKeyFields; } /** * PUBLIC: * Return the target foregin key field names associated with the mapping. * These are in-order with the sourceKeyFieldNames. */ public Vector getTargetForeignKeyFieldNames() { Vector fieldNames = new Vector(getTargetForeignKeyFields().size()); for (Enumeration fieldsEnum = getTargetForeignKeyFields().elements(); fieldsEnum.hasMoreElements();) { fieldNames.addElement(((DatabaseField)fieldsEnum.nextElement()).getQualifiedName()); } return fieldNames; } /** * INTERNAL: * Return the target foregin key fields associated with the mapping */ public Vector getTargetForeignKeyFields() { return targetForeignKeyFields; } /** * INTERNAL: */ public Map getTargetForeignKeyToSourceKeys() { return targetForeignKeyToSourceKeys; } /** * INTERNAL: * For aggregate collection mapping the reference descriptor is cloned. The cloned descriptor is then * assigned primary keys and table names before initialize. Once cloned descriptor is initialized * it is assigned as reference descriptor in the aggregate mapping. This is very specific * behavior for aggregate mappings. The original descriptor is used only for creating clones and * after that mapping never uses it. * Some initialization is done in postInitialize to ensure the target descriptor's references are initialized. */ public void initialize(AbstractSession session) throws DescriptorException { if (session.hasBroker()) { if (getReferenceClass() == null) { throw DescriptorException.referenceClassNotSpecified(this); } // substitute session that owns the mapping for the session that owns reference descriptor. session = session.getBroker().getSessionForClass(getReferenceClass()); } super.initialize(session); if (getDescriptor() != null) { // descriptor will only be null in special case where the mapping has not been added to a descriptor prior to initialization. getDescriptor().addMappingsPostCalculateChanges(this); // always equivalent to Private Owned } if (!getReferenceDescriptor().isAggregateCollectionDescriptor()) { session.getIntegrityChecker().handleError(DescriptorException.referenceDescriptorIsNotAggregateCollection(getReferenceClass().getName(), this)); } if (shouldInitializeSelectionCriteria()) { if (isSourceKeySpecified()) { initializeTargetForeignKeyToSourceKeys(session); } else { initializeTargetForeignKeyToSourceKeysWithDefaults(session); } initializeSelectionCriteria(session); getContainerPolicy().addAdditionalFieldsToQuery(getSelectionQuery(), getAdditionalFieldsBaseExpression(getSelectionQuery())); } // Aggregate 1:m never maintains cache as target objects are aggregates. getSelectionQuery().setShouldMaintainCache(false); // Add foreign key fields to select, as field values may be required for relationships. for (DatabaseField relationField : getTargetForeignKeyFields()) { ((ReadAllQuery)getSelectionQuery()).getAdditionalFields().add(relationField); } initializeDeleteAllQuery(session); if (this.listOrderField != null) { initializeUpdateListOrderQuery(session, ""); initializeUpdateListOrderQuery(session, bulk); initializeUpdateListOrderQuery(session, pk); } if (getDescriptor() != null) { // Check if any foreign keys reference a secondary table. if (getDescriptor().getTables().size() > 1) { DatabaseTable firstTable = getDescriptor().getTables().get(0); for (DatabaseField field : getSourceKeyFields()) { if (!field.getTable().equals(firstTable)) { getDescriptor().setHasMultipleTableConstraintDependecy(true); } } } } // Aggregate collections do not have a cache key when build, so cannot be cached if they have references to isolated classes. if ((this.referenceDescriptor != null) && this.referenceDescriptor.hasNoncacheableMappings()) { this.isCacheable = false; } } /** * Initialize and set the descriptor for the referenced class in this mapping. */ protected void initializeReferenceDescriptor(AbstractSession session) throws DescriptorException { super.initializeReferenceDescriptor(session); HashMap fieldTranslation = null; HashMap tableTranslation = null; ClassDescriptor referenceDescriptor = getReferenceDescriptor(); ClassDescriptor clonedDescriptor = (ClassDescriptor) referenceDescriptor.clone(); if (clonedDescriptor.isAggregateDescriptor()) { clonedDescriptor.descriptorIsAggregateCollection(); } int nAggregateTables = 0; if (referenceDescriptor.getTables() != null) { nAggregateTables = referenceDescriptor.getTables().size(); } if (! aggregateToSourceFields.isEmpty()) { DatabaseTable aggregateDefaultTable = null; if (nAggregateTables != 0) { aggregateDefaultTable = referenceDescriptor.getTables().get(0); } else { aggregateDefaultTable = new DatabaseTable(); } tableTranslation = new HashMap(); fieldTranslation = new HashMap(); for (String aggregateFieldName : aggregateToSourceFields.keySet()) { DatabaseField aggregateField = new DatabaseField(aggregateFieldName); // 322233 - continue using a string for the Aggregate field name // because the table may or may not have been set. DatabaseFields without a table // will match any DatabaseField with a table if the name is the same, breaking // legacy support for AggregateCollection inheritance models if (! aggregateField.hasTableName()) { aggregateField.setTable(aggregateDefaultTable); } DatabaseField sourceField = aggregateToSourceFields.get(aggregateFieldName); if (! sourceField.hasTableName()) { if (defaultSourceTable == null) { // TODO: throw exception: source field doesn't have table } else { sourceField.setTable(defaultSourceTable); } } DatabaseTable sourceTable = sourceField.getTable(); DatabaseTable savedSourceTable = tableTranslation.get(aggregateField.getTable()); if (savedSourceTable == null) { tableTranslation.put(aggregateField.getTable(), sourceTable); } else { if (! sourceTable.equals(savedSourceTable)) { // TODO: throw exception: aggregate table mapped to two source tables } } sourceField.setIsTranslated(true); fieldTranslation.put(aggregateField, sourceField); } // Translate the table and fields now. translateTablesAndFields(clonedDescriptor, fieldTranslation, tableTranslation); } else { if (nAggregateTables == 0) { if (defaultSourceTable == null) { // TODO: throw exception } else { clonedDescriptor.addTable(defaultSourceTable); } } } updateNestedAggregateMappings(clonedDescriptor, session); if (clonedDescriptor.isChildDescriptor()) { ClassDescriptor parentDescriptor = session.getDescriptor(clonedDescriptor.getInheritancePolicy().getParentClass()); initializeParentInheritance(parentDescriptor, clonedDescriptor, session, fieldTranslation, tableTranslation); } if (clonedDescriptor.isAggregateDescriptor()) { clonedDescriptor.descriptorIsAggregateCollection(); } setReferenceDescriptor(clonedDescriptor); clonedDescriptor.preInitialize(session); getContainerPolicy().initialize(session, clonedDescriptor.getDefaultTable()); if (clonedDescriptor.getPrimaryKeyFields().isEmpty()) { this.isEntireObjectPK = true; clonedDescriptor.getAdditionalAggregateCollectionKeyFields().addAll(this.getTargetForeignKeyFields()); if(this.listOrderField != null && !this.isListOrderFieldUpdatable) { clonedDescriptor.getAdditionalAggregateCollectionKeyFields().add(this.listOrderField); } } List identityFields = getContainerPolicy().getIdentityFieldsForMapKey(); if (identityFields != null){ clonedDescriptor.getAdditionalAggregateCollectionKeyFields().addAll(identityFields); } clonedDescriptor.initialize(session); if (clonedDescriptor.hasInheritance() && clonedDescriptor.getInheritancePolicy().hasChildren()) { //clone child descriptors initializeChildInheritance(clonedDescriptor, session, fieldTranslation, tableTranslation); } } protected void initializeUpdateListOrderQuery(AbstractSession session, String queryType) { DataModifyQuery query = new DataModifyQuery(); if(queryType == pk) { this.pkUpdateListOrderFieldQuery = query; } else if(queryType == bulk) { this.bulkUpdateListOrderFieldQuery = query; } else { this.updateListOrderFieldQuery = query; } query.setSessionName(session.getName()); // Build where clause expression. Expression whereClause = null; Expression builder = new ExpressionBuilder(); AbstractRecord modifyRow = new DatabaseRecord(); if(queryType == pk) { Iterator it = getReferenceDescriptor().getPrimaryKeyFields().iterator(); while(it.hasNext()) { DatabaseField pkField = it.next(); DatabaseField sourceField = targetForeignKeyToSourceKeys.get(pkField); DatabaseField parameterField = sourceField != null ? sourceField : pkField; Expression expression = builder.getField(pkField).equal(builder.getParameter(parameterField)); whereClause = expression.and(whereClause); } modifyRow.add(this.listOrderField, null); } else { Iterator> it = targetForeignKeyToSourceKeys.entrySet().iterator(); while(it.hasNext()) { Map.Entry entry = it.next(); Expression expression = builder.getField(entry.getKey()).equal(builder.getParameter(entry.getValue())); whereClause = expression.and(whereClause); } Expression listOrderExpression; if(queryType == bulk) { listOrderExpression = builder.getField(this.listOrderField).between(builder.getParameter(min), builder.getParameter(max)); modifyRow.add(this.listOrderField, ExpressionMath.add(builder.getField(this.listOrderField), builder.getParameter(shift))); } else { listOrderExpression = builder.getField(this.listOrderField).equal(builder.getParameter(min)); modifyRow.add(this.listOrderField, null); } whereClause = listOrderExpression.and(whereClause); } SQLUpdateStatement statement = new SQLUpdateStatement(); statement.setTable(getReferenceDescriptor().getDefaultTable()); statement.setWhereClause(whereClause); statement.setModifyRow(modifyRow); query.setSQLStatement(statement); } /** * INTERNAL: * Clone and prepare the JoinedAttributeManager nested JoinedAttributeManager. * This is used for nested joining as the JoinedAttributeManager passed to the joined build object. */ public ObjectLevelReadQuery prepareNestedJoins(JoinedAttributeManager joinManager, ObjectBuildingQuery baseQuery, AbstractSession session) { ObjectLevelReadQuery nestedQuery = super.prepareNestedJoins(joinManager, baseQuery, session); nestedQuery.setShouldMaintainCache(false); return nestedQuery; } /** * INTERNAL: * Called in case fieldTranslation != null * Sets new primary keys, tables, appends fieldTranslation to fieldMap so that all fields in mappings, inheritance etc. translated to the new ones. */ protected static void translateTablesAndFields(ClassDescriptor descriptor, HashMap fieldTranslation, HashMap tableTranslation) { int nTables = 0; if(descriptor.getTables() != null) { nTables = descriptor.getTables().size(); } DatabaseTable defaultAggregateTable = null; if(nTables == 0) { defaultAggregateTable = new DatabaseTable(); DatabaseTable defaultSourceTable = tableTranslation.get(defaultAggregateTable); if(defaultSourceTable == null) { //TODO: throw exception } descriptor.addTable(defaultSourceTable); } else { defaultAggregateTable = descriptor.getTables().get(0); Vector newTables = NonSynchronizedVector.newInstance(nTables); for(int i=0; i < nTables; i++) { DatabaseTable table = tableTranslation.get(descriptor.getTables().get(i)); if(table == null) { //TODO: throw exception } if(!newTables.contains(table)) { newTables.add(table); } } descriptor.setTables(newTables); } int nPrimaryKeyFields = 0; if(descriptor.getPrimaryKeyFields() != null) { nPrimaryKeyFields = descriptor.getPrimaryKeyFields().size(); } if(nPrimaryKeyFields > 0) { ArrayList newPrimaryKeyFields = new ArrayList(nPrimaryKeyFields); for(int i=0; i < nPrimaryKeyFields; i++) { DatabaseField pkField = descriptor.getPrimaryKeyFields().get(i); if(!pkField.hasTableName() && nTables > 0) { pkField = new DatabaseField(pkField.getName(), defaultAggregateTable); } DatabaseField field = fieldTranslation.get(pkField); if(field == null) { //TODO: throw exception: pk not translated } newPrimaryKeyFields.add(field); } descriptor.setPrimaryKeyFields(newPrimaryKeyFields); } // put fieldTranslation into fieldsMap so that all the fields in the mappings, inheritance policy etc // are translated to the new ones. descriptor.getObjectBuilder().getFieldsMap().putAll(fieldTranslation); } /** * INTERNAL: * Called in case nestedAggregateToSourceFieldNames != null * Updates AggregateObjectMappings and AggregateCollectionMappings of the * reference descriptor. */ protected void updateNestedAggregateMappings(ClassDescriptor descriptor, AbstractSession session) { if (! nestedAggregateToSourceFields.isEmpty()) { Iterator>> it = nestedAggregateToSourceFields.entrySet().iterator(); while (it.hasNext()) { Map.Entry> entry = it.next(); String attribute = entry.getKey(); String nestedAttribute = null; int indexOfDot = attribute.indexOf('.'); // attribute "homes.sellingPonts" is divided into attribute "homes" and nestedAttribute "sellingPoints" if (indexOfDot >= 0) { nestedAttribute = attribute.substring(indexOfDot + 1, attribute.length()); attribute = attribute.substring(0, indexOfDot); } DatabaseMapping mapping = descriptor.getMappingForAttributeName(attribute); if (mapping == null) { //TODO: may have been already processed by the parent, may be processed later by a child. //Should add method verifyNestedAggregateToSourceFieldNames that would go through //all the children and detect the wrong attribute. continue; } if (mapping.isAggregateCollectionMapping()) { AggregateCollectionMapping nestedAggregateCollectionMapping = (AggregateCollectionMapping)mapping; if (nestedAttribute == null) { nestedAggregateCollectionMapping.addFieldTranslations(entry.getValue()); } else { nestedAggregateCollectionMapping.addNestedFieldNameTranslations(nestedAttribute, entry.getValue()); } } else if (mapping.isAggregateObjectMapping()) { // We have a nested aggregate object mapping (which in turn may have more nested aggregate object mappings). // However, at this point we have all the field name translations in the nested list. Since we have the clone // of the first nested aggregate object from the aggregate collection mapping, we will add all the field name // translations to it since we do not need to look up nested mappings field names. The way nested aggregate // object mappings handle field name translations will work if we set all the translations on the root of the // nested objects. This in turn allows sharing nested aggregate objects and allowing different name translations // for each different chain. Given this aggregate chain "record.location.venue.history" where record is an // aggregate collection mapping, metadata processing from JPA will (and a direct user may opt to) add all the // attribute overrides from location, venue and history under separate attribute names, that is, // - addNestedFieldNameTranslation("location", ..., ...); // - addNestedFieldNameTranslation("location.venue", ..., ...); // - addNestedFieldNameTranslation("location.venue.history", ..., ...); // // This will add all the field name translations to the 'location' aggregate object mapping since we extract // the attribute name as the string up to the first dot. // Simply adding all the nestedFieldNameTranslations to 'location' would work as well. AggregateObjectMapping nestedAggregateObjectMapping = (AggregateObjectMapping) mapping; Map entries = entry.getValue(); for (String aggregateFieldName : entries.keySet()) { DatabaseField sourceField = entries.get(aggregateFieldName); nestedAggregateObjectMapping.addFieldTranslation(sourceField, aggregateFieldName); } } else { // TODO: throw exception: mapping corresponding to attribute is not a mapping that accepts field name translations. } } } } /** * INTERNAL: * For aggregate mapping the reference descriptor is cloned. Also the involved inheritance descriptor, its children * and parents all need to be cloned. The cloned descriptors are then assigned primary keys and table names before * initialize. Once cloned descriptor is initialized it is assigned as reference descriptor in the aggregate mapping. * This is very specific behavior for aggregate mappings. The original descriptor is used only for creating clones * and after that mapping never uses it. * Some initialization is done in postInitialize to ensure the target descriptor's references are initialized. */ public void initializeChildInheritance(ClassDescriptor parentDescriptor, AbstractSession session, HashMap fieldTranslation, HashMap tableTranslation) throws DescriptorException { //recursive call to further children descriptors if (parentDescriptor.getInheritancePolicy().hasChildren()) { //setFields(clonedChildDescriptor.getFields()); List childDescriptors = parentDescriptor.getInheritancePolicy().getChildDescriptors(); List cloneChildDescriptors = new ArrayList(childDescriptors.size()); for (ClassDescriptor childDescriptor : childDescriptors) { ClassDescriptor clonedChildDescriptor = (ClassDescriptor)childDescriptor.clone(); if (fieldTranslation != null) { translateTablesAndFields(clonedChildDescriptor, fieldTranslation, tableTranslation); } updateNestedAggregateMappings(clonedChildDescriptor, session); if (clonedChildDescriptor.isAggregateDescriptor()) { clonedChildDescriptor.descriptorIsAggregateCollection(); } if (!clonedChildDescriptor.isAggregateCollectionDescriptor()) { session.getIntegrityChecker().handleError(DescriptorException.referenceDescriptorIsNotAggregate(clonedChildDescriptor.getJavaClass().getName(), this)); } clonedChildDescriptor.getInheritancePolicy().setParentDescriptor(parentDescriptor); clonedChildDescriptor.preInitialize(session); clonedChildDescriptor.initialize(session); cloneChildDescriptors.add(clonedChildDescriptor); initializeChildInheritance(clonedChildDescriptor, session, fieldTranslation, tableTranslation); } parentDescriptor.getInheritancePolicy().setChildDescriptors(cloneChildDescriptors); } } /** * INTERNAL: * Initialize delete all query. This query is used to delete the collection of objects from the * target table. */ protected void initializeDeleteAllQuery(AbstractSession session) { DeleteAllQuery query = (DeleteAllQuery)getDeleteAllQuery(); query.setReferenceClass(getReferenceClass()); query.setDescriptor(getReferenceDescriptor()); query.setShouldMaintainCache(false); query.setIsInMemoryOnly(isCascadeOnDeleteSetOnDatabase()); if (query.getPartitioningPolicy() == null) { query.setPartitioningPolicy(getPartitioningPolicy()); } if (!hasCustomDeleteAllQuery()) { if (getSelectionCriteria() == null) { query.setSelectionCriteria(getDeleteAllCriteria(session)); } else { query.setSelectionCriteria(getSelectionCriteria()); } } } /** * INTERNAL: * For aggregate mapping the reference descriptor is cloned. Also the involved inheritance descriptor, its children * and parents all need to be cloned. The cloned descriptors are then assigned primary keys and table names before * initialize. Once cloned descriptor is initialized it is assigned as reference descriptor in the aggregate mapping. * This is very specific behavior for aggregate mappings. The original descriptor is used only for creating clones * and after that mapping never uses it. * Some initialization is done in postInitialize to ensure the target descriptor's references are initialized. */ public void initializeParentInheritance(ClassDescriptor parentDescriptor, ClassDescriptor childDescriptor, AbstractSession session, HashMap fieldTranslation, HashMap tableTranslation) throws DescriptorException { ClassDescriptor clonedParentDescriptor = (ClassDescriptor)parentDescriptor.clone(); if(clonedParentDescriptor.isAggregateDescriptor()) { clonedParentDescriptor.descriptorIsAggregateCollection(); } if (!clonedParentDescriptor.isAggregateCollectionDescriptor()) { session.getIntegrityChecker().handleError(DescriptorException.referenceDescriptorIsNotAggregateCollection(parentDescriptor.getJavaClass().getName(), this)); } if (fieldTranslation != null) { translateTablesAndFields(clonedParentDescriptor, fieldTranslation, tableTranslation); } updateNestedAggregateMappings(clonedParentDescriptor, session); //recursive call to the further parent descriptors if (clonedParentDescriptor.getInheritancePolicy().isChildDescriptor()) { ClassDescriptor parentToParentDescriptor = session.getDescriptor(clonedParentDescriptor.getJavaClass()); initializeParentInheritance(parentToParentDescriptor, parentDescriptor, session, fieldTranslation, tableTranslation); } Vector children = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1); children.addElement(childDescriptor); clonedParentDescriptor.getInheritancePolicy().setChildDescriptors(children); clonedParentDescriptor.preInitialize(session); clonedParentDescriptor.initialize(session); } /** * INTERNAL: * Selection criteria is created with target foreign keys and source keys. * This criteria is then used to read records from the target table. */ protected void initializeSelectionCriteria(AbstractSession session) { Expression expression; Expression criteria; Expression builder = new ExpressionBuilder(); for (Iterator keys = getTargetForeignKeyToSourceKeys().keySet().iterator(); keys.hasNext();) { DatabaseField targetForeignKey = (DatabaseField)keys.next(); DatabaseField sourceKey = getTargetForeignKeyToSourceKeys().get(targetForeignKey); expression = builder.getField(targetForeignKey).equal(builder.getParameter(sourceKey)); criteria = expression.and(getSelectionCriteria()); setSelectionCriteria(criteria); } } /** * INTERNAL: * The foreign keys and the primary key names are converted to DatabaseFields and stored. */ protected void initializeTargetForeignKeyToSourceKeys(AbstractSession session) throws DescriptorException { if (getTargetForeignKeyFields().isEmpty()) { throw DescriptorException.noTargetForeignKeysSpecified(this); } for (int index = 0; index < getTargetForeignKeyFields().size(); index++) { DatabaseField foreignKeyfield = getTargetForeignKeyFields().get(index); foreignKeyfield = getReferenceDescriptor().buildField(foreignKeyfield); getTargetForeignKeyFields().set(index, foreignKeyfield); } for (int index = 0; index < getSourceKeyFields().size(); index++) { DatabaseField sourceKeyfield = getSourceKeyFields().get(index); sourceKeyfield = getDescriptor().buildField(sourceKeyfield); if (usesIndirection()) { sourceKeyfield.setKeepInRow(true); } getSourceKeyFields().set(index, sourceKeyfield); } if (getTargetForeignKeyFields().size() != getSourceKeyFields().size()) { throw DescriptorException.targetForeignKeysSizeMismatch(this); } Iterator targetForeignKeysEnum = getTargetForeignKeyFields().iterator(); Iterator sourceKeysEnum = getSourceKeyFields().iterator(); while (targetForeignKeysEnum.hasNext()) { getTargetForeignKeyToSourceKeys().put(targetForeignKeysEnum.next(), sourceKeysEnum.next()); } } /** * INTERNAL: * The foreign keys and the primary key names are converted to DatabaseFields and stored. The source keys * are not specified by the user so primary keys are extracted from the reference descriptor. */ protected void initializeTargetForeignKeyToSourceKeysWithDefaults(AbstractSession session) throws DescriptorException { if (getTargetForeignKeyFields().isEmpty()) { throw DescriptorException.noTargetForeignKeysSpecified(this); } List sourceKeys = getDescriptor().getPrimaryKeyFields(); if (usesIndirection()) { for (DatabaseField field : sourceKeys) { field.setKeepInRow(true); } } setSourceKeyFields(org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(sourceKeys)); for (int index = 0; index < getTargetForeignKeyFields().size(); index++) { DatabaseField foreignKeyfield = getTargetForeignKeyFields().get(index); foreignKeyfield = getReferenceDescriptor().buildField(foreignKeyfield); getTargetForeignKeyFields().set(index, foreignKeyfield); } if (getTargetForeignKeyFields().size() != sourceKeys.size()) { throw DescriptorException.targetForeignKeysSizeMismatch(this); } for (int index = 0; index < getTargetForeignKeyFields().size(); index++) { getTargetForeignKeyToSourceKeys().put(getTargetForeignKeyFields().get(index), sourceKeys.get(index)); } } /** * INTERNAL: * Iterate on the specified element. */ public void iterateOnElement(DescriptorIterator iterator, Object element) { // CR#... Aggregate collections must iterate as aggregates, not regular mappings. // For some reason the element can be null, this makes absolutely no sense, but we have a test case for it... if (element != null) { iterator.iterateForAggregateMapping(element, this, getReferenceDescriptor(element.getClass(), iterator.getSession())); } } /** * INTERNAL: */ public boolean isAggregateCollectionMapping() { return true; } /** * INTERNAL: */ @Override public boolean isElementCollectionMapping() { return true; } /** * INTERNAL: * Return if this mapping support joining. */ public boolean isJoiningSupported() { return true; } /** * INTERNAL: */ public boolean isOwned(){ return true; } /** * Checks if source key is specified or not. */ protected boolean isSourceKeySpecified() { return !(getSourceKeyFields().isEmpty()); } /** * Force instantiation of the load group. */ @Override public void load(final Object object, AttributeItem item, final AbstractSession session, final boolean fromFetchGroup) { instantiateAttribute(object, session); if (item.getGroup() != null && (!fromFetchGroup || session.isUnitOfWork())) { //if UOW make sure the nested attributes are loaded as the clones will not be instantiated Object value = getRealAttributeValueFromObject(object, session); ContainerPolicy cp = this.containerPolicy; for (Object iterator = cp.iteratorFor(value); cp.hasNext(iterator);) { Object wrappedObject = cp.nextEntry(iterator, session); Object nestedObject = cp.unwrapIteratorResult(wrappedObject); getReferenceDescriptor(nestedObject.getClass(), session).getObjectBuilder().load(nestedObject, item.getGroup(nestedObject.getClass()), session, fromFetchGroup); } } } /** * Force instantiation of all indirections. */ @Override public void loadAll(Object object, AbstractSession session, IdentityHashSet loaded) { instantiateAttribute(object, session); Object value = getRealAttributeValueFromObject(object, session); ContainerPolicy cp = this.containerPolicy; for (Object iterator = cp.iteratorFor(value); cp.hasNext(iterator);) { Object wrappedObject = cp.nextEntry(iterator, session); Object nestedObject = cp.unwrapIteratorResult(wrappedObject); getReferenceDescriptor(nestedObject.getClass(), session).getObjectBuilder().loadAll(nestedObject, session, loaded); } } /** * INTERNAL: * Merge changes from the source to the target object. * Because this is a collection mapping, values are added to or removed from the * collection based on the changeset */ public void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { if (this.descriptor.getCachePolicy().isProtectedIsolation()){ if (!this.isCacheable && !targetSession.isProtectedSession()){ setAttributeValueInObject(target, this.indirectionPolicy.buildIndirectObject(new ValueHolder(null))); } return; } //Check to see if the target has an instantiated collection if (!isAttributeValueInstantiatedOrChanged(target)) { //Then do nothing. return; } ContainerPolicy containerPolicy = getContainerPolicy(); AbstractSession session = mergeManager.getSession(); Object valueOfTarget = null; //At this point the source's indirection must be instantiated or the changeSet would never have // been created Object sourceAggregate = null; //On a distributed cache if our changes are for the same version as the target object //then load the changes from database. // CR 4143 // CR 4155 Always replace the collection with the query results as we will not be able to // find the originals for merging and indirection information may be lost. if (mergeManager.shouldMergeChangesIntoDistributedCache()) { ClassDescriptor descriptor = getDescriptor(); AbstractRecord parentRow = descriptor.getObjectBuilder().extractPrimaryKeyRowFromObject(target, session); Object result = getIndirectionPolicy().valueFromQuery(getSelectionQuery(), parentRow, session);//fix for indirection setAttributeValueInObject(target, result); return; } // iterate over the changes and merge the collections Vector aggregateObjects = ((AggregateCollectionChangeRecord)changeRecord).getChangedValues(); int size = aggregateObjects.size(); valueOfTarget = containerPolicy.containerInstance(size); // Next iterate over the changes and add them to the container ObjectChangeSet objectChanges = null; for (int index = 0; index < size; ++index) { objectChanges = (ObjectChangeSet)aggregateObjects.elementAt(index); Class localClassType = objectChanges.getClassType(session); sourceAggregate = objectChanges.getUnitOfWorkClone(); // cr 4155 Load the target from the UnitOfWork. This will be the original // aggregate object that has the original indirection in it. Object targetAggregate = ((UnitOfWorkImpl)mergeManager.getSession()).getCloneToOriginals().get(sourceAggregate); if (targetAggregate == null) { targetAggregate = getReferenceDescriptor(localClassType, session).getObjectBuilder().buildNewInstance(); } getReferenceDescriptor(localClassType, session).getObjectBuilder().mergeChangesIntoObject(targetAggregate, objectChanges, sourceAggregate, mergeManager, targetSession); containerPolicy.addInto(objectChanges.getNewKey(), targetAggregate, valueOfTarget, session); } setRealAttributeValueInObject(target, valueOfTarget); } /** * INTERNAL: * Merge changes from the source to the target object. */ public void mergeIntoObject(Object target, boolean isTargetUnInitialized, Object source, MergeManager mergeManager, AbstractSession targetSession) { if (this.descriptor.getCachePolicy().isProtectedIsolation()) { if (!this.isCacheable && !targetSession.isProtectedSession()){ setAttributeValueInObject(target, this.indirectionPolicy.buildIndirectObject(new ValueHolder(null))); } return; } if (isTargetUnInitialized) { // This will happen if the target object was removed from the cache before the commit was attempted if (mergeManager.shouldMergeWorkingCopyIntoOriginal() && (!isAttributeValueInstantiatedOrChanged(source))) { setAttributeValueInObject(target, getIndirectionPolicy().getOriginalIndirectionObject(getAttributeValueFromObject(source), targetSession)); return; } } if (!shouldMergeCascadeReference(mergeManager)) { // This is only going to happen on mergeClone, and we should not attempt to merge the reference return; } if (mergeManager.shouldRefreshRemoteObject() && shouldMergeCascadeParts(mergeManager) && usesIndirection()) { mergeRemoteValueHolder(target, source, mergeManager); return; } if (mergeManager.isForRefresh()) { if (!isAttributeValueInstantiatedOrChanged(target)) { // This will occur when the clone's value has not been instantiated yet and we do not need // the refresh that attribute return; } } else if (!isAttributeValueInstantiatedOrChanged(source)) { // I am merging from a clone into an original. No need to do merge if the attribute was never // modified return; } ContainerPolicy containerPolicy = getContainerPolicy(); Object valueOfSource = getRealCollectionAttributeValueFromObject(source, mergeManager.getSession()); Object valueOfTarget = containerPolicy.containerInstance(containerPolicy.sizeFor(valueOfSource)); for (Object sourceValuesIterator = containerPolicy.iteratorFor(valueOfSource); containerPolicy.hasNext(sourceValuesIterator);) { Object wrappedSourceValue = containerPolicy.nextEntry(sourceValuesIterator, mergeManager.getSession()); Object sourceValue = containerPolicy.unwrapIteratorResult(wrappedSourceValue); // For some odd reason support for having null in the collection was added. This does not make sense... Object originalValue = null; if (sourceValue != null) { //CR#2896 - TW originalValue = getReferenceDescriptor(sourceValue.getClass(), mergeManager.getSession()).getObjectBuilder().buildNewInstance(); getReferenceDescriptor(sourceValue.getClass(), mergeManager.getSession()).getObjectBuilder().mergeIntoObject(originalValue, true, sourceValue, mergeManager, targetSession); containerPolicy.addInto(containerPolicy.keyFromIterator(sourceValuesIterator), originalValue, valueOfTarget, mergeManager.getSession()); } } // Must re-set variable to allow for set method to re-morph changes if the collection is not being stored directly. setRealAttributeValueInObject(target, valueOfTarget); } /** * INTERNAL: * An object was added to the collection during an update, insert it if private. */ protected void objectAddedDuringUpdate(ObjectLevelModifyQuery query, Object objectAdded, ObjectChangeSet changeSet, Map extraData) throws DatabaseException, OptimisticLockException { // Insert must not be done for uow or cascaded queries and we must cascade to cascade policy. InsertObjectQuery insertQuery = getAndPrepareModifyQueryForInsert(query, objectAdded); ContainerPolicy.copyMapDataToRow(extraData, insertQuery.getModifyRow()); if(this.listOrderField != null && extraData != null) { insertQuery.getModifyRow().put(this.listOrderField, extraData.get(this.listOrderField)); } query.getSession().executeQuery(insertQuery, insertQuery.getTranslationRow()); } /** * INTERNAL: * An object was removed to the collection during an update, delete it if private. */ protected void objectRemovedDuringUpdate(ObjectLevelModifyQuery query, Object objectDeleted, Map extraData) throws DatabaseException, OptimisticLockException { // Delete must not be done for uow or cascaded queries and we must cascade to cascade policy. DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); deleteQuery.setIsExecutionClone(true); prepareModifyQueryForDelete(query, deleteQuery, objectDeleted, extraData); ContainerPolicy.copyMapDataToRow(extraData, deleteQuery.getTranslationRow()); query.getSession().executeQuery(deleteQuery, deleteQuery.getTranslationRow()); if (containerPolicy.shouldIncludeKeyInDeleteEvent()){ query.getSession().deleteObject(containerPolicy.keyFromEntry(objectDeleted)); } } /** * INTERNAL: * An object is still in the collection, update it as it may have changed. */ @Override protected void objectUnchangedDuringUpdate(ObjectLevelModifyQuery query, Object object, Map backupCloneKeyedCache, Object cachedKey) throws DatabaseException, OptimisticLockException { // Always write for updates, either private or in uow if calling this method. UpdateObjectQuery updateQuery = new UpdateObjectQuery(); updateQuery.setIsExecutionClone(true); Object backupclone = backupCloneKeyedCache.get(cachedKey); updateQuery.setBackupClone(backupclone); prepareModifyQueryForUpdate(query, updateQuery, object); query.getSession().executeQuery(updateQuery, updateQuery.getTranslationRow()); } protected void objectUnchangedDuringUpdate(ObjectLevelModifyQuery query, Object object, Object backupClone) throws DatabaseException, OptimisticLockException { // Always write for updates, either private or in uow if calling this method. UpdateObjectQuery updateQuery = new UpdateObjectQuery(); updateQuery.setIsExecutionClone(true); updateQuery.setBackupClone(backupClone); prepareModifyQueryForUpdate(query, updateQuery, object); query.getSession().executeQuery(updateQuery, updateQuery.getTranslationRow()); } /** * INTERNAL: * For aggregate collection mapping the reference descriptor is cloned. The cloned descriptor is then * assigned primary keys and table names before initialize. Once the cloned descriptor is initialized * it is assigned as reference descriptor in the aggregate mapping. This is a very specific * behavior for aggregate mappings. The original descriptor is used only for creating clones and * after that the aggregate mapping never uses it. * Some initialization is done in postInitialize to ensure the target descriptor's references are initialized. */ public void postInitialize(AbstractSession session) throws DescriptorException { super.postInitialize(session); if (getReferenceDescriptor() != null) { // Changed as part of fix for bug#4410581 aggregate mapping can not be set to use change tracking if owning descriptor does not use it. // Basically the policies should be the same, but we also allow deferred with attribute for CMP2 (courser grained). if (getDescriptor().getObjectChangePolicy().getClass().equals(DeferredChangeDetectionPolicy.class)) { getReferenceDescriptor().setObjectChangePolicy(new DeferredChangeDetectionPolicy()); } else if (getDescriptor().getObjectChangePolicy().getClass().equals(ObjectChangeTrackingPolicy.class) && getReferenceDescriptor().getObjectChangePolicy().getClass().equals(AttributeChangeTrackingPolicy.class)) { getReferenceDescriptor().setObjectChangePolicy(new ObjectChangeTrackingPolicy()); } getReferenceDescriptor().postInitialize(session); } // Need to set the types on the foreign key fields, as not mapped in the object. for (int index = 0; index < getSourceKeyFields().size(); index++) { DatabaseField foreignKey = getSourceKeyFields().get(index); DatabaseField targetKey = getTargetForeignKeyFields().get(index); if (targetKey.getType() == null) { targetKey.setType(getDescriptor().getObjectBuilder().getFieldClassification(foreignKey)); } } } /** * INTERNAL: * Insert privately owned parts */ public void postInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (isReadOnly()) { return; } Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); int index = 0; // insert each object one by one ContainerPolicy cp = getContainerPolicy(); for (Object iter = cp.iteratorFor(objects); cp.hasNext(iter);) { Object wrappedObject = cp.nextEntry(iter, query.getSession()); Object object = cp.unwrapIteratorResult(wrappedObject); InsertObjectQuery insertQuery = getAndPrepareModifyQueryForInsert(query, object); ContainerPolicy.copyMapDataToRow(cp.getKeyMappingDataForWriteQuery(wrappedObject, query.getSession()), insertQuery.getModifyRow()); if(this.listOrderField != null) { insertQuery.getModifyRow().add(this.listOrderField, index++); } query.getSession().executeQuery(insertQuery, insertQuery.getTranslationRow()); cp.propogatePostInsert(query, wrappedObject); } } /** * INTERNAL: * Update the privately owned parts */ public void postUpdate(WriteObjectQuery writeQuery) throws DatabaseException, OptimisticLockException { if (this.isReadOnly) { return; } // If objects are not instantiated that means they are not changed. if (!isAttributeValueInstantiatedOrChanged(writeQuery.getObject())) { return; } // OLD COMMIT - TODO This should not be used. compareObjectsAndWrite(writeQuery); } /** * INTERNAL: * Delete privately owned parts */ public void preDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { if (isReadOnly()) { return; } AbstractSession session = query.getSession(); // If privately owned parts have their privately own parts, delete those one by one // else delete everything in one shot. int index = 0; if (mustDeleteReferenceObjectsOneByOne()) { Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); ContainerPolicy cp = getContainerPolicy(); if (this.isCascadeOnDeleteSetOnDatabase && session.isUnitOfWork()) { for (Object iterator = cp.iteratorFor(objects); cp.hasNext(iterator);) { Object wrappedObject = cp.nextEntry(iterator, session); Object object = cp.unwrapIteratorResult(wrappedObject); ((UnitOfWorkImpl)session).getCascadeDeleteObjects().add(object); } } for (Object iter = cp.iteratorFor(objects); cp.hasNext(iter);) { Object wrappedObject = cp.nextEntry(iter, session); DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); deleteQuery.setIsExecutionClone(true); Map extraData = null; if (this.listOrderField != null) { extraData = new DatabaseRecord(1); extraData.put(this.listOrderField, index++); } prepareModifyQueryForDelete(query, deleteQuery, wrappedObject, extraData); session.executeQuery(deleteQuery, deleteQuery.getTranslationRow()); cp.propogatePreDelete(query, wrappedObject); } if (!session.isUnitOfWork()) { // This deletes any objects on the database, as the collection in memory may has been changed. // This is not required for unit of work, as the update would have already deleted these objects, // and the backup copy will include the same objects causing double deletes. verifyDeleteForUpdate(query); } } else { deleteAll(query, session); } } /** * INTERNAL: * The message is passed to its reference class descriptor. */ public void preInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (isReadOnly()) { return; } Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); int index = 0; // pre-insert each object one by one ContainerPolicy cp = getContainerPolicy(); for (Object iter = cp.iteratorFor(objects); cp.hasNext(iter);) { Object wrappedObject = cp.nextEntry(iter, query.getSession()); Object object = cp.unwrapIteratorResult(wrappedObject); InsertObjectQuery insertQuery = getAndPrepareModifyQueryForInsert(query, object); ContainerPolicy.copyMapDataToRow(cp.getKeyMappingDataForWriteQuery(wrappedObject, query.getSession()), insertQuery.getModifyRow()); if(this.listOrderField != null) { insertQuery.getModifyRow().add(this.listOrderField, index++); } // aggregates do not actually use a query to write to the database so the pre-write must be called here executeEvent(DescriptorEventManager.PreWriteEvent, insertQuery); executeEvent(DescriptorEventManager.PreInsertEvent, insertQuery); getReferenceDescriptor(object.getClass(), query.getSession()).getQueryManager().preInsert(insertQuery); cp.propogatePreInsert(query, wrappedObject); } } /** * INTERNAL: * Returns clone of InsertObjectQuery from the reference descriptor, if it is not set - create it. */ protected InsertObjectQuery getInsertObjectQuery(AbstractSession session, ClassDescriptor desc) { InsertObjectQuery insertQuery = desc.getQueryManager().getInsertQuery(); if (insertQuery == null) { insertQuery = new InsertObjectQuery(); desc.getQueryManager().setInsertQuery(insertQuery); } if (insertQuery.getModifyRow() == null) { AbstractRecord modifyRow = new DatabaseRecord(); for (int i = 0; i < getTargetForeignKeyFields().size(); i++) { DatabaseField field = getTargetForeignKeyFields().elementAt(i); modifyRow.put(field, null); } desc.getObjectBuilder().buildTemplateInsertRow(session, modifyRow); getContainerPolicy().addFieldsForMapKey(modifyRow); if(this.listOrderField != null) { modifyRow.put(this.listOrderField, null); } insertQuery.setModifyRow(modifyRow); } return insertQuery; } /** * INTERNAL: * setup the modifyQuery for post insert/update and pre delete */ public InsertObjectQuery getAndPrepareModifyQueryForInsert(ObjectLevelModifyQuery originalQuery, Object object) { AbstractSession session = originalQuery.getSession(); ClassDescriptor objReferenceDescriptor = getReferenceDescriptor(object.getClass(), session); InsertObjectQuery insertQueryFromDescriptor = getInsertObjectQuery(session, objReferenceDescriptor); insertQueryFromDescriptor.checkPrepare(session, insertQueryFromDescriptor.getModifyRow()); InsertObjectQuery insertQuery = (InsertObjectQuery)insertQueryFromDescriptor.clone(); insertQuery.setObject(object); insertQuery.setDescriptor(objReferenceDescriptor); AbstractRecord targetForeignKeyRow = new DatabaseRecord(); Vector referenceObjectKeys = getReferenceObjectKeys(originalQuery); for (int keyIndex = 0; keyIndex < getTargetForeignKeyFields().size(); keyIndex++) { targetForeignKeyRow.put(getTargetForeignKeyFields().elementAt(keyIndex), referenceObjectKeys.elementAt(keyIndex)); } insertQuery.setModifyRow(targetForeignKeyRow); insertQuery.setTranslationRow(targetForeignKeyRow); insertQuery.setSession(session); insertQuery.setCascadePolicy(originalQuery.getCascadePolicy()); insertQuery.dontMaintainCache(); // For bug 2863721 must set a backup clone for compatibility with // old event mechanism, even though for AggregateCollections there is no // way to get a backup directly from a clone. if (session.isUnitOfWork()) { Object backupAttributeValue = getReferenceDescriptor(object.getClass(), session).getObjectBuilder().buildNewInstance(); insertQuery.setBackupClone(backupAttributeValue); } return insertQuery; } /** * INTERNAL: * setup the modifyQuery for pre delete */ public void prepareModifyQueryForDelete(ObjectLevelModifyQuery originalQuery, ObjectLevelModifyQuery modifyQuery, Object wrappedObject, Map extraData) { Object object = getContainerPolicy().unwrapIteratorResult(wrappedObject); AbstractRecord aggregateRow = getAggregateRow(originalQuery, object); ContainerPolicy.copyMapDataToRow(containerPolicy.getKeyMappingDataForWriteQuery(wrappedObject, modifyQuery.getSession()), aggregateRow); if(this.listOrderField != null && extraData != null) { aggregateRow.put(this.listOrderField, extraData.get(this.listOrderField)); } modifyQuery.setObject(object); modifyQuery.setDescriptor(getReferenceDescriptor(object.getClass(), originalQuery.getSession())); modifyQuery.setPrimaryKey(getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromRow(aggregateRow, originalQuery.getSession())); modifyQuery.setModifyRow(aggregateRow); modifyQuery.setTranslationRow(aggregateRow); modifyQuery.setSession(originalQuery.getSession()); if (originalQuery.shouldCascadeOnlyDependentParts()) { //This query is the result of being in a UnitOfWork therefor use the Aggregate Collection //specific cascade policy to prevent cascading the delete now modifyQuery.setCascadePolicy(DatabaseQuery.CascadeAggregateDelete); } else { modifyQuery.setCascadePolicy(originalQuery.getCascadePolicy()); } modifyQuery.dontMaintainCache(); } /** * INTERNAL: * setup the modifyQuery for update, */ public void prepareModifyQueryForUpdate(ObjectLevelModifyQuery originalQuery, ObjectLevelModifyQuery modifyQuery, Object object) { AbstractRecord aggregateRow = getAggregateRow(originalQuery, object); modifyQuery.setObject(object); modifyQuery.setDescriptor(getReferenceDescriptor(object.getClass(), originalQuery.getSession())); modifyQuery.setPrimaryKey(getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromRow(aggregateRow, originalQuery.getSession())); modifyQuery.setTranslationRow(aggregateRow); modifyQuery.setSession(originalQuery.getSession()); modifyQuery.setCascadePolicy(originalQuery.getCascadePolicy()); modifyQuery.dontMaintainCache(); } /** * INTERNAL: * Set the referenceDescriptor. This is a descriptor which is associated with * the reference class. */ protected void setReferenceDescriptor(ClassDescriptor aDescriptor) { this.referenceDescriptor = aDescriptor; this.remoteReferenceDescriptor = this.referenceDescriptor; } /** * PUBLIC: * Set the source key field names associated with the mapping. * These must be in-order with the targetForeignKeyFieldNames. */ public void setSourceKeyFieldNames(Vector fieldNames) { Vector fields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(fieldNames.size()); for (Enumeration fieldNamesEnum = fieldNames.elements(); fieldNamesEnum.hasMoreElements();) { fields.addElement(new DatabaseField((String)fieldNamesEnum.nextElement())); } setSourceKeyFields(fields); } /** * INTERNAL: * set all the primary key names associated with this mapping */ public void setSourceKeyFields(Vector sourceKeyFields) { this.sourceKeyFields = sourceKeyFields; } /** * PUBLIC: * Set the target foregin key field names associated with the mapping. * These must be in-order with the sourceKeyFieldNames. */ public void setTargetForeignKeyFieldNames(Vector fieldNames) { Vector fields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(fieldNames.size()); for (Enumeration fieldNamesEnum = fieldNames.elements(); fieldNamesEnum.hasMoreElements();) { fields.addElement(new DatabaseField((String)fieldNamesEnum.nextElement())); } setTargetForeignKeyFields(fields); } /** * INTERNAL: * set the target foregin key fields associated with the mapping */ public void setTargetForeignKeyFields(Vector targetForeignKeyFields) { this.targetForeignKeyFields = targetForeignKeyFields; } protected void setTargetForeignKeyToSourceKeys(Map targetForeignKeyToSourceKeys) { this.targetForeignKeyToSourceKeys = targetForeignKeyToSourceKeys; } /** * Returns true as any process leading to object modification should also affect its privately owned parts * Usually used by write, insert, update and delete. */ protected boolean shouldObjectModifyCascadeToParts(ObjectLevelModifyQuery query) { if (isReadOnly()) { return false; } return true; } /** * ADVANCED: * This method is used to have an object add to a collection once the changeSet is applied * The referenceKey parameter should only be used for direct Maps. PLEASE ENSURE that the changes * have been made in the object model first. */ public void simpleAddToCollectionChangeRecord(Object referenceKey, Object changeSetToAdd, ObjectChangeSet changeSet, AbstractSession session) { AggregateCollectionChangeRecord collectionChangeRecord = (AggregateCollectionChangeRecord)changeSet.getChangesForAttributeNamed(this.getAttributeName()); if (collectionChangeRecord == null) { //if there is no change for this attribute then create a changeSet for it. no need to modify the resulting // change record as it should be built from the clone which has the changes allready Object cloneObject = ((UnitOfWorkChangeSet)changeSet.getUOWChangeSet()).getUOWCloneForObjectChangeSet(changeSet); Object cloneCollection = this.getRealAttributeValueFromObject(cloneObject, session); collectionChangeRecord = (AggregateCollectionChangeRecord)convertToChangeRecord(cloneCollection, containerPolicy.containerInstance(), changeSet, session); changeSet.addChange(collectionChangeRecord); } else { collectionChangeRecord.getChangedValues().add(changeSetToAdd); } } /** * ADVANCED: * This method is used to have an object removed from a collection once the changeSet is applied * The referenceKey parameter should only be used for direct Maps. PLEASE ENSURE that the changes * have been made in the object model first. */ public void simpleRemoveFromCollectionChangeRecord(Object referenceKey, Object changeSetToRemove, ObjectChangeSet changeSet, AbstractSession session) { AggregateCollectionChangeRecord collectionChangeRecord = (AggregateCollectionChangeRecord)changeSet.getChangesForAttributeNamed(this.getAttributeName()); if (collectionChangeRecord == null) { //if there is no change for this attribute then create a changeSet for it. no need to modify the resulting // change record as it should be built from the clone which has the changes allready Object cloneObject = ((UnitOfWorkChangeSet)changeSet.getUOWChangeSet()).getUOWCloneForObjectChangeSet(changeSet); Object cloneCollection = this.getRealAttributeValueFromObject(cloneObject, session); collectionChangeRecord = (AggregateCollectionChangeRecord)convertToChangeRecord(cloneCollection, containerPolicy.containerInstance(), changeSet, session); changeSet.addChange(collectionChangeRecord); } else { collectionChangeRecord.getChangedValues().remove(changeSetToRemove); } } /** * INTERNAL: * Checks if object is deleted from the database or not. */ public boolean verifyDelete(Object object, AbstractSession session) throws DatabaseException { // Row is built for translation if (isReadOnly()) { return true; } AbstractRecord row = getDescriptor().getObjectBuilder().buildRowForTranslation(object, session); Object value = session.executeQuery(getSelectionQuery(), row); return getContainerPolicy().isEmpty(value); } /** * Verifying deletes make sure that all the records privately owned by this mapping are * actually removed. If such records are found than those are all read and removed one * by one taking their privately owned parts into account. */ protected void verifyDeleteForUpdate(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { Object objects = readPrivateOwnedForObject(query); // Delete all these object one by one. ContainerPolicy cp = getContainerPolicy(); for (Object iter = cp.iteratorFor(objects); cp.hasNext(iter);) { query.getSession().deleteObject(cp.next(iter, query.getSession())); } } /** * INTERNAL: * AggregateCollection contents should not be considered for addition to the UnitOfWork * private owned objects list for removal. */ public boolean isCandidateForPrivateOwnedRemoval() { return false; } /** * INTERNAL * Return true if this mapping supports cascaded version optimistic locking. */ public boolean isCascadedLockingSupported() { return true; } /** * INTERNAL: * Return if this mapping supports change tracking. */ public boolean isChangeTrackingSupported(Project project) { return false; } /** * INTERNAL: * Once a descriptor is serialized to the remote session, all its mappings and reference descriptors are traversed. * Usually the mappings are initialized and the serialized reference descriptors are replaced with local descriptors * if they already exist in the remote session. */ public void remoteInitialization(DistributedSession session) { super.remoteInitialization(session); getReferenceDescriptor().remoteInitialization(session); } /** * PUBLIC: * indicates whether listOrderField value could be updated in the db. Used only if listOrderField!=null */ public boolean isListOrderFieldUpdatable() { return this.isListOrderFieldUpdatable; } /** * PUBLIC: * indicates whether listOrderField value could be updated in the db. Used only if listOrderField!=null * Default value is true. */ public void setIsListOrderFieldUpdatable(boolean isUpdatable) { this.isListOrderFieldUpdatable = isUpdatable; } /** * PUBLIC: * Set a default source table to use with the source fields of this mapping. */ public void setDefaultSourceTable(DatabaseTable table) { defaultSourceTable = table; } /** * INTERNAL: * Indicates whether the mapping (or at least one of its nested mappings, at any nested depth) * references an entity. * To return true the mapping (or nested mapping) should be ForeignReferenceMapping with non-null and non-aggregate reference descriptor. */ @Override public boolean hasNestedIdentityReference() { if (hasNestedIdentityReference == null) { hasNestedIdentityReference = getReferenceDescriptor().hasNestedIdentityReference(true); } return hasNestedIdentityReference; } }eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/ObjectReferenceMapping.java0000664000000000000000000020652512216173130025437 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings; import java.util.*; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.descriptors.changetracking.ChangeTracker; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.expressions.*; import org.eclipse.persistence.indirection.ValueHolder; import org.eclipse.persistence.indirection.ValueHolderInterface; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.descriptors.changetracking.ObjectChangeListener; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.indirection.*; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.remote.*; import org.eclipse.persistence.sessions.CopyGroup; import org.eclipse.persistence.sessions.Project; /** *

Purpose: Abstract class for 1:1, variable 1:1 and reference mappings */ public abstract class ObjectReferenceMapping extends ForeignReferenceMapping { /** Keeps track if any of the fields are foreign keys. */ protected boolean isForeignKeyRelationship; /** Keeps track of which fields are foreign keys on a per field basis (can have mixed foreign key relationships). */ protected Vector foreignKeyFields; protected ObjectReferenceMapping() { super(); this.setWeight(WEIGHT_TO_ONE); } /** * INTERNAL: * Used during building the backup shallow copy to copy the vector without re-registering the target objects. * For 1-1 or ref the reference is from the clone so it is already registered. */ @Override public Object buildBackupCloneForPartObject(Object attributeValue, Object clone, Object backup, UnitOfWorkImpl unitOfWork) { return attributeValue; } /** * INTERNAL: * Require for cloning, the part must be cloned. * Ignore the objects, use the attribute value. */ @Override public Object buildCloneForPartObject(Object attributeValue, Object original, CacheKey cacheKey, Object clone, AbstractSession cloningSession, Integer refreshCascade, boolean isExisting, boolean isFromSharedCache) { if (attributeValue == null) { return null; } if (cloningSession.isUnitOfWork()){ return buildUnitofWorkCloneForPartObject(attributeValue, original, clone, refreshCascade, (UnitOfWorkImpl)cloningSession, isExisting); } // Not a unit Of Work clone so must have been a PROTECTED object if (this.referenceDescriptor.getCachePolicy().isProtectedIsolation()) { ClassDescriptor descriptor = this.referenceDescriptor; if (descriptor.hasInterfacePolicy()){ descriptor = cloningSession.getClassDescriptor(attributeValue.getClass()); } return cloningSession.createProtectedInstanceFromCachedData(attributeValue, refreshCascade, descriptor); } return attributeValue; } /** * INTERNAL: * Require for cloning, the part must be cloned. * Ignore the objects, use the attribute value. */ public Object buildUnitofWorkCloneForPartObject(Object attributeValue, Object original, Object clone, Integer refreshCascade, UnitOfWorkImpl unitOfWork, boolean isExisting) { if (attributeValue == null) { return null; } if (refreshCascade != null ){ switch(refreshCascade){ case ObjectBuildingQuery.CascadeAllParts : return unitOfWork.mergeClone(attributeValue, MergeManager.CASCADE_ALL_PARTS, true); case ObjectBuildingQuery.CascadePrivateParts : return unitOfWork.mergeClone(attributeValue, MergeManager.CASCADE_PRIVATE_PARTS, true); case ObjectBuildingQuery.CascadeByMapping : return unitOfWork.mergeClone(attributeValue, MergeManager.CASCADE_BY_MAPPING, true); default: return unitOfWork.mergeClone(attributeValue, MergeManager.NO_CASCADE, true); } }else{ // Optimize registration to knowledge of existence. Object registeredObject = null; if (isExisting) { registeredObject = unitOfWork.registerExistingObject(attributeValue, true); } else { // Not known whether existing or not. registeredObject = unitOfWork.registerObject(attributeValue); // if the mapping is privately owned, keep track of the privately owned reference in the UnitOfWork if (isCandidateForPrivateOwnedRemoval() && unitOfWork.shouldDiscoverNewObjects() && registeredObject != null && unitOfWork.isCloneNewObject(registeredObject)) { unitOfWork.addPrivateOwnedObject(this, registeredObject); } } return registeredObject; } } /** * INTERNAL: * Copy of the attribute of the object. * This is NOT used for unit of work but for templatizing an object. */ @Override public void buildCopy(Object copy, Object original, CopyGroup group) { Object attributeValue = getRealAttributeValueFromObject(original, group.getSession()); if ((attributeValue != null) && (group.shouldCascadeAllParts() || (group.shouldCascadePrivateParts() && isPrivateOwned()) || group.shouldCascadeTree())) { attributeValue = group.getSession().copyInternal(attributeValue, group); } else if (attributeValue != null) { // Check for copy of part, i.e. back reference. Object copyValue = group.getCopies().get(attributeValue); if (copyValue != null) { attributeValue = copyValue; } } // if value holder is used, then the value holder shared with original substituted for a new ValueHolder. getIndirectionPolicy().reset(copy); setRealAttributeValueInObject(copy, attributeValue); } /** * INTERNAL: * In case Query By Example is used, this method generates an expression from a attribute value pair. Since * this is a ObjectReference mapping, a recursive call is made to the buildExpressionFromExample method of * ObjectBuilder. */ @Override public Expression buildExpression(Object queryObject, QueryByExamplePolicy policy, Expression expressionBuilder, Map processedObjects, AbstractSession session) { String attributeName = this.getAttributeName(); Object attributeValue = this.getRealAttributeValueFromObject(queryObject, session); if (!policy.shouldIncludeInQuery(queryObject.getClass(), attributeName, attributeValue)) { //the attribute name and value pair is not to be included in the query. return null; } if (attributeValue == null) { //even though it is null, it is to be always included in the query Expression expression = expressionBuilder.get(attributeName); return policy.completeExpressionForNull(expression); } ObjectBuilder objectBuilder = getReferenceDescriptor().getObjectBuilder(); return objectBuilder.buildExpressionFromExample(attributeValue, policy, expressionBuilder.get(attributeName), processedObjects, session); } /** * INTERNAL: * Return an ObjectReferenceChangeRecord describing the change, or null if no change. * Used to compute changes for deferred change tracking. */ @Override public ChangeRecord compareForChange(Object clone, Object backUp, ObjectChangeSet owner, AbstractSession session) { Object cloneAttribute = null; Object backUpAttribute = null; cloneAttribute = getAttributeValueFromObject(clone); if (!owner.isNew()) { backUpAttribute = getAttributeValueFromObject(backUp); if ((backUpAttribute == null) && (cloneAttribute == null)) { return null; } } if ((cloneAttribute != null) && (!this.indirectionPolicy.objectIsInstantiated(cloneAttribute))) { //the clone's valueholder was never triggered so there will be no change return null; } Object cloneAttributeValue = null; Object backUpAttributeValue = null; if (cloneAttribute != null) { cloneAttributeValue = getRealAttributeValueFromAttribute(cloneAttribute, clone, session); } if (backUpAttribute != null) { backUpAttributeValue = getRealAttributeValueFromAttribute(backUpAttribute, backUp, session); } if ((cloneAttributeValue == backUpAttributeValue) && (!owner.isNew())) {// if it is new record the value return null; } ObjectReferenceChangeRecord record = internalBuildChangeRecord(cloneAttributeValue, owner, session); if (!owner.isNew()) { record.setOldValue(backUpAttributeValue); } return record; } /** * INTERNAL: * Directly build a change record based on the newValue without comparison */ public ObjectReferenceChangeRecord internalBuildChangeRecord(Object newValue, ObjectChangeSet owner, AbstractSession session) { ObjectReferenceChangeRecord changeRecord = new ObjectReferenceChangeRecord(owner); changeRecord.setAttribute(getAttributeName()); changeRecord.setMapping(this); setNewValueInChangeRecord(newValue, changeRecord, owner, session); return changeRecord; } /** * INTERNAL: * Set the newValue in the change record */ public void setNewValueInChangeRecord(Object newValue, ObjectReferenceChangeRecord changeRecord, ObjectChangeSet owner, AbstractSession session) { if (newValue != null) { // Bug 2612571 - added more flexible manner of getting descriptor ObjectChangeSet newSet = getDescriptorForTarget(newValue, session).getObjectBuilder().createObjectChangeSet(newValue, (UnitOfWorkChangeSet)owner.getUOWChangeSet(), session); changeRecord.setNewValue(newSet); } else { changeRecord.setNewValue(null); } } /** * INTERNAL: * Compare the references of the two objects are the same, not the objects themselves. * Used for independent relationships. * This is used for testing and validation purposes. */ @Override protected boolean compareObjectsWithoutPrivateOwned(Object firstObject, Object secondObject, AbstractSession session) { Object firstReferencedObject = getRealAttributeValueFromObject(firstObject, session); Object secondReferencedObject = getRealAttributeValueFromObject(secondObject, session); if ((firstReferencedObject == null) && (secondReferencedObject == null)) { return true; } if ((firstReferencedObject == null) || (secondReferencedObject == null)) { return false; } Object firstKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(firstReferencedObject, session); Object secondKey = getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(secondReferencedObject, session); if (firstKey == null) { if (secondKey == null) { return true; } return false; } return firstKey.equals(secondKey); } /** * INTERNAL: * Compare the references of the two objects are the same, and the objects themselves are the same. * Used for private relationships. * This is used for testing and validation purposes. */ @Override protected boolean compareObjectsWithPrivateOwned(Object firstObject, Object secondObject, AbstractSession session) { Object firstPrivateObject = getRealAttributeValueFromObject(firstObject, session); Object secondPrivateObject = getRealAttributeValueFromObject(secondObject, session); return session.compareObjects(firstPrivateObject, secondPrivateObject); } /** * INTERNAL: * We are not using a remote valueholder * so we need to replace the reference object(s) with * the corresponding object(s) from the remote session. * * ObjectReferenceMappings need to unwrap and wrap the * reference object. */ @Override public void fixRealObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { //bug 4147755 getRealAttribute... / setReal... Object attributeValue = getRealAttributeValueFromObject(object, session); attributeValue = getReferenceDescriptor().getObjectBuilder().unwrapObject(attributeValue, session); ObjectLevelReadQuery tempQuery = query; if (!tempQuery.shouldMaintainCache()) { if ((!tempQuery.shouldCascadeParts()) || (tempQuery.shouldCascadePrivateParts() && (!isPrivateOwned()))) { tempQuery = null; } } Object remoteAttributeValue = session.getObjectCorrespondingTo(attributeValue, objectDescriptors, processedObjects, tempQuery); remoteAttributeValue = getReferenceDescriptor().getObjectBuilder().wrapObject(remoteAttributeValue, session); setRealAttributeValueInObject(object, remoteAttributeValue); } /** * INTERNAL: * Return a descriptor for the target of this mapping * @see org.eclipse.persistence.mappings.VariableOneToOneMapping * Bug 2612571 */ public ClassDescriptor getDescriptorForTarget(Object object, AbstractSession session) { return session.getDescriptor(object); } /** * INTERNAL: * Object reference must unwrap the reference object if required. */ @Override public Object getRealAttributeValueFromAttribute(Object attributeValue, Object object, AbstractSession session) { Object value = super.getRealAttributeValueFromAttribute(attributeValue, object, session); value = getReferenceDescriptor().getObjectBuilder().unwrapObject(value, session); return value; } /** * INTERNAL: * Related mapping should implement this method to return true. */ @Override public boolean isObjectReferenceMapping() { return true; } /** * INTERNAL: * Iterate on the attribute value. * The value holder has already been processed. */ @Override public void iterateOnRealAttributeValue(DescriptorIterator iterator, Object realAttributeValue) { // This may be wrapped as the caller in iterate on foreign reference does not unwrap as the type is generic. Object unwrappedAttributeValue = getReferenceDescriptor().getObjectBuilder().unwrapObject(realAttributeValue, iterator.getSession()); iterator.iterateReferenceObjectForMapping(unwrappedAttributeValue, this); } /** * Force instantiation of all indirections. */ @Override public void loadAll(Object object, AbstractSession session, IdentityHashSet loaded) { Object value = getRealAttributeValueFromObject(object, session); if (value != null) { getReferenceDescriptor().getObjectBuilder().loadAll(value, session, loaded); } } /** * INTERNAL: * Merge changes from the source to the target object. Which is the original from the parent UnitOfWork */ @Override public void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { if (this.descriptor.getCachePolicy().isProtectedIsolation()&& !this.isCacheable && !targetSession.isProtectedSession()){ setAttributeValueInObject(target, this.indirectionPolicy.buildIndirectObject(new ValueHolder(null))); return; } Object targetValueOfSource = null; // The target object must be completely merged before setting it otherwise // another thread can pick up the partial object. if (shouldMergeCascadeParts(mergeManager)) { ObjectChangeSet set = (ObjectChangeSet)((ObjectReferenceChangeRecord)changeRecord).getNewValue(); if (set != null) { if (mergeManager.shouldMergeChangesIntoDistributedCache()) { //Let's try and find it first. We may have merged it already. In which case merge //changes will stop the recursion targetValueOfSource = set.getTargetVersionOfSourceObject(mergeManager, targetSession, false); if ((targetValueOfSource == null) && (set.isNew() || set.isAggregate()) && set.containsChangesFromSynchronization()) { if (!mergeManager.isAlreadyMerged(set, targetSession)) { // if we haven't merged this object already then build a new object // otherwise leave it as null which will stop the recursion // CR 2855 // CR 3424 Need to build the right instance based on class type instead of refernceDescriptor Class objectClass = set.getClassType(mergeManager.getSession()); targetValueOfSource = mergeManager.getSession().getDescriptor(objectClass).getObjectBuilder().buildNewInstance(); //Store the changeset to prevent us from creating this new object again mergeManager.recordMerge(set, targetValueOfSource, targetSession); } else { //CR 4012 //we have all ready created the object, must be in a cyclic //merge on a new object so get it out of the already merged collection targetValueOfSource = mergeManager.getMergedObject(set, targetSession); } } else { // If We have not found it anywhere else load it from the database targetValueOfSource = set.getTargetVersionOfSourceObject(mergeManager, targetSession, true); } if (set.containsChangesFromSynchronization()) { mergeManager.mergeChanges(targetValueOfSource, set, targetSession); } //bug:3604593 - ensure reference not changed source is invalidated if target object not found if (targetValueOfSource == null) { mergeManager.getSession().getIdentityMapAccessorInstance().invalidateObject(target); return; } } else { mergeManager.mergeChanges(set.getUnitOfWorkClone(), set, targetSession); } } } if ((targetValueOfSource == null) && (((ObjectReferenceChangeRecord)changeRecord).getNewValue() != null)) { targetValueOfSource = ((ObjectChangeSet)((ObjectReferenceChangeRecord)changeRecord).getNewValue()).getTargetVersionOfSourceObject(mergeManager, targetSession); } // Register new object in nested units of work must not be registered into the parent, // so this records them in the merge to parent case. if (isPrivateOwned() && (source != null)) { mergeManager.registerRemovedNewObjectIfRequired(getRealAttributeValueFromObject(source, mergeManager.getSession())); } targetValueOfSource = getReferenceDescriptor().getObjectBuilder().wrapObject(targetValueOfSource, targetSession); // if value holder is used, then the value holder shared with original substituted for a new ValueHolder. getIndirectionPolicy().reset(target); setRealAttributeValueInObject(target, targetValueOfSource); } /** * INTERNAL: * Merge changes from the source to the target object. */ @Override public void mergeIntoObject(Object target, boolean isTargetUnInitialized, Object source, MergeManager mergeManager, AbstractSession targetSession) { if (this.descriptor.getCachePolicy().isProtectedIsolation()&& !this.isCacheable && !targetSession.isProtectedSession()){ setAttributeValueInObject(target, this.indirectionPolicy.buildIndirectObject(new ValueHolder(null))); return; } if (isTargetUnInitialized) { // This will happen if the target object was removed from the cache before the commit was attempted, // or for new objects. if (mergeManager.shouldMergeWorkingCopyIntoOriginal()) { if (!isAttributeValueInstantiated(source)) { setAttributeValueInObject(target, this.indirectionPolicy.getOriginalIndirectionObject(getAttributeValueFromObject(source), targetSession)); return; } else { // Must clear the old value holder to cause it to be reset. this.indirectionPolicy.reset(target); } } } if (!shouldMergeCascadeReference(mergeManager)) { // This is only going to happen on mergeClone, and we should not attempt to merge the reference return; } if (mergeManager.shouldRefreshRemoteObject() && usesIndirection()) { mergeRemoteValueHolder(target, source, mergeManager); return; } if (mergeManager.isForRefresh()) { if (!isAttributeValueInstantiated(target)) { // This will occur when the clone's value has not been instantiated yet and we do not need // the refresh that attribute if (shouldRefreshCascadeParts(mergeManager)){ Object attributeValue = getAttributeValueFromObject(source); Integer refreshCascade = null; if (selectionQuery != null && selectionQuery.isObjectBuildingQuery() && ((ObjectBuildingQuery)selectionQuery).shouldRefreshIdentityMapResult()){ refreshCascade = selectionQuery.getCascadePolicy(); } Object clonedAttributeValue = this.indirectionPolicy.cloneAttribute(attributeValue, source, null, target, refreshCascade, mergeManager.getSession(), false); // building clone from an original not a row. setAttributeValueInObject(target, clonedAttributeValue); } return; } } else if (!isAttributeValueInstantiated(source)) { // I am merging from a clone into an original. No need to do merge if the attribute was never // modified return; } Object valueOfSource = getRealAttributeValueFromObject(source, mergeManager.getSession()); Object targetValueOfSource = null; // The target object must be completely merged before setting it otherwise // another thread can pick up the partial object. if (shouldMergeCascadeParts(mergeManager) && (valueOfSource != null)) { if ((mergeManager.getSession().isUnitOfWork()) && (((UnitOfWorkImpl)mergeManager.getSession()).getUnitOfWorkChangeSet() != null)) { // If it is a unit of work, we have to check if I have a change Set fot this object Object targetValue = mergeManager.mergeChanges(mergeManager.getObjectToMerge(valueOfSource, referenceDescriptor, targetSession), (ObjectChangeSet)((UnitOfWorkChangeSet)((UnitOfWorkImpl)mergeManager.getSession()).getUnitOfWorkChangeSet()).getObjectChangeSetForClone(valueOfSource), targetSession); if (target == source && targetValue != valueOfSource && (this.descriptor.getObjectChangePolicy().isObjectChangeTrackingPolicy()) && (target instanceof ChangeTracker) && (((ChangeTracker)target)._persistence_getPropertyChangeListener() != null)) { ObjectChangeListener listener = (ObjectChangeListener)((ChangeTracker)target)._persistence_getPropertyChangeListener(); if (listener != null){ //update the ChangeSet recorded within the parents ObjectChangeSet as the parent is referenceing the ChangeSet //for a detached or new Entity. this.descriptor.getObjectChangePolicy().updateListenerForSelfMerge(listener, this, valueOfSource, targetValue, (UnitOfWorkImpl) mergeManager.getSession()); } } } else { mergeManager.mergeChanges(mergeManager.getObjectToMerge(valueOfSource, referenceDescriptor, targetSession), null, targetSession); } } if (valueOfSource != null) { // Need to do this after merge so that an object exists in the database targetValueOfSource = mergeManager.getTargetVersionOfSourceObject(valueOfSource, referenceDescriptor, targetSession); } // If merge into the unit of work, must only merge and raise the event is the value changed. if ((mergeManager.shouldMergeCloneIntoWorkingCopy() || mergeManager.shouldMergeCloneWithReferencesIntoWorkingCopy()) && !mergeManager.isForRefresh() && this.descriptor.getObjectChangePolicy().isObjectChangeTrackingPolicy()) { // Object level or attribute level so lets see if we need to raise the event? Object valueOfTarget = getRealAttributeValueFromObject(target, mergeManager.getSession()); if (valueOfTarget != targetValueOfSource) { //equality comparison cause both are uow clones this.descriptor.getObjectChangePolicy().raiseInternalPropertyChangeEvent(target, getAttributeName(), valueOfTarget, targetValueOfSource); } else { // No change. return; } } targetValueOfSource = this.referenceDescriptor.getObjectBuilder().wrapObject(targetValueOfSource, mergeManager.getSession()); setRealAttributeValueInObject(target, targetValueOfSource); } /** * INTERNAL: * Return all the fields populated by this mapping, these are foreign keys only. */ @Override protected Vector collectFields() { return getForeignKeyFields(); } /** * INTERNAL: * Returns the foreign key names associated with the mapping. * These are the fields that will be populated by the 1-1 mapping when writing. */ public Vector getForeignKeyFields() { return foreignKeyFields; } /** * INTERNAL: * Set the foreign key fields associated with the mapping. * These are the fields that will be populated by the 1-1 mapping when writing. */ protected void setForeignKeyFields(Vector foreignKeyFields) { this.foreignKeyFields = foreignKeyFields; if (!foreignKeyFields.isEmpty()) { setIsForeignKeyRelationship(true); } } /** * INTERNAL: * Return if the 1-1 mapping has a foreign key dependency to its target. * This is true if any of the foreign key fields are true foreign keys, * i.e. populated on write from the targets primary key. */ public boolean isForeignKeyRelationship() { return isForeignKeyRelationship; } /** * INTERNAL: * Set if the 1-1 mapping has a foreign key dependency to its target. * This is true if any of the foreign key fields are true foreign keys, * i.e. populated on write from the targets primary key. */ public void setIsForeignKeyRelationship(boolean isForeignKeyRelationship) { this.isForeignKeyRelationship = isForeignKeyRelationship; } /** * INTERNAL: * Insert privately owned parts */ @Override public void preInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (isForeignKeyRelationship()) { insert(query); } } /** * INTERNAL: * Reads the private owned object. */ protected Object readPrivateOwnedForObject(ObjectLevelModifyQuery modifyQuery) throws DatabaseException { if (modifyQuery.getSession().isUnitOfWork()) { if (modifyQuery.getObjectChangeSet() != null) { ObjectReferenceChangeRecord record = (ObjectReferenceChangeRecord) modifyQuery.getObjectChangeSet().getChangesForAttributeNamed(getAttributeName()); if (record != null) { return record.getOldValue(); } } else { // Old commit. return getRealAttributeValueFromObject(modifyQuery.getBackupClone(), modifyQuery.getSession()); } } return null; } /** * INTERNAL: * Update privately owned parts */ @Override public void preUpdate(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!isAttributeValueInstantiated(query.getObject())) { return; } if (isPrivateOwned()) { Object objectInDatabase = readPrivateOwnedForObject(query); if (objectInDatabase != null) { query.setProperty(this, objectInDatabase); } } if (!isForeignKeyRelationship()) { return; } update(query); } /** * INTERNAL: * Overridden by mappings that require additional processing of the change record after the record has been calculated. */ @Override public void postCalculateChanges(org.eclipse.persistence.sessions.changesets.ChangeRecord changeRecord, UnitOfWorkImpl uow) { // no need for private owned check. This code is only registered for private owned mappings. // targets are added to and/or removed to/from the source. Object oldValue = ((ObjectReferenceChangeRecord)changeRecord).getOldValue(); if (oldValue != null) { uow.addDeletedPrivateOwnedObjects(this, oldValue); } } /** * INTERNAL: * Overridden by mappings that require additional processing of the change record after the record has been calculated. */ @Override public void recordPrivateOwnedRemovals(Object object, UnitOfWorkImpl uow) { Object target = getRealAttributeValueFromObject(object, uow); if (target != null){ this.referenceDescriptor.getObjectBuilder().recordPrivateOwnedRemovals(target, uow, false); } } /** * INTERNAL: * Delete privately owned parts */ @Override public void postDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { // Deletion takes place only if it has privately owned parts and mapping is not read only. if (!shouldObjectModifyCascadeToParts(query)) { return; } Object object = query.getProperty(this); // The object is stored in the query by preDeleteForObjectUsing(...). if (isForeignKeyRelationship()) { if (object != null) { query.removeProperty(this); AbstractSession session = query.getSession(); //if the query is being passed from an aggregate collection descriptor then // The delete will have been cascaded at update time. This will cause sub objects // to be ignored, and real only classes to throw exceptions. // If it is an aggregate Collection then delay deletes until they should be deleted //CR 2811 if (query.isCascadeOfAggregateDelete()) { session.getCommitManager().addObjectToDelete(object); } else { // PERF: Avoid query execution if already deleted. if (session.getCommitManager().isCommitCompletedInPostOrIgnore(object)) { return; } if (this.isCascadeOnDeleteSetOnDatabase && !hasRelationTableMechanism() && session.isUnitOfWork()) { ((UnitOfWorkImpl)session).getCascadeDeleteObjects().add(object); } DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); deleteQuery.setIsExecutionClone(true); deleteQuery.setObject(object); deleteQuery.setCascadePolicy(query.getCascadePolicy()); session.executeQuery(deleteQuery); } } } } /** * INTERNAL: * Insert privately owned parts */ @Override public void postInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!isForeignKeyRelationship()) { insert(query); } } /** * INTERNAL: * Update privately owned parts */ @Override public void postUpdate(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!isAttributeValueInstantiated(query.getObject())) { return; } if (!isForeignKeyRelationship()) { update(query); } // If a private owned reference was changed the old value will be set on the query as a property. Object objectInDatabase = query.getProperty(this); if (objectInDatabase != null) { query.removeProperty(this); } else { return; } // If there is no change (old commit), it must be determined if the value changed. if (query.getObjectChangeSet() == null) { Object objectInMemory = getRealAttributeValueFromObject(query.getObject(), query.getSession()); // delete the object in the database if it is no more a referenced object. if (objectInDatabase != objectInMemory) { Object keyForObjectInDatabase = getPrimaryKeyForObject(objectInDatabase, query.getSession()); Object keyForObjectInMemory = null; if (objectInMemory != null) { keyForObjectInMemory = getPrimaryKeyForObject(objectInMemory, query.getSession()); } if ((keyForObjectInMemory != null) && keyForObjectInDatabase.equals(keyForObjectInMemory)) { return; } } else { return; } } if (!query.shouldCascadeOnlyDependentParts()) { query.getSession().deleteObject(objectInDatabase); } } /** * INTERNAL: * Delete privately owned parts */ @Override public void preDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { // Deletion takes place according the the cascading policy if (!shouldObjectModifyCascadeToParts(query)) { return; } AbstractSession session = query.getSession(); // Get the privately owned parts. Object objectInMemory = getRealAttributeValueFromObject(query.getObject(), session); Object objectFromDatabase = null; // Because the value in memory may have been changed we check the previous value or database value. objectFromDatabase = readPrivateOwnedForObject(query); // If the value was changed, both values must be deleted (uow will have inserted the new one). if ((objectFromDatabase != null) && (objectFromDatabase != objectInMemory)) { // Also check pk as may not be maintaining identity. Object keyForObjectInMemory = null; Object keyForObjectInDatabase = getPrimaryKeyForObject(objectFromDatabase, session); if (objectInMemory != null) { keyForObjectInMemory = getPrimaryKeyForObject(objectInMemory, session); } if ((keyForObjectInMemory == null) || !keyForObjectInDatabase.equals(keyForObjectInMemory)) { if (objectFromDatabase != null) { if (this.isCascadeOnDeleteSetOnDatabase && !hasRelationTableMechanism() && session.isUnitOfWork()) { ((UnitOfWorkImpl)session).getCascadeDeleteObjects().add(objectFromDatabase); } DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); deleteQuery.setIsExecutionClone(true); deleteQuery.setObject(objectFromDatabase); deleteQuery.setCascadePolicy(query.getCascadePolicy()); session.executeQuery(deleteQuery); } } } if (!isForeignKeyRelationship()) { if (objectInMemory != null) { if (this.isCascadeOnDeleteSetOnDatabase && !hasRelationTableMechanism() && session.isUnitOfWork()) { ((UnitOfWorkImpl)session).getCascadeDeleteObjects().add(objectInMemory); } // PERF: Avoid query execution if already deleted. if (session.getCommitManager().isCommitCompletedInPostOrIgnore(objectInMemory)) { return; } DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); deleteQuery.setIsExecutionClone(true); deleteQuery.setObject(objectInMemory); deleteQuery.setCascadePolicy(query.getCascadePolicy()); session.executeQuery(deleteQuery); } } else { // The actual deletion of part takes place in postDeleteForObjectUsing(...). if (objectInMemory != null) { query.setProperty(this, objectInMemory); } } } /** * INTERNAL: * Record deletion dependencies for foreign key constraints. * This is used during deletion to resolve deletion cycles. */ @Override public void earlyPreDelete(DeleteObjectQuery query, Object object) { AbstractSession session = query.getSession(); // Avoid instantiating objects. Object attributeValue = getAttributeValueFromObject(object); Object targetObject = null; if (!this.indirectionPolicy.objectIsInstantiated(attributeValue) && !this.indirectionPolicy.objectIsEasilyInstantiated(attributeValue)) { AbstractRecord referenceRow = this.indirectionPolicy.extractReferenceRow(attributeValue); targetObject = this.selectionQuery.checkEarlyReturn(session, referenceRow); } else { targetObject = getRealAttributeValueFromAttribute(attributeValue, object, session); } UnitOfWorkImpl unitOfWork = (UnitOfWorkImpl)session; if ((targetObject != null) && unitOfWork.getDeletedObjects().containsKey(targetObject)) { unitOfWork.addDeletionDependency(targetObject, object); } } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ @Override public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects){ cascadePerformRemoveIfRequired(object, uow, visitedObjects, true); } /** * INTERNAL: * Cascade remove through mappings that require the cascade. * @param object is either the source object, or attribute value if getAttributeValueFromObject is true. */ public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects, boolean getAttributeValueFromObject) { if (!this.cascadeRemove) { return; } Object attributeValue = null; if (getAttributeValueFromObject) { attributeValue = getAttributeValueFromObject(object); } else { attributeValue = object; } if (attributeValue != null) { if (getAttributeValueFromObject) { attributeValue = this.indirectionPolicy.getRealAttributeValueFromObject(object, attributeValue); } if (attributeValue != null && (! visitedObjects.containsKey(attributeValue)) ){ visitedObjects.put(attributeValue, attributeValue); if (this.isCascadeOnDeleteSetOnDatabase && !hasRelationTableMechanism()) { uow.getCascadeDeleteObjects().add(attributeValue); } uow.performRemove(attributeValue, visitedObjects); } } } /** * INTERNAL: * Cascade removal of orphaned private owned objects from the UnitOfWorkChangeSet */ @Override public void cascadePerformRemovePrivateOwnedObjectFromChangeSetIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { // if the object is not instantiated, do not instantiate or cascade Object attributeValue = getAttributeValueFromObject(object); if (attributeValue != null && this.indirectionPolicy.objectIsInstantiated(attributeValue)) { Object realValue = getRealAttributeValueFromObject(object, uow); if (!visitedObjects.containsKey(realValue)){ visitedObjects.put(realValue, realValue); // remove private owned object from UnitOfWork ChangeSet uow.performRemovePrivateOwnedObjectFromChangeSet(realValue, visitedObjects); } } } /** * INTERNAL: * This method is used to store the FK fields that can be cached that correspond to noncacheable mappings * the FK field values will be used to re-issue the query when cloning the shared cache entity */ @Override public void collectQueryParameters(Set cacheFields){ for (DatabaseField field : foreignKeyFields) { cacheFields.add(field); } } /** * INTERNAL: * Cascade discover and persist new objects during commit. */ @Override public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow, Set cascadeErrors) { cascadeDiscoverAndPersistUnregisteredNewObjects(object, newObjects, unregisteredExistingObjects, visitedObjects, uow, true, cascadeErrors); } /** * INTERNAL: * Cascade discover and persist new objects during commit. */ public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow, boolean getAttributeValueFromObject, Set cascadeErrors) { Object attributeValue = null; if (getAttributeValueFromObject){ attributeValue = getAttributeValueFromObject(object); } else { attributeValue = object; } if (attributeValue != null && this.indirectionPolicy.objectIsInstantiated(attributeValue)) { if (getAttributeValueFromObject){ attributeValue = this.indirectionPolicy.getRealAttributeValueFromObject(object, attributeValue); } // remove private owned object from uow list if (isCandidateForPrivateOwnedRemoval()) { uow.removePrivateOwnedObject(this, attributeValue); } uow.discoverAndPersistUnregisteredNewObjects(attributeValue, isCascadePersist(), newObjects, unregisteredExistingObjects, visitedObjects, cascadeErrors); } } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade */ @Override public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects){ cascadeRegisterNewIfRequired(object, uow, visitedObjects, true); } /** * INTERNAL: * Cascade registerNew for Create through mappings that require the cascade * @param object is either the source object, or attribute value if getAttributeValueFromObject is true. */ public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects, boolean getAttributeValueFromObject) { if (!isCascadePersist()) { return; } Object attributeValue = null; if (getAttributeValueFromObject) { attributeValue = getAttributeValueFromObject(object); } else { attributeValue = object; } if ((attributeValue != null) // no need to check for new as persist must be cascaded. && (this.indirectionPolicy.objectIsInstantiated(attributeValue) || uow.isCloneNewObject(object))) { if (getAttributeValueFromObject){ attributeValue = this.indirectionPolicy.getRealAttributeValueFromObject(object, attributeValue); } uow.registerNewObjectForPersist(attributeValue, visitedObjects); // add private owned object to uow list if mapping is a candidate and uow should discover new objects and the source object is new. if (isCandidateForPrivateOwnedRemoval() && uow.shouldDiscoverNewObjects() && (attributeValue != null) && uow.isCloneNewObject(object)) { uow.addPrivateOwnedObject(this, attributeValue); } } } /** * INTERNAL: */ protected Object getPrimaryKeyForObject(Object object, AbstractSession session) { return getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(object, session); } /** * INTERNAL: * The returns if the mapping has any constraint dependencies, such as foreign keys and join tables. */ @Override public boolean hasConstraintDependency() { return isForeignKeyRelationship(); } /** * INTERNAL: * Builder the unit of work value holder. * @param buildDirectlyFromRow indicates that we are building the clone directly * from a row as opposed to building the original from the row, putting it in * the shared cache, and then cloning the original. */ @Override public DatabaseValueHolder createCloneValueHolder(ValueHolderInterface attributeValue, Object original, Object clone, AbstractRecord row, AbstractSession cloningSession, boolean buildDirectlyFromRow) { DatabaseValueHolder valueHolder = null; if ((row == null) && (isPrimaryKeyMapping())) { // The row must be built if a primary key mapping for remote case. AbstractRecord rowFromTargetObject = extractPrimaryKeyRowForSourceObject(original, cloningSession); valueHolder = cloningSession.createCloneQueryValueHolder(attributeValue, clone, rowFromTargetObject, this); } else { valueHolder = cloningSession.createCloneQueryValueHolder(attributeValue, clone, row, this); } // In case of joined attributes it so happens that the attributeValue // contains a registered clone, as valueFromRow was called with a // UnitOfWork. So switch the values. // Note that this UOW valueholder starts off as instantiated but that // is fine, for the reality is that it is. if (buildDirectlyFromRow && attributeValue.isInstantiated()) { Object cloneAttributeValue = attributeValue.getValue(); valueHolder.privilegedSetValue(cloneAttributeValue); valueHolder.setInstantiated(); // PERF: Do not modify the original value-holder, it is never used. } return valueHolder; } /** * INTERNAL: * Extract the reference pk for rvh usage in remote model. */ public AbstractRecord extractPrimaryKeyRowForSourceObject(Object domainObject, AbstractSession session) { AbstractRecord databaseRow = getDescriptor().getObjectBuilder().createRecord(session); writeFromObjectIntoRow(domainObject, databaseRow, session, WriteType.UNDEFINED); return databaseRow; } /** * INTERNAL: * Extract the reference pk for rvh usage in remote model. */ public Object extractPrimaryKeysForReferenceObject(Object domainObject, AbstractSession session) { return this.indirectionPolicy.extractPrimaryKeyForReferenceObject(getAttributeValueFromObject(domainObject), session); } /** * INTERNAL: * Return the primary key for the reference object (i.e. the object * object referenced by domainObject and specified by mapping). * This key will be used by a RemoteValueHolder. */ public Object extractPrimaryKeysForReferenceObjectFromRow(AbstractRecord row) { return null; } /** * INTERNAL: * Extract the reference pk for rvh usage in remote model. */ public Object extractPrimaryKeysFromRealReferenceObject(Object object, AbstractSession session) { if (object == null) { return null; } else { Object implementation = getReferenceDescriptor().getObjectBuilder().unwrapObject(object, session); return getReferenceDescriptor().getObjectBuilder().extractPrimaryKeyFromObject(implementation, session); } } /** * INTERNAL: * Initialize the state of mapping. */ @Override public void preInitialize(AbstractSession session) throws DescriptorException { super.preInitialize(session); //Bug#4251902 Make Proxy Indirection writable and readable to deployment xml. If ProxyIndirectionPolicy does not //have any targetInterfaces, build a new set. if ((this.indirectionPolicy instanceof ProxyIndirectionPolicy) && !((ProxyIndirectionPolicy)this.indirectionPolicy).hasTargetInterfaces()) { useProxyIndirection(); } } /** * INTERNAL: * Insert privately owned parts */ protected void insert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { // Checks if privately owned parts should be inserted or not. if (!shouldObjectModifyCascadeToParts(query)) { return; } // Get the privately owned parts Object object = getRealAttributeValueFromObject(query.getObject(), query.getSession()); if (object == null) { return; } AbstractSession session = query.getSession(); // PERF: Avoid query execution if already written. if (session.getCommitManager().isCommitCompletedInPostOrIgnore(object)) { return; } ObjectChangeSet changeSet = null; // Get changeSet for referenced object. Change record may not exist for new objects, so always lookup. if (session.isUnitOfWork() && (((UnitOfWorkImpl)session).getUnitOfWorkChangeSet() != null)) { UnitOfWorkChangeSet uowChangeSet = (UnitOfWorkChangeSet)((UnitOfWorkImpl)session).getUnitOfWorkChangeSet(); changeSet = (ObjectChangeSet)uowChangeSet.getObjectChangeSetForClone(object); // PERF: If the changeSet is null it must be existing, if it is not new, then cascading is not required. if (changeSet == null || !changeSet.isNew()) { return; } } WriteObjectQuery writeQuery = null; // If private owned, the dependent objects should also be new. // However a bug was logged was put in to allow dependent objects to be existing in a unit of work, // so this allows existing dependent objects in the unit of work. if (this.isPrivateOwned && ((changeSet == null) || (changeSet.isNew()))) { // no identity check needed for private owned writeQuery = new InsertObjectQuery(); } else { writeQuery = new WriteObjectQuery(); } writeQuery.setIsExecutionClone(true); writeQuery.setObject(object); writeQuery.setObjectChangeSet(changeSet); writeQuery.setCascadePolicy(query.getCascadePolicy()); session.executeQuery(writeQuery); } /** * INTERNAL: * Update the private owned part. */ protected void update(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!shouldObjectModifyCascadeToParts(query)) { return; } Object sourceObject = query.getObject(); Object attributeValue = getAttributeValueFromObject(sourceObject); // If objects are not instantiated that means they are not changed. if (!this.indirectionPolicy.objectIsInstantiated(attributeValue)) { return; } // Get the privately owned parts in the memory AbstractSession session = query.getSession(); Object object = getRealAttributeValueFromAttribute(attributeValue, sourceObject, session); if (object != null) { ObjectChangeSet changeSet = query.getObjectChangeSet(); if (changeSet != null) { ObjectReferenceChangeRecord changeRecord = (ObjectReferenceChangeRecord)query.getObjectChangeSet().getChangesForAttributeNamed(getAttributeName()); if (changeRecord != null) { changeSet = (ObjectChangeSet)changeRecord.getNewValue(); // PERF: If it is not new, then cascading is not required. if (!changeSet.isNew()) { return; } } else { // no changeRecord no change to reference. return; } } else { UnitOfWorkChangeSet uowChangeSet = null; // Get changeSet for referenced object. if (session.isUnitOfWork() && (((UnitOfWorkImpl)session).getUnitOfWorkChangeSet() != null)) { uowChangeSet = (UnitOfWorkChangeSet)((UnitOfWorkImpl)session).getUnitOfWorkChangeSet(); changeSet = (ObjectChangeSet)uowChangeSet.getObjectChangeSetForClone(object); // PERF: If the changeSet is null it must be existing, if it is not new, then cascading is not required. if (changeSet == null || !changeSet.isNew()) { return; } } } // PERF: Only write dependent object if they are new. if ((!query.shouldCascadeOnlyDependentParts()) || (changeSet == null) || changeSet.isNew()) { // PERF: Avoid query execution if already written. if (session.getCommitManager().isCommitCompletedInPostOrIgnore(object)) { return; } WriteObjectQuery writeQuery = new WriteObjectQuery(); writeQuery.setIsExecutionClone(true); writeQuery.setObject(object); writeQuery.setObjectChangeSet(changeSet); writeQuery.setCascadePolicy(query.getCascadePolicy()); session.executeQuery(writeQuery); } } } /** * PUBLIC: * Indicates whether the mapping has RelationTableMechanism. */ public boolean hasRelationTableMechanism() { return false; } /** * PUBLIC: * Set this mapping to use Proxy Indirection. * * Proxy Indirection uses the Proxy and InvocationHandler features * of JDK 1.3 to provide "transparent indirection" for 1:1 relationships. In order to use Proxy * Indirection:

* *

    *
  • The target class must implement at least one public interface *
  • The attribute on the source class must be typed as that public interface *
  • get() and set() methods for the attribute must use the interface *
* * With this policy, proxy objects are returned during object creation. When a message other than * toString is called on the proxy the real object data is retrieved from the database. * * By default, use the target class' full list of interfaces for the proxy. * */ public void useProxyIndirection() { Class[] targetInterfaces = getReferenceClass().getInterfaces(); if (!getReferenceClass().isInterface() && getReferenceClass().getSuperclass() == null) { setIndirectionPolicy(new ProxyIndirectionPolicy(targetInterfaces)); } else { HashSet targetInterfacesCol = new HashSet(); //Bug#4432781 Include all the interfaces and the super interfaces of the target class if (getReferenceClass().getSuperclass() != null) { buildTargetInterfaces(getReferenceClass(), targetInterfacesCol); } //Bug#4251902 Make Proxy Indirection writable and readable to deployment xml. If //ReferenceClass is an interface, it needs to be included in the array. if (getReferenceClass().isInterface()) { targetInterfacesCol.add(getReferenceClass()); } targetInterfaces = (Class[])targetInterfacesCol.toArray(targetInterfaces); setIndirectionPolicy(new ProxyIndirectionPolicy(targetInterfaces)); } } /** * INTERNAL: This method will access the target relationship and create a * list of PKs of the target entities. This method is used in combination * with the CachedValueHolder to store references to PK's to be loaded from * a cache instead of a query. * @see ContainerPolicy.buildReferencesPKList() * @see MappedKeyMapContainerPolicy() */ @Override public Object[] buildReferencesPKList(Object entity, Object attribute, AbstractSession session) { ClassDescriptor referenceDescriptor = getReferenceDescriptor(); Object target = this.indirectionPolicy.getRealAttributeValueFromObject(entity, attribute); if (target != null){ Object[] result = new Object[1]; result[0] = referenceDescriptor.getObjectBuilder().extractPrimaryKeyFromObject(target, session); return result; } return new Object[]{}; } /** * INTERNAL: * Build a list of all the interfaces and super interfaces for a given class. */ public Collection buildTargetInterfaces(Class aClass, Collection targetInterfacesCol) { Class[] targetInterfaces = aClass.getInterfaces(); for (int index = 0; index < targetInterfaces.length; index++) { targetInterfacesCol.add(targetInterfaces[index]); } if (aClass.getSuperclass() == null) { return targetInterfacesCol; } else { return buildTargetInterfaces(aClass.getSuperclass(), targetInterfacesCol); } } /** * PUBLIC: * Set this mapping to use Proxy Indirection. * * Proxy Indirection uses the Proxy and InvocationHandler features * of JDK 1.3 to provide "transparent indirection" for 1:1 relationships. In order to use Proxy * Indirection:

* *

    *
  • The target class must implement at least one public interface *
  • The attribute on the source class must be typed as that public interface *
  • get() and set() methods for the attribute must use the interface *
* * With this policy, proxy objects are returned during object creation. When a message other than * toString is called on the proxy the real object data is retrieved from the database. * * @param proxyInterfaces The interfaces that the target class implements. The attribute must be typed * as one of these interfaces. */ public void useProxyIndirection(Class[] targetInterfaces) { setIndirectionPolicy(new ProxyIndirectionPolicy(targetInterfaces)); } /** * PUBLIC: * Set this mapping to use Proxy Indirection. * * Proxy Indirection uses the Proxy and InvocationHandler features * of JDK 1.3 to provide "transparent indirection" for 1:1 relationships. In order to use Proxy * Indirection:

* *

    *
  • The target class must implement at least one public interface *
  • The attribute on the source class must be typed as that public interface *
  • get() and set() methods for the attribute must use the interface *
* * With this policy, proxy objects are returned during object creation. When a message other than * toString is called on the proxy the real object data is retrieved from the database. * * @param proxyInterface The interface that the target class implements. The attribute must be typed * as this interface. */ public void useProxyIndirection(Class targetInterface) { Class[] targetInterfaces = new Class[] { targetInterface }; setIndirectionPolicy(new ProxyIndirectionPolicy(targetInterfaces)); } /** * INTERNAL: * This method is used to load a relationship from a list of PKs. * This list may be available if the relationship has been cached. */ @Override public Object valueFromPKList(Object[] pks, AbstractRecord foreignKeys, AbstractSession session) { if (pks.length == 0 || pks[0] == null) return null; ReadObjectQuery query = new ReadObjectQuery(); query.setReferenceClass(getReferenceClass()); query.setSelectionId(pks[0]); query.setIsExecutionClone(true); query.setSession(session); return session.executeQuery(query); } /** * INTERNAL: * To verify if the specified object is deleted or not. */ @Override public boolean verifyDelete(Object object, AbstractSession session) throws DatabaseException { if (isPrivateOwned() || isCascadeRemove()) { Object attributeValue = getRealAttributeValueFromObject(object, session); if (attributeValue != null) { return session.verifyDelete(attributeValue); } } return true; } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. * But before that check if the reference object is instantiated or not. */ @Override public void writeFromObjectIntoRowForUpdate(WriteObjectQuery query, AbstractRecord databaseRow) { Object object = query.getObject(); AbstractSession session = query.getSession(); if (!isAttributeValueInstantiated(object)) { return; } if (session.isUnitOfWork()) { if (compareObjectsWithoutPrivateOwned(query.getBackupClone(), object, session)) { return; } } writeFromObjectIntoRow(object, databaseRow, session, WriteType.UPDATE); } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public void writeFromObjectIntoRowForWhereClause(ObjectLevelModifyQuery query, AbstractRecord databaseRow) { if (isReadOnly()) { return; } if (query.isDeleteObjectQuery()) { writeFromObjectIntoRow(query.getObject(), databaseRow, query.getSession(), WriteType.UNDEFINED); } else { // If the original was never instantiated the backup clone has a ValueHolder of null // so for this case we must extract from the original object. if (isAttributeValueInstantiated(query.getObject())) { writeFromObjectIntoRow(query.getBackupClone(), databaseRow, query.getSession(), WriteType.UNDEFINED); } else { writeFromObjectIntoRow(query.getObject(), databaseRow, query.getSession(), WriteType.UNDEFINED); } } } /** * INTERNAL: * Return if this mapping supports change tracking. */ @Override public boolean isChangeTrackingSupported(Project project) { return true; } /** * INTERNAL: * Either create a new change record or update the change record with the new value. * This is used by attribute change tracking. */ @Override public void updateChangeRecord(Object clone, Object newValue, Object oldValue, ObjectChangeSet objectChangeSet, UnitOfWorkImpl uow) { // Must ensure values are unwrapped. Object unwrappedNewValue = newValue; Object unwrappedOldValue = oldValue; if (newValue != null) { unwrappedNewValue = getReferenceDescriptor().getObjectBuilder().unwrapObject(newValue, uow); } if (oldValue != null) { unwrappedOldValue = getReferenceDescriptor().getObjectBuilder().unwrapObject(oldValue, uow); } ObjectReferenceChangeRecord changeRecord = (ObjectReferenceChangeRecord)objectChangeSet.getChangesForAttributeNamed(this.getAttributeName()); if (changeRecord == null) { changeRecord = internalBuildChangeRecord(unwrappedNewValue, objectChangeSet, uow); changeRecord.setOldValue(unwrappedOldValue); objectChangeSet.addChange(changeRecord); } else { setNewValueInChangeRecord(unwrappedNewValue, changeRecord, objectChangeSet, uow); } } /** * INTERNAL: * Update a ChangeRecord to replace the ChangeSet for the old entity with the changeSet for the new Entity. This is * used when an Entity is merged into itself and the Entity reference new or detached entities. */ public void updateChangeRecordForSelfMerge(ChangeRecord changeRecord, Object source, Object target, UnitOfWorkChangeSet parentUOWChangeSet, UnitOfWorkImpl unitOfWork){ ((ObjectReferenceChangeRecord)changeRecord).setNewValue(((UnitOfWorkChangeSet)unitOfWork.getUnitOfWorkChangeSet()).findOrCreateLocalObjectChangeSet(target, referenceDescriptor, unitOfWork.isCloneNewObject(target))); } /** * INTERNAL: * Directly build a change record without comparison */ @Override public ChangeRecord buildChangeRecord(Object clone, ObjectChangeSet owner, AbstractSession session) { return internalBuildChangeRecord(getRealAttributeValueFromObject(clone, session), owner, session); } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/PropertyAssociation.java0000664000000000000000000000207112216173130025105 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings; /** *

Purpose: Generic association object. * This can be used to map hashtable/map containers where the key and value primitives or independent objects. * * @author Mike Norman * @since TopLink 11.1.0.0 */ public class PropertyAssociation extends Association { } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/RelationTableMechanism.java0000664000000000000000000013511212216173130025441 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * 07/16/2009 Andrei Ilitchev * - Bug 282553: JPA 2.0 JoinTable support for OneToOne and ManyToOne * 14/05/2012-2.4 Guy Pelletier * - 376603: Provide for table per tenant support for multitenant applications ******************************************************************************/ package org.eclipse.persistence.mappings; import java.util.ArrayList; import java.util.Enumeration; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.Vector; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.descriptors.TablePerMultitenantPolicy; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.expressions.Expression; import org.eclipse.persistence.expressions.ExpressionBuilder; import org.eclipse.persistence.internal.databaseaccess.DatasourcePlatform; import org.eclipse.persistence.internal.databaseaccess.Platform; import org.eclipse.persistence.internal.descriptors.ObjectBuilder; import org.eclipse.persistence.internal.expressions.FieldExpression; import org.eclipse.persistence.internal.expressions.ForUpdateClause; import org.eclipse.persistence.internal.expressions.ForUpdateOfClause; import org.eclipse.persistence.internal.expressions.SQLDeleteStatement; import org.eclipse.persistence.internal.expressions.SQLInsertStatement; import org.eclipse.persistence.internal.expressions.SQLSelectStatement; import org.eclipse.persistence.internal.helper.ConversionManager; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.internal.helper.DatabaseTable; import org.eclipse.persistence.internal.identitymaps.CacheId; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.mappings.ForeignReferenceMapping.ExtendPessimisticLockScope; import org.eclipse.persistence.queries.Call; import org.eclipse.persistence.queries.DataModifyQuery; import org.eclipse.persistence.queries.DirectReadQuery; import org.eclipse.persistence.queries.ObjectBuildingQuery; import org.eclipse.persistence.queries.ObjectLevelReadQuery; import org.eclipse.persistence.queries.ReadAllQuery; import org.eclipse.persistence.queries.ReadQuery; import org.eclipse.persistence.sessions.DatabaseRecord; /** *

Purpose: Contains relation table functionality * that was originally defined in ManyToManyMapping * and now is shared with OneToOneMapping. */ public class RelationTableMechanism implements Cloneable, java.io.Serializable { /** The intermediate relation table. */ protected DatabaseTable relationTable; /** The field in the source table that corresponds to the key in the relation table */ protected Vector sourceKeyFields; /** The field in the target table that corresponds to the key in the relation table */ protected Vector targetKeyFields; /** The field in the intermediate table that corresponds to the key in the source table */ protected Vector sourceRelationKeyFields; /** The field in the intermediate table that corresponds to the key in the target table */ protected Vector targetRelationKeyFields; /** Query used for single row deletion. */ protected DataModifyQuery deleteQuery; protected boolean hasCustomDeleteQuery; /** Used for insertion. */ protected DataModifyQuery insertQuery; protected boolean hasCustomInsertQuery; protected ReadQuery lockRelationTableQuery; public RelationTableMechanism() { this.insertQuery = new DataModifyQuery(); this.deleteQuery = new DataModifyQuery(); this.sourceRelationKeyFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1); this.targetRelationKeyFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1); this.sourceKeyFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1); this.targetKeyFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1); this.hasCustomDeleteQuery = false; this.hasCustomInsertQuery = false; } /** * PUBLIC: * Add the fields in the intermediate table that corresponds to the primary * key in the source table. This method is used if the keys are composite. */ public void addSourceRelationKeyField(DatabaseField sourceRelationKeyField, DatabaseField sourcePrimaryKeyField) { getSourceRelationKeyFields().addElement(sourceRelationKeyField); getSourceKeyFields().addElement(sourcePrimaryKeyField); } /** * PUBLIC: * Add the fields in the intermediate table that corresponds to the primary * key in the source table. This method is used if the keys are composite. */ public void addSourceRelationKeyFieldName(String sourceRelationKeyFieldName, String sourcePrimaryKeyFieldName) { addSourceRelationKeyField(new DatabaseField(sourceRelationKeyFieldName), new DatabaseField(sourcePrimaryKeyFieldName)); } /** * PUBLIC: * Add the fields in the intermediate table that corresponds to the primary * key in the target table. This method is used if the keys are composite. */ public void addTargetRelationKeyField(DatabaseField targetRelationKeyField, DatabaseField targetPrimaryKeyField) { getTargetRelationKeyFields().addElement(targetRelationKeyField); getTargetKeyFields().addElement(targetPrimaryKeyField); } /** * PUBLIC: * Add the fields in the intermediate table that corresponds to the primary * key in the target table. This method is used if the keys are composite. */ public void addTargetRelationKeyFieldName(String targetRelationKeyFieldName, String targetPrimaryKeyFieldName) { addTargetRelationKeyField(new DatabaseField(targetRelationKeyFieldName), new DatabaseField(targetPrimaryKeyFieldName)); } /** * INTERNAL: * Selection criteria is created to read target records from the table. */ Expression buildSelectionCriteria(ForeignReferenceMapping mapping, Expression criteria) { return buildSelectionCriteriaAndAddFieldsToQueryInternal(mapping, criteria, true, false); } Expression buildSelectionCriteriaAndAddFieldsToQuery(ForeignReferenceMapping mapping, Expression criteria) { return buildSelectionCriteriaAndAddFieldsToQueryInternal(mapping, criteria, true, true); } /** * INTERNAL: * Build the selection criteria to join the source, relation, and target tables. */ public Expression buildSelectionCriteriaAndAddFieldsToQueryInternal(ForeignReferenceMapping mapping, Expression criteria, boolean shouldAddTargetFields, boolean shouldAddFieldsToQuery) { Expression builder = new ExpressionBuilder(); Expression linkTable = builder.getTable(this.relationTable); if (shouldAddTargetFields) { Iterator targetKeyIterator = getTargetKeyFields().iterator(); Iterator relationKeyIterator = getTargetRelationKeyFields().iterator(); while (targetKeyIterator.hasNext()) { DatabaseField relationKey = relationKeyIterator.next(); DatabaseField targetKey = targetKeyIterator.next(); Expression expression = builder.getField(targetKey).equal(linkTable.getField(relationKey)); if (criteria == null) { criteria = expression; } else { criteria = expression.and(criteria); } } } Iterator relationKeyIterator = getSourceRelationKeyFields().iterator(); Iterator sourceKeyIterator = getSourceKeyFields().iterator(); while (relationKeyIterator.hasNext()) { DatabaseField relationKey = relationKeyIterator.next(); DatabaseField sourceKey = sourceKeyIterator.next(); Expression expression = linkTable.getField(relationKey).equal(builder.getParameter(sourceKey)); if (criteria == null) { criteria = expression; } else { criteria = expression.and(criteria); } } if (shouldAddFieldsToQuery && mapping.isCollectionMapping()) { ((CollectionMapping)mapping).getContainerPolicy().addAdditionalFieldsToQuery(mapping.getSelectionQuery(), linkTable); } return criteria; } /** * INTERNAL: * This method is used to store the FK fields that can be cached that correspond to noncacheable mappings * the FK field values will be used to re-issue the query when cloning the shared cache entity */ protected void collectQueryParameters(Set cacheFields){ for (DatabaseField field : getSourceKeyFields()) { cacheFields.add(field); } } /** * INTERNAL: * The mapping clones itself to create deep copy. */ public Object clone() { RelationTableMechanism clone; try { clone = (RelationTableMechanism)super.clone(); } catch (CloneNotSupportedException e) { throw new InternalError(); } clone.setTargetKeyFields(cloneFields(getTargetKeyFields())); clone.setSourceKeyFields(cloneFields(getSourceKeyFields())); clone.setTargetRelationKeyFields(cloneFields(getTargetRelationKeyFields())); clone.setSourceRelationKeyFields(cloneFields(getSourceRelationKeyFields())); clone.setInsertQuery((DataModifyQuery) insertQuery.clone()); clone.setDeleteQuery((DataModifyQuery) deleteQuery.clone()); if(lockRelationTableQuery != null) { clone.lockRelationTableQuery = (DirectReadQuery)lockRelationTableQuery.clone(); } return clone; } /** * INTERNAL: * Helper method to clone vector of fields (used in aggregate initialization cloning). */ protected Vector cloneFields(Vector fields) { Vector clonedFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(); for (Enumeration fieldsEnum = fields.elements(); fieldsEnum.hasMoreElements();) { clonedFields.addElement(((DatabaseField)fieldsEnum.nextElement()).clone()); } return clonedFields; } protected DataModifyQuery getDeleteQuery() { return deleteQuery; } /** * INTERNAL: * Returns a query that */ ReadQuery getLockRelationTableQueryClone(AbstractSession session, short lockMode) { DirectReadQuery lockRelationTableQueryClone = (DirectReadQuery)lockRelationTableQuery.clone(); SQLSelectStatement statement = new SQLSelectStatement(); statement.addTable(this.relationTable); statement.addField(this.sourceRelationKeyFields.get(0).clone()); statement.setWhereClause((Expression)lockRelationTableQuery.getSelectionCriteria().clone()); statement.setLockingClause(new ForUpdateClause(lockMode)); statement.normalize(session, null); lockRelationTableQueryClone.setSQLStatement(statement); lockRelationTableQueryClone.setIsExecutionClone(true); return lockRelationTableQueryClone; } /** * INTERNAL: * Return relation table locking clause. */ public void setRelationTableLockingClause(ObjectLevelReadQuery targetQuery, ObjectBuildingQuery sourceQuery) { ForUpdateOfClause lockingClause = new ForUpdateOfClause(); lockingClause.setLockMode(sourceQuery.getLockMode()); FieldExpression exp = (FieldExpression)targetQuery.getExpressionBuilder().getTable(this.relationTable).getField(this.sourceRelationKeyFields.get(0)); lockingClause.addLockedExpression(exp); targetQuery.setLockingClause(lockingClause); // locking clause is not compatible with DISTINCT targetQuery.setShouldOuterJoinSubclasses(true); } protected DataModifyQuery getInsertQuery() { return insertQuery; } /** * INTERNAL: * Return the relation table associated with the mapping. */ public DatabaseTable getRelationTable() { return relationTable; } /** * PUBLIC: * Return the relation table name associated with the mapping. */ public String getRelationTableName() { if (relationTable == null) { return null; } return relationTable.getName(); } /** * PUBLIC: * Return the relation table qualified name associated with the mapping. */ public String getRelationTableQualifiedName() { if (relationTable == null) { return null; } return relationTable.getQualifiedName(); } /** * PUBLIC: * Return the source key field names associated with the mapping. * These are in-order with the sourceRelationKeyFieldNames. */ public Vector getSourceKeyFieldNames() { Vector fieldNames = new Vector(getSourceKeyFields().size()); for (Enumeration fieldsEnum = getSourceKeyFields().elements(); fieldsEnum.hasMoreElements();) { fieldNames.addElement(((DatabaseField)fieldsEnum.nextElement()).getQualifiedName()); } return fieldNames; } /** * INTERNAL: * Return the selection criteria used to IN batch fetching. */ protected Expression buildBatchCriteria(ExpressionBuilder builder, ObjectLevelReadQuery query) { Expression linkTable = builder.getTable(this.relationTable); Expression criteria = null; int size = this.targetRelationKeyFields.size(); for (int index = 0; index < size; index++) { DatabaseField relationKey = this.targetRelationKeyFields.get(index); DatabaseField targetKey = this.targetKeyFields.get(index); criteria = builder.getField(targetKey).equal(linkTable.getField(relationKey)).and(criteria); } size = this.sourceRelationKeyFields.size(); if (size > 1) { // Support composite keys using nested IN. List fields = new ArrayList(size); for (DatabaseField sourceRelationKeyField : this.sourceRelationKeyFields) { fields.add(linkTable.getField(sourceRelationKeyField)); } return criteria.and(query.getSession().getPlatform().buildBatchCriteriaForComplexId(builder, fields)); } else { return criteria.and(query.getSession().getPlatform().buildBatchCriteria(builder, linkTable.getField(this.sourceRelationKeyFields.get(0)))); } } /** * INTERNAL: * Add the addition join fields to the batch query. */ public void postPrepareNestedBatchQuery(ReadQuery batchQuery, ObjectLevelReadQuery query) { ReadAllQuery mappingBatchQuery = (ReadAllQuery)batchQuery; mappingBatchQuery.setShouldIncludeData(true); Expression linkTable = mappingBatchQuery.getExpressionBuilder().getTable(this.relationTable); for (DatabaseField relationField : this.sourceRelationKeyFields) { mappingBatchQuery.getAdditionalFields().add(linkTable.getField(relationField)); } } /** * INTERNAL: * Extract the foreign key value from the source row. */ protected Object extractBatchKeyFromRow(AbstractRecord row, AbstractSession session) { Object[] key; ConversionManager conversionManager = session.getDatasourcePlatform().getConversionManager(); List sourceKeyFields = this.sourceKeyFields; int size = sourceKeyFields.size(); key = new Object[size]; for (int index = 0; index < size; index++) { DatabaseField field = sourceKeyFields.get(index); Object value = row.get(field); // Must ensure the classification gets a cache hit. key[index] = conversionManager.convertObject(value, field.getType()); } return new CacheId(key); } /** * INTERNAL: * Extract the source primary key value from the relation row. * Used for batch reading, most following same order and fields as in the mapping. */ protected Object extractKeyFromTargetRow(AbstractRecord row, AbstractSession session) { int size = getSourceRelationKeyFields().size(); Object[] key = new Object[size]; ConversionManager conversionManager = session.getDatasourcePlatform().getConversionManager(); for (int index = 0; index < size; index++) { DatabaseField relationField = this.sourceRelationKeyFields.get(index); DatabaseField sourceField = this.sourceKeyFields.get(index); Object value = row.get(relationField); // Must ensure the classification gets a cache hit. value = conversionManager.convertObject(value, sourceField.getType()); key[index] = value; } return new CacheId(key); } /** * INTERNAL: * Return all the source key fields associated with the mapping. */ public Vector getSourceKeyFields() { return sourceKeyFields; } /** * PUBLIC: * Return the source relation key field names associated with the mapping. * These are in-order with the sourceKeyFieldNames. */ public Vector getSourceRelationKeyFieldNames() { Vector fieldNames = new Vector(getSourceRelationKeyFields().size()); for (Enumeration fieldsEnum = getSourceRelationKeyFields().elements(); fieldsEnum.hasMoreElements();) { fieldNames.addElement(((DatabaseField)fieldsEnum.nextElement()).getQualifiedName()); } return fieldNames; } /** * INTERNAL: * Return all the source relation key fields associated with the mapping. */ public Vector getSourceRelationKeyFields() { return sourceRelationKeyFields; } /** * PUBLIC: * Return the target key field names associated with the mapping. * These are in-order with the targetRelationKeyFieldNames. */ public Vector getTargetKeyFieldNames() { Vector fieldNames = new Vector(getTargetKeyFields().size()); for (Enumeration fieldsEnum = getTargetKeyFields().elements(); fieldsEnum.hasMoreElements();) { fieldNames.addElement(((DatabaseField)fieldsEnum.nextElement()).getQualifiedName()); } return fieldNames; } /** * INTERNAL: * Return the relation field for the target field. */ public DatabaseField getRelationFieldForTargetField(DatabaseField targetField) { int index = this.targetKeyFields.indexOf(targetField); if (index == -1) { return null; } return this.targetRelationKeyFields.get(index); } /** * INTERNAL: * Return all the target keys associated with the mapping. */ public Vector getTargetKeyFields() { return targetKeyFields; } /** * PUBLIC: * Return the target relation key field names associated with the mapping. * These are in-order with the targetKeyFieldNames. */ public Vector getTargetRelationKeyFieldNames() { Vector fieldNames = new Vector(getTargetRelationKeyFields().size()); for (Enumeration fieldsEnum = getTargetRelationKeyFields().elements(); fieldsEnum.hasMoreElements();) { fieldNames.addElement(((DatabaseField)fieldsEnum.nextElement()).getQualifiedName()); } return fieldNames; } /** * INTERNAL: * Return all the target relation key fields associated with the mapping. */ public Vector getTargetRelationKeyFields() { return targetRelationKeyFields; } protected boolean hasCustomDeleteQuery() { return hasCustomDeleteQuery; } protected boolean hasCustomInsertQuery() { return hasCustomInsertQuery; } /** * INTERNAL: * Indicates whether the mechanism has relation table. */ public boolean hasRelationTable() { return relationTable != null && relationTable.getName().length() > 0; } /** * INTERNAL: * Initialize */ public void initialize(AbstractSession session, ForeignReferenceMapping mapping) throws DescriptorException { initializeRelationTable(session, mapping); initializeSourceRelationKeys(mapping); initializeTargetRelationKeys(mapping); if (isSingleSourceRelationKeySpecified()) { initializeSourceKeysWithDefaults(mapping); } else { initializeSourceKeys(mapping); } if (isSingleTargetRelationKeySpecified()) { initializeTargetKeysWithDefaults(session, mapping); } else { initializeTargetKeys(session, mapping); } if (getRelationTable().getName().indexOf(' ') != -1) { //table names contains a space so needs to be quoted. String beginQuote = ((DatasourcePlatform)session.getDatasourcePlatform()).getStartDelimiter(); String endQuote = ((DatasourcePlatform)session.getDatasourcePlatform()).getEndDelimiter(); //Ensure this table name hasn't already been quoted. if (getRelationTable().getName().indexOf(beginQuote) == -1) { getRelationTable().setName(beginQuote + getRelationTable().getName() + endQuote); } } if (mapping.isCollectionMapping()) { ((CollectionMapping)mapping).getContainerPolicy().initialize(session, getRelationTable()); } initializeInsertQuery(session, mapping); initializeDeleteQuery(session, mapping); if (mapping.extendPessimisticLockScope != ExtendPessimisticLockScope.NONE) { initializeExtendPessipisticLockScope(session, mapping); } } /** * INTERNAL: * Initialize delete query. This query is used to delete a specific row from the join table in uow, * given the objects on both sides of the relation. */ protected void initializeDeleteQuery(AbstractSession session, ForeignReferenceMapping mapping) { if (!getDeleteQuery().hasSessionName()) { getDeleteQuery().setSessionName(session.getName()); } if (getDeleteQuery().getPartitioningPolicy() == null) { getDeleteQuery().setPartitioningPolicy(mapping.getPartitioningPolicy()); } getInsertQuery().setName(mapping.getAttributeName()); if (hasCustomDeleteQuery()) { return; } // Build where clause expression. Expression whereClause = null; Expression builder = new ExpressionBuilder(); for (DatabaseField relationKey : getSourceRelationKeyFields()) { Expression expression = builder.getField(relationKey).equal(builder.getParameter(relationKey)); whereClause = expression.and(whereClause); } if (mapping.isCollectionMapping()) { for (DatabaseField relationKey : getTargetRelationKeyFields()) { Expression expression = builder.getField(relationKey).equal(builder.getParameter(relationKey)); whereClause = expression.and(whereClause); } } SQLDeleteStatement statement = new SQLDeleteStatement(); statement.setTable(getRelationTable()); statement.setWhereClause(whereClause); getDeleteQuery().setSQLStatement(statement); } /** * INTERNAL: * Initialize extendPessimisticLockeScope and lockRelationTableQuery (if required). */ protected void initializeExtendPessipisticLockScope(AbstractSession session, ForeignReferenceMapping mapping) { if(mapping.usesIndirection()) { if(session.getPlatform().isForUpdateCompatibleWithDistinct() && session.getPlatform().supportsLockingQueriesWithMultipleTables()) { mapping.extendPessimisticLockScope = ExtendPessimisticLockScope.SOURCE_QUERY; } else { mapping.extendPessimisticLockScope = ExtendPessimisticLockScope.DEDICATED_QUERY; } } else { if(session.getPlatform().supportsIndividualTableLocking() && session.getPlatform().supportsLockingQueriesWithMultipleTables()) { mapping.extendPessimisticLockScope = ExtendPessimisticLockScope.TARGET_QUERY; } else { mapping.extendPessimisticLockScope = ExtendPessimisticLockScope.DEDICATED_QUERY; } } if(mapping.extendPessimisticLockScope == ExtendPessimisticLockScope.DEDICATED_QUERY) { Expression startCriteria = mapping.getSelectionQuery().getSelectionCriteria(); if(startCriteria != null) { startCriteria = (Expression)startCriteria.clone(); } initializeLockRelationTableQuery(session, mapping, startCriteria); } } /** * INTERNAL: * Initialize insert query. This query is used to insert the collection of objects into the * relation table. */ protected void initializeInsertQuery(AbstractSession session, ForeignReferenceMapping mapping) { if (!getInsertQuery().hasSessionName()) { getInsertQuery().setSessionName(session.getName()); } if (getInsertQuery().getPartitioningPolicy() == null) { getInsertQuery().setPartitioningPolicy(mapping.getPartitioningPolicy()); } getInsertQuery().setName(mapping.getAttributeName()); if (hasCustomInsertQuery()) { return; } SQLInsertStatement statement = new SQLInsertStatement(); statement.setTable(getRelationTable()); AbstractRecord joinRow = new DatabaseRecord(); for (DatabaseField field : getTargetRelationKeyFields()) { joinRow.put(field, null); } for (DatabaseField field : getSourceRelationKeyFields()) { joinRow.put(field, null); } if (mapping.isCollectionMapping()) { CollectionMapping collectionMapping = (CollectionMapping)mapping; if (collectionMapping.getListOrderField() != null) { joinRow.put(collectionMapping.getListOrderField(), null); } collectionMapping.getContainerPolicy().addFieldsForMapKey(joinRow); } statement.setModifyRow(joinRow); getInsertQuery().setSQLStatement(statement); getInsertQuery().setModifyRow(joinRow); } /** * INTERNAL: * Initialize lockRelationTableQuery. */ protected void initializeLockRelationTableQuery(AbstractSession session, ForeignReferenceMapping mapping, Expression startCriteria) { lockRelationTableQuery = new DirectReadQuery(); Expression criteria = buildSelectionCriteriaAndAddFieldsToQueryInternal(mapping, startCriteria, false, false); SQLSelectStatement statement = new SQLSelectStatement(); statement.addTable(this.relationTable); statement.addField(this.sourceRelationKeyFields.get(0).clone()); statement.setWhereClause(criteria); statement.normalize(session, null); lockRelationTableQuery.setSQLStatement(statement); lockRelationTableQuery.setSessionName(session.getName()); } /** * INTERNAL: * Set the table qualifier on the relation table if required */ protected void initializeRelationTable(AbstractSession session, ForeignReferenceMapping mapping) throws DescriptorException { Platform platform = session.getDatasourcePlatform(); // We need to look up the relation table name from the reference // descriptor if we are the non owning side of a bidirectional mapping // to a table per tenant descriptor. if (mapping.isReadOnly() && mapping.getReferenceDescriptor().hasTablePerMultitenantPolicy()) { setRelationTable(((TablePerMultitenantPolicy) mapping.getReferenceDescriptor().getMultitenantPolicy()).getTable(getRelationTable())); } if (!hasRelationTable()) { throw DescriptorException.noRelationTable(mapping); } if (platform.getTableQualifier().length() > 0) { if (getRelationTable().getTableQualifier().length() == 0) { getRelationTable().setTableQualifier(platform.getTableQualifier()); } } } /** * INTERNAL: * All the source key field names are converted to DatabaseField and stored. */ protected void initializeSourceKeys(ForeignReferenceMapping mapping) { for (int index = 0; index < getSourceKeyFields().size(); index++) { DatabaseField field = mapping.getDescriptor().buildField(getSourceKeyFields().get(index)); if (((ForeignReferenceMapping)mapping).usesIndirection()) { field.setKeepInRow(true); } getSourceKeyFields().set(index, field); } } /** * INTERNAL: * If a user does not specify the source key then the primary keys of the source table are used. */ protected void initializeSourceKeysWithDefaults(DatabaseMapping mapping) { List primaryKeyFields = mapping.getDescriptor().getPrimaryKeyFields(); for (int index = 0; index < primaryKeyFields.size(); index++) { DatabaseField field = primaryKeyFields.get(index); if (((ForeignReferenceMapping)mapping).usesIndirection()) { field.setKeepInRow(true); } getSourceKeyFields().addElement(field); } } /** * INTERNAL: * All the source relation key field names are converted to DatabaseField and stored. */ protected void initializeSourceRelationKeys(ForeignReferenceMapping mapping) throws DescriptorException { if (getSourceRelationKeyFields().size() == 0) { throw DescriptorException.noSourceRelationKeysSpecified(mapping); } for (Enumeration entry = getSourceRelationKeyFields().elements(); entry.hasMoreElements();) { DatabaseField field = (DatabaseField)entry.nextElement(); // Update the fields table first if the mapping is from a table per tenant entity. ClassDescriptor sourceDescriptor = mapping.getDescriptor(); if (sourceDescriptor.hasTablePerMultitenantPolicy()) { field.setTable(((TablePerMultitenantPolicy) sourceDescriptor.getMultitenantPolicy()).getTable(field.getTable())); } if (field.hasTableName() && (!(field.getTableName().equals(getRelationTable().getName())))) { throw DescriptorException.relationKeyFieldNotProperlySpecified(field, mapping); } field.setTable(getRelationTable()); } } /** * INTERNAL: * All the target key field names are converted to DatabaseField and stored. */ protected void initializeTargetKeys(AbstractSession session, ForeignReferenceMapping mapping) { for (int index = 0; index < getTargetKeyFields().size(); index++) { DatabaseField field = mapping.getReferenceDescriptor().buildField(getTargetKeyFields().get(index)); getTargetKeyFields().set(index, field); } } /** * INTERNAL: * If a user does not specify the target key then the primary keys of the target table are used. */ protected void initializeTargetKeysWithDefaults(AbstractSession session, ForeignReferenceMapping mapping) { List primaryKeyFields = mapping.getReferenceDescriptor().getPrimaryKeyFields(); for (int index = 0; index < primaryKeyFields.size(); index++) { getTargetKeyFields().addElement(primaryKeyFields.get(index)); } } /** * INTERNAL: * All the target relation key field names are converted to DatabaseField and stored. */ protected void initializeTargetRelationKeys(ForeignReferenceMapping mapping) { if (getTargetRelationKeyFields().size() == 0) { throw DescriptorException.noTargetRelationKeysSpecified(mapping); } for (Enumeration targetEnum = getTargetRelationKeyFields().elements(); targetEnum.hasMoreElements();) { DatabaseField field = (DatabaseField)targetEnum.nextElement(); // Update the fields table first if the mapping is from a table per tenant entity. ClassDescriptor referenceDescriptor = mapping.getReferenceDescriptor(); if (referenceDescriptor.hasTablePerMultitenantPolicy()) { field.setTable(((TablePerMultitenantPolicy) referenceDescriptor.getMultitenantPolicy()).getTable(field.getTable())); } if (field.hasTableName() && (!(field.getTableName().equals(getRelationTable().getName())))) { throw DescriptorException.relationKeyFieldNotProperlySpecified(field, mapping); } field.setTable(getRelationTable()); } } /** * INTERNAL: * Checks if a single source key was specified. */ protected boolean isSingleSourceRelationKeySpecified() { return getSourceKeyFields().isEmpty(); } /** * INTERNAL: * Checks if a single target key was specified. */ protected boolean isSingleTargetRelationKeySpecified() { return getTargetKeyFields().isEmpty(); } /** * INTERNAL: * Adds to the passed expression a single relation table field joined to source field. * Used to extend pessimistic locking clause in source query. */ public Expression joinRelationTableField(Expression expression, Expression baseExpression) { return baseExpression.getField(this.sourceKeyFields.get(0)).equal(baseExpression.getTable(relationTable).getField(this.sourceRelationKeyFields.get(0))).and(expression); } /** * PUBLIC: * The default delete query for mapping can be overridden by specifying the new query. * This query must delete the row from the M-M join table. */ public void setCustomDeleteQuery(DataModifyQuery query) { setDeleteQuery(query); setHasCustomDeleteQuery(true); } /** * PUBLIC: * The default insert query for mapping can be overridden by specifying the new query. * This query must insert the row into the M-M join table. */ public void setCustomInsertQuery(DataModifyQuery query) { setInsertQuery(query); setHasCustomInsertQuery(true); } protected void setDeleteQuery(DataModifyQuery deleteQuery) { this.deleteQuery = deleteQuery; } /** * PUBLIC: * Set the receiver's delete SQL string. This allows the user to override the SQL * generated by TOPLink, with there own SQL or procedure call. The arguments are * translated from the fields of the source row, through replacing the field names * marked by '#' with the values for those fields. * This is used to delete a single entry from the M-M join table. * Example, 'delete from PROJ_EMP where PROJ_ID = #PROJ_ID AND EMP_ID = #EMP_ID'. */ public void setDeleteSQLString(String sqlString) { DataModifyQuery query = new DataModifyQuery(); query.setSQLString(sqlString); setCustomDeleteQuery(query); } /** * PUBLIC: * Set the receiver's delete Call. This allows the user to override the SQL * generated by TOPLink, with there own SQL or procedure call. The arguments are * translated from the fields of the source row. * This is used to delete a single entry from the M-M join table. * Example, 'new SQLCall("delete from PROJ_EMP where PROJ_ID = #PROJ_ID AND EMP_ID = #EMP_ID")'. */ public void setDeleteCall(Call call) { DataModifyQuery query = new DataModifyQuery(); query.setCall(call); setCustomDeleteQuery(query); } protected void setHasCustomDeleteQuery(boolean hasCustomDeleteQuery) { this.hasCustomDeleteQuery = hasCustomDeleteQuery; } protected void setHasCustomInsertQuery(boolean bool) { hasCustomInsertQuery = bool; } protected void setInsertQuery(DataModifyQuery insertQuery) { this.insertQuery = insertQuery; } /** * PUBLIC: * Set the receiver's insert SQL string. This allows the user to override the SQL * generated by TOPLink, with there own SQL or procedure call. The arguments are * translated from the fields of the source row, through replacing the field names * marked by '#' with the values for those fields. * This is used to insert an entry into the M-M join table. * Example, 'insert into PROJ_EMP (EMP_ID, PROJ_ID) values (#EMP_ID, #PROJ_ID)'. */ public void setInsertSQLString(String sqlString) { DataModifyQuery query = new DataModifyQuery(); query.setSQLString(sqlString); setCustomInsertQuery(query); } /** * PUBLIC: * Set the receiver's insert Call. This allows the user to override the SQL * generated by TOPLink, with there own SQL or procedure call. The arguments are * translated from the fields of the source row. * This is used to insert an entry into the M-M join table. * Example, 'new SQLCall("insert into PROJ_EMP (EMP_ID, PROJ_ID) values (#EMP_ID, #PROJ_ID)")'. */ public void setInsertCall(Call call) { DataModifyQuery query = new DataModifyQuery(); query.setCall(call); setCustomInsertQuery(query); } /** * PUBLIC: * Set the relational table. * This is the join table that store both the source and target primary keys. */ public void setRelationTable(DatabaseTable relationTable) { this.relationTable = relationTable; } /** * PUBLIC: * Set the name of the relational table. * This is the join table that store both the source and target primary keys. */ public void setRelationTableName(String tableName) { relationTable = new DatabaseTable(tableName); } /** * PUBLIC: * Set the name of the session to execute the mapping's queries under. * This can be used by the session broker to override the default session * to be used for the target class. */ public void setSessionName(String name) { getInsertQuery().setSessionName(name); getDeleteQuery().setSessionName(name); } /** * PUBLIC: * Set the source key field names associated with the mapping. * These must be in-order with the sourceRelationKeyFieldNames. */ public void setSourceKeyFieldNames(Vector fieldNames) { Vector fields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(fieldNames.size()); for (Enumeration fieldNamesEnum = fieldNames.elements(); fieldNamesEnum.hasMoreElements();) { fields.addElement(new DatabaseField((String)fieldNamesEnum.nextElement())); } setSourceKeyFields(fields); } /** * INTERNAL: * Set the source fields. */ public void setSourceKeyFields(Vector sourceKeyFields) { this.sourceKeyFields = sourceKeyFields; } /** * PUBLIC: * Set the source key field in the relation table. * This is the name of the foreign key in the relation table to the source's primary key field. * This method is used if the source primary key is a singleton only. */ public void setSourceRelationKeyFieldName(String sourceRelationKeyFieldName) { getSourceRelationKeyFields().addElement(new DatabaseField(sourceRelationKeyFieldName)); } /** * PUBLIC: * Set the source relation key field names associated with the mapping. * These must be in-order with the sourceKeyFieldNames. */ public void setSourceRelationKeyFieldNames(Vector fieldNames) { Vector fields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(fieldNames.size()); for (Enumeration fieldNamesEnum = fieldNames.elements(); fieldNamesEnum.hasMoreElements();) { fields.addElement(new DatabaseField((String)fieldNamesEnum.nextElement())); } setSourceRelationKeyFields(fields); } /** * INTERNAL: * Set the source fields. */ public void setSourceRelationKeyFields(Vector sourceRelationKeyFields) { this.sourceRelationKeyFields = sourceRelationKeyFields; } /** * INTERNAL: * Set the target key field names associated with the mapping. * These must be in-order with the targetRelationKeyFieldNames. */ public void setTargetKeyFieldNames(Vector fieldNames) { Vector fields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(fieldNames.size()); for (Enumeration fieldNamesEnum = fieldNames.elements(); fieldNamesEnum.hasMoreElements();) { fields.addElement(new DatabaseField((String)fieldNamesEnum.nextElement())); } setTargetKeyFields(fields); } /** * INTERNAL: * Set the target fields. */ public void setTargetKeyFields(Vector targetKeyFields) { this.targetKeyFields = targetKeyFields; } /** * PUBLIC: * Set the target key field in the relation table. * This is the name of the foreign key in the relation table to the target's primary key field. * This method is used if the target's primary key is a singleton only. */ public void setTargetRelationKeyFieldName(String targetRelationKeyFieldName) { getTargetRelationKeyFields().addElement(new DatabaseField(targetRelationKeyFieldName)); } /** * INTERNAL: * Set the target relation key field names associated with the mapping. * These must be in-order with the targetKeyFieldNames. */ public void setTargetRelationKeyFieldNames(Vector fieldNames) { Vector fields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(fieldNames.size()); for (Enumeration fieldNamesEnum = fieldNames.elements(); fieldNamesEnum.hasMoreElements();) { fields.addElement(new DatabaseField((String)fieldNamesEnum.nextElement())); } setTargetRelationKeyFields(fields); } /** * INTERNAL: * Set the target fields. */ public void setTargetRelationKeyFields(Vector targetRelationKeyFields) { this.targetRelationKeyFields = targetRelationKeyFields; } /** * INTERNAL: * Create a row that contains source relation fields with values extracted from the source object. */ public AbstractRecord buildRelationTableSourceRow(Object sourceObject, AbstractSession session, ForeignReferenceMapping mapping) { AbstractRecord databaseRow = new DatabaseRecord(); return addRelationTableSourceRow(sourceObject, session, databaseRow, mapping); } /** * INTERNAL: * Add to a row source relation fields with values extracted from the source object. */ public AbstractRecord addRelationTableSourceRow(Object sourceObject, AbstractSession session, AbstractRecord databaseRow, ForeignReferenceMapping mapping) { ObjectBuilder builder = mapping.getDescriptor().getObjectBuilder(); int size = sourceKeyFields.size(); for(int i=0; i < size; i++) { Object sourceValue = builder.extractValueFromObjectForField(sourceObject, sourceKeyFields.get(i), session); databaseRow.put(sourceRelationKeyFields.get(i), sourceValue); } return databaseRow; } /** * INTERNAL: * Create a row that contains source relation fields with values extracted from the source row. */ public AbstractRecord buildRelationTableSourceRow(AbstractRecord sourceRow) { AbstractRecord databaseRow = new DatabaseRecord(); return addRelationTableSourceRow(sourceRow, databaseRow); } /** * INTERNAL: * Add to a row source relation fields with values extracted from the source row. */ public AbstractRecord addRelationTableSourceRow(AbstractRecord sourceRow, AbstractRecord databaseRow) { int size = sourceKeyFields.size(); for(int i=0; i < size; i++) { Object sourceValue = sourceRow.get(sourceKeyFields.get(i)); databaseRow.put(sourceRelationKeyFields.get(i), sourceValue); } return databaseRow; } /** * INTERNAL: * Add to a row target relation fields with values extracted from the target object. */ public AbstractRecord addRelationTableTargetRow(Object targetObject, AbstractSession session, AbstractRecord databaseRow, ForeignReferenceMapping mapping) { ObjectBuilder builder = mapping.getReferenceDescriptor().getObjectBuilder(); int size = targetKeyFields.size(); for(int i=0; i < size; i++) { Object sourceValue = builder.extractValueFromObjectForField(targetObject, targetKeyFields.get(i), session); databaseRow.put(targetRelationKeyFields.get(i), sourceValue); } return databaseRow; } /** * INTERNAL: * Create a row that contains source relation fields with values extracted from the source object * and target relation fields with values extracted from the target object. */ public AbstractRecord buildRelationTableSourceAndTargetRow(Object sourceObject, Object targetObject, AbstractSession session, ForeignReferenceMapping mapping) { AbstractRecord databaseRow = buildRelationTableSourceRow(sourceObject, session, mapping); databaseRow = addRelationTableTargetRow(targetObject, session, databaseRow, mapping); return databaseRow; } /** * INTERNAL: * Create a row that contains source relation fields with values extracted from the source row * and target relation fields with values extracted from the target object. */ public AbstractRecord buildRelationTableSourceAndTargetRow(AbstractRecord sourceRow, Object targetObject, AbstractSession session, ForeignReferenceMapping mapping) { AbstractRecord databaseRow = buildRelationTableSourceRow(sourceRow); databaseRow = addRelationTableTargetRow(targetObject, session, databaseRow, mapping); return databaseRow; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/UnidirectionalOneToManyMapping.java0000664000000000000000000004007512216173130027151 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * ailitchev - Uni-directional OneToMany * 07/19/2011-2.2.1 Guy Pelletier * - 338812: ManyToMany mapping in aggregate object violate integrity constraint on deletion ******************************************************************************/ package org.eclipse.persistence.mappings; import java.util.Iterator; import java.util.Vector; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.ConversionException; import org.eclipse.persistence.exceptions.DatabaseException; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.exceptions.OptimisticLockException; import org.eclipse.persistence.internal.descriptors.CascadeLockingPolicy; import org.eclipse.persistence.internal.helper.ConversionManager; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.ChangeRecord; import org.eclipse.persistence.internal.sessions.CollectionChangeRecord; import org.eclipse.persistence.internal.sessions.ObjectChangeSet; import org.eclipse.persistence.internal.sessions.UnitOfWorkChangeSet; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.queries.DeleteObjectQuery; import org.eclipse.persistence.queries.ObjectLevelModifyQuery; import org.eclipse.persistence.queries.ObjectLevelReadQuery; import org.eclipse.persistence.queries.ReadAllQuery; import org.eclipse.persistence.queries.ReadQuery; import org.eclipse.persistence.sessions.DatabaseRecord; /** *

Purpose: UnidirectionalOneToManyMapping doesn't have 1:1 back reference mapping. * * @author Andrei Ilitchev * @since Eclipselink 1.1 */ public class UnidirectionalOneToManyMapping extends OneToManyMapping { /** * Indicates whether target's optimistic locking value should be incremented on * target being added to / removed from a source. **/ protected boolean shouldIncrementTargetLockValueOnAddOrRemoveTarget; /** * Indicates whether target's optimistic locking value should be incremented on * the source deletion. * Note that if the flag is set to true then the indirection will be triggered on * source delete - in order to verify all targets' versions. **/ protected boolean shouldIncrementTargetLockValueOnDeleteSource; /** * PUBLIC: * Default constructor. */ public UnidirectionalOneToManyMapping() { super(); this.shouldIncrementTargetLockValueOnAddOrRemoveTarget = true; this.shouldIncrementTargetLockValueOnDeleteSource = true; } /** * INTERNAL: * Build a row containing the keys for use in the query that updates the row for the * target object during an insert or update */ protected AbstractRecord buildKeyRowForTargetUpdate(ObjectLevelModifyQuery query){ AbstractRecord keyRow = new DatabaseRecord(); // Extract primary key and value from the source. int size = sourceKeyFields.size(); for (int index = 0; index < size; index++) { DatabaseField sourceKey = sourceKeyFields.get(index); DatabaseField targetForeignKey = targetForeignKeyFields.get(index); Object sourceKeyValue = query.getTranslationRow().get(sourceKey); keyRow.put(targetForeignKey, sourceKeyValue); } return keyRow; } /** * INTERNAL: * This method is used to create a change record from comparing two collections * @return org.eclipse.persistence.internal.sessions.ChangeRecord */ @Override public ChangeRecord compareForChange(Object clone, Object backUp, ObjectChangeSet owner, AbstractSession uow) { ChangeRecord record = super.compareForChange(clone, backUp, owner, uow); if(record != null && getReferenceDescriptor().getOptimisticLockingPolicy() != null) { postCalculateChanges(record, (UnitOfWorkImpl)uow); } return record; } /** * INTERNAL: * Extract the source primary key value from the target row. * Used for batch reading, most following same order and fields as in the mapping. */ protected Vector extractSourceKeyFromRow(AbstractRecord row, AbstractSession session) { int size = sourceKeyFields.size(); Vector key = new Vector(size); ConversionManager conversionManager = session.getDatasourcePlatform().getConversionManager(); for (int index = 0; index < size; index++) { DatabaseField targetField = targetForeignKeyFields.get(index); DatabaseField sourceField = sourceKeyFields.get(index); Object value = row.get(targetField); // Must ensure the classification gets a cache hit. try { value = conversionManager.convertObject(value, sourceField.getType()); } catch (ConversionException e) { throw ConversionException.couldNotBeConverted(this, getDescriptor(), e); } key.addElement(value); } return key; } /** * INTERNAL: */ public boolean isOwned(){ return true; } /** * INTERNAL: */ public boolean isUnidirectionalOneToManyMapping() { return true; } /** * INTERNAL: * Initialize the mapping. */ public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if (getReferenceDescriptor().getOptimisticLockingPolicy() != null) { if (this.shouldIncrementTargetLockValueOnAddOrRemoveTarget) { this.descriptor.addMappingsPostCalculateChanges(this); } if (this.shouldIncrementTargetLockValueOnDeleteSource && !this.isPrivateOwned) { this.descriptor.addMappingsPostCalculateChangesOnDeleted(this); } } } /** * Initialize the type of the target foreign key, as it will be null as it is not mapped in the target. */ public void postInitialize(AbstractSession session) { super.postInitialize(session); Iterator targetForeignKeys = getTargetForeignKeyFields().iterator(); Iterator sourceKeys = getSourceKeyFields().iterator(); while (targetForeignKeys.hasNext()) { DatabaseField targetForeignKey = targetForeignKeys.next(); DatabaseField sourcePrimaryKey = sourceKeys.next(); if (targetForeignKey.getType() == null) { DatabaseMapping mapping = getDescriptor().getObjectBuilder().getMappingForField(sourcePrimaryKey); // If we have a mapping, set the type, otherwise at this point // there is not much more we can do. This case will likely hit // when we have a UnidirectionalOneToManyMapping on an aggregate // outside of JPA. Within JPA, in most cases, the metadata // processing should set the type on the targetForeignKey for us. // Bug 278263 has been entered to revisit this code. if (mapping != null) { targetForeignKey.setType(mapping.getFieldClassification(sourcePrimaryKey)); } } } } /** * INTERNAL: */ protected AbstractRecord createModifyRowForAddTargetQuery() { AbstractRecord modifyRow = super.createModifyRowForAddTargetQuery(); int size = targetForeignKeyFields.size(); for (int index = 0; index < size; index++) { DatabaseField targetForeignKey = targetForeignKeyFields.get(index); modifyRow.put(targetForeignKey, null); } return modifyRow; } /** * INTERNAL: * Delete the reference objects. */ public void preDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { if (shouldObjectModifyCascadeToParts(query)) { super.preDelete(query); } else { updateTargetRowPreDeleteSource(query); } } /** * Prepare a cascade locking policy. */ public void prepareCascadeLockingPolicy() { CascadeLockingPolicy policy = new CascadeLockingPolicy(getDescriptor(), getReferenceDescriptor()); policy.setQueryKeyFields(getSourceKeysToTargetForeignKeys()); policy.setShouldHandleUnmappedFields(true); getReferenceDescriptor().addCascadeLockingPolicy(policy); } /** * INTERNAL: * Overridden by mappings that require additional processing of the change record after the record has been calculated. */ @Override public void postCalculateChanges(org.eclipse.persistence.sessions.changesets.ChangeRecord changeRecord, UnitOfWorkImpl uow) { // targets are added to and/or removed to/from the source. CollectionChangeRecord collectionChangeRecord = (CollectionChangeRecord)changeRecord; Iterator it = collectionChangeRecord.getAddObjectList().values().iterator(); while(it.hasNext()) { ObjectChangeSet change = (ObjectChangeSet)it.next(); if(!change.hasChanges()) { change.setShouldModifyVersionField(Boolean.TRUE); ((org.eclipse.persistence.internal.sessions.UnitOfWorkChangeSet)change.getUOWChangeSet()).addObjectChangeSet(change, uow, false); } } // in the mapping is privately owned then the target will be deleted - no need to modify target version. it = collectionChangeRecord.getRemoveObjectList().values().iterator(); while(it.hasNext()) { ObjectChangeSet change = (ObjectChangeSet)it.next(); if (!isPrivateOwned()){ if(!change.hasChanges()) { change.setShouldModifyVersionField(Boolean.TRUE); ((org.eclipse.persistence.internal.sessions.UnitOfWorkChangeSet)change.getUOWChangeSet()).addObjectChangeSet(change, uow, false); } }else{ containerPolicy.postCalculateChanges(change, referenceDescriptor, this, uow); } } } /** * INTERNAL: * Overridden by mappings that require objects to be deleted contribute to change set creation. */ @Override public void postCalculateChangesOnDeleted(Object deletedObject, UnitOfWorkChangeSet uowChangeSet, UnitOfWorkImpl uow) { // the source is deleted: // trigger the indirection - we have to get optimistic lock exception // in case another thread has updated one of the targets: // triggered indirection caches the target with the old version, // then the version update waits until the other thread (which is locking the version field) commits, // then the version update is executed and it throws optimistic lock exception. Object col = getRealCollectionAttributeValueFromObject(deletedObject, uow); if (col != null) { Object iterator = this.containerPolicy.iteratorFor(col); while (this.containerPolicy.hasNext(iterator)) { Object target = this.containerPolicy.next(iterator, uow); ObjectChangeSet change = this.referenceDescriptor.getObjectBuilder().createObjectChangeSet(target, uowChangeSet, uow); if (!change.hasChanges()) { change.setShouldModifyVersionField(Boolean.TRUE); ((org.eclipse.persistence.internal.sessions.UnitOfWorkChangeSet)change.getUOWChangeSet()).addObjectChangeSet(change, uow, false); } } } } /** * INTERNAL: * Add additional fields */ @Override protected void postPrepareNestedBatchQuery(ReadQuery batchQuery, ObjectLevelReadQuery query) { super.postPrepareNestedBatchQuery(batchQuery, query); ReadAllQuery mappingBatchQuery = (ReadAllQuery)batchQuery; int size = this.targetForeignKeyFields.size(); for (int i=0; i < size; i++) { mappingBatchQuery.addAdditionalField(this.targetForeignKeyFields.get(i)); } } /** * INTERNAL: * The translation row may require additional fields than the primary key if the mapping in not on the primary key. */ @Override protected void prepareTranslationRow(AbstractRecord translationRow, Object object, ClassDescriptor descriptor, AbstractSession session) { // Make sure that each source key field is in the translation row. int size = sourceKeyFields.size(); for(int i=0; i < size; i++) { DatabaseField sourceKey = sourceKeyFields.get(i); if (!translationRow.containsKey(sourceKey)) { Object value = descriptor.getObjectBuilder().extractValueFromObjectForField(object, sourceKey, session); translationRow.put(sourceKey, value); } } } /** * INTERNAL: * Overridden by mappings that require additional processing of the change record after the record has been calculated. */ @Override public void recordPrivateOwnedRemovals(Object object, UnitOfWorkImpl uow) { //need private owned check for this mapping as this method is called for any mapping // that also registers a postCalculateChanges() method. Most mappings only register the // postCalculateChanges if they are privately owned. This Mapping is a special case an // always registers a postCalculateChanges mapping when the target has OPT locking. if (isPrivateOwned){ super.recordPrivateOwnedRemovals(object, uow); } } /** * INTERNAL: * UnidirectionalOneToManyMapping performs some events after INSERT/UPDATE to maintain the keys * @return */ @Override public boolean requiresDataModificationEvents(){ return true; } /** * PUBLIC: * Set value that indicates whether target's optimistic locking value should be incremented on * target being added to / removed from a source (default value is true). **/ public void setShouldIncrementTargetLockValueOnAddOrRemoveTarget(boolean shouldIncrementTargetLockValueOnAddOrRemoveTarget) { this.shouldIncrementTargetLockValueOnAddOrRemoveTarget = shouldIncrementTargetLockValueOnAddOrRemoveTarget; } /** * PUBLIC: * Set value that indicates whether target's optimistic locking value should be incremented on * the source deletion (default value is true). **/ public void setShouldIncrementTargetLockValueOnDeleteSource(boolean shouldIncrementTargetLockValueOnDeleteSource) { this.shouldIncrementTargetLockValueOnDeleteSource = shouldIncrementTargetLockValueOnDeleteSource; } /** * PUBLIC: * Indicates whether target's optimistic locking value should be incremented on * target being added to / removed from a source (default value is true). **/ public boolean shouldIncrementTargetLockValueOnAddOrRemoveTarget() { return shouldIncrementTargetLockValueOnAddOrRemoveTarget; } /** * PUBLIC: * Indicates whether target's optimistic locking value should be incremented on * the source deletion (default value is true). **/ public boolean shouldIncrementTargetLockValueOnDeleteSource() { return shouldIncrementTargetLockValueOnDeleteSource; } /** * INTERNAL * Target foreign key of the removed object should be modified (set to null). */ protected boolean shouldRemoveTargetQueryModifyTargetForeignKey() { return true; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/MultitenantPrimaryKeyMapping.java0000664000000000000000000002121112216173130026716 0ustar /******************************************************************************* * Copyright (c) 2011, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * 11/10/2011-2.4 Guy Pelletier * - 357474: Address primaryKey option from tenant discriminator column ******************************************************************************/ package org.eclipse.persistence.mappings; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.internal.descriptors.MultitenantPrimaryKeyAccessor; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.ChangeRecord; import org.eclipse.persistence.internal.sessions.MergeManager; import org.eclipse.persistence.internal.sessions.ObjectChangeSet; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.mappings.foundation.AbstractColumnMapping; import org.eclipse.persistence.queries.ObjectBuildingQuery; import org.eclipse.persistence.sessions.Session; /** * Purpose: Maps a multitenant property to the corresponding database * field type. The list of field types that are supported by EclipseLink's * direct to field mapping is dependent on the relational database being used. * * @author Guy Pelletier * @since EclipseLink 2.4 */ public class MultitenantPrimaryKeyMapping extends AbstractColumnMapping { private MultitenantPrimaryKeyAccessor accessor; /** * Constructor */ public MultitenantPrimaryKeyMapping() { super(); isInsertable = true; isUpdatable = false; setIsOptional(false); accessor = new MultitenantPrimaryKeyAccessor(); setAttributeAccessor(accessor); } /** * INTERNAL: * Clone the attribute from the clone and assign it to the backup. * * This is an override from DatabaseMapping and must be implemented. */ @Override public void buildBackupClone(Object clone, Object backup, UnitOfWorkImpl unitOfWork) { // Mapping is write only so nothing to do. } /** * INTERNAL: * Clone the attribute from the original and assign it to the clone. * * This is an override from DatabaseMapping and must be implemented. */ @Override public void buildClone(Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession) { // Mapping is write only so nothing to do. } /** * INTERNAL: * Extract value from the row and set the attribute to this value in the * working copy clone. * In order to bypass the shared cache when in transaction a UnitOfWork must * be able to populate working copies directly from the row. */ @Override public void buildCloneFromRow(AbstractRecord databaseRow, JoinedAttributeManager joinManager, Object clone, CacheKey sharedCacheKey, ObjectBuildingQuery sourceQuery, UnitOfWorkImpl unitOfWork, AbstractSession executionSession) { // Mapping is write only so nothing to do. } /** * INTERNAL: * Compare the clone and backup clone values and return a change record if * the value changed. * * This is an override from DatabaseMapping and must be implemented. */ @Override public ChangeRecord compareForChange(Object clone, Object backUp, ObjectChangeSet owner, AbstractSession session) { // Mapping is write only so nothing to do. return null; } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. * * This is an override from DatabaseMapping and must be implemented. */ @Override public boolean compareObjects(Object firstObject, Object secondObject, AbstractSession session) { // Mapping is write only so nothing to do. return true; } /** * INTERNAL: */ @Override public Object getFieldValue(Object propertyValue, AbstractSession session) { return accessor.getValue(session); } /** * INTERNAL: */ @Override public Object getObjectValue(Object fieldValue, Session session) { return accessor.getValue(session); } /** * INTERNAL: * The mapping is initialized with the given session. This mapping is fully * initialized after this. */ @Override public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if (getField() == null) { session.getIntegrityChecker().handleError(DescriptorException.fieldNameNotSetInMapping(this)); } setField(getDescriptor().buildField(getField())); setFields(collectFields()); // Must unwrap Struct types on WLS. if (getField().getSqlType() == java.sql.Types.STRUCT) { getDescriptor().setIsNativeConnectionRequired(true); } } /** * INTERNAL: * Return if this mapping requires its attribute value to be cloned. */ @Override public boolean isCloningRequired() { return false; } /** * INTERNAL */ @Override public boolean isMultitenantPrimaryKeyMapping() { return true; } /** * INTERNAL: */ public boolean isRelationalMapping() { return true; } /** * INTERNAL * This mapping must be write only as their is no attribute to read into. */ @Override public boolean isWriteOnly() { return true; } /** * INTERNAL: * Merge changes from the source to the target object. * * This is an override from DatabaseMapping and must be implemented. */ @Override public void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { // Mapping is write only so do nothing. } /** * INTERNAL: * Merge changes from the source to the target object. This merge is only * called when a changeSet for the target does not exist or the target is * uninitialized * * This is an override from DatabaseMapping and must be implemented. */ @Override public void mergeIntoObject(Object target, boolean isTargetUninitialized, Object source, MergeManager mergeManager, AbstractSession targetSession) { // Mapping is write only so do nothing. } /** * INTERNAL: * The context property that is used to write this mapping must be set. It * is set as the attribute name (which gets set on the accessor) */ public void setContextProperty(String contextProperty) { setAttributeName(contextProperty); } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public void writeFromObjectIntoRow(Object object, AbstractRecord row, AbstractSession session, WriteType writeType) { writeValueIntoRow(row, getField(), getFieldValue(null, session)); } /** * INTERNAL: * Return the Value from the object. */ @Override public Object valueFromObject(Object anObject, DatabaseField field, AbstractSession session) { return accessor.getValue(session); } /** * INTERNAL: * Write fields needed for insert into the template for with null values. */ @Override public void writeInsertFieldsIntoRow(AbstractRecord databaseRow, AbstractSession session) { databaseRow.add(getField(), null); } /** * INTERNAL: */ @Override protected void writeValueIntoRow(AbstractRecord row, DatabaseField field, Object fieldValue) { row.add(getField(), fieldValue); } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/structures/0000775000000000000000000000000012216174372022455 5ustar ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/mappings/structures/ObjectRelationalDatabaseField.javaeclipselink-2.5.1.orig/org/eclipse/persistence/mappings/structures/ObjectRelationalDatabaseField.jav0000664000000000000000000000777012216173130030773 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.structures; import org.eclipse.persistence.internal.helper.DatabaseField; /** * INTERNAL: * A database field of object-relational type: either a java.sql.Array, * java.sql.Struct, or java.sql.Ref. * Oracle drivers require the user defined field type name for these fields, * along with the generic sqlType: ARRAY, STRUCT, or REF. * Toplink can only recognize primitive field types like Integer * or String, but here custom java objects are being written to a single field. * Thus instead of DatabaseField#type the driver needs a string representing * the user defined type of the structure on the database, and the type of * field: either ARRAY, STRUCT, or REF. * Added for bug 2730536. * @author Stephen McRitchie * @since OracleAS TopLink 10g (9.0.4) */ public class ObjectRelationalDatabaseField extends DatabaseField { protected String sqlTypeName; protected DatabaseField nestedTypeField; public ObjectRelationalDatabaseField(DatabaseField field) { this.index = field.index; this.name = field.getName(); this.table = field.getTable(); this.type = field.type; this.useDelimiters = field.shouldUseDelimiters(); this.useUpperCaseForComparisons = field.getUseUpperCaseForComparisons(); this.nameForComparisons = field.getNameForComparisons(); this.typeName = field.getTypeName(); this.sqlTypeName = ""; } public ObjectRelationalDatabaseField(String name) { super(name); this.sqlTypeName = ""; } /* * INTERNAL: * Convert all the class-name-based settings in this mapping to actual * class-based settings. This method is implemented by subclasses as * necessary. * @param classLoader */ public void convertClassNamesToClasses(ClassLoader classLoader) { super.convertClassNamesToClasses(classLoader); if (nestedTypeField != null) { nestedTypeField.convertClassNamesToClasses(classLoader); } } /** * ADVANCED: * For ARRAY and STRUCT fields, this is the user defined type for the field. * For REF fields, this is the user defined type of entity is points to. */ public String getSqlTypeName() { return sqlTypeName; } /** * PUBLIC: * Return if this is an ObjectRelationalDatabaseField. */ public boolean isObjectRelationalDatabaseField(){ return true; } /** * ADVANCED: * For ARRAY and STRUCT fields, this is the user defined type for the field. * For REF fields, this is the user defined type of entity is points to. */ public void setSqlTypeName(String sqlTypeName) { this.sqlTypeName = sqlTypeName; } /** * ADVANCED: * For ARRAY fields, this field's type represents the type contained in the ARRAY. */ public DatabaseField getNestedTypeField() { return nestedTypeField; } /** * ADVANCED: * For ARRAY fields, this field's type represents the type contained in the ARRAY. */ public void setNestedTypeField(DatabaseField nestedTypeField) { this.nestedTypeField = nestedTypeField; } } ././@LongLink0000000000000000000000000000015300000000000011564 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/mappings/structures/ObjectRelationalDataTypeDescriptor.javaeclipselink-2.5.1.orig/org/eclipse/persistence/mappings/structures/ObjectRelationalDataTypeDescripto0000664000000000000000000004323012216173130031123 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 14/05/2012-2.4 Guy Pelletier * - 376603: Provide for table per tenant support for multitenant applications ******************************************************************************/ package org.eclipse.persistence.mappings.structures; import java.util.*; import java.sql.*; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.expressions.*; import org.eclipse.persistence.internal.databaseaccess.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.descriptors.RelationalDescriptor; import org.eclipse.persistence.internal.queries.ContainerPolicy; import org.eclipse.persistence.sessions.DatabaseRecord; /** *

Purpose: * Differentiates object-relational descriptors from normal relational descriptors. * The object-relational descriptor describes a type not a table, (although there * is normally a table associated with the type, unless it is aggregate). */ @SuppressWarnings("unchecked") public class ObjectRelationalDataTypeDescriptor extends RelationalDescriptor { protected String structureName; protected Vector orderedFields; public ObjectRelationalDataTypeDescriptor() { this.orderedFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(); } /** * INTERNAL: * Auto-Default orderedFields to fields */ @Override public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if (orderedFields==null || orderedFields.size()==0){ orderedFields=getAllFields(); } } /** * PUBLIC: * Order the fields in a specific * Add the field ordering, this will order the fields in the order this method is called. * @param fieldName the name of the field to add ordering on. */ public void addFieldOrdering(String fieldName) { getOrderedFields().addElement(new DatabaseField(fieldName)); } /** * INTERNAL: * Extract the direct values from the specified field value. * Return them in a vector. * The field value better be an Array. */ @Override public Vector buildDirectValuesFromFieldValue(Object fieldValue) throws DatabaseException { if(fieldValue == null) { return null; } return Helper.vectorFromArray((Object[])fieldValue); } /** * INTERNAL: * Build the appropriate field value for the specified * set of direct values. * The database better be expecting an ARRAY. */ @Override public Object buildFieldValueFromDirectValues(Vector directValues, String elementDataTypeName, AbstractSession session) throws DatabaseException { Object[] fields = Helper.arrayFromVector(directValues); try { ((DatabaseAccessor)session.getAccessor()).incrementCallCount(session); java.sql.Connection connection = ((DatabaseAccessor)session.getAccessor()).getConnection(); return session.getPlatform().createArray(elementDataTypeName, fields, session,connection); } catch (java.sql.SQLException ex) { throw DatabaseException.sqlException(ex, session, false); } finally { ((DatabaseAccessor)session.getAccessor()).decrementCallCount(); } } /** * INTERNAL: * Build and return the field value from the specified nested database row. * The database better be expecting a Struct. */ @Override public Object buildFieldValueFromNestedRow(AbstractRecord nestedRow, AbstractSession session) throws DatabaseException { java.sql.Connection connection = ((DatabaseAccessor)session.getAccessor()).getConnection(); return this.buildStructureFromRow(nestedRow, session, connection); } /** * INTERNAL: * Build and return the appropriate field value for the specified * set of nested rows. * The database better be expecting an ARRAY. * It looks like we can ignore inheritance here.... */ @Override public Object buildFieldValueFromNestedRows(Vector nestedRows, String structureName, AbstractSession session) throws DatabaseException { Object[] fields = new Object[nestedRows.size()]; java.sql.Connection connection = ((DatabaseAccessor)session.getAccessor()).getConnection(); boolean reconnected = false; try { if (connection == null) { ((DatabaseAccessor)session.getAccessor()).incrementCallCount(session); reconnected = true; connection = ((DatabaseAccessor)session.getAccessor()).getConnection(); } int i = 0; for (Enumeration stream = nestedRows.elements(); stream.hasMoreElements();) { AbstractRecord nestedRow = (AbstractRecord)stream.nextElement(); fields[i++] = this.buildStructureFromRow(nestedRow, session, connection); } return session.getPlatform().createArray(structureName, fields, session,connection); } catch (java.sql.SQLException exception) { throw DatabaseException.sqlException(exception, session, false); } finally { if (reconnected) { ((DatabaseAccessor)session.getAccessor()).decrementCallCount(); } } } /** * INTERNAL: * Build and return the nested rows from the specified field value. * This method allows the field value to be an ARRAY containing other structures * such as arrays or Struct, or direct values. */ static public Object buildContainerFromArray(Array fieldValue, ObjectRelationalDatabaseField arrayField, AbstractSession session) throws DatabaseException { if (arrayField.getType()==null){ return fieldValue; } Object[] objects = null; try { objects = (Object[])fieldValue.getArray(); } catch (java.sql.SQLException ex) { throw DatabaseException.sqlException(ex, session, false); } if (objects == null) { return null; } boolean isNestedStructure = false; ObjectRelationalDataTypeDescriptor ord=null; DatabaseField nestedType = null; if (arrayField != null){ nestedType = arrayField.getNestedTypeField(); if ((nestedType != null) && nestedType.getSqlType()==Types.STRUCT){ ClassDescriptor descriptor = session.getDescriptor(nestedType.getType()); if ((descriptor != null) && (descriptor.isObjectRelationalDataTypeDescriptor())) { //this is used to convert non-null objects passed through stored procedures and custom SQL to structs ord=(ObjectRelationalDataTypeDescriptor)descriptor; } } else if ((nestedType != null) && (nestedType instanceof ObjectRelationalDatabaseField) ){ isNestedStructure = true; } } //handle ARRAY conversions ReadObjectQuery query = new ReadObjectQuery(); query.setSession(session); ContainerPolicy cp = ContainerPolicy.buildPolicyFor(arrayField.getType()); Object container = cp.containerInstance(objects.length); for (int i = 0; i < objects.length; i++) { Object arrayValue = objects[i]; if (arrayValue == null) { return null; } if (ord!=null){ AbstractRecord nestedRow = ord.buildRowFromStructure( (Struct)arrayValue); ClassDescriptor descriptor = ord; if (descriptor.hasInheritance()) { Class newElementClass = descriptor.getInheritancePolicy().classFromRow(nestedRow, session); if (!descriptor.getJavaClass().equals(newElementClass)) { descriptor = session.getDescriptor(newElementClass); if (descriptor==null){ descriptor=ord; } } } arrayValue = descriptor.getObjectBuilder().buildNewInstance(); descriptor.getObjectBuilder().buildAttributesIntoObject(arrayValue, null, nestedRow, query, null, null, false, session); } else if (isNestedStructure && (arrayValue instanceof Array)){ arrayValue = buildContainerFromArray((Array)arrayValue, (ObjectRelationalDatabaseField)nestedType, session); } cp.addInto(arrayValue, container, session); } return container; } /** * INTERNAL: * Build and return the nested database row from the specified field value. * The field value better be an Struct. */ @Override public AbstractRecord buildNestedRowFromFieldValue(Object fieldValue) throws DatabaseException { AbstractRecord row = new DatabaseRecord(); Object[] attributes = (Object[])fieldValue; for (int index = 0; index < getOrderedFields().size(); index++) { DatabaseField field = (DatabaseField)getOrderedFields().elementAt(index); row.put(field, attributes[index]); } return row; } /** * INTERNAL: * Build and return the nested rows from the specified field value. * The field value better be an ARRAY. */ @Override public Vector buildNestedRowsFromFieldValue(Object fieldValue, AbstractSession session) throws DatabaseException { if(fieldValue==null){ return null; } Object[] structs = (Object[])fieldValue; Vector nestedRows = new Vector(structs.length); for (int i = 0; i < structs.length; i++) { Object[] struct = (Object[])structs[i]; if (struct == null) { return null; } nestedRows.addElement(this.buildNestedRowFromFieldValue(struct)); } return nestedRows; } /** * INTERNAL: * Build a row representation from the ADT structure field array. * TopLink will then build the object from the row. */ public AbstractRecord buildRowFromStructure(Struct structure) throws DatabaseException { Object[] attributes; try { attributes = structure.getAttributes(); } catch (java.sql.SQLException exception) { throw DatabaseException.sqlException(exception); } if(attributes!=null){ for(int i=0;iPurpose: * In an object-relational data model, structures reference each other through "Refs"; not through foreign keys as * in the relational data model. TopLink supports using the Ref to reference the target object. */ public class ReferenceMapping extends ObjectReferenceMapping { /** A ref is always stored in a single field. */ protected DatabaseField field; public ReferenceMapping() { super(); this.setWeight(WEIGHT_AGGREGATE); } /** * Returns all the aggregate fields. */ protected Vector collectFields() { Vector fields = new Vector(1); fields.addElement(getField()); return fields; } /** * INTERNAL: * Returns the field which this mapping represents. */ public DatabaseField getField() { return field; } /** * PUBLIC: * Return the name of the field this mapping represents. */ public String getFieldName() { return getField().getName(); } /** * INTERNAL: * Join criteria is created to read target records (nested table) from the table. */ @Override public Expression getJoinCriteria(ObjectExpression context, Expression base) { return null; } /** * INTERNAL: * The returns if the mapping has any constraint dependencies, such as foreign keys and join tables. */ @Override public boolean hasConstraintDependency() { return true; } /** * INTERNAL: * Initialize the mapping. */ @Override public void initialize(AbstractSession session) throws DescriptorException { setReferenceDescriptor(session.getDescriptor(getReferenceClass())); if (referenceDescriptor == null) { throw DescriptorException.descriptorIsMissing(getReferenceClass().getName(), this); } // For bug 2730536 convert the field to be an ObjectRelationalDatabaseField. ObjectRelationalDatabaseField field = (ObjectRelationalDatabaseField)getField(); field.setSqlType(java.sql.Types.REF); if (referenceDescriptor instanceof ObjectRelationalDataTypeDescriptor) { field.setSqlTypeName(((ObjectRelationalDataTypeDescriptor)referenceDescriptor).getStructureName()); } setField(getDescriptor().buildField(getField())); setFields(collectFields()); // Ref mapping requires native connection in WLS as the Ref is wrapped. getDescriptor().setIsNativeConnectionRequired(true); } /** * INTERNAL: */ @Override public boolean isReferenceMapping() { return true; } /** * INTERNAL: * Insert privately owned parts */ @Override public void preInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { // Checks if privately owned parts should be inserted or not. if (!shouldObjectModifyCascadeToParts(query)) { return; } // Get the privately owned parts Object object = getRealAttributeValueFromObject(query.getObject(), query.getSession()); if (object == null) { return; } if (isPrivateOwned()) { // No need to set changeSet as insert is a straight copy anyway InsertObjectQuery insertQuery = new InsertObjectQuery(); insertQuery.setIsExecutionClone(true); insertQuery.setObject(object); insertQuery.setCascadePolicy(query.getCascadePolicy()); query.getSession().executeQuery(insertQuery); } else { ObjectChangeSet changeSet = null; UnitOfWorkChangeSet uowChangeSet = null; if (query.getSession().isUnitOfWork() && (((UnitOfWorkImpl)query.getSession()).getUnitOfWorkChangeSet() != null)) { uowChangeSet = (UnitOfWorkChangeSet)((UnitOfWorkImpl)query.getSession()).getUnitOfWorkChangeSet(); changeSet = (ObjectChangeSet)uowChangeSet.getObjectChangeSetForClone(object); } WriteObjectQuery writeQuery = new WriteObjectQuery(); writeQuery.setIsExecutionClone(true); writeQuery.setObject(object); writeQuery.setObjectChangeSet(changeSet); writeQuery.setCascadePolicy(query.getCascadePolicy()); query.getSession().executeQuery(writeQuery); } } /** * INTERNAL: * Update privately owned parts */ @Override public void preUpdate(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!isAttributeValueInstantiated(query.getObject())) { return; } if (isPrivateOwned()) { Object objectInDatabase = readPrivateOwnedForObject(query); if (objectInDatabase != null) { query.setProperty(this, objectInDatabase); } } if (!shouldObjectModifyCascadeToParts(query)) { return; } // Get the privately owned parts in the memory Object object = getRealAttributeValueFromObject(query.getObject(), query.getSession()); if (object != null) { ObjectChangeSet changeSet = null; UnitOfWorkChangeSet uowChangeSet = null; if (query.getSession().isUnitOfWork() && (((UnitOfWorkImpl)query.getSession()).getUnitOfWorkChangeSet() != null)) { uowChangeSet = (UnitOfWorkChangeSet)((UnitOfWorkImpl)query.getSession()).getUnitOfWorkChangeSet(); changeSet = (ObjectChangeSet)uowChangeSet.getObjectChangeSetForClone(object); } WriteObjectQuery writeQuery = new WriteObjectQuery(); writeQuery.setIsExecutionClone(true); writeQuery.setObject(object); writeQuery.setObjectChangeSet(changeSet); writeQuery.setCascadePolicy(query.getCascadePolicy()); query.getSession().executeQuery(writeQuery); } } /** * INTERNAL: * Insert privately owned parts */ @Override public void postInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { return; } /** * INTERNAL: * Delete privately owned parts */ @Override public void postDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { return; } /** * INTERNAL: * Update privately owned parts */ @Override public void postUpdate(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { return; } /** * INTERNAL: * Delete privately owned parts */ @Override public void preDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { return; } /** * Set the field in the mapping. */ protected void setField(DatabaseField field) { this.field = field; } /** * PUBLIC: * Set the field name in the mapping. */ public void setFieldName(String fieldName) { setField(new ObjectRelationalDatabaseField(fieldName)); } /** * PUBLIC: * This is a reference class whose instances this mapping will store in the domain objects. */ @Override public void setReferenceClass(Class referenceClass) { this.referenceClass = referenceClass; } /** * INTERNAL: * Return the value of the field from the row or a value holder on the query to obtain the object. * Check for batch + aggregation reading. */ @Override public Object valueFromRow(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery query, CacheKey cacheKey, AbstractSession executionSession, boolean isTargetProtected, Boolean[] wasCacheUsed) throws DatabaseException { if (this.descriptor.getCachePolicy().isProtectedIsolation()) { if (this.isCacheable && isTargetProtected && cacheKey != null) { //cachekey will be null when isolating to uow //used cached collection Object result = null; Object cached = cacheKey.getObject(); if (cached != null) { if (wasCacheUsed != null){ wasCacheUsed[0] = Boolean.TRUE; } return this.getAttributeValueFromObject(cached); } return result; } else if (!this.isCacheable && !isTargetProtected && cacheKey != null) { return this.indirectionPolicy.buildIndirectObject(new ValueHolder(null)); } } AbstractRecord targetRow = null; if (row.hasSopObject()) { Object sopAttributeValue = getAttributeValueFromObject(row.getSopObject()); if (sopAttributeValue == null) { return this.indirectionPolicy.nullValueFromRow(); } // As part of SOP object the indirection should be already triggered Object sopRealAttributeValue = getIndirectionPolicy().getRealAttributeValueFromObject(row.getSopObject(), sopAttributeValue); if (sopRealAttributeValue == null) { return sopAttributeValue; } targetRow = new DatabaseRecord(0); targetRow.setSopObject(sopRealAttributeValue); // As part of SOP object the indirection should be already triggered and should be no references outside of sopObject (and its privately owned (possibly nested privately owned) objects) return getReferenceDescriptor().getObjectBuilder().buildObject(query, targetRow, null); } Ref ref = (Ref)row.get(getField()); if (ref == null) { return null; } Struct struct; try { ((DatabaseAccessor)executionSession.getAccessor()).incrementCallCount(executionSession); java.sql.Connection connection = ((DatabaseAccessor)executionSession.getAccessor()).getConnection(); struct = (Struct)executionSession.getPlatform().getRefValue(ref,executionSession,connection); targetRow = ((ObjectRelationalDataTypeDescriptor)getReferenceDescriptor()).buildRowFromStructure(struct); } catch (java.sql.SQLException exception) { throw DatabaseException.sqlException(exception, executionSession, false); } finally { ((DatabaseAccessor)executionSession.getAccessor()).decrementCallCount(); } return getReferenceDescriptor().getObjectBuilder().buildObject(query, targetRow, joinManager); } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public void writeFromObjectIntoRow(Object object, AbstractRecord record, AbstractSession session, WriteType writeType) { if (isReadOnly()) { return; } writeFromObjectIntoRowInternal(object, record, session, false); } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ public void writeFromObjectIntoRowInternal(Object object, AbstractRecord record, AbstractSession session, boolean shouldIgnoreNull) { Object referenceObject = getRealAttributeValueFromObject(object, session); if (referenceObject == null) { if (!shouldIgnoreNull) { // Fix for 2730536, must put something in modify row, even if it is null. record.put(getField(), null); } return; } Ref ref = ((ObjectRelationalDataTypeDescriptor)getReferenceDescriptor()).getRef(referenceObject, session); record.put(getField(), ref); } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public void writeFromObjectIntoRowWithChangeRecord(ChangeRecord changeRecord, AbstractRecord record, AbstractSession session, WriteType writeType) { if (isReadOnly()) { return; } ObjectChangeSet changeSet = (ObjectChangeSet)((ObjectReferenceChangeRecord)changeRecord).getNewValue(); Object referenceObject = changeSet.getUnitOfWorkClone(); if (referenceObject == null) { return; } Ref ref = ((ObjectRelationalDataTypeDescriptor)getReferenceDescriptor()).getRef(referenceObject, session); record.put(getField(), ref); } /** * INTERNAL: * This row is built for shallow insert which happens in case of bidirectional inserts. * The foreign keys must be set to null to avoid constraints. */ @Override public void writeFromObjectIntoRowForShallowInsert(Object object, AbstractRecord record, AbstractSession session) { if (isReadOnly()) { return; } if (getField().isNullable()) { record.put(getField(), null); } else { writeFromObjectIntoRowInternal(object, record, session, false); } } /** * INTERNAL: * This row is built for update after shallow insert which happens in case of bidirectional inserts. * It contains the foreign keys with non null values that were set to null for shallow insert. */ @Override public void writeFromObjectIntoRowForUpdateAfterShallowInsert(Object object, AbstractRecord record, AbstractSession session, DatabaseTable table) { if (this.isReadOnly) { return; } if (!getField().getTable().equals(table) || !getField().isNullable()) { return; } writeFromObjectIntoRowInternal(object, record, session, true); } /** * INTERNAL: * This row is built for shallow insert which happens in case of bidirectional inserts. * The foreign keys must be set to null to avoid constraints. */ @Override public void writeFromObjectIntoRowForShallowInsertWithChangeRecord(ChangeRecord changeRecord, AbstractRecord record, AbstractSession session) { if (isReadOnly()) { return; } record.put(getField(), null); } /** * INTERNAL: * Write fields needed for insert into the template for with null values. */ @Override public void writeInsertFieldsIntoRow(AbstractRecord record, AbstractSession session) { if (isReadOnly()) { return; } record.put(getField(), null); } /** * INTERNAL: */ @Override public boolean isRelationalMapping() { return true; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/structures/ObjectArrayMapping.java0000664000000000000000000001066312216173130027036 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.structures; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.mappings.foundation.AbstractCompositeCollectionMapping; import org.eclipse.persistence.queries.ObjectBuildingQuery; import org.eclipse.persistence.descriptors.ClassDescriptor; /** *

Purpose: * In an object-relational data model, structures can contain "Arrays" or collections of other data-types. * In Oracle 8i, a Varray is typically used to represent a collection of primitive data or aggregate structures. * These arrays are stored with their parent structure in the same table.

* * ArrayMapping is used to map a collection of primitive data

* ObjectArrayMapping is used to map a collection of Oracle data-type * *

NOTE: Only Oracle8i supports Varray type. * * @author King (Yaoping) Wang * @since TOPLink/Java 3.0 * * @see ArrayMapping */ public class ObjectArrayMapping extends AbstractCompositeCollectionMapping { /** Arrays require a structure name, this is the ADT defined for the VARRAY. */ protected String structureName; /** * PUBLIC: * Return the name of the structure. * This is the name of the user defined data type as defined on the database. */ public String getStructureName() { return structureName; } /** * INTERNAL: * Initialize the mapping. */ @Override public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if ((getStructureName() == null) || getStructureName().length() == 0) { throw DescriptorException.structureNameNotSetInMapping(this); } // For bug 2730536 convert the field to be an ObjectRelationalDatabaseField. ObjectRelationalDatabaseField field = (ObjectRelationalDatabaseField)getField(); field.setSqlType(java.sql.Types.ARRAY); field.setSqlTypeName(getStructureName()); // May require native connection in WLS to avoid wrapping wrapped. getDescriptor().setIsNativeConnectionRequired(true); } public void setFieldName(String fieldName) { this.setField(new ObjectRelationalDatabaseField(fieldName)); } /** * PUBLIC: * Set the name of the structure. * This is the name of the user defined data type as defined on the database. */ public void setStructureName(String structureName) { this.structureName = structureName; } @Override protected Object buildCompositeObject(ClassDescriptor descriptor, AbstractRecord nestedRow, ObjectBuildingQuery query, CacheKey parentCacheKey, JoinedAttributeManager joinManager, AbstractSession targetSession) { Object element = descriptor.getObjectBuilder().buildNewInstance(); descriptor.getObjectBuilder().buildAttributesIntoObject(element, parentCacheKey, nestedRow, query, joinManager, query.getExecutionFetchGroup(descriptor), false, targetSession); return element; } @Override protected AbstractRecord buildCompositeRow(Object attributeValue, AbstractSession session, AbstractRecord parentRow, WriteType writeType) { return this.getObjectBuilder(attributeValue, session).buildRow(attributeValue, session, writeType); } /** * INTERNAL: */ @Override public boolean isRelationalMapping() { return true; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/structures/ArrayMapping.java0000664000000000000000000000725712216173130025714 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.structures; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.mappings.foundation.AbstractCompositeDirectCollectionMapping; /** *

Purpose: * In an object-relational data model, structures can contain "Arrays" or collections of other data-types. * In Oracle 8i, a "VARRAY" is typically used to represent a collection of primitive data or aggregate structures. * These arrays are stored with their parent structure in the same table. * * @see StructureMapping * @see NestedTableMapping * @see ReferenceMapping */ public class ArrayMapping extends AbstractCompositeDirectCollectionMapping { /** * Default constructor. */ public ArrayMapping() { super(); } /** * PUBLIC: * Set the name of the field that holds the nested collection. */ public void setFieldName(String fieldName) { this.setField(new ObjectRelationalDatabaseField(fieldName)); } /** * PUBLIC: * Return the name of the structure. * This is the name of the user-defined data type as defined on the database. */ public String getStructureName() { return this.getElementDataTypeName(); } /** * PUBLIC: * Set the name of the structure. * This is the name of the user-defined data type as defined on the database. */ public void setStructureName(String structureName) { this.setElementDataTypeName(structureName); } /** * PUBLIC: * Return the "data type" associated with each element * in the nested collection. * Depending on the data store, this could be optional. */ public String getElementDataTypeName() { return elementDataTypeName; } /** * PUBLIC: * Set the "data type" associated with each element * in the nested collection. * Depending on the data store, this could be optional. */ public void setElementDataTypeName(String elementDataTypeName) { this.elementDataTypeName = elementDataTypeName; } /** * INTERNAL: */ @Override public boolean isRelationalMapping() { return true; } /** * INTERNAL: * Initialize the mapping. */ @Override public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if (this.getStructureName().length() == 0) { throw DescriptorException.structureNameNotSetInMapping(this); } // For bug 2730536 convert the field to be an ObjectRelationalDatabaseField. ObjectRelationalDatabaseField field = (ObjectRelationalDatabaseField)getField(); field.setSqlType(java.sql.Types.ARRAY); field.setSqlTypeName(getStructureName()); } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/structures/ArrayCollectionMappingHelper.java0000664000000000000000000005743112216173130031067 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.structures; import java.util.Enumeration; import java.util.Vector; import org.eclipse.persistence.eis.EISCollectionChangeRecord; import org.eclipse.persistence.eis.EISOrderedCollectionChangeRecord; import org.eclipse.persistence.internal.queries.ContainerPolicy; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.ChangeRecord; import org.eclipse.persistence.internal.sessions.MergeManager; import org.eclipse.persistence.internal.sessions.ObjectChangeSet; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.mappings.DatabaseMapping; /** * Helper class to consolidate all the heinous comparing * and merging code for the Array collection mappings. * @see ArrayCollectionMapping */ public class ArrayCollectionMappingHelper { /** The mapping that needs help comparing and merging. */ private ArrayCollectionMapping mapping; private static Object XXX = new Object();// object used to marked cleared out slots when comparing /** * Constructor. */ public ArrayCollectionMappingHelper(ArrayCollectionMapping mapping) { super(); this.mapping = mapping; } /** * Convenience method. */ private boolean mapKeyHasChanged(Object element, AbstractSession session) { return this.getMapping().mapKeyHasChanged(element, session); } /** * Convenience method. */ private Object getRealCollectionAttributeValueFromObject(Object object, AbstractSession session) { return this.getMapping().getRealCollectionAttributeValueFromObject(object, session); } /** * Convenience method. */ private Object buildAddedElementFromChangeSet(Object changeSet, MergeManager mergeManager, AbstractSession targetSession) { return this.getMapping().buildAddedElementFromChangeSet(changeSet, mergeManager, targetSession); } /** * Convenience method. */ private Object buildChangeSet(Object element, ObjectChangeSet owner, AbstractSession session) { return this.getMapping().buildChangeSet(element, owner, session); } /** * Convenience method. */ private Object buildElementFromElement(Object element, MergeManager mergeManager, AbstractSession targetSession) { return this.getMapping().buildElementFromElement(element, mergeManager, targetSession); } /** * Convenience method. */ private Object buildRemovedElementFromChangeSet(Object changeSet, MergeManager mergeManager, AbstractSession targetSession) { return this.getMapping().buildRemovedElementFromChangeSet(changeSet, mergeManager, targetSession); } /** * Convenience method. * Check for null values before delegating to the mapping. */ protected boolean compareElements(Object element1, Object element2, AbstractSession session) { if ((element1 == null) && (element2 == null)) { return true; } if ((element1 == null) || (element2 == null)) { return false; } if (element2 == XXX) {// if element2 was marked as cleared out, it is not a match return false; } return this.getMapping().compareElements(element1, element2, session); } /** * Convenience method. * Check for null values before delegating to the mapping. */ protected boolean compareElementsForChange(Object element1, Object element2, AbstractSession session) { if ((element1 == null) && (element2 == null)) { return true; } if ((element1 == null) || (element2 == null)) { return false; } if (element2 == XXX) {// if element2 was marked as cleared out, it is not a match return false; } return this.getMapping().compareElementsForChange(element1, element2, session); } /** * INTERNAL: * Return the mapping. */ public ArrayCollectionMapping getMapping() { return mapping; } /** * INTERNAL: * Build and return the change record that results * from comparing the two collection attributes. */ public ChangeRecord compareForChange(Object clone, Object backup, ObjectChangeSet owner, AbstractSession session) { ContainerPolicy cp = this.getContainerPolicy(); Object cloneCollection = this.getRealCollectionAttributeValueFromObject(clone, session); Object backupCollection = null; if (owner.isNew()) { backupCollection = cp.containerInstance(1); } else { backupCollection = this.getRealCollectionAttributeValueFromObject(backup, session); } if (cp.hasOrder()) { return this.compareAttributeValuesForChangeWithOrder(cloneCollection, backupCollection, owner, session); } else { return this.compareAttributeValuesForChangeWithoutOrder(cloneCollection, backupCollection, owner, session); } } /** * Build and return the change record that results * from comparing the two collection attributes. * The order of the elements is significant. */ private ChangeRecord compareAttributeValuesForChangeWithOrder(Object cloneCollection, Object backupCollection, ObjectChangeSet owner, AbstractSession session) { ContainerPolicy cp = this.getContainerPolicy(); Vector cloneVector = cp.vectorFor(cloneCollection, session);// convert it to a Vector so we can preserve the order and use indexes Vector backupVector = cp.vectorFor(backupCollection, session);// "clone" it so we can clear out the slots EISOrderedCollectionChangeRecord changeRecord = new EISOrderedCollectionChangeRecord(owner, this.getAttributeName(), this.getDatabaseMapping()); for (int i = 0; i < cloneVector.size(); i++) { Object cloneElement = cloneVector.elementAt(i); boolean found = false; for (int j = 0; j < backupVector.size(); j++) { if (this.compareElementsForChange(cloneElement, backupVector.elementAt(j), session)) { // the clone element was found in the backup collection found = true; backupVector.setElementAt(XXX, j);// clear out the matching backup element changeRecord.addMovedChangeSet(this.buildChangeSet(cloneElement, owner, session), j, i); break;// matching backup element found - skip the rest of them } } if (!found) { // the clone element was not found, so it must have been added changeRecord.addAddedChangeSet(this.buildChangeSet(cloneElement, owner, session), i); } } for (int i = 0; i < backupVector.size(); i++) { Object backupElement = backupVector.elementAt(i); if (backupElement != XXX) { // the backup element was not in the clone collection, so it must have been removed changeRecord.addRemovedChangeSet(this.buildChangeSet(backupElement, owner, session), i); } } if (changeRecord.hasChanges()) { return changeRecord; } else { return null; } } /** * Build and return the change record that results * from comparing the two collection attributes. * Ignore the order of the elements. */ private ChangeRecord compareAttributeValuesForChangeWithoutOrder(Object cloneCollection, Object backupCollection, ObjectChangeSet owner, AbstractSession session) { ContainerPolicy cp = this.getContainerPolicy(); Vector backupVector = cp.vectorFor(backupCollection, session);// "clone" it so we can clear out the slots EISCollectionChangeRecord changeRecord = new EISCollectionChangeRecord(owner, this.getAttributeName(), this.getDatabaseMapping()); for (Object cloneIter = cp.iteratorFor(cloneCollection); cp.hasNext(cloneIter);) { Object cloneElement = cp.next(cloneIter, session); boolean found = false; for (int i = 0; i < backupVector.size(); i++) { if (this.compareElementsForChange(cloneElement, backupVector.elementAt(i), session)) { // the clone element was found in the backup collection found = true; backupVector.setElementAt(XXX, i);// clear out the matching backup element if (this.mapKeyHasChanged(cloneElement, session)) { changeRecord.addChangedMapKeyChangeSet(this.buildChangeSet(cloneElement, owner, session)); } break;// matching backup element found - skip the rest of them } } if (!found) { // the clone element was not found, so it must have been added changeRecord.addAddedChangeSet(this.buildChangeSet(cloneElement, owner, session)); } } for (int i = 0; i < backupVector.size(); i++) { Object backupElement = backupVector.elementAt(i); if (backupElement != XXX) { // the backup element was not in the clone collection, so it must have been removed changeRecord.addRemovedChangeSet(this.buildChangeSet(backupElement, owner, session)); } } if (changeRecord.hasChanges()) { return changeRecord; } else { return null; } } /** * INTERNAL: * Compare the attributes belonging to this mapping for the objects. */ public boolean compareObjects(Object object1, Object object2, AbstractSession session) { return this.compareAttributeValues(this.getRealCollectionAttributeValueFromObject(object1, session), this.getRealCollectionAttributeValueFromObject(object2, session), session); } /** * Compare the attributes. Return true if they are alike. * Assume the passed-in attributes are non-null. */ private boolean compareAttributeValues(Object collection1, Object collection2, AbstractSession session) { ContainerPolicy cp = this.getContainerPolicy(); if (cp.sizeFor(collection1) != cp.sizeFor(collection2)) { return false; } // if they are both empty, go no further... if (cp.sizeFor(collection1) == 0) { return true; } if (cp.hasOrder()) { return this.compareAttributeValuesWithOrder(collection1, collection2, session); } else { return this.compareAttributeValuesWithoutOrder(collection1, collection2, session); } } /** * Compare the attributes. Return true if they are alike. * The order of the elements is significant. */ private boolean compareAttributeValuesWithOrder(Object collection1, Object collection2, AbstractSession session) { ContainerPolicy cp = this.getContainerPolicy(); Object iter1 = cp.iteratorFor(collection1); Object iter2 = cp.iteratorFor(collection2); while (cp.hasNext(iter1)) { if (!this.compareElements(cp.next(iter1, session), cp.next(iter2, session), session)) { return false; } } return true; } /** * Compare the attributes. Return true if they are alike. * Ignore the order of the elements. */ private boolean compareAttributeValuesWithoutOrder(Object collection1, Object collection2, AbstractSession session) { ContainerPolicy cp = this.getContainerPolicy(); Vector vector2 = cp.vectorFor(collection2, session);// "clone" it so we can clear out the slots for (Object iter1 = cp.iteratorFor(collection1); cp.hasNext(iter1);) { Object element1 = cp.next(iter1, session); boolean found = false; for (int i = 0; i < vector2.size(); i++) { if (this.compareElements(element1, vector2.elementAt(i), session)) { found = true; vector2.setElementAt(XXX, i);// clear out the matching element break;// matching element found - skip the rest of them } } if (!found) { return false; } } // look for elements that were not in collection1 for (Enumeration stream = vector2.elements(); stream.hasMoreElements();) { if (stream.nextElement() != XXX) { return false; } } return true; } /** * INTERNAL: * Merge changes from the source to the target object. */ public void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { if (this.getContainerPolicy().hasOrder()) { this.mergeChangesIntoObjectWithOrder(target, changeRecord, source, mergeManager, targetSession); } else { this.mergeChangesIntoObjectWithoutOrder(target, changeRecord, source, mergeManager, targetSession); } } /** * Merge changes from the source to the target object. * Simply replace the entire target collection. */ private void mergeChangesIntoObjectWithOrder(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { ContainerPolicy cp = this.getContainerPolicy(); AbstractSession session = mergeManager.getSession(); Vector changes = ((EISOrderedCollectionChangeRecord)changeRecord).getNewCollection(); Object targetCollection = cp.containerInstance(changes.size()); for (Enumeration stream = changes.elements(); stream.hasMoreElements();) { Object targetElement = this.buildAddedElementFromChangeSet(stream.nextElement(), mergeManager, targetSession); cp.addInto(targetElement, targetCollection, session); } // reset the attribute to allow for set method to re-morph changes if the collection is not being stored directly this.setRealAttributeValueInObject(target, targetCollection); } /** * Merge changes from the source to the target object. * Make the necessary removals and adds and map key modifications. */ private void mergeChangesIntoObjectWithoutOrder(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager, AbstractSession targetSession) { EISCollectionChangeRecord sdkChangeRecord = (EISCollectionChangeRecord)changeRecord; ContainerPolicy cp = this.getContainerPolicy(); AbstractSession session = mergeManager.getSession(); Object targetCollection = null; if (sdkChangeRecord.getOwner().isNew()) { targetCollection = cp.containerInstance(sdkChangeRecord.getAdds().size()); } else { targetCollection = this.getRealCollectionAttributeValueFromObject(target, session); } Vector removes = sdkChangeRecord.getRemoves(); Vector adds = sdkChangeRecord.getAdds(); Vector changedMapKeys = sdkChangeRecord.getChangedMapKeys(); synchronized (targetCollection) { for (Enumeration stream = removes.elements(); stream.hasMoreElements();) { Object removeElement = this.buildRemovedElementFromChangeSet(stream.nextElement(), mergeManager, targetSession); Object targetElement = null; for (Object iter = cp.iteratorFor(targetCollection); cp.hasNext(iter);) { targetElement = cp.next(iter, session); if (this.compareElements(targetElement, removeElement, session)) { break;// matching element found - skip the rest of them } } if (targetElement != null) { // a matching element was found, remove it cp.removeFrom(targetElement, targetCollection, session); } } for (Enumeration stream = adds.elements(); stream.hasMoreElements();) { Object addElement = this.buildAddedElementFromChangeSet(stream.nextElement(), mergeManager, targetSession); cp.addInto(addElement, targetCollection, session); } for (Enumeration stream = changedMapKeys.elements(); stream.hasMoreElements();) { Object changedMapKeyElement = this.buildAddedElementFromChangeSet(stream.nextElement(), mergeManager, targetSession); Object originalElement = ((UnitOfWorkImpl)session).getOriginalVersionOfObject(changedMapKeyElement); cp.removeFrom(originalElement, targetCollection, session); cp.addInto(changedMapKeyElement, targetCollection, session); } } // reset the attribute to allow for set method to re-morph changes if the collection is not being stored directly this.setRealAttributeValueInObject(target, targetCollection); } /** * INTERNAL: * Merge changes from the source to the target object. * Simply replace the entire target collection. */ public void mergeIntoObject(Object target, boolean isTargetUnInitialized, Object source, MergeManager mergeManager, AbstractSession targetSession) { ContainerPolicy cp = this.getContainerPolicy(); AbstractSession session = mergeManager.getSession(); Object sourceCollection = this.getRealCollectionAttributeValueFromObject(source, session); Object targetCollection = cp.containerInstance(cp.sizeFor(sourceCollection)); for (Object iter = cp.iteratorFor(sourceCollection); cp.hasNext(iter);) { Object targetElement = this.buildElementFromElement(cp.next(iter, session), mergeManager, targetSession); cp.addInto(targetElement, targetCollection, session); } // reset the attribute to allow for set method to re-morph changes if the collection is not being stored directly this.setRealAttributeValueInObject(target, targetCollection); } /** * ADVANCED: * This method is used to add an object to a collection once the changeSet is applied. * The referenceKey parameter should only be used for direct Maps. */ public void simpleAddToCollectionChangeRecord(Object referenceKey, Object changeSetToAdd, ObjectChangeSet changeSet, AbstractSession session) { if (this.getContainerPolicy().hasOrder()) { this.simpleAddToCollectionChangeRecordWithOrder(referenceKey, changeSetToAdd, changeSet, session); } else { this.simpleAddToCollectionChangeRecordWithoutOrder(referenceKey, changeSetToAdd, changeSet, session); } } /** * Add stuff to an ordered collection. */ private void simpleAddToCollectionChangeRecordWithOrder(Object referenceKey, Object changeSetToAdd, ObjectChangeSet changeSet, AbstractSession session) { EISOrderedCollectionChangeRecord changeRecord = (EISOrderedCollectionChangeRecord)changeSet.getChangesForAttributeNamed(this.getAttributeName()); if (changeRecord == null) { changeRecord = new EISOrderedCollectionChangeRecord(changeSet, this.getAttributeName(), this.getDatabaseMapping()); changeSet.addChange(changeRecord); } changeRecord.simpleAddChangeSet(changeSetToAdd); } /** * Add stuff to an unordered collection. */ private void simpleAddToCollectionChangeRecordWithoutOrder(Object referenceKey, Object changeSetToAdd, ObjectChangeSet changeSet, AbstractSession session) { EISCollectionChangeRecord changeRecord = (EISCollectionChangeRecord)changeSet.getChangesForAttributeNamed(this.getAttributeName()); if (changeRecord == null) { changeRecord = new EISCollectionChangeRecord(changeSet, this.getAttributeName(), this.getDatabaseMapping()); changeSet.addChange(changeRecord); } changeRecord.simpleAddChangeSet(changeSetToAdd); } /** * ADVANCED: * This method is used to remove an object from a collection once the changeSet is applied. * The referenceKey parameter should only be used for direct Maps. */ public void simpleRemoveFromCollectionChangeRecord(Object referenceKey, Object changeSetToRemove, ObjectChangeSet changeSet, AbstractSession session) { if (this.getContainerPolicy().hasOrder()) { this.simpleRemoveFromCollectionChangeRecordWithOrder(referenceKey, changeSetToRemove, changeSet, session); } else { this.simpleRemoveFromCollectionChangeRecordWithoutOrder(referenceKey, changeSetToRemove, changeSet, session); } } /** * Remove stuff from an ordered collection. */ private void simpleRemoveFromCollectionChangeRecordWithOrder(Object referenceKey, Object changeSetToRemove, ObjectChangeSet changeSet, AbstractSession session) { EISOrderedCollectionChangeRecord changeRecord = (EISOrderedCollectionChangeRecord)changeSet.getChangesForAttributeNamed(this.getAttributeName()); if (changeRecord == null) { changeRecord = new EISOrderedCollectionChangeRecord(changeSet, this.getAttributeName(), this.getDatabaseMapping()); changeSet.addChange(changeRecord); } changeRecord.simpleRemoveChangeSet(changeSetToRemove); } /** * Remove stuff from an unordered collection. */ private void simpleRemoveFromCollectionChangeRecordWithoutOrder(Object referenceKey, Object changeSetToRemove, ObjectChangeSet changeSet, AbstractSession session) { EISCollectionChangeRecord changeRecord = (EISCollectionChangeRecord)changeSet.getChangesForAttributeNamed(this.getAttributeName()); if (changeRecord == null) { changeRecord = new EISCollectionChangeRecord(changeSet, this.getAttributeName(), this.getDatabaseMapping()); changeSet.addChange(changeRecord); } changeRecord.simpleRemoveChangeSet(changeSetToRemove); } /** * Convenience method. */ private void setRealAttributeValueInObject(Object object, Object attributeValue) { this.getMapping().setRealAttributeValueInObject(object, attributeValue); } /** * Convenience method. */ private String getAttributeName() { return this.getMapping().getAttributeName(); } /** * Convenience method. */ private ContainerPolicy getContainerPolicy() { return this.getMapping().getContainerPolicy(); } /** * INTERNAL: * Return the mapping, casted a bit more generally. */ public DatabaseMapping getDatabaseMapping() { return (DatabaseMapping)this.getMapping(); } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/structures/ArrayCollectionMapping.java0000664000000000000000000000641112216173130027717 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.structures; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.MergeManager; import org.eclipse.persistence.internal.sessions.ObjectChangeSet; import org.eclipse.persistence.mappings.ContainerMapping; /** * Interface used by the ArrayCollectionMappingHelper to interact * with the assorted array collection mappings. * @see ArrayCollectionMappingHelper */ public interface ArrayCollectionMapping extends ContainerMapping{ /** * Build and return a newly-added element based on the change set. */ Object buildAddedElementFromChangeSet(Object changeSet, MergeManager mergeManager, AbstractSession targetSession); /** * Build and return a change set for the specified element. */ Object buildChangeSet(Object element, ObjectChangeSet owner, AbstractSession session); /** * Build and return a new element based on the specified element. */ Object buildElementFromElement(Object element, MergeManager mergeManager, AbstractSession targetSession); /** * Build and return a recently-removed element based on the change set. */ Object buildRemovedElementFromChangeSet(Object changeSet, MergeManager mergeManager, AbstractSession targetSession); /** * Compare the non-null elements and return true if they are alike. */ boolean compareElements(Object element1, Object element2, AbstractSession session); /** * Compare the non-null elements and return true if they are alike. * This is used to build a change record. */ boolean compareElementsForChange(Object element1, Object element2, AbstractSession session); /** * Return the mapping's attribute name. */ String getAttributeName(); /** * Return the attribute value from the specified object, * unwrapping the value holder if necessary. * If the value is null, build a new container. */ Object getRealCollectionAttributeValueFromObject(Object object, AbstractSession session); /** * Return whether the element's user-defined Map key has changed * since it was cloned from the original version. */ boolean mapKeyHasChanged(Object element, AbstractSession session); /** * Set the attribute value for the specified object, * wrapping it in a value holder if necessary. */ void setRealAttributeValueInObject(Object object, Object attributeValue); } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/structures/NestedTableMapping.java0000664000000000000000000004225412216173130027024 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.structures; import java.util.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.internal.databaseaccess.*; import org.eclipse.persistence.internal.expressions.ObjectExpression; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.expressions.*; import org.eclipse.persistence.internal.queries.*; /** *

Purpose: * Nested tables are similar to VARRAYs except internally they store their information in a separate table * from their parent structure's table. The advantage of nested tables is that they support querying and * joining much better than varrays that are inlined into the parent table. A nested table is typically * used to represent a one-to-many or many-to-many relationship of references to another independent * structure. TopLink supports storing a nested table of values into a single field. * *

NOTE: Only Oracle8i supports nested tables type. * * @since TOPLink/Java 2.5 */ public class NestedTableMapping extends CollectionMapping { protected DatabaseMapping nestedMapping; /** A ref is always stored in a single field. */ protected DatabaseField field; /** Arrays require a structure name, this is the ADT defined for the VARRAY. */ protected String structureName; /** * PUBLIC: * Default constructor. */ public NestedTableMapping() { super(); } /** * INTERNAL: * The mapping clones itself to create deep copy */ public Object clone() { NestedTableMapping clone = (NestedTableMapping)super.clone(); return clone; } /** * Returns all the aggregate fields. */ protected Vector collectFields() { Vector fields = new Vector(1); fields.addElement(getField()); return fields; } /** * INTERNAL: * Returns the field which this mapping represents. */ public DatabaseField getField() { return field; } /** * PUBLIC: * Return the name of the field this mapping represents. */ public String getFieldName() { return getField().getName(); } /** * INTERNAL: * Join criteria is created to read target records (nested table) from the table. */ @Override public Expression getJoinCriteria(ObjectExpression context, Expression base) { return context.ref().equal(base.value()); } /** * PUBLIC: * Return the structure name of the nestedTable. * This is the name of the user defined data type as defined on the database. */ public String getStructureName() { return structureName; } /** * INTERNAL: * The returns if the mapping has any constraint dependencies, such as foreign keys and join tables. */ @Override public boolean hasConstraintDependency() { return true; } /** * INTERNAL: * Initialize the mapping. */ @Override public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if (getField() == null) { throw DescriptorException.fieldNameNotSetInMapping(this); } // For bug 2730536 convert the field to be an ObjectRelationalDatabaseField. ObjectRelationalDatabaseField field = (ObjectRelationalDatabaseField)getField(); field.setSqlType(java.sql.Types.ARRAY); field.setSqlTypeName(getStructureName()); setField(getDescriptor().buildField(getField())); } /** * INTERNAL: * Selection criteria is created to read target records (nested table) from the table. */ protected void initializeSelectionCriteria(AbstractSession session) { Expression exp1; Expression exp2; ExpressionBuilder builder = new ExpressionBuilder(); Expression queryKey = builder.getManualQueryKey(getAttributeName(), getDescriptor()); exp1 = builder.ref().equal(queryKey.get(getAttributeName()).value()); exp2 = getDescriptor().getObjectBuilder().getPrimaryKeyExpression().rebuildOn(queryKey); setSelectionCriteria(exp1.and(exp2)); } /** * INTERNAL: */ @Override public boolean isNestedTableMapping() { return true; } /** * INTERNAL: * Post Initialize the mapping. */ @Override public void postInitialize(AbstractSession session) throws DescriptorException { initializeSelectionCriteria(session); } /** * INTERNAL: * Delete privately owned parts */ @Override public void preDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!shouldObjectModifyCascadeToParts(query)) { return; } Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); ContainerPolicy containerPolicy = getContainerPolicy(); Object objectsIterator = containerPolicy.iteratorFor(objects); // delete parts one by one while (containerPolicy.hasNext(objectsIterator)) { DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); deleteQuery.setIsExecutionClone(true); deleteQuery.setObject(containerPolicy.next(objectsIterator, query.getSession())); deleteQuery.setCascadePolicy(query.getCascadePolicy()); query.getSession().executeQuery(deleteQuery); } if (!query.getSession().isUnitOfWork()) { // This deletes any objects on the database, as the collection in memory may has been changed. // This is not required for unit of work, as the update would have already deleted these objects, // and the backup copy will include the same objects causing double deletes. verifyDeleteForUpdate(query); } } /** * INTERNAL: * Insert privately owned parts */ @Override public void preInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!shouldObjectModifyCascadeToParts(query)) { return; } Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); // insert each object one by one ContainerPolicy cp = getContainerPolicy(); for (Object iter = cp.iteratorFor(objects); cp.hasNext(iter);) { Object object = cp.next(iter, query.getSession()); if (isPrivateOwned()) { InsertObjectQuery insertQuery = new InsertObjectQuery(); insertQuery.setIsExecutionClone(true); insertQuery.setObject(object); insertQuery.setCascadePolicy(query.getCascadePolicy()); query.getSession().executeQuery(insertQuery); } else { // Will happen in unit of work or cascaded query. // This is done only for persistence by reachablility and it not require if the targets are in the queue anyway // Avoid cycles by checking commit manager, this is allowed because there is no dependency. if (!query.getSession().getCommitManager().isCommitInPreModify(object)) { ObjectChangeSet changeSet = null; UnitOfWorkChangeSet uowChangeSet = null; if (query.getSession().isUnitOfWork() && (((UnitOfWorkImpl)query.getSession()).getUnitOfWorkChangeSet() != null)) { uowChangeSet = (UnitOfWorkChangeSet)((UnitOfWorkImpl)query.getSession()).getUnitOfWorkChangeSet(); changeSet = (ObjectChangeSet)uowChangeSet.getObjectChangeSetForClone(object); } WriteObjectQuery writeQuery = new WriteObjectQuery(); writeQuery.setIsExecutionClone(true); writeQuery.setObject(object); writeQuery.setObjectChangeSet(changeSet); writeQuery.setCascadePolicy(query.getCascadePolicy()); query.getSession().executeQuery(writeQuery); } } } } /** * INTERNAL: * Update the privately owned parts */ @Override public void preUpdate(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!shouldObjectModifyCascadeToParts(query)) { return; } // If objects are not instantiated that means they are not changed. if (!isAttributeValueInstantiatedOrChanged(query.getObject())) { return; } if (query.getObjectChangeSet() != null) { // UnitOfWork writeChanges(query.getObjectChangeSet(), query); } else { // OLD COMMIT compareObjectsAndWrite(query); } } /** * Set the field in the mapping. */ protected void setField(DatabaseField theField) { field = theField; } /** * PUBLIC: * Set the field name in the mapping. */ public void setFieldName(String FieldName) { setField(new ObjectRelationalDatabaseField(FieldName)); } /** * PUBLIC: * Set the name of the structure. * This is the name of the user defined nested table data type as defined on the database. */ public void setStructureName(String structureName) { this.structureName = structureName; } /** * INTERNAL: * Verifying deletes make sure that all the records privately owned by this mapping are * actually removed. If such records are found then those are all read and removed one * by one taking their privately owned parts into account. */ protected void verifyDeleteForUpdate(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { Object objects = readPrivateOwnedForObject(query); // Delete all objects one by one. ContainerPolicy cp = getContainerPolicy(); for (Object iter = cp.iteratorFor(objects); cp.hasNext(iter);) { query.getSession().deleteObject(cp.next(iter, query.getSession())); } } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public void writeFromObjectIntoRow(Object object, AbstractRecord record, AbstractSession session, WriteType writeType) { if (isReadOnly()) { return; } Object values = getRealCollectionAttributeValueFromObject(object, session); ContainerPolicy cp = getContainerPolicy(); Object[] fields = new Object[cp.sizeFor(values)]; Object valuesIterator = cp.iteratorFor(values); for (int index = 0; index < cp.sizeFor(values); index++) { Object value = cp.next(valuesIterator, session); fields[index] = ((ObjectRelationalDataTypeDescriptor)getReferenceDescriptor()).getRef(value, session); } java.sql.Array array; try { ((DatabaseAccessor)session.getAccessor()).incrementCallCount(session); java.sql.Connection connection = ((DatabaseAccessor)session.getAccessor()).getConnection(); array = session.getPlatform().createArray(getStructureName(), fields, session,connection); } catch (java.sql.SQLException exception) { throw DatabaseException.sqlException(exception, session.getAccessor(), session, false); } finally { ((DatabaseAccessor)session.getAccessor()).decrementCallCount(); } record.put(getField(), array); } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public void writeFromObjectIntoRowWithChangeRecord(ChangeRecord changeRecord, AbstractRecord record, AbstractSession session, WriteType writeType) { if (isReadOnly()) { return; } Object object = ((ObjectChangeSet)changeRecord.getOwner()).getUnitOfWorkClone(); Object values = getRealAttributeValueFromObject(object, session); ContainerPolicy containterPolicy = getContainerPolicy(); if (values == null) { values = containterPolicy.containerInstance(1); } Object[] fields = new Object[containterPolicy.sizeFor(values)]; Object valuesIterator = containterPolicy.iteratorFor(values); for (int index = 0; index < containterPolicy.sizeFor(values); index++) { Object value = containterPolicy.next(valuesIterator, session); fields[index] = ((ObjectRelationalDataTypeDescriptor)getReferenceDescriptor()).getRef(value, session); } java.sql.Array array; try { ((DatabaseAccessor)session.getAccessor()).incrementCallCount(session); java.sql.Connection connection = ((DatabaseAccessor)session.getAccessor()).getConnection(); array = session.getPlatform().createArray(getStructureName(), fields, session, connection); } catch (java.sql.SQLException exception) { throw DatabaseException.sqlException(exception, session.getAccessor(), session, false); } finally { ((DatabaseAccessor)session.getAccessor()).decrementCallCount(); } record.put(getField(), array); } /** * INTERNAL: * This row is built for shallow insert which happens in case of bidirectional inserts. * The foreign keys must be set to null to avoid constraints. */ @Override public void writeFromObjectIntoRowForShallowInsert(Object object, AbstractRecord record, AbstractSession session) { if (isReadOnly()) { return; } if (getField().isNullable()) { record.put(getField(), null); } else { writeFromObjectIntoRow(object, record, session, WriteType.INSERT); } } /** * INTERNAL: * This row is built for update after shallow insert which happens in case of bidirectional inserts. * It contains the foreign keys with non null values that were set to null for shallow insert. */ @Override public void writeFromObjectIntoRowForUpdateAfterShallowInsert(Object object, AbstractRecord record, AbstractSession session, DatabaseTable table) { if (!getField().getTable().equals(table) || !getField().isNullable()) { return; } writeFromObjectIntoRow(object, record, session, WriteType.UPDATE); } /** * INTERNAL: * This row is built for shallow insert which happens in case of bidirectional inserts. * The foreign keys must be set to null to avoid constraints. */ @Override public void writeFromObjectIntoRowForShallowInsertWithChangeRecord(ChangeRecord changeRecord, AbstractRecord record, AbstractSession session) { if (isReadOnly()) { return; } record.put(getField(), null); } /** * INTERNAL: * Write the entire structure into the row as a special type that prints as the constructor. * If any part of the structure has changed the whole thing is written. */ @Override public void writeFromObjectIntoRowForUpdate(WriteObjectQuery writeQuery, AbstractRecord record) throws DescriptorException { if (!isAttributeValueInstantiatedOrChanged(writeQuery.getObject())) { return; } if (writeQuery.getSession().isUnitOfWork()) { if (compareObjects(writeQuery.getObject(), writeQuery.getBackupClone(), writeQuery.getSession())) { return;// Nothing has changed, no work required } } writeFromObjectIntoRow(writeQuery.getObject(), record, writeQuery.getSession(), WriteType.UPDATE); } /** * INTERNAL: * Write fields needed for insert into the template for with null values. */ @Override public void writeInsertFieldsIntoRow(AbstractRecord record, AbstractSession session) { if (isReadOnly()) { return; } record.put(getField(), null); } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/structures/StructureMapping.java0000664000000000000000000001012412216173130026621 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.structures; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.descriptors.ObjectBuilder; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.queries.ObjectBuildingQuery; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; import org.eclipse.persistence.mappings.foundation.AbstractCompositeObjectMapping; /** *

Purpose: * A structure is an object-relational user-defined data-type or object-type. A structure is similar to a Java class * as it defines attributes or fields where each attribute is either a primitive data-type, another structure, an * array, or a reference to another structure. * The mapping is similar to an AggregateObjectMapping, as multiple objects are stored in a single table. */ public class StructureMapping extends AbstractCompositeObjectMapping { /** * Default constructor. */ public StructureMapping() { super(); } /** * INTERNAL: */ public boolean isStructureMapping() { return true; } /** * INTERNAL: * Return the name of the structure. * This is the name of the user-defined data type as defined on the database. */ public String getStructureName() { if (getReferenceDescriptor() instanceof ObjectRelationalDataTypeDescriptor) { return ((ObjectRelationalDataTypeDescriptor)getReferenceDescriptor()).getStructureName(); } else { return ""; } } /** * INTERNAL: * Initialize the mapping. */ public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); // For bug 2730536 convert the field to be an ObjectRelationalDatabaseField. ObjectRelationalDatabaseField field = (ObjectRelationalDatabaseField)getField(); field.setSqlType(java.sql.Types.STRUCT); field.setSqlTypeName(getStructureName()); } public void setFieldName(String fieldName) { this.setField(new ObjectRelationalDatabaseField(fieldName)); } @Override protected Object buildCompositeRow(Object attributeValue, AbstractSession session, AbstractRecord Record, WriteType writeType) { AbstractRecord nestedRow = this.getObjectBuilder(attributeValue, session).buildRow(attributeValue, session, writeType); return this.getReferenceDescriptor(attributeValue, session).buildFieldValueFromNestedRow(nestedRow, session); } @Override protected Object buildCompositeObject(ObjectBuilder objectBuilder, AbstractRecord nestedRow, ObjectBuildingQuery query, CacheKey parentCacheKey, JoinedAttributeManager joinManager, AbstractSession targetSession) { Object aggregateObject = objectBuilder.buildNewInstance(); objectBuilder.buildAttributesIntoObject(aggregateObject, parentCacheKey, nestedRow, query, joinManager, query.getExecutionFetchGroup(objectBuilder.getDescriptor()), false, targetSession); return aggregateObject; } /** * INTERNAL: */ @Override public boolean isRelationalMapping() { return true; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/querykeys/0000775000000000000000000000000012216174372022273 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/querykeys/QueryKey.java0000664000000000000000000001150112216173130024701 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.querykeys; import java.io.*; import org.eclipse.persistence.descriptors.ClassDescriptor; /** *

* Purpose: Define a Java appropriate alias to a database field or function. *

* Responsibilities: *

    *
  • Define the name of the alias. *
  • Define the descriptor of the alias. *
*/ public class QueryKey implements Cloneable, Serializable { protected String name; protected ClassDescriptor descriptor; /** * INTERNAL: * Clones itself. */ public Object clone() { Object object = null; try { object = super.clone(); } catch (Exception exception) { throw new InternalError(exception.toString()); } return object; } /** * INTERNAL: * Convert all the class-name-based settings in this QueryKey to actual class-based * settings * Will be overridded by subclasses * @param classLoader */ public void convertClassNamesToClasses(ClassLoader classLoader){} /** * INTERNAL: * Return the descriptor. */ public ClassDescriptor getDescriptor() { return descriptor; } /** * PUBLIC: * Return the name for the query key. * This is the name that will be used in the expression. */ public String getName() { return name; } /** * INTERNAL: * Initialize any information in the receiver that requires its descriptor. * Set the receiver's descriptor back reference. * @param aDescriptor is the owner descriptor of the receiver. */ public void initialize(ClassDescriptor aDescriptor) { setDescriptor(aDescriptor); } /** * INTERNAL: * return whether this query key is abstract * @return boolean */ public boolean isAbstractQueryKey() { return (this.getClass().equals(org.eclipse.persistence.internal.helper.ClassConstants.QueryKey_Class)); } /** * PUBLIC:: * Related query key should implement this method to return true. */ public boolean isCollectionQueryKey() { return false; } /** * PUBLIC:: * Related query key should implement this method to return true. */ public boolean isDirectCollectionQueryKey() { return false; } /** * PUBLIC:: * Related query key should implement this method to return true. */ public boolean isDirectQueryKey() { return false; } /** * PUBLIC:: * Related query key should implement this method to return true. */ public boolean isForeignReferenceQueryKey() { return false; } /** * PUBLIC:: * Related query key should implement this method to return true. */ public boolean isManyToManyQueryKey() { return false; } /** * PUBLIC:: * Related query key should implement this method to return true. */ public boolean isOneToManyQueryKey() { return false; } /** * PUBLIC:: * Related query key should implement this method to return true. */ public boolean isOneToOneQueryKey() { return false; } /** * INTERNAL: * This is a QueryKey. return true. * @return boolean */ public boolean isQueryKey() { return true; } /** * INTERNAL: * Set the descriptor. */ public void setDescriptor(ClassDescriptor descriptor) { this.descriptor = descriptor; } /** * PUBLIC: * Set the name for the query key. * This is the name that will be used in the expression. */ public void setName(String name) { this.name = name; } /** * INTERNAL: * return a string representation of this instance of QueryKey */ public String toString() { return org.eclipse.persistence.internal.helper.Helper.getShortClassName(this) + "(" + getName() + ")"; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/querykeys/OneToOneQueryKey.java0000664000000000000000000000236612216173130026321 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.querykeys; /** *

* Purpose: Define an alias to a foreign one to one object. *

*/ public class OneToOneQueryKey extends ForeignReferenceQueryKey { // CR#2466 removed joinCriteria because it is already in ForeignReferenceQueryKey - TW /** * INTERNAL: * override the isOneToOneQueryKey() method in the superclass to return true. * @return boolean */ public boolean isOneToOneQueryKey() { return true; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/querykeys/OneToManyQueryKey.java0000664000000000000000000000322612216173130026500 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.querykeys; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.internal.helper.DatabaseTable; /** *

* Purpose:Represents a 1-m join query. */ public class OneToManyQueryKey extends ForeignReferenceQueryKey { /** * INTERNAL: * override the isCollectionQueryKey() method in the superclass to return true. * @return boolean */ public boolean isCollectionQueryKey() { return true; } /** * INTERNAL: * override the isOneToManyQueryKey() method in the superclass to return true. * @return boolean */ public boolean isOneToManyQueryKey() { return true; } /** * PUBLIC: * Returns the reference table. */ public DatabaseTable getRelationTable(ClassDescriptor referenceDescriptor) { return null; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/querykeys/DirectCollectionQueryKey.java0000664000000000000000000000327112216173130030055 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.querykeys; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.internal.helper.DatabaseTable; /** *

* Purpose:Represents a direct collection join query. */ public class DirectCollectionQueryKey extends ForeignReferenceQueryKey { /** * INTERNAL: * override the isCollectionQueryKey() method in the superclass to return true. * @return boolean */ public boolean isCollectionQueryKey() { return true; } /** * INTERNAL: * override the isDirectCollectionQueryKey() method in the superclass to return true. * @return boolean */ public boolean isDirectCollectionQueryKey() { return true; } /** * PUBLIC: * Returns the reference table. */ public DatabaseTable getRelationTable(ClassDescriptor referenceDescriptor) { return null; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/querykeys/DirectQueryKey.java0000664000000000000000000000523712216173130026045 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.querykeys; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.internal.helper.DatabaseField; /** *

* Purpose: Define an alias to a database field. *

* Responsibilities: *

    *
  • Define the field that is being aliased. *
*/ public class DirectQueryKey extends QueryKey { DatabaseField field; /** * INTERNAL: * Return the field for the query key. */ public DatabaseField getField() { return field; } /** * PUBLIC: * Return the field name for the query key. */ public String getFieldName() { return getField().getName(); } /** * PUBLIC: * Return the qualified field name for the query key. */ public String getQualifiedFieldName() { return getField().getQualifiedName(); } /** * INTERNAL: * Initialize any information in the receiver that requires its descriptor. * Set the receiver's descriptor back reference. * @param descriptor is the owner descriptor of the receiver. */ public void initialize(ClassDescriptor descriptor) { super.initialize(descriptor); if (!getField().hasTableName()) { getField().setTable(descriptor.getDefaultTable()); } } /** * INTERNAL: * override the isDirectQueryKey() method in the superclass to return true. * @return boolean */ public boolean isDirectQueryKey() { return true; } /** * INTERNAL: * Set the field for the query key. */ public void setField(DatabaseField field) { this.field = field; } /** * PUBLIC: * Set the field name for the query key. */ public void setFieldName(String fieldName) { setField(new DatabaseField(fieldName)); } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/querykeys/ManyToManyQueryKey.java0000664000000000000000000000373712216173130026672 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.querykeys; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.QueryException; import org.eclipse.persistence.internal.helper.DatabaseTable; /**

* Purpose:Represents a m-m join query. */ public class ManyToManyQueryKey extends ForeignReferenceQueryKey { /** * INTERNAL: * override the isCollectionQueryKey() method in the superclass to return true. * @return boolean */ public boolean isCollectionQueryKey() { return true; } /** * INTERNAL: * override the isManyToManyQueryKey() method in the superclass to return true. * @return boolean */ public boolean isManyToManyQueryKey() { return true; } /** * PUBLIC: * Returns the reference table. */ public DatabaseTable getRelationTable(ClassDescriptor referenceDescriptor) { DatabaseTable relationTable = super.getRelationTable(referenceDescriptor); if(relationTable != null) { return relationTable; } else { throw QueryException.noRelationTableInManyToManyQueryKey(this, this.joinCriteria); } } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/querykeys/ForeignReferenceQueryKey.java0000664000000000000000000001745012216173130030043 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings.querykeys; import java.security.AccessController; import java.security.PrivilegedActionException; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.expressions.*; import org.eclipse.persistence.internal.expressions.DataExpression; import org.eclipse.persistence.internal.expressions.ExpressionIterator; import org.eclipse.persistence.internal.expressions.ParameterExpression; import org.eclipse.persistence.internal.expressions.TableExpression; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.internal.helper.DatabaseTable; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; /** *

* Purpose: Define an alias to a foreign object. *

* Responsibilities: *

    *
  • Define the reference class of the foreign object. *
*/ public class ForeignReferenceQueryKey extends QueryKey { protected Class referenceClass; protected String referenceClassName; protected Expression joinCriteria; /** * INTERNAL: * Convert all the class-name-based settings in this project to actual class-based * settings * @param classLoader */ public void convertClassNamesToClasses(ClassLoader classLoader){ Class referenceClass = null; try{ if (referenceClassName != null){ if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { referenceClass = (Class)AccessController.doPrivileged(new PrivilegedClassForName(referenceClassName, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(referenceClassName, exception.getException()); } } else { referenceClass = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(referenceClassName, true, classLoader); } } setReferenceClass(referenceClass); } catch (ClassNotFoundException exc){ throw ValidationException.classNotFoundWhileConvertingClassNames(referenceClassName, exc); } } /** * PUBLIC: * Return the join expression for the relationship defined by the query key. */ public Expression getJoinCriteria() { return joinCriteria; } /** * PUBLIC: * Return the reference class of the relationship. */ public Class getReferenceClass() { return referenceClass; } /** * PUBLIC: * Return the reference class name of the relationship. */ public String getReferenceClassName() { if (referenceClassName == null && referenceClass != null){ referenceClassName = referenceClass.getName(); } return referenceClassName; } /** * INTERNAL: * override the isForeignReferenceQueryKey() method in the superclass to return true. * @return boolean */ public boolean isForeignReferenceQueryKey() { return true; } /** * PUBLIC: * Set the join expression for the relationship defined by the query key. *

Example: *

* builder.getField("ADDRESS.ADDRESS_ID").equal(builder.getParameter("EMPLOYEE.ADDR_ID"); *
*/ public void setJoinCriteria(Expression joinCriteria) { this.joinCriteria = joinCriteria; } /** * PUBLIC: * Set the reference class of the relationship. * This is not required for direct collection query keys. */ public void setReferenceClass(Class referenceClass) { this.referenceClass = referenceClass; } /** * PUBLIC: * Set the reference class name for this relationship * This is used when projects are built without using classes * @param referenceClassName */ public void setReferenceClassName(String referenceClassName) { this.referenceClassName = referenceClassName; } /** * PUBLIC: * Returns the source table. */ public DatabaseTable getSourceTable() { // TODO: Should extract the target table from joinCriteria (if it's not null), // like ManyToManyQueryKey.getRelationTable does. return this.descriptor.getTables().firstElement(); } /** * PUBLIC: * Returns the reference table. */ public DatabaseTable getReferenceTable(ClassDescriptor desc) { // TODO: This won't work for direct collection. // Should extract the target table from joinCriteria (if it's not null), // like ManyToManyQueryKey.getRelationTable does. return desc.getTables().firstElement(); } /** * PUBLIC: * Returns the relation table. * Currently only ManyToMany and OneToOne may have relation table. * The method is overridden to return null for other subclasses. * The returned relationTable still could be null. */ public DatabaseTable getRelationTable(ClassDescriptor referenceDescriptor) { ExpressionIterator expIterator = new ExpressionIterator() { public void iterate(Expression each) { if(each.isTableExpression()) { ((Collection)this.getResult()).add(((TableExpression)each).getTable()); } else if(each.isDataExpression()) { DatabaseField field = ((DataExpression)each).getField(); if(field != null && field.hasTableName()) { ((Collection)this.getResult()).add(field.getTable()); } } else if(each.isParameterExpression()) { DatabaseField field = ((ParameterExpression)each).getField(); if(field != null && field.hasTableName()) { ((Collection)this.getResult()).add(field.getTable()); } } } }; expIterator.setResult(new HashSet()); expIterator.iterateOn(this.joinCriteria); HashSet tables = (HashSet)expIterator.getResult(); DatabaseTable relationTable = null; Iterator it = tables.iterator(); while(it.hasNext()) { DatabaseTable table = it.next(); // neither source nor reference descriptor contains table - must be relationTable if(!descriptor.getTables().contains(table) && !referenceDescriptor.getTables().contains(table)) { relationTable = table; break; } } return relationTable; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/ManyToManyMapping.java0000664000000000000000000014252412216173130024444 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 07/19/2011-2.2.1 Guy Pelletier * - 338812: ManyToMany mapping in aggregate object violate integrity constraint on deletion ******************************************************************************/ package org.eclipse.persistence.mappings; import java.util.*; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.expressions.*; import org.eclipse.persistence.history.*; import org.eclipse.persistence.internal.expressions.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.queries.*; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.mappings.foundation.MapComponentMapping; import org.eclipse.persistence.sessions.DatabaseRecord; import org.eclipse.persistence.queries.*; /** *

Purpose: Many to many mappings are used to represent the relationships * between a collection of source objects and a collection of target objects. * The mapping requires the creation of an intermediate table for managing the * associations between the source and target records. * * @author Sati * @since TOPLink/Java 1.0 */ public class ManyToManyMapping extends CollectionMapping implements RelationalMapping, MapComponentMapping { /** Used for data modification events. */ protected static final String PostInsert = "postInsert"; protected static final String ObjectRemoved = "objectRemoved"; protected static final String ObjectAdded = "objectAdded"; /** Mechanism holds relationTable and all fields and queries associated with it. */ protected RelationTableMechanism mechanism; protected HistoryPolicy historyPolicy; /** * 266912: Since: EclipseLink 2.0 for the Metamodel API * For 1:1 and m:m mappings - track the original externally defined mapping if different * Note: This field will provide differentiation for the following * external to internal representations for mapping types
* - A OneToManyMapping will be represented by a ManyToManyMapping if unidirectional
* - A ManyToOneMapping will be represented by a OneToOneMapping (without a FK constraint)
*/ protected boolean isDefinedAsOneToManyMapping = false; /** * PUBLIC: * Default constructor. */ public ManyToManyMapping() { this.mechanism = new RelationTableMechanism(); this.isListOrderFieldSupported = true; } /** * INTERNAL: */ @Override public boolean isOwned(){ return !isReadOnly; } /** * INTERNAL: */ @Override public boolean isRelationalMapping() { return true; } /** * PUBLIC: * Add the fields in the intermediate table that corresponds to the primary * key in the source table. This method is used if the keys are composite. */ public void addSourceRelationKeyField(DatabaseField sourceRelationKeyField, DatabaseField sourcePrimaryKeyField) { this.mechanism.addSourceRelationKeyField(sourceRelationKeyField, sourcePrimaryKeyField); } /** * PUBLIC: * Add the fields in the intermediate table that corresponds to the primary * key in the source table. This method is used if the keys are composite. */ public void addSourceRelationKeyFieldName(String sourceRelationKeyFieldName, String sourcePrimaryKeyFieldName) { this.mechanism.addSourceRelationKeyFieldName(sourceRelationKeyFieldName, sourcePrimaryKeyFieldName); } /** * PUBLIC: * Add the fields in the intermediate table that corresponds to the primary * key in the target table. This method is used if the keys are composite. */ public void addTargetRelationKeyField(DatabaseField targetRelationKeyField, DatabaseField targetPrimaryKeyField) { this.mechanism.addTargetRelationKeyField(targetRelationKeyField, targetPrimaryKeyField); } /** * PUBLIC: * Add the fields in the intermediate table that corresponds to the primary * key in the target table. This method is used if the keys are composite. */ public void addTargetRelationKeyFieldName(String targetRelationKeyFieldName, String targetPrimaryKeyFieldName) { this.mechanism.addTargetRelationKeyFieldName(targetRelationKeyFieldName, targetPrimaryKeyFieldName); } /** * INTERNAL: * This method is used to store the FK fields that can be cached that correspond to noncacheable mappings * the FK field values will be used to re-issue the query when cloning the shared cache entity */ @Override public void collectQueryParameters(Set cacheFields){ this.mechanism.collectQueryParameters(cacheFields); } /** * INTERNAL: * The mapping clones itself to create deep copy. */ @Override public Object clone() { ManyToManyMapping clone = (ManyToManyMapping)super.clone(); clone.mechanism = (RelationTableMechanism)this.mechanism.clone(); return clone; } /** * INTERNAL: * Delete join tables before the start of the deletion process to avoid constraint errors. */ @Override public void earlyPreDelete(DeleteObjectQuery query, Object object) { AbstractSession querySession = query.getSession(); if (!this.isCascadeOnDeleteSetOnDatabase) { prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getDescriptor(), querySession); querySession.executeQuery(this.deleteAllQuery, query.getTranslationRow()); } if ((this.historyPolicy != null) && this.historyPolicy.shouldHandleWrites()) { if (this.isCascadeOnDeleteSetOnDatabase) { prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getDescriptor(), querySession); } this.historyPolicy.mappingLogicalDelete(this.deleteAllQuery, query.getTranslationRow(), querySession); } } /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection. Returns the key. */ public Object createMapComponentFromRow(AbstractRecord dbRow, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected){ return session.executeQuery(getSelectionQuery(), dbRow); } /** * INTERNAL: * Adds locking clause to the target query to extend pessimistic lock scope. */ @Override protected void extendPessimisticLockScopeInTargetQuery(ObjectLevelReadQuery targetQuery, ObjectBuildingQuery sourceQuery) { this.mechanism.setRelationTableLockingClause(targetQuery, sourceQuery); } /** * INTERNAL: * Called only if both * shouldExtendPessimisticLockScope and shouldExtendPessimisticLockScopeInSourceQuery are true. * Adds fields to be locked to the where clause of the source query. * Note that the sourceQuery must be ObjectLevelReadQuery so that it has ExpressionBuilder. * * This method must be implemented in subclasses that allow * setting shouldExtendPessimisticLockScopeInSourceQuery to true. */ @Override public void extendPessimisticLockScopeInSourceQuery(ObjectLevelReadQuery sourceQuery) { Expression exp = sourceQuery.getSelectionCriteria(); exp = this.mechanism.joinRelationTableField(exp, sourceQuery.getExpressionBuilder()); sourceQuery.setSelectionCriteria(exp); } /** * INTERNAL: * Extract the source primary key value from the relation row. * Used for batch reading, most following same order and fields as in the mapping. */ @Override protected Object extractKeyFromTargetRow(AbstractRecord row, AbstractSession session) { return this.mechanism.extractKeyFromTargetRow(row, session); } /** * INTERNAL: * Extract the primary key value from the source row. * Used for batch reading, most following same order and fields as in the mapping. */ @Override protected Object extractBatchKeyFromRow(AbstractRecord row, AbstractSession session) { return this.mechanism.extractBatchKeyFromRow(row, session); } /** * INTERNAL: * Return the selection criteria used to IN batch fetching. */ @Override protected Expression buildBatchCriteria(ExpressionBuilder builder, ObjectLevelReadQuery query) { return this.mechanism.buildBatchCriteria(builder, query); } /** * INTERNAL: * Add additional fields and check for history. */ @Override protected void postPrepareNestedBatchQuery(ReadQuery batchQuery, ObjectLevelReadQuery query) { super.postPrepareNestedBatchQuery(batchQuery, query); ReadAllQuery mappingBatchQuery = (ReadAllQuery)batchQuery; this.mechanism.postPrepareNestedBatchQuery(batchQuery, query); if (this.historyPolicy != null) { ExpressionBuilder builder = mappingBatchQuery.getExpressionBuilder(); Expression twisted = batchQuery.getSelectionCriteria(); if (query.getSession().getAsOfClause() != null) { builder.asOf(query.getSession().getAsOfClause()); } else if (builder.getAsOfClause() == null) { builder.asOf(AsOfClause.NO_CLAUSE); } twisted = twisted.and(this.historyPolicy.additionalHistoryExpression(builder, builder)); mappingBatchQuery.setSelectionCriteria(twisted); } } /** * INTERNAL: * Return the base expression to use for adding fields to the query. * Normally this is the query's builder, but may be the join table for m-m. */ @Override protected Expression getAdditionalFieldsBaseExpression(ReadQuery query) { return ((ReadAllQuery)query).getExpressionBuilder().getTable(getRelationTable()); } protected DataModifyQuery getDeleteQuery() { return this.mechanism.getDeleteQuery(); } /** * INTERNAL: * Should be overridden by subclass that allows setting * extendPessimisticLockScope to DEDICATED_QUERY. */ @Override protected ReadQuery getExtendPessimisticLockScopeDedicatedQuery(AbstractSession session, short lockMode) { if(this.mechanism != null) { return this.mechanism.getLockRelationTableQueryClone(session, lockMode); } else { return super.getExtendPessimisticLockScopeDedicatedQuery(session, lockMode); } } /** * INTERNAL: * Return source key fields for translation by an AggregateObjectMapping */ @Override public Collection getFieldsForTranslationInAggregate() { return getRelationTableMechanism().getSourceKeyFields(); } protected DataModifyQuery getInsertQuery() { return this.mechanism.getInsertQuery(); } /** * INTERNAL: * Returns the join criteria stored in the mapping selection query. This criteria * is used to read reference objects across the tables from the database. */ @Override public Expression getJoinCriteria(ObjectExpression context, Expression base) { if (getHistoryPolicy() != null) { Expression result = super.getJoinCriteria(context, base); Expression historyCriteria = getHistoryPolicy().additionalHistoryExpression(context, base); if (result != null) { return result.and(historyCriteria); } else if (historyCriteria != null) { return historyCriteria; } else { return null; } } else { return super.getJoinCriteria(context, base); } } /** * PUBLIC: * Allows history tracking on the m-m join table. */ public HistoryPolicy getHistoryPolicy() { return historyPolicy; } /** * PUBLIC: * Returns RelationTableMechanism that may be owned by the mapping. * Note that all RelationTableMechanism methods are accessible * through the mapping directly. * The only reason this method is provided * is to allow a uniform approach to RelationTableMechanism * in both ManyToManyMapping and OneToOneMapping * that uses RelationTableMechanism. */ public RelationTableMechanism getRelationTableMechanism() { return this.mechanism; } /** * INTERNAL: * Return the relation table associated with the mapping. */ public DatabaseTable getRelationTable() { return this.mechanism.getRelationTable(); } /** * PUBLIC: * Return the relation table name associated with the mapping. */ public String getRelationTableName() { return this.mechanism.getRelationTableName(); } //CR#2407 This method is added to include table qualifier. /** * PUBLIC: * Return the relation table qualified name associated with the mapping. */ public String getRelationTableQualifiedName() { return this.mechanism.getRelationTableQualifiedName(); } /** * PUBLIC: * Return the source key field names associated with the mapping. * These are in-order with the sourceRelationKeyFieldNames. */ public Vector getSourceKeyFieldNames() { return this.mechanism.getSourceKeyFieldNames(); } /** * INTERNAL: * Return all the source key fields associated with the mapping. */ public Vector getSourceKeyFields() { return this.mechanism.getSourceKeyFields(); } /** * PUBLIC: * Return the source relation key field names associated with the mapping. * These are in-order with the sourceKeyFieldNames. */ public Vector getSourceRelationKeyFieldNames() { return this.mechanism.getSourceRelationKeyFieldNames(); } /** * INTERNAL: * Return all the source relation key fields associated with the mapping. */ public Vector getSourceRelationKeyFields() { return this.mechanism.getSourceRelationKeyFields(); } /** * PUBLIC: * Return the target key field names associated with the mapping. * These are in-order with the targetRelationKeyFieldNames. */ public Vector getTargetKeyFieldNames() { return this.mechanism.getTargetKeyFieldNames(); } /** * INTERNAL: * Return all the target keys associated with the mapping. */ public Vector getTargetKeyFields() { return this.mechanism.getTargetKeyFields(); } /** * PUBLIC: * Return the target relation key field names associated with the mapping. * These are in-order with the targetKeyFieldNames. */ public Vector getTargetRelationKeyFieldNames() { return this.mechanism.getTargetRelationKeyFieldNames(); } /** * INTERNAL: * Return all the target relation key fields associated with the mapping. */ public Vector getTargetRelationKeyFields() { return this.mechanism.getTargetRelationKeyFields(); } protected boolean hasCustomDeleteQuery() { return this.mechanism.hasCustomDeleteQuery(); } protected boolean hasCustomInsertQuery() { return this.mechanism.hasCustomInsertQuery(); } /** * INTERNAL: * The join table is a dependency if not read-only. */ @Override public boolean hasDependency() { return this.isPrivateOwned || (!this.isReadOnly); } /** * INTERNAL: * Initialize mappings */ @Override public void initialize(AbstractSession session) throws DescriptorException { if (session.hasBroker()) { if (getReferenceClass() == null) { throw DescriptorException.referenceClassNotSpecified(this); } // substitute session that owns the mapping for the session that owns reference descriptor. session = session.getBroker().getSessionForClass(getReferenceClass()); } super.initialize(session); getDescriptor().addPreDeleteMapping(this); if(this.mechanism != null) { this.mechanism.initialize(session, this); } else { throw DescriptorException.noRelationTableMechanism(this); } if (shouldInitializeSelectionCriteria()) { if (shouldForceInitializationOfSelectionCriteria()) { initializeSelectionCriteriaAndAddFieldsToQuery(null); } else { initializeSelectionCriteriaAndAddFieldsToQuery(getSelectionCriteria()); } } if (!getSelectionQuery().hasSessionName()) { getSelectionQuery().setSessionName(session.getName()); } initializeDeleteAllQuery(session); if (getHistoryPolicy() != null) { getHistoryPolicy().initialize(session); } if (getReferenceDescriptor() != null && getReferenceDescriptor().hasTablePerClassPolicy()) { // This will do nothing if we have already prepared for this // source mapping or if the source mapping does not require // any special prepare logic. getReferenceDescriptor().getTablePerClassPolicy().prepareChildrenSelectionQuery(this, session); } } /** * INTERNAL: * Verifies listOrderField's table: it must be relation table. * Precondition: listOrderField != null. */ @Override protected void buildListOrderField() { if(this.listOrderField.hasTableName()) { if(!getRelationTable().equals(this.listOrderField.getTable())) { throw DescriptorException.listOrderFieldTableIsWrong(this.getDescriptor(), this, this.listOrderField.getTable(), getRelationTable()); } } else { listOrderField.setTable(getRelationTable()); } this.listOrderField = getDescriptor().buildField(this.listOrderField, getRelationTable()); } /** * INTERNAL: * Indicates whether getListOrderFieldExpression method should create field expression on table expression. */ @Override public boolean shouldUseListOrderFieldTableExpression() { return true; } /** * INTERNAL: * Initialize changeOrderTargetQuery. */ protected void initializeChangeOrderTargetQuery(AbstractSession session) { boolean hasChangeOrderTargetQuery = changeOrderTargetQuery != null; if(!hasChangeOrderTargetQuery) { changeOrderTargetQuery = new DataModifyQuery(); } changeOrderTargetQuery = new DataModifyQuery(); if (!changeOrderTargetQuery.hasSessionName()) { changeOrderTargetQuery.setSessionName(session.getName()); } if (hasChangeOrderTargetQuery) { return; } // Build where clause expression. Expression whereClause = null; Expression builder = new ExpressionBuilder(); List sourceRelationKeyFields = getSourceRelationKeyFields(); int size = sourceRelationKeyFields.size(); for (int index = 0; index < size; index++) { DatabaseField sourceRelationKeyField = sourceRelationKeyFields.get(index); Expression expression = builder.getField(sourceRelationKeyField).equal(builder.getParameter(sourceRelationKeyField)); whereClause = expression.and(whereClause); } List targetRelationKeyFields = getTargetRelationKeyFields(); size = targetRelationKeyFields.size(); for (int index = 0; index < size; index++) { DatabaseField targetRelationKeyField = targetRelationKeyFields.get(index); Expression expression = builder.getField(targetRelationKeyField).equal(builder.getParameter(targetRelationKeyField)); whereClause = expression.and(whereClause); } AbstractRecord modifyRow = new DatabaseRecord(); modifyRow.add(listOrderField, null); SQLUpdateStatement statement = new SQLUpdateStatement(); statement.setTable(listOrderField.getTable()); statement.setWhereClause(whereClause); statement.setModifyRow(modifyRow); changeOrderTargetQuery.setSQLStatement(statement); } /** * Initialize delete all query. This query is used to all relevant rows from the * relation table. */ protected void initializeDeleteAllQuery(AbstractSession session) { if (!getDeleteAllQuery().hasSessionName()) { getDeleteAllQuery().setSessionName(session.getName()); } getDeleteAllQuery().setName(getAttributeName()); if (getDeleteAllQuery().getPartitioningPolicy() == null) { getDeleteAllQuery().setPartitioningPolicy(getPartitioningPolicy()); } if (hasCustomDeleteAllQuery()) { return; } Expression expression = null; Expression subExpression; Expression builder = new ExpressionBuilder(); SQLDeleteStatement statement = new SQLDeleteStatement(); // Construct an expression to delete from the relation table. for (int index = 0; index < getSourceRelationKeyFields().size(); index++) { DatabaseField sourceRelationKey = getSourceRelationKeyFields().elementAt(index); DatabaseField sourceKey = getSourceKeyFields().elementAt(index); subExpression = builder.getField(sourceRelationKey).equal(builder.getParameter(sourceKey)); expression = subExpression.and(expression); } // All the entries are deleted in one shot. statement.setWhereClause(expression); statement.setTable(getRelationTable()); getDeleteAllQuery().setSQLStatement(statement); } /** * INTERNAL: * Initializes listOrderField's table. * Precondition: listOrderField != null. */ @Override protected void initializeListOrderFieldTable(AbstractSession session) { this.mechanism.initializeRelationTable(session, this); } /** * INTERNAL: * Selection criteria is created to read target records from the table. */ protected void initializeSelectionCriteriaAndAddFieldsToQuery(Expression startCriteria) { setSelectionCriteria(this.mechanism.buildSelectionCriteriaAndAddFieldsToQuery(this, startCriteria)); } /** * INTERNAL: * An object was added to the collection during an update, insert it. */ protected void insertAddedObjectEntry(ObjectLevelModifyQuery query, Object objectAdded, Map extraData) throws DatabaseException, OptimisticLockException { //cr 3819 added the line below to fix the translationtable to ensure that it // contains the required values prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getDescriptor(), query.getSession()); AbstractRecord databaseRow = this.mechanism.buildRelationTableSourceAndTargetRow(query.getTranslationRow(), containerPolicy.unwrapIteratorResult(objectAdded), query.getSession(), this); ContainerPolicy.copyMapDataToRow(getContainerPolicy().getKeyMappingDataForWriteQuery(objectAdded, query.getSession()), databaseRow); if(listOrderField != null && extraData != null) { databaseRow.put(listOrderField, extraData.get(listOrderField)); } query.getExecutionSession().executeQuery(this.mechanism.getInsertQuery(), databaseRow); if ((getHistoryPolicy() != null) && getHistoryPolicy().shouldHandleWrites()) { getHistoryPolicy().mappingLogicalInsert(this.mechanism.getInsertQuery(), databaseRow, query.getSession()); } } /** * INTERNAL: * Insert into relation table. This follows following steps. *

- Extract primary key and its value from the source object. *

- Extract target key and its value from the target object. *

- Construct a insert statement with above fields and values for relation table. *

- execute the statement. *

- Repeat above three statements until all the target objects are done. */ public void insertIntoRelationTable(WriteObjectQuery query) throws DatabaseException { if (isReadOnly()) { return; } ContainerPolicy cp = getContainerPolicy(); Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); if (cp.isEmpty(objects)) { return; } prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getDescriptor(), query.getSession()); AbstractRecord databaseRow = this.mechanism.buildRelationTableSourceRow(query.getTranslationRow()); int orderIndex = 0; // Extract target field and its value. Construct insert statement and execute it for (Object iter = cp.iteratorFor(objects); cp.hasNext(iter);) { Object wrappedObject = cp.nextEntry(iter, query.getSession()); Object object = cp.unwrapIteratorResult(wrappedObject); databaseRow = this.mechanism.addRelationTableTargetRow(object, query.getExecutionSession(), databaseRow, this); ContainerPolicy.copyMapDataToRow(cp.getKeyMappingDataForWriteQuery(wrappedObject, query.getSession()), databaseRow); if(listOrderField != null) { databaseRow.put(listOrderField, orderIndex++); } query.getExecutionSession().executeQuery(this.mechanism.getInsertQuery(), databaseRow); if ((getHistoryPolicy() != null) && getHistoryPolicy().shouldHandleWrites()) { getHistoryPolicy().mappingLogicalInsert(this.mechanism.getInsertQuery(), databaseRow, query.getSession()); } } } /** * INTERNAL: * Write the target objects if the cascade policy requires them to be written first. * They must be written within a unit of work to ensure that they exist. */ public void insertTargetObjects(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { if (!shouldObjectModifyCascadeToParts(query)) { return; } // Only cascade dependents writes in uow. if (query.shouldCascadeOnlyDependentParts()) { return; } Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); ContainerPolicy cp = getContainerPolicy(); if (cp.isEmpty(objects)) { return; } // Write each of the target objects for (Object objectsIterator = cp.iteratorFor(objects); cp.hasNext(objectsIterator);) { Object wrappedObject = cp.next(objectsIterator, query.getSession()); Object object = cp.unwrapIteratorResult(wrappedObject); if (isPrivateOwned()) { // no need to set changeset as insert is a straight copy anyway InsertObjectQuery insertQuery = new InsertObjectQuery(); insertQuery.setIsExecutionClone(true); insertQuery.setObject(object); insertQuery.setCascadePolicy(query.getCascadePolicy()); query.getSession().executeQuery(insertQuery); } else { ObjectChangeSet changeSet = null; UnitOfWorkChangeSet uowChangeSet = null; if (query.getSession().isUnitOfWork() && (((UnitOfWorkImpl)query.getSession()).getUnitOfWorkChangeSet() != null)) { uowChangeSet = (UnitOfWorkChangeSet)((UnitOfWorkImpl)query.getSession()).getUnitOfWorkChangeSet(); changeSet = (ObjectChangeSet)uowChangeSet.getObjectChangeSetForClone(object); } WriteObjectQuery writeQuery = new WriteObjectQuery(); writeQuery.setIsExecutionClone(true); writeQuery.setObject(object); writeQuery.setObjectChangeSet(changeSet); writeQuery.setCascadePolicy(query.getCascadePolicy()); query.getSession().executeQuery(writeQuery); } cp.propogatePostInsert(query, wrappedObject); } } /** * INTERNAL: * Return whether this mapping was originally defined as a OneToMany. */ public boolean isDefinedAsOneToManyMapping() { return isDefinedAsOneToManyMapping; } /** * INTERNAL: * Return if this mapping support joining. */ @Override public boolean isJoiningSupported() { return true; } @Override public boolean isManyToManyMapping() { return true; } /** * INTERNAL: * Ensure the container policy is post initialized */ @Override public void postInitialize(AbstractSession session) { super.postInitialize(session); this.mustDeleteReferenceObjectsOneByOne = true; } /** * INTERNAL: * An object was added to the collection during an update, insert it if private. */ @Override protected void objectAddedDuringUpdate(ObjectLevelModifyQuery query, Object objectAdded, ObjectChangeSet changeSet, Map extraData) throws DatabaseException, OptimisticLockException { // First insert/update object. super.objectAddedDuringUpdate(query, objectAdded, changeSet, extraData); // In the uow data queries are cached until the end of the commit. if (query.shouldCascadeOnlyDependentParts()) { // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[4]; event[0] = ObjectAdded; event[1] = query; event[2] = objectAdded; event[3] = extraData; query.getSession().getCommitManager().addDataModificationEvent(this, event); } else { insertAddedObjectEntry(query, objectAdded, extraData); } } /** * INTERNAL: * An object was removed to the collection during an update, delete it if private. */ @Override protected void objectRemovedDuringUpdate(ObjectLevelModifyQuery query, Object objectDeleted, Map extraData) throws DatabaseException, OptimisticLockException { Object unwrappedObjectDeleted = getContainerPolicy().unwrapIteratorResult(objectDeleted); AbstractRecord databaseRow = this.mechanism.buildRelationTableSourceAndTargetRow(query.getTranslationRow(), unwrappedObjectDeleted, query.getSession(), this); // In the uow data queries are cached until the end of the commit. if (query.shouldCascadeOnlyDependentParts()) { // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = ObjectRemoved; event[1] = this.mechanism.getDeleteQuery(); event[2] = databaseRow; query.getSession().getCommitManager().addDataModificationEvent(this, event); } else { query.getSession().executeQuery(this.mechanism.getDeleteQuery(), databaseRow); if ((getHistoryPolicy() != null) && getHistoryPolicy().shouldHandleWrites()) { getHistoryPolicy().mappingLogicalDelete(this.mechanism.getDeleteQuery(), databaseRow, query.getSession()); } } // Delete object after join entry is delete if private. super.objectRemovedDuringUpdate(query, objectDeleted, extraData); } @Override protected void objectOrderChangedDuringUpdate(WriteObjectQuery query, Object orderChangedObject, int orderIndex) { prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getDescriptor(), query.getSession()); AbstractRecord databaseRow = this.mechanism.buildRelationTableSourceAndTargetRow(query.getTranslationRow(), orderChangedObject, query.getSession(), this); databaseRow.put(listOrderField, orderIndex); query.getSession().executeQuery(changeOrderTargetQuery, databaseRow); } /** * INTERNAL: * Perform the commit event. * This is used in the uow to delay data modifications. */ @Override public void performDataModificationEvent(Object[] event, AbstractSession session) throws DatabaseException, DescriptorException { // Hey I might actually want to use an inner class here... ok array for now. if (event[0] == PostInsert) { insertIntoRelationTable((WriteObjectQuery)event[1]); } else if (event[0] == ObjectRemoved) { session.executeQuery((DataModifyQuery)event[1], (AbstractRecord)event[2]); if ((getHistoryPolicy() != null) && getHistoryPolicy().shouldHandleWrites()) { getHistoryPolicy().mappingLogicalDelete((DataModifyQuery)event[1], (AbstractRecord)event[2], session); } } else if (event[0] == ObjectAdded) { insertAddedObjectEntry((WriteObjectQuery)event[1], event[2], (Map)event[3]); } else { throw DescriptorException.invalidDataModificationEventCode(event[0], this); } } /** * INTERNAL: * Insert into relation table. This follows following steps. *

- Extract primary key and its value from the source object. *

- Extract target key and its value from the target object. *

- Construct a insert statement with above fields and values for relation table. *

- execute the statement. *

- Repeat above three statements until all the target objects are done. */ @Override public void postInsert(WriteObjectQuery query) throws DatabaseException { insertTargetObjects(query); // Batch data modification in the uow if (query.shouldCascadeOnlyDependentParts()) { // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[2]; event[0] = PostInsert; event[1] = query; query.getSession().getCommitManager().addDataModificationEvent(this, event); } else { insertIntoRelationTable(query); } } /** * INTERNAL: * Update the relation table with the entries related to this mapping. * Delete entries removed, insert entries added. * If private also insert/delete/update target objects. */ @Override public void postUpdate(WriteObjectQuery query) throws DatabaseException { if (this.isReadOnly) { return; } // If objects are not instantiated that means they are not changed. if (!isAttributeValueInstantiatedOrChanged(query.getObject())) { return; } if (query.getObjectChangeSet() != null) { // UnitOfWork writeChanges(query.getObjectChangeSet(), query); } else { // OLD COMMIT compareObjectsAndWrite(query); } } /** * INTERNAL: * Delete entries related to this mapping from the relation table. */ @Override public void preDelete(DeleteObjectQuery query) throws DatabaseException { AbstractSession session = query.getSession(); Object objectsIterator = null; ContainerPolicy containerPolicy = getContainerPolicy(); if (this.isReadOnly) { return; } Object objects = null; boolean cascade = shouldObjectModifyCascadeToParts(query); if (containerPolicy.propagatesEventsToCollection() || cascade) { // if processed during UnitOfWork commit process the private owned delete will occur during change calculation objects = getRealCollectionAttributeValueFromObject(query.getObject(), session); //this must be done up here because the select must be done before the entry in the relation table is deleted. // TODO: Hmm given the below code, the rows are already deleted, so this code is broken. // Assuming it was a cascade remove, it will have been instantiated, so may be ok? objectsIterator = containerPolicy.iteratorFor(objects); } // This has already been done in a unit of work. if (!session.isUnitOfWork()) { earlyPreDelete(query, query.getObject()); } // If privately owned delete the objects, this does not handle removed objects (i.e. verify delete, not req in uow). // Does not try to optimize delete all like 1-m, (rarely used and hard to do). if (containerPolicy.propagatesEventsToCollection() || cascade) { if (objects != null) { //objectsIterator will not be null because cascade check will still return true. while (containerPolicy.hasNext(objectsIterator)) { Object wrappedObject = containerPolicy.nextEntry(objectsIterator, session); Object object = containerPolicy.unwrapIteratorResult(wrappedObject); if (cascade){ // PERF: Avoid query execution if already deleted. if (!session.getCommitManager().isCommitCompletedInPostOrIgnore(object)) { DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); deleteQuery.setIsExecutionClone(true); deleteQuery.setObject(object); deleteQuery.setCascadePolicy(query.getCascadePolicy()); session.executeQuery(deleteQuery); } } containerPolicy.propogatePreDelete(query, wrappedObject); } } } } /** * INTERNAL: * The translation row may require additional fields than the primary key if the mapping in not on the primary key. */ @Override protected void prepareTranslationRow(AbstractRecord translationRow, Object object, ClassDescriptor descriptor, AbstractSession session) { // Make sure that each source key field is in the translation row. for (Enumeration sourceFieldsEnum = getSourceKeyFields().elements(); sourceFieldsEnum.hasMoreElements();) { DatabaseField sourceKey = (DatabaseField)sourceFieldsEnum.nextElement(); if (!translationRow.containsKey(sourceKey)) { Object value = descriptor.getObjectBuilder().extractValueFromObjectForField(object, sourceKey, session); translationRow.put(sourceKey, value); } } } /** * PUBLIC: * The default delete query for mapping can be overridden by specifying the new query. * This query must delete the row from the M-M join table. */ public void setCustomDeleteQuery(DataModifyQuery query) { this.mechanism.setCustomDeleteQuery(query); } /** * PUBLIC: * The default insert query for mapping can be overridden by specifying the new query. * This query must insert the row into the M-M join table. */ public void setCustomInsertQuery(DataModifyQuery query) { this.mechanism.setCustomInsertQuery(query); } protected void setDeleteQuery(DataModifyQuery deleteQuery) { this.mechanism.setDeleteQuery(deleteQuery); } /** * PUBLIC: * Set the receiver's delete SQL string. This allows the user to override the SQL * generated by TOPLink, with there own SQL or procedure call. The arguments are * translated from the fields of the source row, through replacing the field names * marked by '#' with the values for those fields. * This is used to delete a single entry from the M-M join table. * Example, 'delete from PROJ_EMP where PROJ_ID = #PROJ_ID AND EMP_ID = #EMP_ID'. */ public void setDeleteSQLString(String sqlString) { this.mechanism.setDeleteSQLString(sqlString); } /** * INTERNAL: * Set whether this mapping was originally defined as a OneToMany * @param isDefinedAsOneToManyMapping */ public void setDefinedAsOneToManyMapping(boolean isDefinedAsOneToManyMapping) { this.isDefinedAsOneToManyMapping = isDefinedAsOneToManyMapping; } /** * PUBLIC: * Set the receiver's delete Call. This allows the user to override the SQL * generated by TOPLink, with there own SQL or procedure call. The arguments are * translated from the fields of the source row. * This is used to delete a single entry from the M-M join table. * Example, 'new SQLCall("delete from PROJ_EMP where PROJ_ID = #PROJ_ID AND EMP_ID = #EMP_ID")'. */ public void setDeleteCall(Call call) { this.mechanism.setDeleteCall(call); } protected void setInsertQuery(DataModifyQuery insertQuery) { this.mechanism.setInsertQuery(insertQuery); } /** * PUBLIC: * Set the receiver's insert SQL string. This allows the user to override the SQL * generated by TOPLink, with there own SQL or procedure call. The arguments are * translated from the fields of the source row, through replacing the field names * marked by '#' with the values for those fields. * This is used to insert an entry into the M-M join table. * Example, 'insert into PROJ_EMP (EMP_ID, PROJ_ID) values (#EMP_ID, #PROJ_ID)'. */ public void setInsertSQLString(String sqlString) { this.mechanism.setInsertSQLString(sqlString); } /** * PUBLIC: * Set the receiver's insert Call. This allows the user to override the SQL * generated by TOPLink, with there own SQL or procedure call. The arguments are * translated from the fields of the source row. * This is used to insert an entry into the M-M join table. * Example, 'new SQLCall("insert into PROJ_EMP (EMP_ID, PROJ_ID) values (#EMP_ID, #PROJ_ID)")'. */ public void setInsertCall(Call call) { this.mechanism.setInsertCall(call); } /** * PUBLIC: * Allows to set RelationTableMechanism to be owned by the mapping. * It's not necessary to explicitly set the mechanism: * one is created by mapping's constructor. * The only reason this method is provided * is to allow a uniform approach to RelationTableMechanism * in both ManyToManyMapping and OneToOneMapping * that uses RelationTableMechanism. * ManyToManyMapping must have RelationTableMechanism, * never set it to null. */ void setRelationTableMechanism(RelationTableMechanism mechanism) { this.mechanism = mechanism; } /** * PUBLIC: * Set the relational table. * This is the join table that store both the source and target primary keys. */ public void setRelationTable(DatabaseTable relationTable) { this.mechanism.setRelationTable(relationTable); } /** * PUBLIC: * Enable history tracking on the m-m join table. */ public void setHistoryPolicy(HistoryPolicy policy) { this.historyPolicy = policy; if (policy != null) { policy.setMapping(this); } } /** * PUBLIC: * Set the name of the relational table. * This is the join table that store both the source and target primary keys. */ public void setRelationTableName(String tableName) { this.mechanism.setRelationTableName(tableName); } /** * PUBLIC: * Set the name of the session to execute the mapping's queries under. * This can be used by the session broker to override the default session * to be used for the target class. */ @Override public void setSessionName(String name) { super.setSessionName(name); this.mechanism.setSessionName(name); } /** * PUBLIC: * Set the source key field names associated with the mapping. * These must be in-order with the sourceRelationKeyFieldNames. */ public void setSourceKeyFieldNames(Vector fieldNames) { this.mechanism.setSourceKeyFieldNames(fieldNames); } /** * INTERNAL: * Set the source fields. */ public void setSourceKeyFields(Vector sourceKeyFields) { this.mechanism.setSourceKeyFields(sourceKeyFields); } /** * PUBLIC: * Set the source key field in the relation table. * This is the name of the foreign key in the relation table to the source's primary key field. * This method is used if the source primary key is a singleton only. */ public void setSourceRelationKeyFieldName(String sourceRelationKeyFieldName) { this.mechanism.setSourceRelationKeyFieldName(sourceRelationKeyFieldName); } /** * PUBLIC: * Set the source relation key field names associated with the mapping. * These must be in-order with the sourceKeyFieldNames. */ public void setSourceRelationKeyFieldNames(Vector fieldNames) { this.mechanism.setSourceRelationKeyFieldNames(fieldNames); } /** * INTERNAL: * Set the source fields. */ public void setSourceRelationKeyFields(Vector sourceRelationKeyFields) { this.mechanism.setSourceRelationKeyFields(sourceRelationKeyFields); } /** * INTERNAL: * Set the target key field names associated with the mapping. * These must be in-order with the targetRelationKeyFieldNames. */ public void setTargetKeyFieldNames(Vector fieldNames) { this.mechanism.setTargetKeyFieldNames(fieldNames); } /** * INTERNAL: * Set the target fields. */ public void setTargetKeyFields(Vector targetKeyFields) { this.mechanism.setTargetKeyFields(targetKeyFields); } /** * PUBLIC: * Set the target key field in the relation table. * This is the name of the foreign key in the relation table to the target's primary key field. * This method is used if the target's primary key is a singleton only. */ public void setTargetRelationKeyFieldName(String targetRelationKeyFieldName) { this.mechanism.setTargetRelationKeyFieldName(targetRelationKeyFieldName); } /** * INTERNAL: * Set the target relation key field names associated with the mapping. * These must be in-order with the targetKeyFieldNames. */ public void setTargetRelationKeyFieldNames(Vector fieldNames) { this.mechanism.setTargetRelationKeyFieldNames(fieldNames); } /** * INTERNAL: * Set the target fields. */ public void setTargetRelationKeyFields(Vector targetRelationKeyFields) { this.mechanism.setTargetRelationKeyFields(targetRelationKeyFields); } /** * INTERNAL: * Append the temporal selection to the query selection criteria. */ @Override protected ReadQuery prepareHistoricalQuery(ReadQuery targetQuery, ObjectBuildingQuery sourceQuery, AbstractSession executionSession) { if (getHistoryPolicy() != null) { if (targetQuery == getSelectionQuery()) { targetQuery = (ObjectLevelReadQuery)targetQuery.clone(); targetQuery.setIsExecutionClone(true); } if (targetQuery.getSelectionCriteria() == getSelectionQuery().getSelectionCriteria()) { targetQuery.setSelectionCriteria((Expression)targetQuery.getSelectionCriteria().clone()); } if (sourceQuery.getSession().getAsOfClause() != null) { ((ObjectLevelReadQuery)targetQuery).setAsOfClause(sourceQuery.getSession().getAsOfClause()); } else if (((ObjectLevelReadQuery)targetQuery).getAsOfClause() == null) { ((ObjectLevelReadQuery)targetQuery).setAsOfClause(AsOfClause.NO_CLAUSE); } Expression temporalExpression = (this).getHistoryPolicy().additionalHistoryExpression(targetQuery.getSelectionCriteria().getBuilder(), targetQuery.getSelectionCriteria().getBuilder()); targetQuery.setSelectionCriteria(targetQuery.getSelectionCriteria().and(temporalExpression)); } return targetQuery; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/OneToOneMapping.java0000664000000000000000000030544512216173130024101 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 05/14/2012-2.4 Guy Pelletier * - 376603: Provide for table per tenant support for multitenant applications * 02/11/2013-2.5 Guy Pelletier * - 365931: @JoinColumn(name="FK_DEPT",insertable = false, updatable = true) causes INSERT statement to include this data value that it is associated with ******************************************************************************/ package org.eclipse.persistence.mappings; import java.util.*; import org.eclipse.persistence.annotations.CacheKeyType; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.expressions.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.*; import org.eclipse.persistence.internal.indirection.ProxyIndirectionPolicy; import org.eclipse.persistence.internal.queries.ContainerPolicy; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; import org.eclipse.persistence.internal.queries.MappedKeyMapContainerPolicy; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.sessions.DatabaseRecord; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.internal.descriptors.CascadeLockingPolicy; import org.eclipse.persistence.internal.descriptors.DescriptorIterator; import org.eclipse.persistence.internal.descriptors.ObjectBuilder; import org.eclipse.persistence.internal.expressions.ConstantExpression; import org.eclipse.persistence.internal.expressions.ObjectExpression; import org.eclipse.persistence.internal.expressions.QueryKeyExpression; import org.eclipse.persistence.internal.expressions.SQLSelectStatement; import org.eclipse.persistence.mappings.foundation.MapKeyMapping; import org.eclipse.persistence.mappings.querykeys.OneToOneQueryKey; import org.eclipse.persistence.mappings.querykeys.QueryKey; /** *

Purpose: One to one mappings are used to represent a pointer references * between two java objects. This mappings is usually represented by a single pointer * (stored in an instance variable) between the source and target objects. In the relational * database tables, these mappings are normally implemented using foreign keys. * * @author Sati * @since TOPLink/Java 1.0 */ public class OneToOneMapping extends ObjectReferenceMapping implements RelationalMapping, MapKeyMapping { /** Maps the source foreign/primary key fields to the target primary/foreign key fields. */ protected Map sourceToTargetKeyFields; /** Maps the target primary/foreign key fields to the source foreign/primary key fields. */ protected Map targetToSourceKeyFields; /** Keeps track of which fields are foreign keys on a per field basis (can have mixed foreign key relationships). */ /** These are used for non-unit of work modification to check if the value of the 1-1 was changed and a deletion is required. */ protected boolean shouldVerifyDelete; protected transient Expression privateOwnedCriteria; public DatabaseTable keyTableForMapKey = null; protected static final String setObject = "setObject"; /** Mechanism holds relationTable and all fields and queries associated with it. */ protected RelationTableMechanism mechanism; /** * Define if this mapping is really for a OneToOne relationship. * This is a backward compatibility issue, in that before the ManyToOneMapping * was created OneToOneMapping was used for both. */ protected boolean isOneToOneRelationship = false; /** * Defines if this mapping was built using primary key join columns. */ protected boolean isOneToOnePrimaryKeyRelationship = false; /** * Keep track of which fields are insertable and updatable. */ protected HashSet insertableFields = new HashSet(); protected HashSet updatableFields = new HashSet(); /** * Mode for writeFromObjectIntoRowInternal method */ protected static enum ShallowMode { Insert, UpdateAfterInsert, UpdateBeforeDelete } /** * PUBLIC: * Default constructor. */ public OneToOneMapping() { this.selectionQuery = new ReadObjectQuery(); this.sourceToTargetKeyFields = new HashMap(2); this.targetToSourceKeyFields = new HashMap(2); this.foreignKeyFields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(1); this.isForeignKeyRelationship = false; this.shouldVerifyDelete = true; } /** * INTERNAL: */ @Override public boolean isRelationalMapping() { return true; } /** * INTERNAL: * Used when initializing queries for mappings that use a Map. * Called when the selection query is being initialized to add the fields for the map key to the query. */ public void addAdditionalFieldsToQuery(ReadQuery selectionQuery, Expression baseExpression){ for (DatabaseField field : getForeignKeyFields()) { if (selectionQuery.isObjectLevelReadQuery()){ ((ObjectLevelReadQuery)selectionQuery).addAdditionalField(baseExpression.getField(field)); } else if (selectionQuery.isDataReadQuery()){ ((SQLSelectStatement)((DataReadQuery)selectionQuery).getSQLStatement()).addField(field); } } } /** * INTERNAL: * Used when initializing queries for mappings that use a Map * Called when the insert query is being initialized to ensure the fields for the map key are in the insert query */ public void addFieldsForMapKey(AbstractRecord joinRow){ Iterator i = getForeignKeyFields().iterator(); while (i.hasNext()){ joinRow.put((DatabaseField)i.next(), null); } } /** * PUBLIC: * Define the foreign key relationship in the 1-1 mapping. * This method is used for composite foreign key relationships, * that is the source object's table has multiple foreign key fields to * the target object's primary key fields. * Both the source foreign key field and the target foreign key field must * be specified. * When a foreign key is specified TopLink will automatically populate the * value for that field from the target object when the object is written to * the database. If the foreign key is also mapped through a direct-to-field * then the direct-to-field must be set read-only. */ @Override public void addForeignKeyField(DatabaseField sourceForeignKeyField, DatabaseField targetPrimaryKeyField) { setIsForeignKeyRelationship(true); getForeignKeyFields().addElement(sourceForeignKeyField); getSourceToTargetKeyFields().put(sourceForeignKeyField, targetPrimaryKeyField); getTargetToSourceKeyFields().put(targetPrimaryKeyField, sourceForeignKeyField); } /** * PUBLIC: * Define the foreign key relationship in the 1-1 mapping. * This method is used for composite foreign key relationships, * that is the source object's table has multiple foreign key fields to * the target object's primary key fields. * Both the source foreign key field name and the target foreign key field * name must be specified. * When a foreign key is specified TopLink will automatically populate the * value for that field from the target object when the object is written to * the database. If the foreign key is also mapped through a direct-to-field * then the direct-to-field must be set read-only. */ public void addForeignKeyFieldName(String sourceForeignKeyFieldName, String targetPrimaryKeyFieldName) { addForeignKeyField(new DatabaseField(sourceForeignKeyFieldName), new DatabaseField(targetPrimaryKeyFieldName)); } /** * PUBLIC: * Define the target foreign key relationship in the 1-1 mapping. * This method is used for composite target foreign key relationships, * that is the target object's table has multiple foreign key fields to * the source object's primary key fields. * Both the target foreign key field and the source primary key field must * be specified. * The distinction between a foreign key and target foreign key is that the * 1-1 mapping will not populate the target foreign key value when written * (because it is in the target table). Normally 1-1's are through foreign * keys but in bi-directional 1-1's the back reference will be a target * foreign key. In obscure composite legacy data models a 1-1 may consist of * a foreign key part and a target foreign key part, in this case both * method will be called with the correct parts. */ @Override public void addTargetForeignKeyField(DatabaseField targetForeignKeyField, DatabaseField sourcePrimaryKeyField) { getSourceToTargetKeyFields().put(sourcePrimaryKeyField, targetForeignKeyField); getTargetToSourceKeyFields().put(targetForeignKeyField, sourcePrimaryKeyField); } /** * PUBLIC: * Define the target foreign key relationship in the 1-1 mapping. * This method is used for composite target foreign key relationships, * that is the target object's table has multiple foreign key fields to * the source object's primary key fields. * Both the target foreign key field name and the source primary key field * name must be specified. * The distinction between a foreign key and target foreign key is that the * 1-1 mapping will not populate the target foreign key value when written * (because it is in the target table). Normally 1-1's are through foreign * keys but in bi-directional 1-1's the back reference will be a target * foreign key. In obscure composite legacy data models a 1-1 may consist of * a foreign key part and a target foreign key part, in this case both * method will be called with the correct parts. */ public void addTargetForeignKeyFieldName(String targetForeignKeyFieldName, String sourcePrimaryKeyFieldName) { addTargetForeignKeyField(new DatabaseField(targetForeignKeyFieldName), new DatabaseField(sourcePrimaryKeyFieldName)); } /** * INTERNAL: * For mappings used as MapKeys in MappedKeyContainerPolicy. Add the target of this mapping to the deleted * objects list if necessary * * This method is used for removal of private owned relationships. */ public void addKeyToDeletedObjectsList(Object object, Map deletedObjects){ deletedObjects.put(object, object); } /** * Build a clone of the given element in a unitOfWork. */ public Object buildElementClone(Object attributeValue, Object parent, CacheKey cacheKey, Integer refreshCascade, AbstractSession cloningSession, boolean isExisting, boolean isFromSharedCache){ return buildCloneForPartObject(attributeValue, null, cacheKey, parent, cloningSession, refreshCascade, isExisting, isFromSharedCache); } /** * INTERNAL: * Used to allow object level comparisons. */ public Expression buildObjectJoinExpression(Expression expression, Object value, AbstractSession session) { Expression base = ((ObjectExpression)expression).getBaseExpression(); Expression foreignKeyJoin = null; if(this.mechanism == null) { // Allow for equal null. if (value == null) { if (!isForeignKeyRelationship()) { // ELBug#331352 // Need to do a join and compare target foreign key to null. for (DatabaseField field : getSourceToTargetKeyFields().values()) { Expression join = null; join = expression.getField(field).equal(null); if (foreignKeyJoin == null) { foreignKeyJoin = join; } else { foreignKeyJoin = foreignKeyJoin.and(join); } } } else { for (DatabaseField field : getSourceToTargetKeyFields().keySet()) { Expression join = null; join = base.getField(field).equal(null); if (foreignKeyJoin == null) { foreignKeyJoin = join; } else { foreignKeyJoin = foreignKeyJoin.and(join); } } } } else { if (!getReferenceDescriptor().getJavaClass().isInstance(value)) { // Bug 3894351 - ensure any proxys are triggered so we can do a proper class comparison value = ProxyIndirectionPolicy.getValueFromProxy(value); if (!getReferenceDescriptor().getJavaClass().isInstance(value)) { throw QueryException.incorrectClassForObjectComparison(base, value, this); } } Iterator keyIterator = Arrays.asList(((CacheId)extractKeyFromReferenceObject(value, session)).getPrimaryKey()).iterator(); for (DatabaseField field : getSourceToTargetKeyFields().keySet()) { Expression join = null; join = base.getField(field).equal(keyIterator.next()); if (foreignKeyJoin == null) { foreignKeyJoin = join; } else { foreignKeyJoin = foreignKeyJoin.and(join); } } } } else { int size = this.mechanism.sourceKeyFields.size(); Object key = null; if (value != null) { if (!getReferenceDescriptor().getJavaClass().isInstance(value)) { // Bug 3894351 - ensure any proxys are triggered so we can do a proper class comparison value = ProxyIndirectionPolicy.getValueFromProxy(value); if (!getReferenceDescriptor().getJavaClass().isInstance(value)) { throw QueryException.incorrectClassForObjectComparison(base, value, this); } } key = extractKeyFromReferenceObject(value, session); boolean allNulls = true; for (int i=0; i < size; i++) { if (((CacheId)key).getPrimaryKey()[i] != null) { allNulls = false; break; } } // the same case if (allNulls) { value = null; } } if (value != null) { for(int i=0; i < size; i++) { DatabaseField field = this.mechanism.sourceKeyFields.get(i); Expression join = null; join = base.getField(field).equal(((CacheId)key).getPrimaryKey()[i]); if (foreignKeyJoin == null) { foreignKeyJoin = join; } else { foreignKeyJoin = foreignKeyJoin.and(join); } } } else { ReportQuery subQuery = new ReportQuery(this.descriptor.getJavaClass(), new ExpressionBuilder()); Expression relationTableExp = subQuery.getExpressionBuilder().getTable(this.mechanism.relationTable); Expression subSelectExp = null; for(int i=0; i < size; i++) { subSelectExp = relationTableExp.getField(this.mechanism.sourceRelationKeyFields.get(i)).equal(base.getField(this.mechanism.sourceKeyFields.get(i))).and(subSelectExp); } subQuery.setSelectionCriteria(subSelectExp); subQuery.dontRetrievePrimaryKeys(); subQuery.addAttribute("", subQuery.getExpressionBuilder().getField(this.mechanism.sourceKeyFields.get(0))); foreignKeyJoin = base.notExists(subQuery); } } return foreignKeyJoin; } /** * INTERNAL: * Used to allow object level comparisons. */ public Expression buildObjectJoinExpression(Expression expression, Expression argument, AbstractSession session) { Expression base = ((org.eclipse.persistence.internal.expressions.ObjectExpression)expression).getBaseExpression(); Expression foreignKeyJoin = null; if(this.mechanism == null) { if (expression==argument){ for (Iterator sourceFieldsEnum = getSourceToTargetKeyFields().keySet().iterator(); sourceFieldsEnum.hasNext();) { DatabaseField field = (DatabaseField)sourceFieldsEnum.next(); Expression join = base.getField(field); join = join.equal(join); if (foreignKeyJoin == null) { foreignKeyJoin = join; } else { foreignKeyJoin = foreignKeyJoin.and(join); } } }else{ Iterator targetFieldsEnum = getSourceToTargetKeyFields().values().iterator(); for (Iterator sourceFieldsEnum = getSourceToTargetKeyFields().keySet().iterator(); sourceFieldsEnum.hasNext();) { DatabaseField sourceField = (DatabaseField)sourceFieldsEnum.next(); DatabaseField targetField = (DatabaseField)targetFieldsEnum.next(); Expression join = null; join = base.getField(sourceField).equal(argument.getField(targetField)); if (foreignKeyJoin == null) { foreignKeyJoin = join; } else { foreignKeyJoin = foreignKeyJoin.and(join); } } } } else { if (expression==argument){ foreignKeyJoin = (new ConstantExpression(0, base)).equal(new ConstantExpression(0, base)); }else{ int size = this.mechanism.sourceKeyFields.size(); Expression relTable = base.getTable(this.mechanism.getRelationTable()); for(int i=0; i < size; i++) { Expression source = base.getField(this.mechanism.sourceKeyFields.get(i)); Expression sourceRel = relTable.getField(this.mechanism.sourceRelationKeyFields.get(i)); Expression targetRel = relTable.getField(this.mechanism.targetRelationKeyFields.get(i)); Expression target = argument.getField(this.mechanism.targetKeyFields.get(i)); foreignKeyJoin = source.equal(sourceRel).and(targetRel.equal(target)).and(foreignKeyJoin); } } } return foreignKeyJoin; } /** * INTERNAL: * Certain key mappings favor different types of selection query. Return the appropriate * type of selectionQuery * @return */ public ReadQuery buildSelectionQueryForDirectCollectionKeyMapping(ContainerPolicy containerPolicy){ DataReadQuery query = new DataReadQuery(); query.setSQLStatement(new SQLSelectStatement()); query.setContainerPolicy(containerPolicy); return query; } /** * INTERNAL: * This methods clones all the fields and ensures that each collection refers to * the same clones. */ @Override public Object clone() { OneToOneMapping clone = (OneToOneMapping)super.clone(); if(this.mechanism == null) { clone.setForeignKeyFields(org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(getForeignKeyFields().size())); clone.setSourceToTargetKeyFields(new HashMap(getSourceToTargetKeyFields().size())); clone.setTargetToSourceKeyFields(new HashMap(getTargetToSourceKeyFields().size())); Hashtable setOfFields = new Hashtable(getTargetToSourceKeyFields().size()); //clone foreign keys and save the clones in a set for (Enumeration enumtr = getForeignKeyFields().elements(); enumtr.hasMoreElements();) { DatabaseField field = (DatabaseField)enumtr.nextElement(); DatabaseField fieldClone = field.clone(); setOfFields.put(field, fieldClone); clone.getForeignKeyFields().addElement(fieldClone); } //get clones from set for source hashtable. If they do not exist, create a new one. for (Iterator sourceEnum = getSourceToTargetKeyFields().keySet().iterator(); sourceEnum.hasNext();) { DatabaseField sourceField = (DatabaseField)sourceEnum.next(); DatabaseField targetField = getSourceToTargetKeyFields().get(sourceField); DatabaseField targetClone; DatabaseField sourceClone; targetClone = (DatabaseField)setOfFields.get(targetField); if (targetClone == null) { targetClone = targetField.clone(); setOfFields.put(targetField, targetClone); } sourceClone = (DatabaseField)setOfFields.get(sourceField); if (sourceClone == null) { sourceClone = sourceField.clone(); setOfFields.put(sourceField, sourceClone); } clone.getSourceToTargetKeyFields().put(sourceClone, targetClone); } //get clones from set for target hashtable. If they do not exist, create a new one. for (Iterator targetEnum = getTargetToSourceKeyFields().keySet().iterator(); targetEnum.hasNext();) { DatabaseField targetField = (DatabaseField)targetEnum.next(); DatabaseField sourceField = getTargetToSourceKeyFields().get(targetField); DatabaseField targetClone; DatabaseField sourceClone; targetClone = (DatabaseField)setOfFields.get(targetField); if (targetClone == null) { targetClone = targetField.clone(); setOfFields.put(targetField, targetClone); } sourceClone = (DatabaseField)setOfFields.get(sourceField); if (sourceClone == null) { sourceClone = sourceField.clone(); setOfFields.put(sourceField, sourceClone); } clone.getTargetToSourceKeyFields().put(targetClone, sourceClone); } } else { clone.mechanism = (RelationTableMechanism)this.mechanism.clone(); } return clone; } @Override public void collectQueryParameters(Set cacheFields){ for (DatabaseField field : sourceToTargetKeyFields.keySet()) { cacheFields.add(field); } } /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection. Returns the key. */ public Object createMapComponentFromRow(AbstractRecord dbRow, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected){ return session.executeQuery(getSelectionQuery(), dbRow); } /** * INTERNAL: * Creates the Array of simple types used to recreate this map. */ public Object createSerializableMapKeyInfo(Object key, AbstractSession session){ return referenceDescriptor.getObjectBuilder().extractPrimaryKeyFromObject(key, session); } /** * INTERNAL: * Create an instance of the Key object from the key information extracted from the map. * This may return the value directly in case of a simple key or will be used as the FK to load a related entity. */ public List createMapComponentsFromSerializableKeyInfo(Object[] keyInfo, AbstractSession session){ List orderedResult = new ArrayList(keyInfo.length); Map fromCache = session.getIdentityMapAccessorInstance().getAllFromIdentityMapWithEntityPK(keyInfo, referenceDescriptor); DatabaseRecord translationRow = new DatabaseRecord(); List foreignKeyValues = new ArrayList(keyInfo.length - fromCache.size()); CacheKeyType cacheKeyType = referenceDescriptor.getCachePolicy().getCacheKeyType(); for (int index = 0; index < keyInfo.length; ++index){ Object pk = keyInfo[index]; if (!fromCache.containsKey(pk)){ if (cacheKeyType == CacheKeyType.CACHE_ID){ foreignKeyValues.add(Arrays.asList(((CacheId)pk).getPrimaryKey())); }else{ foreignKeyValues.add(pk); } } } if (!foreignKeyValues.isEmpty()){ translationRow.put(ForeignReferenceMapping.QUERY_BATCH_PARAMETER, foreignKeyValues); ReadAllQuery query = new ReadAllQuery(referenceDescriptor.getJavaClass()); query.setIsExecutionClone(true); query.setTranslationRow(translationRow); query.setSession(session); query.setSelectionCriteria(referenceDescriptor.buildBatchCriteriaByPK(query.getExpressionBuilder(), query)); Collection temp = (Collection) session.executeQuery(query); for (Object element: temp){ Object pk = referenceDescriptor.getObjectBuilder().extractPrimaryKeyFromObject(element, session); fromCache.put(pk, element); } } for(Object key : keyInfo){ orderedResult.add(fromCache.get(key)); } return orderedResult; } /** * INTERNAL: * Create an instance of the Key object from the key information extracted from the map. * This key object may be a shallow stub of the actual object if the key is an Entity type. */ public Object createStubbedMapComponentFromSerializableKeyInfo(Object keyInfo, AbstractSession session) { ObjectBuilder builder = this.referenceDescriptor.getObjectBuilder(); ObjectBuildingQuery clonedQuery = (ObjectBuildingQuery) getSelectionQuery().clone(); clonedQuery.setSession(session); Object newObject = referenceDescriptor.getInstantiationPolicy().buildNewInstance(); builder.buildPrimaryKeyAttributesIntoObject(newObject, builder.buildRowFromPrimaryKeyValues(keyInfo, session), clonedQuery, session); return newObject; } /** * INTERNAL * Called when a DatabaseMapping is used to map the key in a collection. Returns the key. */ public Object createMapComponentFromJoinedRow(AbstractRecord dbRow, JoinedAttributeManager joinManager, ObjectBuildingQuery query, CacheKey parentCacheKey, AbstractSession session, boolean isTargetProtected){ return valueFromRowInternalWithJoin(dbRow, joinManager, query, parentCacheKey, session, isTargetProtected); } /** * INTERNAL: * Create a query key that links to the map key * @return */ public QueryKey createQueryKeyForMapKey(){ OneToOneQueryKey key = new OneToOneQueryKey(); key.setDescriptor(getReferenceDescriptor()); key.setReferenceClass(getReferenceClass()); key.setJoinCriteria(getAdditionalSelectionCriteriaForMapKey()); return key; } /** * INTERNAL: * For mappings used as MapKeys in MappedKeyContainerPolicy, Delete the passed object if necessary. * * This method is used for removal of private owned relationships * * @param objectDeleted * @param session */ public void deleteMapKey(Object objectDeleted, AbstractSession session){ session.deleteObject(objectDeleted); } /** * INTERNAL: * Adds locking clause to the target query to extend pessimistic lock scope. */ @Override protected void extendPessimisticLockScopeInTargetQuery(ObjectLevelReadQuery targetQuery, ObjectBuildingQuery sourceQuery) { if(this.mechanism == null) { super.extendPessimisticLockScopeInTargetQuery(targetQuery, sourceQuery); } else { this.mechanism.setRelationTableLockingClause(targetQuery, sourceQuery); } } /** * INTERNAL: * Called only if both * shouldExtendPessimisticLockScope and shouldExtendPessimisticLockScopeInSourceQuery are true. * Adds fields to be locked to the where clause of the source query. * Note that the sourceQuery must be ObjectLevelReadQuery so that it has ExpressionBuilder. * * This method must be implemented in subclasses that allow * setting shouldExtendPessimisticLockScopeInSourceQuery to true. */ @Override public void extendPessimisticLockScopeInSourceQuery(ObjectLevelReadQuery sourceQuery) { Expression exp = sourceQuery.getSelectionCriteria(); if(this.mechanism == null) { ExpressionBuilder builder = sourceQuery.getExpressionBuilder(); Iterator> it = this.getSourceToTargetKeyFields().entrySet().iterator(); Map.Entry entry = it.next(); exp = builder.getField(entry.getKey()).equal(builder.get(this.getAttributeName()).getField(entry.getValue())).and(exp); } else { exp = this.mechanism.joinRelationTableField(exp, sourceQuery.getExpressionBuilder()); } sourceQuery.setSelectionCriteria(exp); } /** * INTERNAL: * Extract the foreign key value from the source row. */ @Override protected Object extractBatchKeyFromRow(AbstractRecord row, AbstractSession session) { if (this.mechanism != null) { return this.mechanism.extractBatchKeyFromRow(row, session); } Object[] key; ConversionManager conversionManager = session.getDatasourcePlatform().getConversionManager(); key = new Object[this.sourceToTargetKeyFields.size()]; int index = 0; for (DatabaseField field : this.sourceToTargetKeyFields.keySet()) { Object value = row.get(field); if (value == null) { return null; } // Must ensure the classification gets a cache hit. try { value = conversionManager.convertObject(value, field.getType()); } catch (ConversionException exception) { throw ConversionException.couldNotBeConverted(this, this.descriptor, exception); } key[index] = value; index++; } return new CacheId(key); } /** * INTERNAL: * Extract the fields for the Map key from the object to use in a query */ public Map extractIdentityFieldsForQuery(Object object, AbstractSession session){ Map keyFields = new HashMap(); for (int index = 0; index < getForeignKeyFields().size(); index++) { DatabaseField targetRelationField = getForeignKeyFields().elementAt(index); DatabaseField targetKey = getSourceToTargetKeyFields().get(targetRelationField); Object value = getReferenceDescriptor().getObjectBuilder().extractValueFromObjectForField(object, targetKey, session); keyFields.put(targetRelationField, value); } return keyFields; } /** * INTERNAL: * Extract the key value from the reference object. */ protected Object extractKeyFromReferenceObject(Object object, AbstractSession session) { ObjectBuilder objectBuilder = getReferenceDescriptor().getObjectBuilder(); Object[] key; if (this.mechanism == null) { key = new Object[getSourceToTargetKeyFields().size()]; int index = 0; for (DatabaseField field : getSourceToTargetKeyFields().values()) { if (object == null) { key[index] = null; } else { key[index] = objectBuilder.extractValueFromObjectForField(object, field, session); } index++; } } else { int size = this.mechanism.targetKeyFields.size(); key = new Object[size]; for (int i = 0; i < size; i++) { if (object == null) { key[i] = null; } else { DatabaseField field = this.mechanism.targetKeyFields.get(i); key[i] = objectBuilder.extractValueFromObjectForField(object, field, session); } } } return new CacheId(key); } /** * INTERNAL: * Return the primary key for the reference object (i.e. the object * object referenced by domainObject and specified by mapping). * This key will be used by a RemoteValueHolder. */ @Override public Object extractPrimaryKeysForReferenceObjectFromRow(AbstractRecord row) { List primaryKeyFields = getReferenceDescriptor().getPrimaryKeyFields(); Object[] result = new Object[primaryKeyFields.size()]; for (int index = 0; index < primaryKeyFields.size(); index++) { DatabaseField targetKeyField = (DatabaseField)primaryKeyFields.get(index); DatabaseField sourceKeyField = getTargetToSourceKeyFields().get(targetKeyField); if (sourceKeyField == null) { return null; } result[index] = row.get(sourceKeyField); if (getReferenceDescriptor().getCachePolicy().getCacheKeyType() == CacheKeyType.ID_VALUE) { return result[index]; } } return new CacheId(result); } /** * INTERNAL: * Allow the mapping the do any further batch preparation. */ @Override protected void postPrepareNestedBatchQuery(ReadQuery batchQuery, ObjectLevelReadQuery query) { super.postPrepareNestedBatchQuery(batchQuery, query); // Force a distinct to filter out m-1 duplicates. // Only set if really a m-1, not a 1-1 if (!isOneToOneRelationship()) { if (!((ObjectLevelReadQuery)batchQuery).isDistinctComputed() && (batchQuery.getSession().getPlatform().isLobCompatibleWithDistinct() || !Helper.hasLob(batchQuery.getDescriptor().getSelectionFields((ObjectLevelReadQuery)batchQuery)))) { ((ObjectLevelReadQuery)batchQuery).useDistinct(); } } if (this.mechanism != null) { this.mechanism.postPrepareNestedBatchQuery(batchQuery, query); } } /** * INTERNAL: * Return the selection criteria used to IN batch fetching. */ @Override protected Expression buildBatchCriteria(ExpressionBuilder builder, ObjectLevelReadQuery query) { if (this.mechanism == null) { int size = this.sourceToTargetKeyFields.size(); if (size > 1) { // Support composite keys using nested IN. List fields = new ArrayList(size); for (DatabaseField targetForeignKeyField : this.sourceToTargetKeyFields.values()) { fields.add(builder.getField(targetForeignKeyField)); } return query.getSession().getPlatform().buildBatchCriteriaForComplexId(builder, fields); } else { return query.getSession().getPlatform().buildBatchCriteria(builder, builder.getField(this.sourceToTargetKeyFields.values().iterator().next())); } } else { return this.mechanism.buildBatchCriteria(builder, query); } } /** * INTERNAL: * Prepare and execute the batch query and store the * results for each source object in a map keyed by the * mappings source keys of the source objects. */ @Override protected void executeBatchQuery(DatabaseQuery query, CacheKey parentCacheKey, Map referenceObjectsByKey, AbstractSession session, AbstractRecord translationRow) { // Execute query and index resulting objects by key. List results; ObjectBuilder builder = query.getDescriptor().getObjectBuilder(); if (this.mechanism == null) { results = (List)session.executeQuery(query, translationRow); for (Object eachReferenceObject : results) { Object eachReferenceKey = extractKeyFromReferenceObject(eachReferenceObject, session); referenceObjectsByKey.put(eachReferenceKey, builder.wrapObject(eachReferenceObject, session)); } } else { ComplexQueryResult complexResult = (ComplexQueryResult)session.executeQuery(query, translationRow); results = (List)complexResult.getResult(); List rows = (List)complexResult.getData(); int size = results.size(); for (int index = 0; index < size; index++) { AbstractRecord row = rows.get(index); Object key = this.mechanism.extractKeyFromTargetRow(row, session); referenceObjectsByKey.put(key, builder.wrapObject(results.get(index), session)); } } } /** * INTERNAL: * Check if the target object is in the cache if possible based on the target key value. * Return null if the target key is not the primary key, or if the query is refreshing. */ @Override protected Object checkCacheForBatchKey(AbstractRecord sourceRow, Object foreignKey, Map batchObjects, ReadQuery batchQuery, ObjectLevelReadQuery originalQuery, AbstractSession session) { if (((ReadAllQuery)batchQuery).shouldRefreshIdentityMapResult() || (!((ReadAllQuery)batchQuery).shouldMaintainCache())) { return null; } // Check the cache using the source row and selection query. Object cachedObject = this.selectionQuery.checkEarlyReturn(session, sourceRow); if ((cachedObject != null) && (batchObjects != null)) { batchObjects.put(foreignKey, cachedObject); } return cachedObject; } /** * INTERNAL: * Return the selection criteria necessary to select the target object when this mapping * is a map key. * @return */ public Expression getAdditionalSelectionCriteriaForMapKey(){ return buildSelectionCriteria(false, false); } /** * INTERNAL: * Return any tables that will be required when this mapping is used as part of a join query */ public List getAdditionalTablesForJoinQuery(){ List tables = new ArrayList(getReferenceDescriptor().getTables().size() + 1); tables.addAll(getReferenceDescriptor().getTables()); if (keyTableForMapKey != null){ tables.add(keyTableForMapKey); } return tables; } /** * INTERNAL: * Should be overridden by subclass that allows setting * extendPessimisticLockScope to DEDICATED_QUERY. */ @Override protected ReadQuery getExtendPessimisticLockScopeDedicatedQuery(AbstractSession session, short lockMode) { if(this.mechanism != null) { return this.mechanism.getLockRelationTableQueryClone(session, lockMode); } else { return super.getExtendPessimisticLockScopeDedicatedQuery(session, lockMode); } } /** * INTERNAL: * Return the classification for the field contained in the mapping. * This is used to convert the row value to a consistent java value. */ @Override public Class getFieldClassification(DatabaseField fieldToClassify) throws DescriptorException { DatabaseField fieldInTarget = getSourceToTargetKeyFields().get(fieldToClassify); if (fieldInTarget == null) { return null;// Can be registered as multiple table secondary field mapping } DatabaseMapping mapping = getReferenceDescriptor().getObjectBuilder().getMappingForField(fieldInTarget); if (mapping == null) { return null;// Means that the mapping is read-only } return mapping.getFieldClassification(fieldInTarget); } /** * PUBLIC: * Return the foreign key field names associated with the mapping. * These are only the source fields that are writable. */ public Vector getForeignKeyFieldNames() { Vector fieldNames = new Vector(getForeignKeyFields().size()); for (Enumeration fieldsEnum = getForeignKeyFields().elements(); fieldsEnum.hasMoreElements();) { fieldNames.addElement(((DatabaseField)fieldsEnum.nextElement()).getQualifiedName()); } return fieldNames; } /** * INTERNAL: * Return source key fields for translation by an AggregateObjectMapping */ @Override public Collection getFieldsForTranslationInAggregate() { return getSourceToTargetKeyFields().keySet(); } /** * Return the appropriate map that maps the "foreign keys" * to the "primary keys". */ protected Map getForeignKeysToPrimaryKeys() { if (this.isForeignKeyRelationship()) { return this.getSourceToTargetKeyFields(); } else { return this.getTargetToSourceKeyFields(); } } /** * INTERNAL: * Return a Map of any foreign keys defined within the the MapKey * @return */ public Map getForeignKeyFieldsForMapKey(){ return getSourceToTargetKeyFields(); } /** * INTERNAL: * Return the fields that make up the identity of the mapped object. For mappings with * a primary key, it will be the set of fields in the primary key. For mappings without * a primary key it will likely be all the fields * @return */ public List getIdentityFieldsForMapKey(){ return getForeignKeyFields(); } /** * INTERNAL: * Return the query that is used when this mapping is part of a joined relationship * * This method is used when this mapping is used to map the key in a Map */ public ObjectLevelReadQuery getNestedJoinQuery(JoinedAttributeManager joinManager, ObjectLevelReadQuery query, AbstractSession session){ return prepareNestedJoins(joinManager, query, session); } /** * INTERNAL: * Get all the fields for the map key */ public List getAllFieldsForMapKey(){ List fields = new ArrayList(getReferenceDescriptor().getAllSelectionFields().size() + getForeignKeyFields().size()); fields.addAll(getReferenceDescriptor().getAllSelectionFields()); fields.addAll(getForeignKeyFields()); return fields; } /** * INTERNAL: * Return a vector of the foreign key fields in the same order * as the corresponding primary key fields are in their descriptor. */ public Vector getOrderedForeignKeyFields() { List primaryKeyFields = getPrimaryKeyDescriptor().getPrimaryKeyFields(); Vector result = new Vector(primaryKeyFields.size()); for (int index = 0; index < primaryKeyFields.size(); index++) { DatabaseField pkField = (DatabaseField)primaryKeyFields.get(index); boolean found = false; for (Iterator fkStream = this.getForeignKeysToPrimaryKeys().keySet().iterator(); fkStream.hasNext();) { DatabaseField fkField = (DatabaseField)fkStream.next(); if (this.getForeignKeysToPrimaryKeys().get(fkField).equals(pkField)) { found = true; result.addElement(fkField); break; } } if (!found) { throw DescriptorException.missingForeignKeyTranslation(this, pkField); } } return result; } /** * Return the descriptor for whichever side of the * relation has the "primary key". */ protected ClassDescriptor getPrimaryKeyDescriptor() { if (this.isForeignKeyRelationship()) { return this.getReferenceDescriptor(); } else { return this.getDescriptor(); } } /** * INTERNAL: * The private owned criteria is only used outside of the unit of work to compare the previous value of the reference. */ public Expression getPrivateOwnedCriteria() { if (privateOwnedCriteria == null) { initializePrivateOwnedCriteria(); } return privateOwnedCriteria; } /** * INTERNAL: * Return a collection of the source to target field value associations. */ public Vector getSourceToTargetKeyFieldAssociations() { Vector associations = new Vector(getSourceToTargetKeyFields().size()); Iterator sourceFieldEnum = getSourceToTargetKeyFields().keySet().iterator(); Iterator targetFieldEnum = getSourceToTargetKeyFields().values().iterator(); while (sourceFieldEnum.hasNext()) { Object fieldValue = ((DatabaseField)sourceFieldEnum.next()).getQualifiedName(); Object attributeValue = ((DatabaseField)targetFieldEnum.next()).getQualifiedName(); associations.addElement(new Association(fieldValue, attributeValue)); } return associations; } /** * INTERNAL: * Returns the source keys to target keys fields association. */ public Map getSourceToTargetKeyFields() { return sourceToTargetKeyFields; } /** * INTERNAL: * Returns the target keys to source keys fields association. */ public Map getTargetToSourceKeyFields() { return targetToSourceKeyFields; } /** * INTERNAL: * If required, get the targetVersion of the source object from the merge manager * * Used with MapKeyContainerPolicy to abstract getting the target version of a source key * @return */ public Object getTargetVersionOfSourceObject(Object object, Object parent, MergeManager mergeManager, AbstractSession targetSession){ return mergeManager.getTargetVersionOfSourceObject(object, referenceDescriptor, targetSession); } /** * INTERNAL: * Return the class this key mapping maps or the descriptor for it * @return */ public Class getMapKeyTargetType(){ return getReferenceClass(); } /** * INTERNAL: * Initialize the mapping. */ @Override public void initialize(AbstractSession session) throws DescriptorException { if (session.hasBroker()) { if (getReferenceClass() == null) { throw DescriptorException.referenceClassNotSpecified(this); } // substitute session that owns the mapping for the session that owns reference descriptor. session = session.getBroker().getSessionForClass(getReferenceClass()); } super.initialize(session); if (isForeignKeyRelationship() && !isMapKeyMapping()) { getDescriptor().addPreDeleteMapping(this); } // Capture our foreign key field specifications here. We need to build // the fields first to ensure they have a table associated with them. // Also must be careful to not set the flags based on a previously // built field (multiple mappings to the same field) since we need to // capture the flags from the field set directly on this mapping. for (DatabaseField field : getForeignKeyFields()) { DatabaseField builtField = getDescriptor().buildField(field, keyTableForMapKey); if (builtField == field || builtField.isTranslated()) { // same instance or translated, look at the built field. updateInsertableAndUpdatableFields(builtField); } else { // previously built field and not translated, look at the original field. updateInsertableAndUpdatableFields(field); } } if (this.mechanism != null) { if (this.mechanism.hasRelationTable()) { if(!this.foreignKeyFields.isEmpty() || !this.sourceToTargetKeyFields.isEmpty() || !this.targetToSourceKeyFields.isEmpty()) { throw DescriptorException.oneToOneMappingConflict(this.getDescriptor(), this); } this.foreignKeyFields = null; this.sourceToTargetKeyFields = null; this.targetToSourceKeyFields = null; this.mechanism.initialize(session, this); } else { this.mechanism = null; } } if (this.mechanism == null) { // Must set table of foreign keys. for (int index = 0; index < getForeignKeyFields().size(); index++) { DatabaseField foreignKeyField = getForeignKeyFields().get(index); foreignKeyField = getDescriptor().buildField(foreignKeyField, keyTableForMapKey); getForeignKeyFields().set(index, foreignKeyField); } // If only a selection criteria is specified then the foreign keys do not have to be initialized. if (!(getTargetToSourceKeyFields().isEmpty() && getSourceToTargetKeyFields().isEmpty())) { if (getTargetToSourceKeyFields().isEmpty() || getSourceToTargetKeyFields().isEmpty()) { initializeForeignKeysWithDefaults(session); } else { initializeForeignKeys(session); } } // Check if any foreign keys reference a secondary table. if (getReferenceDescriptor().getTables().size() > 1) { DatabaseTable firstTable = getReferenceDescriptor().getTables().get(0); for (DatabaseField field : getSourceToTargetKeyFields().values()) { if (!field.getTable().equals(firstTable)) { getReferenceDescriptor().setHasMultipleTableConstraintDependecy(true); } } } // Check if any foreign keys reference a secondary table. if (getDescriptor().getTables().size() > 1) { DatabaseTable firstTable = getDescriptor().getTables().get(0); for (DatabaseField field : getSourceToTargetKeyFields().keySet()) { if (!field.getTable().equals(firstTable)) { getDescriptor().setHasMultipleTableConstraintDependecy(true); } } } } if (shouldInitializeSelectionCriteria()) { if (shouldForceInitializationOfSelectionCriteria()) { setSelectionCriteria(buildSelectionCriteria()); } else { setSelectionCriteria(buildSelectionCriteria(true, true)); } } else { setShouldVerifyDelete(false); } setFields(collectFields()); if (getReferenceDescriptor().hasTablePerClassPolicy()) { // This will do nothing if we have already prepared for this // source mapping or if the source mapping does not require // any special prepare logic. getReferenceDescriptor().getTablePerClassPolicy().prepareChildrenSelectionQuery(this, session); } } /** * INTERNAL: * The foreign keys primary keys are stored as database fields in the map. */ protected void initializeForeignKeys(AbstractSession session) { HashMap newSourceToTargetKeyFields = new HashMap(getSourceToTargetKeyFields().size()); HashMap newTargetToSourceKeyFields = new HashMap(getTargetToSourceKeyFields().size()); Iterator> iterator = getSourceToTargetKeyFields().entrySet().iterator(); while (iterator.hasNext()) { Map.Entry entry = iterator.next(); DatabaseField sourceField = entry.getKey(); sourceField = getDescriptor().buildField(sourceField, keyTableForMapKey); if (usesIndirection()) { sourceField.setKeepInRow(true); } DatabaseField targetField = entry.getValue(); targetField = getReferenceDescriptor().buildField(targetField, keyTableForMapKey); newSourceToTargetKeyFields.put(sourceField, targetField); newTargetToSourceKeyFields.put(targetField, sourceField); } setSourceToTargetKeyFields(newSourceToTargetKeyFields); setTargetToSourceKeyFields(newTargetToSourceKeyFields); } /** * INTERNAL: * The foreign keys primary keys are stored as database fields in the map. */ protected void initializeForeignKeysWithDefaults(AbstractSession session) { if (isForeignKeyRelationship()) { if (getSourceToTargetKeyFields().size() != 1) { throw DescriptorException.foreignKeysDefinedIncorrectly(this); } List targetKeys = getReferenceDescriptor().getPrimaryKeyFields(); if (targetKeys.size() != 1) { //target and source keys are not the same size. throw DescriptorException.sizeMismatchOfForeignKeys(this); } //grab the only element out of the map DatabaseField sourceField = getSourceToTargetKeyFields().keySet().iterator().next(); sourceField = getDescriptor().buildField(sourceField); if (usesIndirection()) { sourceField.setKeepInRow(true); } getSourceToTargetKeyFields().clear(); getTargetToSourceKeyFields().clear(); getSourceToTargetKeyFields().put(sourceField, targetKeys.get(0)); getTargetToSourceKeyFields().put(targetKeys.get(0), sourceField); } else { if (getTargetToSourceKeyFields().size() != 1) { throw DescriptorException.foreignKeysDefinedIncorrectly(this); } List sourceKeys = getDescriptor().getPrimaryKeyFields(); if (sourceKeys.size() != 1) { //target and source keys are not the same size. throw DescriptorException.sizeMismatchOfForeignKeys(this); } //grab the only element out of the map DatabaseField targetField = getTargetToSourceKeyFields().keySet().iterator().next(); targetField = getReferenceDescriptor().buildField(targetField); getSourceToTargetKeyFields().clear(); getTargetToSourceKeyFields().clear(); getTargetToSourceKeyFields().put(targetField, sourceKeys.get(0)); getSourceToTargetKeyFields().put(sourceKeys.get(0), targetField); } } /** * INTERNAL: * Selection criteria is created with source foreign keys and target keys. */ protected void initializePrivateOwnedCriteria() { if (!isForeignKeyRelationship()) { setPrivateOwnedCriteria(getSelectionCriteria()); } else { Expression pkCriteria = getDescriptor().getObjectBuilder().getPrimaryKeyExpression(); ExpressionBuilder builder = new ExpressionBuilder(); Expression backRef = builder.getManualQueryKey(getAttributeName() + "-back-ref", getDescriptor()); Expression newPKCriteria = pkCriteria.rebuildOn(backRef); Expression twistedSelection = backRef.twist(getSelectionCriteria(), builder); if (getDescriptor().getQueryManager().getAdditionalJoinExpression() != null) { // We don't have to twist the additional join because it's all against the same node, which is our base // but we do have to rebuild it onto the manual query key Expression rebuiltAdditional = getDescriptor().getQueryManager().getAdditionalJoinExpression().rebuildOn(backRef); if (twistedSelection == null) { twistedSelection = rebuiltAdditional; } else { twistedSelection = twistedSelection.and(rebuiltAdditional); } } setPrivateOwnedCriteria(newPKCriteria.and(twistedSelection)); } } /** * INTERNAL: * Making any mapping changes necessary to use a the mapping as a map key prior to initializing the mapping */ public void preinitializeMapKey(DatabaseTable table) throws DescriptorException { keyTableForMapKey = table; } /** * INTERNAL: * Need to set the field type for the foreign key fields for a map key, as the fields are not contained in any descriptor. */ public void postInitializeMapKey(MappedKeyMapContainerPolicy policy) { for (DatabaseField foreignKey : getSourceToTargetKeyFields().keySet()) { if (foreignKey.getType() == null) { foreignKey.setType(getFieldClassification(foreignKey)); } } } /** * INTERNAL: * Prepare a cascade locking policy. */ @Override public void prepareCascadeLockingPolicy() { CascadeLockingPolicy policy = new CascadeLockingPolicy(getDescriptor(), getReferenceDescriptor()); policy.setQueryKeyFields(getSourceToTargetKeyFields(), ! isForeignKeyRelationship()); getReferenceDescriptor().addCascadeLockingPolicy(policy); } /** * This method would allow customers to get the potential selection criteria for a mapping * prior to initialization. This would allow them to more easily create an amendment method * that would amend the SQL for the join. */ public Expression buildSelectionCriteria() { return buildSelectionCriteria(true, false); } /** * INTERNAL: * Build the selection criteria for this mapping. Allows several variations. * * Either a parameter can be used for the join or simply the database field * * The existing selection criteria can be built upon or a whole new criteria can be built. */ public Expression buildSelectionCriteria(boolean useParameter, boolean usePreviousSelectionCriteria){ Expression criteria = null; if (usePreviousSelectionCriteria){ criteria = getSelectionCriteria(); } if(this.mechanism == null) { Expression builder = new ExpressionBuilder(); // CR3922 if (getSourceToTargetKeyFields().isEmpty()) { throw DescriptorException.noForeignKeysAreSpecified(this); } for (Iterator keys = getSourceToTargetKeyFields().keySet().iterator(); keys.hasNext();) { DatabaseField foreignKey = (DatabaseField)keys.next(); DatabaseField targetKey = getSourceToTargetKeyFields().get(foreignKey); Expression expression = null; if (useParameter){ expression = builder.getField(targetKey).equal(builder.getParameter(foreignKey)); } else { expression = builder.getField(targetKey).equal(builder.getField(foreignKey)); } if (criteria == null) { criteria = expression; } else { criteria = expression.and(criteria); } } } else { criteria = this.mechanism.buildSelectionCriteria(this, criteria); } return criteria; } /** * INTERNAL: * Builds a shallow original object. Only direct attributes and primary * keys are populated. In this way the minimum original required for * instantiating a working copy clone can be built without placing it in * the shared cache (no concern over cycles). */ @Override public void buildShallowOriginalFromRow(AbstractRecord databaseRow, Object original, JoinedAttributeManager joinManager, ObjectBuildingQuery query, AbstractSession executionSession) { // Now we are only building this original so we can extract the primary // key out of it. If the primary key is stored across a 1-1 a value // holder needs to be built/triggered to get at it. // In this case recursively build the shallow original across the 1-1. // We only need the primary key for that object, and we know // what that primary key is: it is the foreign key in our row. ClassDescriptor descriptor = getReferenceDescriptor(); AbstractRecord targetRow = new DatabaseRecord(); for (Iterator keys = getSourceToTargetKeyFields().keySet().iterator(); keys.hasNext();) { DatabaseField foreignKey = (DatabaseField)keys.next(); DatabaseField targetKey = getSourceToTargetKeyFields().get(foreignKey); targetRow.put(targetKey, databaseRow.get(foreignKey)); } Object targetObject = descriptor.getObjectBuilder().buildNewInstance(); descriptor.getObjectBuilder().buildAttributesIntoShallowObject(targetObject, databaseRow, query); targetObject = getIndirectionPolicy().valueFromRow(targetObject); setAttributeValueInObject(original, targetObject); } /** * INTERNAL: */ @Override public boolean isOneToOneMapping() { return true; } /** * INTERNAL: */ @Override public boolean isOwned(){ return this.hasRelationTable() && ! this.isReadOnly; } /** * INTERNAL: * Reads the private owned object. */ @Override protected Object readPrivateOwnedForObject(ObjectLevelModifyQuery modifyQuery) throws DatabaseException { if (modifyQuery.getSession().isUnitOfWork()) { return super.readPrivateOwnedForObject(modifyQuery); } else { if (!shouldVerifyDelete()) { return null; } ReadObjectQuery readQuery = (ReadObjectQuery)getSelectionQuery().clone(); readQuery.setSelectionCriteria(getPrivateOwnedCriteria()); return modifyQuery.getSession().executeQuery(readQuery, modifyQuery.getTranslationRow()); } } /** * INTERNAL: * Rehash any map based on fields. * This is used to clone descriptors for aggregates, which hammer field names, * it is probably better not to hammer the field name and this should be refactored. */ @Override public void rehashFieldDependancies(AbstractSession session) { setSourceToTargetKeyFields(Helper.rehashMap(getSourceToTargetKeyFields())); // Go through the fks again and make updates for any translated fields. for (DatabaseField field : getSourceToTargetKeyFields().keySet()) { if (field.isTranslated()) { updateInsertableAndUpdatableFields(field); } } } /** * INTERNAL: * Return whether this mapping requires extra queries to update the rows if it is * used as a key in a map. This will typically be true if there are any parts to this mapping * that are not read-only. */ public boolean requiresDataModificationEventsForMapKey() { return true; } /** * Return if this mapping is really for a OneToOne relationship. * This is a backward compatibility issue, in that before the ManyToOneMapping * was created OneToOneMapping was used for both. * false means it may be a OneToOne or a ManyToOne (unknown). */ public boolean isOneToOneRelationship() { return isOneToOneRelationship; } /** * Return if this mapping is mapped using primary key join columns. */ public boolean isOneToOnePrimaryKeyRelationship() { return isOneToOnePrimaryKeyRelationship; } /** * Define if this mapping is really for a OneToOne relationship. * This is a backward compatibility issue, in that before the ManyToOneMapping * was created OneToOneMapping was used for both. */ public void setIsOneToOneRelationship(boolean isOneToOneRelationship) { this.isOneToOneRelationship = isOneToOneRelationship; } /** * Set if this mapping is defined using primary key join columns. */ public void setIsOneToOnePrimaryKeyRelationship(boolean isOneToOnePrimaryKeyRelationship) { this.isOneToOnePrimaryKeyRelationship = isOneToOnePrimaryKeyRelationship; } /** * PUBLIC: * Define the foreign key relationship in the 1-1 mapping. * This method is used for singleton foreign key relationships only, * that is the source object's table has a foreign key field to * the target object's primary key field. * Only the source foreign key field name is specified. * When a foreign key is specified TopLink will automatically populate the value * for that field from the target object when the object is written to the database. * If the foreign key is also mapped through a direct-to-field then the direct-to-field must * be set read-only. */ public void setForeignKeyFieldName(String sourceForeignKeyFieldName) { DatabaseField sourceField = new DatabaseField(sourceForeignKeyFieldName); setIsForeignKeyRelationship(true); getForeignKeyFields().addElement(sourceField); getSourceToTargetKeyFields().put(sourceField, new DatabaseField()); } /** * PUBLIC: * Return the foreign key field names associated with the mapping. * These are only the source fields that are writable. */ public void setForeignKeyFieldNames(Vector fieldNames) { Vector fields = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(fieldNames.size()); for (Enumeration fieldNamesEnum = fieldNames.elements(); fieldNamesEnum.hasMoreElements();) { fields.addElement(new DatabaseField((String)fieldNamesEnum.nextElement())); } setForeignKeyFields(fields); } /** * INTERNAL: * Private owned criteria is used to verify the deletion of the target. * It joins from the source table on the foreign key to the target table, * with a parameterization of the primary key of the source object. */ protected void setPrivateOwnedCriteria(Expression expression) { privateOwnedCriteria = expression; } /** * PUBLIC: * Verify delete is used during delete and update on private 1:1's outside of a unit of work only. * It checks for the previous value of the target object through joining the source and target tables. * By default it is always done, but may be disabled for performance on distributed database reasons. * In the unit of work the previous value is obtained from the backup-clone so it is never used. */ public void setShouldVerifyDelete(boolean shouldVerifyDelete) { this.shouldVerifyDelete = shouldVerifyDelete; } /** * INTERNAL: * Set a collection of the source to target field associations. */ public void setSourceToTargetKeyFieldAssociations(Vector sourceToTargetKeyFieldAssociations) { setSourceToTargetKeyFields(new HashMap(sourceToTargetKeyFieldAssociations.size() + 1)); setTargetToSourceKeyFields(new HashMap(sourceToTargetKeyFieldAssociations.size() + 1)); for (Enumeration associationsEnum = sourceToTargetKeyFieldAssociations.elements(); associationsEnum.hasMoreElements();) { Association association = (Association)associationsEnum.nextElement(); DatabaseField sourceField = new DatabaseField((String)association.getKey()); DatabaseField targetField = new DatabaseField((String)association.getValue()); getSourceToTargetKeyFields().put(sourceField, targetField); getTargetToSourceKeyFields().put(targetField, sourceField); } } /** * INTERNAL: * Set the source keys to target keys fields association. */ public void setSourceToTargetKeyFields(Map sourceToTargetKeyFields) { this.sourceToTargetKeyFields = sourceToTargetKeyFields; } /** * PUBLIC: * Define the target foreign key relationship in the 1-1 mapping. * This method is used for singleton target foreign key relationships only, * that is the target object's table has a foreign key field to * the source object's primary key field. * The target foreign key field name is specified. * The distinction between a foreign key and target foreign key is that the 1-1 * mapping will not populate the target foreign key value when written (because it is in the target table). * Normally 1-1's are through foreign keys but in bi-directional 1-1's * the back reference will be a target foreign key. */ public void setTargetForeignKeyFieldName(String targetForeignKeyFieldName) { DatabaseField targetField = new DatabaseField(targetForeignKeyFieldName); getTargetToSourceKeyFields().put(targetField, new DatabaseField()); } /** * INTERNAL: * Set the target keys to source keys fields association. */ public void setTargetToSourceKeyFields(Map targetToSourceKeyFields) { this.targetToSourceKeyFields = targetToSourceKeyFields; } /** * PUBLIC: * Verify delete is used during delete and update outside of a unit of work only. * It checks for the previous value of the target object through joining the source and target tables. */ public boolean shouldVerifyDelete() { return shouldVerifyDelete; } /** * INTERNAL: * By default returns true. Will also return true if: * 1 - WriteType is INSERT and the field is insertable. * 2 - WriteType is UPDATE and the field is updatable. */ protected boolean shouldWriteField(DatabaseField field, WriteType writeType) { if (writeType.equals(WriteType.INSERT)) { return insertableFields.contains(field); } else if (writeType.equals(WriteType.UPDATE)) { return updatableFields.contains(field); } else { return true; // UNDEFINED, default is to write. } } /** * INTERNAL * Return true if this mapping supports cascaded version optimistic locking. */ @Override public boolean isCascadedLockingSupported() { return true; } /** * INTERNAL: * Return if this mapping support joining. */ @Override public boolean isJoiningSupported() { return true; } /** * INTERNAL: * Called when iterating through descriptors to handle iteration on this mapping when it is used as a MapKey */ public void iterateOnMapKey(DescriptorIterator iterator, Object element){ this.getIndirectionPolicy().iterateOnAttributeValue(iterator, element); } /** * INTERNAL: * Allow the key mapping to unwrap the object. */ public Object unwrapKey(Object key, AbstractSession session){ return getDescriptor().getObjectBuilder().unwrapObject(key, session); } /** * INTERNAL: * Add the field to the updatable and/or insertable list. Remove any * previous field under the same name, otherwise shouldn't matter if we * leave an old name (before translation) in the list as it should 'never' * be used anyway. */ protected void updateInsertableAndUpdatableFields(DatabaseField field) { insertableFields.remove(field); updatableFields.remove(field); if (field.isInsertable()) { insertableFields.add(field); } if (field.isUpdatable()) { updatableFields.add(field); } } /** * INTERNAL: * Allow the key mapping to wrap the object. */ public Object wrapKey(Object key, AbstractSession session){ return getDescriptor().getObjectBuilder().wrapObject(key, session); } /** * INTERNAL: * A subclass should implement this method if it wants different behavior. * Write the foreign key values from the attribute to the row. */ @Override public void writeFromAttributeIntoRow(Object attribute, AbstractRecord row, AbstractSession session) { for (Enumeration fieldsEnum = getForeignKeyFields().elements(); fieldsEnum.hasMoreElements();) { DatabaseField sourceKey = (DatabaseField) fieldsEnum.nextElement(); DatabaseField targetKey = getSourceToTargetKeyFields().get(sourceKey); Object referenceValue = null; // If privately owned part is null then method cannot be invoked. if (attribute != null) { referenceValue = getReferenceDescriptor().getObjectBuilder().extractValueFromObjectForField(attribute, targetKey, session); } row.add(sourceKey, referenceValue); } } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public Object valueFromObject(Object object, DatabaseField field, AbstractSession session) { // First check if the value can be obtained from the value holder's row. Object attributeValue = getAttributeValueFromObject(object); AbstractRecord referenceRow = this.indirectionPolicy.extractReferenceRow(attributeValue); if (referenceRow != null) { Object value = referenceRow.get(field); Class type = getFieldClassification(field); if ((value == null) || (value.getClass() != type)) { // Must ensure the classification to get a cache hit. try { value = session.getDatasourcePlatform().convertObject(value, type); } catch (ConversionException exception) { throw ConversionException.couldNotBeConverted(this, getDescriptor(), exception); } } return value; } Object referenceObject = getRealAttributeValueFromAttribute(attributeValue, object, session); if (referenceObject == null) { return null; } DatabaseField targetField; if(this.mechanism == null) { targetField = this.sourceToTargetKeyFields.get(field); } else { targetField = this.mechanism.targetKeyFields.get(this.mechanism.sourceKeyFields.indexOf(field)); } return this.referenceDescriptor.getObjectBuilder().extractValueFromObjectForField(referenceObject, targetField, session); } /** * INTERNAL: * Return the value of the field from the row or a value holder on the query to obtain the object. * Check for batch + aggregation reading. */ @Override protected Object valueFromRowInternalWithJoin(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, CacheKey parentCacheKey, AbstractSession executionSession, boolean isTargetProtected) throws DatabaseException { // PERF: Direct variable access. Object referenceObject; // CR #... the field for many objects may be in the row, // so build the subpartion of the row through the computed values in the query, // this also helps the field indexing match. AbstractRecord targetRow = trimRowForJoin(row, joinManager, executionSession); // PERF: Only check for null row if an outer-join was used. if (((joinManager != null) && joinManager.hasOuterJoinedAttributeQuery()) && !sourceQuery.hasPartialAttributeExpressions()) { Object key = this.referenceDescriptor.getObjectBuilder().extractPrimaryKeyFromRow(targetRow, executionSession); if (key == null) { return this.indirectionPolicy.nullValueFromRow(); } } // A nested query must be built to pass to the descriptor that looks like the real query execution would, // these should be cached on the query during prepare. ObjectLevelReadQuery nestedQuery = prepareNestedJoinQueryClone(row, null, joinManager, sourceQuery, executionSession); nestedQuery.setTranslationRow(targetRow); nestedQuery.setRequiresDeferredLocks(sourceQuery.requiresDeferredLocks()); nestedQuery.setPrefetchedCacheKeys(sourceQuery.getPrefetchedCacheKeys()); referenceObject = this.referenceDescriptor.getObjectBuilder().buildObject(nestedQuery, targetRow); // For bug 3641713 buildObject doesn't wrap if called on a UnitOfWork for performance reasons, // must wrap here as this is the last time we can look at the query and tell whether to wrap or not. if (nestedQuery.shouldUseWrapperPolicy() && executionSession.isUnitOfWork()) { referenceObject = this.referenceDescriptor.getObjectBuilder().wrapObject(referenceObject, executionSession); } return this.indirectionPolicy.valueFromRow(referenceObject); } /** * INTERNAL: * Return the value of the field from the row or a value holder on the query to obtain the object. * Check for batch + aggregation reading. */ @Override protected Object valueFromRowInternal(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, AbstractSession executionSession, boolean shouldUseSopObject) throws DatabaseException { // If any field in the foreign key is null then it means there are no referenced objects // Skip for partial objects as fk may not be present. if (!shouldUseSopObject) { int size = this.fields.size(); for (int index = 0; index < size; index++) { DatabaseField field = this.fields.get(index); if (row.get(field) == null) { return this.indirectionPolicy.nullValueFromRow(); } } } // Call the default which executes the selection query, // or wraps the query with a value holder. return super.valueFromRowInternal(row, joinManager, sourceQuery, executionSession, shouldUseSopObject); } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. */ @Override public void writeFromObjectIntoRow(Object object, AbstractRecord databaseRow, AbstractSession session, WriteType writeType) { if (this.isReadOnly || (!this.isForeignKeyRelationship)) { return; } writeFromObjectIntoRowInternal(object, databaseRow, session, null, writeType); } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. * The fields and the values added to the row depend on ShallowMode mode: * null - all fields with their values from object; * Insert - nullable fields added with value null, non nullable fields added with their values from object; * UpdateAfterInsert - nullable fields added with with their non-null values from object, non nullable fields (and nullable with null values) are ignored; * UpdateBeforeDelete - the same fields as for UpdateAfterShallowInsert - but all values are nulls. */ protected void writeFromObjectIntoRowInternal(Object object, AbstractRecord databaseRow, AbstractSession session, ShallowMode mode, WriteType writeType) { List foreignKeyFields = getForeignKeyFields(); if (mode != null) { List nonNullableFields = null; for (DatabaseField field : foreignKeyFields) { if (field.isNullable()) { if (mode == ShallowMode.Insert && shouldWriteField(field, writeType)) { // add a nullable field with a null value databaseRow.add(field, null); } } else { if (nonNullableFields == null) { nonNullableFields = new ArrayList(); } nonNullableFields.add(field); } } if (nonNullableFields == null) { // all foreignKeyFields are nullable if (mode == ShallowMode.Insert) { // nothing else to do return; } // UpdateAfterInsert or UpdateBeforeDelete: all nullable foreignKeyFields will be processed } else { if (mode == ShallowMode.Insert) { // all non nullable foreignKeyFields will be processed foreignKeyFields = nonNullableFields; } else { // UpdateAfterInsert or UpdateBeforeDelete if (foreignKeyFields.size() == nonNullableFields.size()) { // all fields are non nullable - nothing else to do return; } else { // all nullable foreignKeyFields will be processed foreignKeyFields = new ArrayList(foreignKeyFields); foreignKeyFields.removeAll(nonNullableFields); } } } } Object attributeValue = getAttributeValueFromObject(object); // If the value holder has the row, avoid instantiation and just use it. AbstractRecord referenceRow = this.indirectionPolicy.extractReferenceRow(attributeValue); if (referenceRow == null) { // Extract from object. Object referenceObject = getRealAttributeValueFromAttribute(attributeValue, object, session); for (DatabaseField sourceKey : foreignKeyFields) { Object referenceValue = null; // If privately owned part is null then method cannot be invoked. if (referenceObject != null) { DatabaseField targetKey = this.sourceToTargetKeyFields.get(sourceKey); referenceValue = this.referenceDescriptor.getObjectBuilder().extractValueFromObjectForField(referenceObject, targetKey, session); } if (mode == null) { // EL Bug 319759 - if a field is null, then the update call cache should not be used if (referenceValue == null) { databaseRow.setNullValueInFields(true); } } else { if (referenceValue == null) { if (mode != ShallowMode.Insert) { // both UpdateAfterInsert and UpdateBeforeDelete ignore null values continue; } } else { if (mode == ShallowMode.UpdateBeforeDelete) { // UpdateBeforeDelete adds nulls instead of non nulls referenceValue = null; } } } // Check updatable and insertable based on the write type. if (shouldWriteField(sourceKey, writeType)) { databaseRow.add(sourceKey, referenceValue); } } } else { for (DatabaseField sourceKey : foreignKeyFields) { Object referenceValue = referenceRow.get(sourceKey); if (mode == null) { // EL Bug 319759 - if a field is null, then the update call cache should not be used if (referenceValue == null) { databaseRow.setNullValueInFields(true); } } else { if (referenceValue == null) { if (mode != ShallowMode.Insert) { // both UpdateAfterInsert and UpdateBeforeDelete ignore null values continue; } } else { if (mode == ShallowMode.UpdateBeforeDelete) { // UpdateBeforeDelete adds nulls instead of non nulls referenceValue = null; } } } // Check updatable and insertable based on the write type. if (shouldWriteField(sourceKey, writeType)) { databaseRow.add(sourceKey, referenceValue); } } } } /** * INTERNAL: * This row is built for shallow insert which happens in case of bidirectional inserts. * The foreign keys must be set to null to avoid constraints. */ @Override public void writeFromObjectIntoRowForShallowInsert(Object object, AbstractRecord databaseRow, AbstractSession session) { if (this.isReadOnly || (!this.isForeignKeyRelationship)) { return; } writeFromObjectIntoRowInternal(object, databaseRow, session, ShallowMode.Insert, WriteType.INSERT); } /** * INTERNAL: * This row is built for update after shallow insert which happens in case of bidirectional inserts. * It contains the foreign keys with non null values that were set to null for shallow insert. */ @Override public void writeFromObjectIntoRowForUpdateAfterShallowInsert(Object object, AbstractRecord databaseRow, AbstractSession session, DatabaseTable table) { if (this.isReadOnly || (!this.isForeignKeyRelationship) || !getFields().get(0).getTable().equals(table) || isPrimaryKeyMapping()) { return; } writeFromObjectIntoRowInternal(object, databaseRow, session, ShallowMode.UpdateAfterInsert, WriteType.UNDEFINED); } /** * INTERNAL: * This row is built for update before shallow delete which happens in case of bidirectional inserts. * It contains the same fields as the row built by writeFromObjectIntoRowForUpdateAfterShallowInsert, but all the values are null. */ @Override public void writeFromObjectIntoRowForUpdateBeforeShallowDelete(Object object, AbstractRecord databaseRow, AbstractSession session, DatabaseTable table) { if (this.isReadOnly || (!this.isForeignKeyRelationship) || !getFields().get(0).getTable().equals(table) || isPrimaryKeyMapping()) { return; } writeFromObjectIntoRowInternal(object, databaseRow, session, ShallowMode.UpdateBeforeDelete, WriteType.UNDEFINED); } /** * INTERNAL: * Get a value from the object and set that in the respective field of the row. * Validation preventing primary key updates is implemented here. */ @Override public void writeFromObjectIntoRowWithChangeRecord(ChangeRecord changeRecord, AbstractRecord databaseRow, AbstractSession session, WriteType writeType) { if ((!this.isReadOnly) && this.isPrimaryKeyMapping && (!changeRecord.getOwner().isNew())) { throw ValidationException.primaryKeyUpdateDisallowed(changeRecord.getOwner().getClassName(), changeRecord.getAttribute()); } // The object must be used here as the foreign key may include more than just the // primary key of the referenced object and the changeSet may not have the required information. Object object = ((ObjectChangeSet)changeRecord.getOwner()).getUnitOfWorkClone(); writeFromObjectIntoRow(object, databaseRow, session, writeType); } /** * INTERNAL: * This row is built for shallow insert which happens in case of bidirectional inserts. * The foreign keys must be set to null to avoid constraints. */ @Override public void writeFromObjectIntoRowForShallowInsertWithChangeRecord(ChangeRecord ChangeRecord, AbstractRecord databaseRow, AbstractSession session) { if (isReadOnly() || (!isForeignKeyRelationship())) { return; } for (Enumeration fieldsEnum = getForeignKeyFields().elements(); fieldsEnum.hasMoreElements();) { DatabaseField sourceKey = (DatabaseField)fieldsEnum.nextElement(); databaseRow.add(sourceKey, null); } } /** * INTERNAL: * Write fields needed for insert into the template for with null values. */ @Override public void writeInsertFieldsIntoRow(AbstractRecord databaseRow, AbstractSession session) { if (isReadOnly() || (!isForeignKeyRelationship())) { return; } for (Enumeration fieldsEnum = getForeignKeyFields().elements(); fieldsEnum.hasMoreElements();) { DatabaseField sourceKey = (DatabaseField)fieldsEnum.nextElement(); if (shouldWriteField(sourceKey, WriteType.INSERT)) { databaseRow.add(sourceKey, null); } } } /** * PUBLIC: * Indicates whether the mapping has RelationTableMechanism. */ public boolean hasRelationTableMechanism() { return this.mechanism != null; } /** * PUBLIC: * Indicates whether the mapping has RelationTable. */ public boolean hasRelationTable() { return this.mechanism != null && this.mechanism.hasRelationTable(); } /** * PUBLIC: * Returns RelationTableMechanism that may be owned by the mapping, * that allows to configure the mapping to use relation table (just like ManyToManyMapping). * By default its null, should be created and set into the mapping before use. */ public RelationTableMechanism getRelationTableMechanism() { return this.mechanism; } /** * PUBLIC: * Set the relational table. * This is the join table that store both the source and target primary keys. */ public void setRelationTable(DatabaseTable relationTable) { this.mechanism.setRelationTable(relationTable); } /** * PUBLIC: * Set RelationTableMechanism into the mapping, * that allows to configure the mapping to use relation table (just like ManyToManyMapping). */ public void setRelationTableMechanism(RelationTableMechanism mechanism) { this.mechanism = mechanism; } /** * PUBLIC: * Return RelationTable. */ public DatabaseTable getRelationTable() { if(this.mechanism != null) { return this.mechanism.getRelationTable(); } else { return null; } } /** * INTERNAL: * Delete privately owned parts */ @Override public void preDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { if ((this.mechanism != null) && !this.isReadOnly && !this.isCascadeOnDeleteSetOnDatabase) { AbstractRecord sourceRow = this.mechanism.buildRelationTableSourceRow(query.getObject(), query.getSession(), this); query.getSession().executeQuery(this.mechanism.deleteQuery, sourceRow); } super.preDelete(query); } /** * INTERNAL: * Insert into relation table. This follows following steps. *

- Extract primary key and its value from the source object. *

- Extract target key and its value from the target object. *

- Construct a insert statement with above fields and values for relation table. *

- execute the statement. */ @Override public void postInsert(WriteObjectQuery query) throws DatabaseException { super.postInsert(query); if(this.mechanism != null && !isReadOnly()) { Object targetObject = getRealAttributeValueFromObject(query.getObject(), query.getSession()); if (targetObject == null) { return; } // Batch data modification in the uow if (query.shouldCascadeOnlyDependentParts()) { // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = setObject; event[1] = this.mechanism.buildRelationTableSourceRow(query.getObject(), query.getSession(), this); // targetObject may not have pk yet - wait to extract targetRow until the event is processed event[2] = targetObject; query.getSession().getCommitManager().addDataModificationEvent(this, event); } else { AbstractRecord sourceAndTargetRow = this.mechanism.buildRelationTableSourceAndTargetRow(query.getObject(), targetObject, query.getSession(), this); query.getSession().executeQuery(this.mechanism.insertQuery, sourceAndTargetRow); } } } /** * INTERNAL: * Update the relation table with the entries related to this mapping. * Delete entries removed, insert entries added. * If private also insert/delete/update target objects. */ @Override public void postUpdate(WriteObjectQuery query) throws DatabaseException { if(this.mechanism == null) { super.postUpdate(query); } else { // If object is not instantiated then it's not changed. if (!isAttributeValueInstantiated(query.getObject())) { return; } AbstractRecord sourceRow = null; if(!isReadOnly()) { sourceRow = this.mechanism.buildRelationTableSourceRow(query.getObject(), query.getSession(), this); query.getSession().executeQuery(this.mechanism.deleteQuery, sourceRow); } super.postUpdate(query); if(sourceRow != null) { Object targetObject = getRealAttributeValueFromObject(query.getObject(), query.getSession()); if (targetObject == null) { return; } // Batch data modification in the uow if (query.shouldCascadeOnlyDependentParts()) { // Hey I might actually want to use an inner class here... ok array for now. Object[] event = new Object[3]; event[0] = setObject; event[1] = sourceRow; // targetObject may not have pk yet - wait to extract targetRow until the event is processed event[2] = targetObject; query.getSession().getCommitManager().addDataModificationEvent(this, event); } else { AbstractRecord sourceAndTargetRow = this.mechanism.addRelationTableTargetRow(targetObject, query.getExecutionSession(), sourceRow, this); query.getSession().executeQuery(this.mechanism.insertQuery, sourceAndTargetRow); } } } } /** * INTERNAL: * Perform the commit event. * This is used in the uow to delay data modifications. */ @Override public void performDataModificationEvent(Object[] event, AbstractSession session) throws DatabaseException, DescriptorException { // Hey I might actually want to use an inner class here... ok array for now. if (event[0] == setObject) { AbstractRecord sourceAndTargetRow = this.mechanism.addRelationTableTargetRow(event[2], session, (AbstractRecord)event[1], this); session.executeQuery(this.mechanism.insertQuery, sourceAndTargetRow); } else { throw DescriptorException.invalidDataModificationEventCode(event[0], this); } } /** * INTERNAL: * Return all the fields populated by this mapping, these are foreign keys only. */ @Override protected Vector collectFields() { if(this.mechanism != null) { return new Vector(0); } else { return super.collectFields(); } } /** * INTERNAL: * Order by foreign key fields if a foreign key mapping (avoids joins). */ @Override public List getOrderByNormalizedExpressions(Expression base) { if (this.foreignKeyFields.size() > 0) { List orderBys = new ArrayList(this.foreignKeyFields.size()); for (DatabaseField field : this.foreignKeyFields) { orderBys.add(((QueryKeyExpression)base).getBaseExpression().getField(field)); } return orderBys; } return super.getOrderByNormalizedExpressions(base); } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/ForeignReferenceMapping.java0000664000000000000000000034041512216173130025617 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 08/23/2010-2.2 Michael O'Brien * - 323043: application.xml module ordering may cause weaving not to occur causing an NPE. * warn if expected "_persistence_*_vh" method not found * instead of throwing NPE during deploy validation. * 11/19/2012-2.5 Guy Pelletier * - 389090: JPA 2.1 DDL Generation Support (foreign key metadata support) * 12/07/2012-2.5 Guy Pelletier * - 389090: JPA 2.1 DDL Generation Support (foreign key metadata support) ******************************************************************************/ package org.eclipse.persistence.mappings; import java.security.AccessController; import java.security.PrivilegedActionException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Hashtable; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Vector; import org.eclipse.persistence.annotations.BatchFetchType; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.descriptors.partitioning.PartitioningPolicy; import org.eclipse.persistence.exceptions.DatabaseException; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.exceptions.QueryException; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.expressions.Expression; import org.eclipse.persistence.expressions.ExpressionBuilder; import org.eclipse.persistence.history.AsOfClause; import org.eclipse.persistence.indirection.ValueHolder; import org.eclipse.persistence.indirection.ValueHolderInterface; import org.eclipse.persistence.internal.descriptors.DescriptorIterator; import org.eclipse.persistence.internal.descriptors.InstanceVariableAttributeAccessor; import org.eclipse.persistence.internal.descriptors.MethodAttributeAccessor; import org.eclipse.persistence.internal.expressions.ForUpdateOfClause; import org.eclipse.persistence.internal.expressions.ObjectExpression; import org.eclipse.persistence.internal.helper.ClassConstants; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.internal.helper.Helper; import org.eclipse.persistence.internal.helper.NonSynchronizedSubVector; import org.eclipse.persistence.internal.helper.NonSynchronizedVector; import org.eclipse.persistence.internal.identitymaps.CacheId; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.indirection.BasicIndirectionPolicy; import org.eclipse.persistence.internal.indirection.ContainerIndirectionPolicy; import org.eclipse.persistence.internal.indirection.DatabaseValueHolder; import org.eclipse.persistence.internal.indirection.IndirectionPolicy; import org.eclipse.persistence.internal.indirection.NoIndirectionPolicy; import org.eclipse.persistence.internal.indirection.WeavedObjectBasicIndirectionPolicy; import org.eclipse.persistence.internal.queries.AttributeItem; import org.eclipse.persistence.internal.queries.JoinedAttributeManager; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.ChangeRecord; import org.eclipse.persistence.internal.sessions.MergeManager; import org.eclipse.persistence.internal.sessions.UnitOfWorkChangeSet; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.internal.sessions.remote.RemoteSessionController; import org.eclipse.persistence.internal.sessions.remote.RemoteValueHolder; import org.eclipse.persistence.logging.SessionLog; import org.eclipse.persistence.queries.BatchFetchPolicy; import org.eclipse.persistence.queries.Call; import org.eclipse.persistence.queries.DatabaseQuery; import org.eclipse.persistence.queries.FetchGroup; import org.eclipse.persistence.queries.ObjectBuildingQuery; import org.eclipse.persistence.queries.ObjectLevelModifyQuery; import org.eclipse.persistence.queries.ObjectLevelReadQuery; import org.eclipse.persistence.queries.ReadAllQuery; import org.eclipse.persistence.queries.ReadObjectQuery; import org.eclipse.persistence.queries.ReadQuery; import org.eclipse.persistence.queries.ReportQuery; import org.eclipse.persistence.sessions.DatabaseRecord; import org.eclipse.persistence.sessions.remote.DistributedSession; /** * Purpose: Abstract class for relationship mappings */ public abstract class ForeignReferenceMapping extends DatabaseMapping { /** Query parameter name used for IN batch ids. */ public static final String QUERY_BATCH_PARAMETER = "query-batch-parameter"; /** This is used only in descriptor proxy in remote session */ protected Class referenceClass; protected String referenceClassName; /** The session is temporarily used for initialization. Once used, it is set to null */ protected transient AbstractSession tempInitSession; /** The descriptor of the reference class. */ protected transient ClassDescriptor referenceDescriptor; /** This query is used to read referenced objects for this mapping. */ protected ReadQuery selectionQuery; /** Indicates whether the referenced object is privately owned or not. */ protected boolean isPrivateOwned; /** * Indicates whether the referenced object should always be batch read on read all queries, * and defines the type of batch fetch to use. */ protected BatchFetchType batchFetchType; /** Implements indirection behavior */ protected IndirectionPolicy indirectionPolicy; /** Indicates whether the selection query is TopLink generated or defined by the user. */ protected transient boolean hasCustomSelectionQuery; /** Used to reference the other half of a bi-directional relationship. */ protected DatabaseMapping relationshipPartner; /** Set by users, used to retrieve the backpointer for this mapping */ protected String relationshipPartnerAttributeName; /** Cascading flags used by the EntityManager */ protected boolean cascadePersist; protected boolean cascadeMerge; protected boolean cascadeRefresh; protected boolean cascadeRemove; protected boolean cascadeDetach; /** Flag used to determine if we need to weave the transient annotation on weaved fields.*/ protected boolean requiresTransientWeavedFields; /** Define if the relationship should always be join fetched. */ protected int joinFetch = NONE; /** Specify any INNER join on a join fetch. */ public static final int INNER_JOIN = 1; /** Specify any OUTER join on a join fetch. */ public static final int OUTER_JOIN = 2; /** Specify no join fetch, this is the default. */ public static final int NONE = 0; /** This is a way (after cloning) to force the initialization of the selection criteria */ protected boolean forceInitializationOfSelectionCriteria; /** * Indicates whether and how pessimistic lock scope should be extended */ enum ExtendPessimisticLockScope { // should not be extended. NONE, // should be extended in mapping's selectQuery. TARGET_QUERY, // should be extended in the source query. SOURCE_QUERY, // should be extended in a dedicated query (which doesn't do anything else). DEDICATED_QUERY } ExtendPessimisticLockScope extendPessimisticLockScope; /** Support delete cascading on the database relationship constraint. */ protected boolean isCascadeOnDeleteSetOnDatabase; /** Allow the mapping's queries to be targeted at specific connection pools. */ protected PartitioningPolicy partitioningPolicy; /** Allow the mapping's queries to be targeted at specific connection pools. */ protected String partitioningPolicyName; /** Stores JPA metadata about whether another mapping is the owning mapping. Only populated for JPA models **/ protected String mappedBy; protected ForeignReferenceMapping() { this.isPrivateOwned = false; this.hasCustomSelectionQuery = false; this.useBasicIndirection(); this.cascadePersist = false; this.cascadeMerge = false; this.cascadeRefresh = false; this.cascadeRemove = false; this.requiresTransientWeavedFields = true; this.forceInitializationOfSelectionCriteria = false; this.extendPessimisticLockScope = ExtendPessimisticLockScope.NONE; } /** * ADVANCED: Allows the retrieval of the owning mapping for a particular * mapping. Note: This will only be set for JPA models * * @return */ public String getMappedBy() { return mappedBy; } /** * PUBLIC: * Return the mapping's partitioning policy. */ public PartitioningPolicy getPartitioningPolicy() { return partitioningPolicy; } /** * PUBLIC: * Set the mapping's partitioning policy. * A PartitioningPolicy is used to partition, load-balance or replicate data across multiple difference databases * or across a database cluster such as Oracle RAC. * Partitioning can provide improved scalability by allowing multiple database machines to service requests. * Setting a policy on a mapping will set the policy on all of its mappings. */ public void setPartitioningPolicy(PartitioningPolicy partitioningPolicy) { this.partitioningPolicy = partitioningPolicy; } /** * PUBLIC: * Return the name of the mapping's partitioning policy. * A PartitioningPolicy with the same name must be defined on the Project. * A PartitioningPolicy is used to partition the data for a class across multiple difference databases * or across a database cluster such as Oracle RAC. * Partitioning can provide improved scalability by allowing multiple database machines to service requests. */ public String getPartitioningPolicyName() { return partitioningPolicyName; } /** * PUBLIC: * Set the name of the mapping's partitioning policy. * A PartitioningPolicy with the same name must be defined on the Project. * A PartitioningPolicy is used to partition the data for a class across multiple difference databases * or across a database cluster such as Oracle RAC. * Partitioning can provide improved scalability by allowing multiple database machines to service requests. */ public void setPartitioningPolicyName(String partitioningPolicyName) { this.partitioningPolicyName = partitioningPolicyName; } /** * INTERNAL: * Retrieve the value through using batch reading. * This executes a single query to read the target for all of the objects and stores the * result of the batch query in the original query to allow the other objects to share the results. */ protected Object batchedValueFromRow(AbstractRecord row, ObjectLevelReadQuery query, CacheKey parentCacheKey) { ReadQuery batchQuery = (ReadQuery)query.getProperty(this); if (batchQuery == null) { if (query.hasBatchReadAttributes()) { Map queries = query.getBatchFetchPolicy().getMappingQueries(); if (queries != null) { batchQuery = queries.get(this); } } if (batchQuery == null) { batchQuery = prepareNestedBatchQuery(query); batchQuery.setIsExecutionClone(true); } else { batchQuery = (ReadQuery)batchQuery.clone(); batchQuery.setIsExecutionClone(true); } query.setProperty(this, batchQuery); } return this.indirectionPolicy.valueFromBatchQuery(batchQuery, row, query, parentCacheKey); } /** * INTERNAL: * Clone the attribute from the clone and assign it to the backup. */ @Override public void buildBackupClone(Object clone, Object backup, UnitOfWorkImpl unitOfWork) { Object attributeValue = getAttributeValueFromObject(clone); Object clonedAttributeValue = this.indirectionPolicy.backupCloneAttribute(attributeValue, clone, backup, unitOfWork); setAttributeValueInObject(backup, clonedAttributeValue); } /** * INTERNAL: * Used during building the backup shallow copy to copy the * target object without re-registering it. */ @Override public abstract Object buildBackupCloneForPartObject(Object attributeValue, Object clone, Object backup, UnitOfWorkImpl unitOfWork); /** * INTERNAL: * Clone the attribute from the original and assign it to the clone. */ @Override public void buildClone(Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession) { Object attributeValue = null; if (!this.isCacheable && (cacheKey != null && !cacheKey.isIsolated())){ ReadObjectQuery query = new ReadObjectQuery(descriptor.getJavaClass()); query.setSession(cloningSession); attributeValue = valueFromRow(cacheKey.getProtectedForeignKeys(), null, query, cacheKey, cloningSession, true, null); }else{ attributeValue = getAttributeValueFromObject(original); } attributeValue = this.indirectionPolicy.cloneAttribute(attributeValue, original, cacheKey, clone, refreshCascade, cloningSession, false); // building clone from an original not a row. //GFBug#404 - fix moved to ObjectBuildingQuery.registerIndividualResult setAttributeValueInObject(clone, attributeValue); } /** * INTERNAL: * A combination of readFromRowIntoObject and buildClone. *

* buildClone assumes the attribute value exists on the original and can * simply be copied. *

* readFromRowIntoObject assumes that one is building an original. *

* Both of the above assumptions are false in this method, and actually * attempts to do both at the same time. *

* Extract value from the row and set the attribute to this value in the * working copy clone. * In order to bypass the shared cache when in transaction a UnitOfWork must * be able to populate working copies directly from the row. */ @Override public void buildCloneFromRow(AbstractRecord databaseRow, JoinedAttributeManager joinManager, Object clone, CacheKey sharedCacheKey, ObjectBuildingQuery sourceQuery, UnitOfWorkImpl unitOfWork, AbstractSession executionSession) { Boolean[] wasCacheUsed = new Boolean[]{Boolean.FALSE}; Object attributeValue = valueFromRow(databaseRow, joinManager, sourceQuery, sharedCacheKey, executionSession, true, wasCacheUsed); Object clonedAttributeValue = this.indirectionPolicy.cloneAttribute(attributeValue, null, sharedCacheKey,clone, null, unitOfWork, !wasCacheUsed[0]);// building clone directly from row. if (executionSession.isUnitOfWork() && sourceQuery.shouldRefreshIdentityMapResult()){ // check whether the attribute is fully build before calling getAttributeValueFromObject because that // call may fully build the attribute boolean wasAttributeValueFullyBuilt = isAttributeValueFullyBuilt(clone); Object oldAttribute = this.getAttributeValueFromObject(clone); setAttributeValueInObject(clone, clonedAttributeValue); // set this first to prevent infinite recursion if (wasAttributeValueFullyBuilt && this.indirectionPolicy.objectIsInstantiatedOrChanged(oldAttribute)){ this.indirectionPolicy.instantiateObject(clone, clonedAttributeValue); } }else{ setAttributeValueInObject(clone, clonedAttributeValue); } if((joinManager != null && joinManager.isAttributeJoined(this.descriptor, this)) || (isExtendingPessimisticLockScope(sourceQuery) && extendPessimisticLockScope == ExtendPessimisticLockScope.TARGET_QUERY) || databaseRow.hasSopObject()) { // need to instantiate to extended the lock beyond the source object table(s). this.indirectionPolicy.instantiateObject(clone, clonedAttributeValue); } } /** * INTERNAL: * Require for cloning, the part must be cloned. */ public abstract Object buildCloneForPartObject(Object attributeValue, Object original, CacheKey cacheKey, Object clone, AbstractSession cloningSession, Integer refreshCascade, boolean isExisting, boolean isFromSharedCache); /** * INTERNAL: * The mapping clones itself to create deep copy. */ @Override public Object clone() { ForeignReferenceMapping clone = (ForeignReferenceMapping)super.clone(); clone.setIndirectionPolicy((IndirectionPolicy)indirectionPolicy.clone()); clone.setSelectionQuery((ReadQuery)getSelectionQuery().clone()); return clone; } /** * INTERNAL: * This method will access the target relationship and create a list of information to rebuild the relationship. * This method is used in combination with the CachedValueHolder to store references to PK's to be loaded from * a cache instead of a query. */ public abstract Object[] buildReferencesPKList(Object entity, Object attribute, AbstractSession session); /** * INTERNAL: Compare the attributes belonging to this mapping for the * objects. */ @Override public boolean compareObjects(Object firstObject, Object secondObject, AbstractSession session) { if (isPrivateOwned()) { return compareObjectsWithPrivateOwned(firstObject, secondObject, session); } else { return compareObjectsWithoutPrivateOwned(firstObject, secondObject, session); } } /** * Compare two objects if their parts are not private owned */ protected abstract boolean compareObjectsWithoutPrivateOwned(Object first, Object second, AbstractSession session); /** * Compare two objects if their parts are private owned */ protected abstract boolean compareObjectsWithPrivateOwned(Object first, Object second, AbstractSession session); /** * INTERNAL: * Convert all the class-name-based settings in this mapping to actual class-based * settings. This method is used when converting a project that has been built * with class names to a project with classes. */ @Override public void convertClassNamesToClasses(ClassLoader classLoader){ super.convertClassNamesToClasses(classLoader); // DirectCollection mappings don't require a reference class. if (getReferenceClassName() != null) { Class referenceClass = null; try{ if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { referenceClass = (Class)AccessController.doPrivileged(new PrivilegedClassForName(getReferenceClassName(), true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(getReferenceClassName(), exception.getException()); } } else { referenceClass = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(getReferenceClassName(), true, classLoader); } } catch (ClassNotFoundException exc){ throw ValidationException.classNotFoundWhileConvertingClassNames(getReferenceClassName(), exc); } setReferenceClass(referenceClass); } if (getSelectionQuery() != null) { getSelectionQuery().convertClassNamesToClasses(classLoader); } } /** * INTERNAL: * Builder the unit of work value holder. * Ignore the original object. * @param buildDirectlyFromRow indicates that we are building the clone directly * from a row as opposed to building the original from the row, putting it in * the shared cache, and then cloning the original. */ public DatabaseValueHolder createCloneValueHolder(ValueHolderInterface attributeValue, Object original, Object clone, AbstractRecord row, AbstractSession cloningSession, boolean buildDirectlyFromRow) { return cloningSession.createCloneQueryValueHolder(attributeValue, clone, row, this); } /** * INTERNAL: * Return true if the merge should be bypassed. This would be the case for several reasons, depending on * the kind of merge taking place. */ protected boolean dontDoMerge(Object target, Object source, MergeManager mergeManager) { if (!shouldMergeCascadeReference(mergeManager)) { return true; } if (mergeManager.isForRefresh()) { // For reverts we are more concerned about the target than the source. if (!isAttributeValueInstantiated(target)) { return true; } } else { if (mergeManager.shouldRefreshRemoteObject() && shouldMergeCascadeParts(mergeManager) && usesIndirection()) { return true; } else { if (!isAttributeValueInstantiated(source)) { return true; } } } return false; } /** * PUBLIC: * Indicates whether the referenced object should always be batch read on read all queries. * Batch reading will read all of the related objects in a single query when accessed from an originating read all. * This should only be used if it is know that the related objects are always required with the source object, or indirection is not used. */ public void dontUseBatchReading() { setUsesBatchReading(false); } /** * PUBLIC: * Indirection means that a ValueHolder will be put in-between the attribute and the real object. * This allows for the reading of the target from the database to be delayed until accessed. * This defaults to true and is strongly suggested as it give a huge performance gain. */ public void dontUseIndirection() { setIndirectionPolicy(new NoIndirectionPolicy()); } /** * INTERNAL: * Called if shouldExtendPessimisticLockScopeInTargetQuery() is true. * Adds locking clause to the target query to extend pessimistic lock scope. */ protected void extendPessimisticLockScopeInTargetQuery(ObjectLevelReadQuery targetQuery, ObjectBuildingQuery sourceQuery) { targetQuery.setLockMode(sourceQuery.getLockMode()); } /** * INTERNAL: * Called if shouldExtendPessimisticLockScopeInSourceQuery is true. * Adds fields to be locked to the where clause of the source query. * Note that the sourceQuery must be ObjectLevelReadQuery so that it has ExpressionBuilder. * * This method must be implemented in subclasses that allow * setting shouldExtendPessimisticLockScopeInSourceQuery to true. */ public void extendPessimisticLockScopeInSourceQuery(ObjectLevelReadQuery sourceQuery) { } /** * INTERNAL: * Extract the value from the batch optimized query, this should be supported by most query types. */ public Object extractResultFromBatchQuery(ReadQuery batchQuery, CacheKey parentCacheKey, AbstractRecord sourceRow, AbstractSession session, ObjectLevelReadQuery originalQuery) throws QueryException { Map batchedObjects = null; Object result = null; Object sourceKey = extractBatchKeyFromRow(sourceRow, session); if (sourceKey == null) { // If the foreign key was null, then just return null. return null; } Object cachedObject = checkCacheForBatchKey(sourceRow, sourceKey, batchedObjects, batchQuery, originalQuery, session); if (cachedObject != null) { // If the object is already in the cache, then just return it. return cachedObject; } // Ensure the query is only executed once. synchronized (batchQuery) { // Check if query was already executed. batchedObjects = batchQuery.getBatchObjects(); BatchFetchPolicy originalPolicy = originalQuery.getBatchFetchPolicy(); if (batchedObjects == null) { batchedObjects = new Hashtable(); batchQuery.setBatchObjects(batchedObjects); } else { result = batchedObjects.get(sourceKey); if (result == Helper.NULL_VALUE) { return null; // If IN may not have that batch yet, or it may have been null. } else if ((result != null) || (!originalPolicy.isIN())) { return result; } } // In case of IN the batch including this row may not have been executed yet. if (result == null) { AbstractRecord translationRow = originalQuery.getTranslationRow(); // Execute query and index resulting object sets by key. if (originalPolicy.isIN()) { // Need to extract all foreign key values from all parent rows for IN parameter. List parentRows = originalPolicy.getDataResults(this); // Execute queries by batch if too many rows. int rowsSize = parentRows.size(); int size = Math.min(rowsSize, originalPolicy.getSize()); if (size == 0) { return null; } int startIndex = 0; if (size != rowsSize) { // If only fetching a page, need to make sure the row we want is in the page. startIndex = parentRows.indexOf(sourceRow); } List foreignKeyValues = new ArrayList(size); Set foreignKeys = new HashSet(size); int index = 0; int offset = startIndex; for (int count = 0; count < size; count++) { if (index >= rowsSize) { // Processed all rows, done. break; } else if ((offset + index) >= rowsSize) { // If passed the end, go back to start. offset = index * -1; } AbstractRecord row = parentRows.get(offset + index); // Handle duplicate rows in the ComplexQueryResult being replaced with null, as a // result of duplicate filtering being true for constructing the ComplexQueryResult if (row != null) { Object foreignKey = extractBatchKeyFromRow(row, session); if (foreignKey == null) { // Ignore null foreign keys. count--; } else { cachedObject = checkCacheForBatchKey(row, foreignKey, batchedObjects, batchQuery, originalQuery, session); if (cachedObject != null) { // Avoid fetching things a cache hit occurs for. count--; } else { // Ensure the same id is not selected twice. if (foreignKeys.contains(foreignKey)) { count--; } else { Object[] key = ((CacheId)foreignKey).getPrimaryKey(); Object foreignKeyValue = key[0]; // Support composite keys using nested IN. if (key.length > 1) { foreignKeyValue = Arrays.asList(key); } foreignKeyValues.add(foreignKeyValue); foreignKeys.add(foreignKey); } } } } index++; } // Need to compute remaining rows, this is tricky because a page in the middle could have been processed. List remainingParentRows = null; if (startIndex == 0) { // Tail remainingParentRows = new ArrayList(parentRows.subList(index, rowsSize)); } else if (startIndex == offset) { // Head and tail. remainingParentRows = new ArrayList(parentRows.subList(0, startIndex - 1)); remainingParentRows.addAll(parentRows.subList(index, rowsSize)); } else { // Middle // Check if empty, if ((offset + index) >= (startIndex - 1)) { remainingParentRows = new ArrayList(0); } else { remainingParentRows = new ArrayList(parentRows.subList(offset + index, startIndex - 1)); } } originalPolicy.setDataResults(this, remainingParentRows); translationRow = translationRow.clone(); translationRow.put(QUERY_BATCH_PARAMETER, foreignKeyValues); // Register each id as null, in case it has no relationship. for (Object foreignKey : foreignKeys) { batchedObjects.put(foreignKey, Helper.NULL_VALUE); } } else if (batchQuery.isReadAllQuery() && ((ReadAllQuery)batchQuery).getBatchFetchPolicy().isIN()) { throw QueryException.originalQueryMustUseBatchIN(this, originalQuery); } executeBatchQuery(batchQuery, parentCacheKey, batchedObjects, session, translationRow); batchQuery.setSession(null); } } result = batchedObjects.get(sourceKey); if (result == Helper.NULL_VALUE) { return null; } else { return result; } } /** * INTERNAL: * Extract the batch key value from the source row. * Used for batch reading, most following same order and fields as in the mapping. * The method should be overridden by classes that support batch reading. */ protected Object extractBatchKeyFromRow(AbstractRecord targetRow, AbstractSession session) { throw QueryException.batchReadingNotSupported(this, null); } /** * INTERNAL: * This method is used to store the FK fields that can be cached that correspond to noncacheable mappings * the FK field values will be used to re-issue the query when cloning the shared cache entity */ public abstract void collectQueryParameters(Set cacheFields); /** * INTERNAL: * Check if the target object is in the cache if possible based on the source row. * If in the cache, add the object to the batch results. * Return null if not possible or not in the cache. */ protected Object checkCacheForBatchKey(AbstractRecord sourceRow, Object foreignKey, Map batchObjects, ReadQuery batchQuery, ObjectLevelReadQuery originalQuery, AbstractSession session) { return null; } /** * INTERNAL: * Prepare and execute the batch query and store the * results for each source object in a map keyed by the * mappings source keys of the source objects. */ protected void executeBatchQuery(DatabaseQuery query, CacheKey parentCacheKey, Map referenceObjectsByKey, AbstractSession session, AbstractRecord row) { throw QueryException.batchReadingNotSupported(this, query); } /** * INTERNAL: * Clone and prepare the JoinedAttributeManager nested JoinedAttributeManager. * This is used for nested joining as the JoinedAttributeManager passed to the joined build object. */ public ObjectLevelReadQuery prepareNestedJoins(JoinedAttributeManager joinManager, ObjectBuildingQuery baseQuery, AbstractSession session) { // A nested query must be built to pass to the descriptor that looks like the real query execution would. ObjectLevelReadQuery nestedQuery = (ObjectLevelReadQuery)((ObjectLevelReadQuery)getSelectionQuery()).deepClone(); nestedQuery.setSession(session); nestedQuery.setShouldUseSerializedObjectPolicy(baseQuery.shouldUseSerializedObjectPolicy()); // Must cascade for nested partial/join attributes, the expressions must be filter to only the nested ones. if (baseQuery.hasPartialAttributeExpressions()) { nestedQuery.setPartialAttributeExpressions(extractNestedExpressions(((ObjectLevelReadQuery)baseQuery).getPartialAttributeExpressions(), nestedQuery.getExpressionBuilder(), false)); // bug 5501751: USING GETALLOWINGNULL() WITH ADDPARTIALATTRIBUTE() BROKEN IN 10.1.3 // The query against Employee with // query.addPartialAttribute(builder.getAllowingNull("address")); // in case there's no address returns null instead of Address object. // Note that in case // query.addPartialAttribute(builder.getAllowingNull("address").get("city")); // in case there's no address an empty Address object (all atributes are nulls) is returned. if(nestedQuery.getPartialAttributeExpressions().isEmpty()) { if(hasRootExpressionThatShouldUseOuterJoin(((ObjectLevelReadQuery)baseQuery).getPartialAttributeExpressions())) { nestedQuery.setShouldBuildNullForNullPk(true); } } } else { if(nestedQuery.getDescriptor().hasFetchGroupManager()) { FetchGroup sourceFG = baseQuery.getExecutionFetchGroup(); if (sourceFG != null) { FetchGroup targetFetchGroup = sourceFG.getGroup(getAttributeName()); if (targetFetchGroup != null) { nestedQuery.setFetchGroup(targetFetchGroup); nestedQuery.prepareFetchGroup(); } } } List nestedJoins = extractNestedExpressions(joinManager.getJoinedAttributeExpressions(), nestedQuery.getExpressionBuilder(), false); if (nestedJoins.size() > 0) { // Recompute the joined indexes based on the nested join expressions. nestedQuery.getJoinedAttributeManager().clear(); nestedQuery.getJoinedAttributeManager().setJoinedAttributeExpressions_(nestedJoins); // the next line sets isToManyJoinQuery flag nestedQuery.getJoinedAttributeManager().prepareJoinExpressions(session); nestedQuery.getJoinedAttributeManager().computeJoiningMappingQueries(session); nestedQuery.getJoinedAttributeManager().computeJoiningMappingIndexes(true, session, 0); } else if (nestedQuery.hasJoining()) { // Clear any mapping level joins. nestedQuery.setJoinedAttributeManager(null); } // Configure nested locking clause. if (baseQuery.isLockQuery()) { if (((ObjectLevelReadQuery)baseQuery).getLockingClause().isForUpdateOfClause()) { ForUpdateOfClause clause = (ForUpdateOfClause)((ObjectLevelReadQuery)baseQuery).getLockingClause().clone(); clause.setLockedExpressions(extractNestedExpressions(clause.getLockedExpressions(), nestedQuery.getExpressionBuilder(), true)); nestedQuery.setLockingClause(clause); } else { nestedQuery.setLockingClause(((ObjectLevelReadQuery)baseQuery).getLockingClause()); } } } nestedQuery.setShouldMaintainCache(baseQuery.shouldMaintainCache()); nestedQuery.setShouldRefreshIdentityMapResult(baseQuery.shouldRefreshIdentityMapResult()); // For flashback: Must still propagate all properties, as the // attributes of this joined attribute may be read later too. if (baseQuery.isObjectLevelReadQuery() && ((ObjectLevelReadQuery)baseQuery).hasAsOfClause()) { nestedQuery.setAsOfClause(((ObjectLevelReadQuery)baseQuery).getAsOfClause()); } nestedQuery.setCascadePolicy(baseQuery.getCascadePolicy()); if (nestedQuery.hasJoining()) { nestedQuery.getJoinedAttributeManager().computeJoiningMappingQueries(session); } nestedQuery.setSession(null); nestedQuery.setRequiresDeferredLocks(baseQuery.requiresDeferredLocks()); return nestedQuery; } /** * INTERNAL: * Allow the mapping the do any further batch preparation. */ protected void postPrepareNestedBatchQuery(ReadQuery batchQuery, ObjectLevelReadQuery query) { // Do nothing. } /** * INTERNAL: * Return the selection criteria used to IN batch fetching. */ protected Expression buildBatchCriteria(ExpressionBuilder builder, ObjectLevelReadQuery query) { throw QueryException.batchReadingNotSupported(this, null); } /** * INTERNAL: * Clone and prepare the selection query as a nested batch read query. * This is used for nested batch reading. */ public ReadQuery prepareNestedBatchQuery(ObjectLevelReadQuery query) { // For CR#2646-S.M. In case of inheritance the descriptor to use may not be that // of the source query (the base class descriptor), but that of the subclass, if the // attribute is only of the subclass. Thus in this case use the descriptor from the mapping. // Also: for Bug 5478648 - Do not switch the descriptor if the query's descriptor is an aggregate ClassDescriptor descriptorToUse = query.getDescriptor(); if ((descriptorToUse != this.descriptor) && (!descriptorToUse.getMappings().contains(this)) && (!this.descriptor.isDescriptorTypeAggregate())) { descriptorToUse = this.descriptor; } ExpressionBuilder builder = new ExpressionBuilder(this.referenceClass); builder.setQueryClassAndDescriptor(this.referenceClass, getReferenceDescriptor()); ReadAllQuery batchQuery = new ReadAllQuery(this.referenceClass, builder); batchQuery.setName(getAttributeName()); batchQuery.setDescriptor(getReferenceDescriptor()); batchQuery.setSession(query.getSession()); batchQuery.setShouldUseSerializedObjectPolicy(query.shouldUseSerializedObjectPolicy()); //bug 3965568 // we should not wrap the results as this is an internal query batchQuery.setShouldUseWrapperPolicy(false); if (query.shouldCascadeAllParts() || (query.shouldCascadePrivateParts() && isPrivateOwned()) || (query.shouldCascadeByMapping() && this.cascadeRefresh)) { batchQuery.setShouldRefreshIdentityMapResult(query.shouldRefreshIdentityMapResult()); batchQuery.setCascadePolicy(query.getCascadePolicy()); batchQuery.setShouldMaintainCache(query.shouldMaintainCache()); if (query.hasAsOfClause()) { batchQuery.setAsOfClause(query.getAsOfClause()); } //bug 3802197 - cascade binding and prepare settings batchQuery.setShouldBindAllParameters(query.getShouldBindAllParameters()); batchQuery.setShouldPrepare(query.shouldPrepare()); } batchQuery.setShouldOuterJoinSubclasses(query.shouldOuterJoinSubclasses()); //CR #4365 batchQuery.setQueryId(query.getQueryId()); Expression batchSelectionCriteria = null; // Build the batch query, either using joining, or an exist sub-select. BatchFetchType batchType = query.getBatchFetchPolicy().getType(); if (this.batchFetchType != null) { batchType = this.batchFetchType; } if (batchType == BatchFetchType.EXISTS) { // Using a EXISTS sub-select (WHERE EXIST ( AND AND ) ExpressionBuilder subBuilder = new ExpressionBuilder(descriptorToUse.getJavaClass()); subBuilder.setQueryClassAndDescriptor(descriptorToUse.getJavaClass(), descriptorToUse); ReportQuery subQuery = new ReportQuery(descriptorToUse.getJavaClass(), subBuilder); subQuery.setDescriptor(descriptorToUse); subQuery.setShouldRetrieveFirstPrimaryKey(true); Expression subCriteria = subBuilder.twist(getSelectionCriteria(), builder); if (query.getSelectionCriteria() != null) { // For bug 2612567, any query can have batch attributes, so the // original selection criteria can be quite complex, with multiple // builders (i.e. for parallel selects). // Now uses cloneUsing(newBase) instead of rebuildOn(newBase). subCriteria = query.getSelectionCriteria().cloneUsing(subBuilder).and(subCriteria); } // Check for history and set asOf. if (descriptorToUse.getHistoryPolicy() != null) { if (query.getSession().getAsOfClause() != null) { subBuilder.asOf(query.getSession().getAsOfClause()); } else if (batchQuery.getAsOfClause() == null) { subBuilder.asOf(AsOfClause.NO_CLAUSE); } else { subBuilder.asOf(batchQuery.getAsOfClause()); } } subQuery.setSelectionCriteria(subCriteria); batchSelectionCriteria = builder.exists(subQuery); } else if (batchType == BatchFetchType.IN) { // Using a IN with foreign key values (WHERE FK IN :QUERY_BATCH_PARAMETER) batchSelectionCriteria = buildBatchCriteria(builder, query); } else { // Using a join, (WHERE AND ) // Join the query where clause with the mapping's, // this will cause a join that should bring in all of the target objects. Expression backRef = builder.getManualQueryKey(getAttributeName() + "-back-ref", descriptorToUse); batchSelectionCriteria = backRef.twist(getSelectionCriteria(), builder); if (query.getSelectionCriteria() != null) { // For bug 2612567, any query can have batch attributes, so the // original selection criteria can be quite complex, with multiple // builders (i.e. for parallel selects). // Now uses cloneUsing(newBase) instead of rebuildOn(newBase). batchSelectionCriteria = batchSelectionCriteria.and(query.getSelectionCriteria().cloneUsing(backRef)); } // Since a manual query key expression does not really get normalized, // it must get its additional expressions added in here. Probably best // to somehow keep all this code inside QueryKeyExpression.normalize. if (descriptorToUse.getQueryManager().getAdditionalJoinExpression() != null) { batchSelectionCriteria = batchSelectionCriteria.and(descriptorToUse.getQueryManager().getAdditionalJoinExpression().rebuildOn(backRef)); } // Check for history and add history expression. if (descriptorToUse.getHistoryPolicy() != null) { if (query.getSession().getAsOfClause() != null) { backRef.asOf(query.getSession().getAsOfClause()); } else if (batchQuery.getAsOfClause() == null) { backRef.asOf(AsOfClause.NO_CLAUSE); } else { backRef.asOf(batchQuery.getAsOfClause()); } batchSelectionCriteria = batchSelectionCriteria.and(descriptorToUse.getHistoryPolicy().additionalHistoryExpression(backRef, backRef)); } } batchQuery.setSelectionCriteria(batchSelectionCriteria); if (query.isDistinctComputed()) { // Only recompute if it has not already been set by the user batchQuery.setDistinctState(query.getDistinctState()); } // Add batch reading attributes contained in the mapping's query. ReadQuery mappingQuery = this.selectionQuery; if (mappingQuery.isReadAllQuery()) { // CR#3238 clone these vectors so they will not grow with each call to the query. -TW batchQuery.setOrderByExpressions(new ArrayList(((ReadAllQuery)mappingQuery).getOrderByExpressions())); if (((ReadAllQuery)mappingQuery).hasBatchReadAttributes()) { for (Expression expression : ((ReadAllQuery)mappingQuery).getBatchReadAttributeExpressions()) { batchQuery.addBatchReadAttribute(expression); } } } // Bug 385700 - Populate session & query class if not initialized by // ObjectLevelReadQuery.computeBatchReadMappingQueries() in case batch query // has been using inheritance and child descriptors can have different mappings. if (query.hasBatchReadAttributes()) { for (Expression expression : query.getBatchReadAttributeExpressions()) { ObjectExpression batchReadExpression = (ObjectExpression) expression; // Batch Read Attribute Expressions may not have initialized. ExpressionBuilder expressionBuilder = batchReadExpression.getBuilder(); if (expressionBuilder.getQueryClass() == null) { expressionBuilder.setSession(query.getSession().getRootSession(null)); expressionBuilder.setQueryClass(query.getReferenceClass()); } } // Computed nested batch attribute expressions, and add them to batch query. List nestedExpressions = extractNestedExpressions(query.getBatchReadAttributeExpressions(), batchQuery.getExpressionBuilder(), false); batchQuery.getBatchReadAttributeExpressions().addAll(nestedExpressions); } batchQuery.setBatchFetchType(batchType); batchQuery.setBatchFetchSize(query.getBatchFetchPolicy().getSize()); // Allow subclasses to further prepare. postPrepareNestedBatchQuery(batchQuery, query); // Set nested fetch group. if (batchQuery.getDescriptor().hasFetchGroupManager()) { FetchGroup sourceFetchGruop = query.getExecutionFetchGroup(); if (sourceFetchGruop != null) { FetchGroup targetFetchGroup = sourceFetchGruop.getGroup(getAttributeName()); if (targetFetchGroup != null) { ((ObjectLevelReadQuery)batchQuery).setFetchGroup(targetFetchGroup); } } } if (batchQuery.shouldPrepare()) { batchQuery.checkPrepare(query.getSession(), query.getTranslationRow()); } batchQuery.setSession(null); return batchQuery; } /** * INTERNAL: * An object has been serialized from the server to the client. * Replace the transient attributes of the remote value holders * with client-side objects. */ @Override public void fixObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { this.indirectionPolicy.fixObjectReferences(object, objectDescriptors, processedObjects, query, session); } /** * INTERNAL: * Return the value of an attribute which this mapping represents for an object. */ @Override public Object getAttributeValueFromObject(Object object) throws DescriptorException { Object attributeValue = super.getAttributeValueFromObject(object); Object indirectionValue = this.indirectionPolicy.validateAttributeOfInstantiatedObject(attributeValue); // PERF: Allow the indirection policy to initialize null attribute values, // this allows the indirection objects to not be initialized in the constructor. if (indirectionValue != attributeValue) { setAttributeValueInObject(object, indirectionValue); attributeValue = indirectionValue; } return attributeValue; } /** * INTERNAL: * Returns the attribute value from the reference object. * If the attribute is using indirection the value of the value-holder is returned. * If the value holder is not instantiated then it is instantiated. */ public Object getAttributeValueWithClonedValueHolders(Object object) { Object attributeValue = getAttributeValueFromObject(object); if (attributeValue instanceof DatabaseValueHolder){ return ((DatabaseValueHolder)attributeValue).clone(); } else if (attributeValue instanceof ValueHolder){ return ((ValueHolder)attributeValue).clone(); } return attributeValue; } /** * INTERNAL: * Return source key fields for translation by an AggregateObjectMapping * By default, return an empty NonSynchronizedVector */ public Collection getFieldsForTranslationInAggregate() { return new NonSynchronizedVector(0); } /** * INTERNAL: * Should be overridden by subclass that allows setting * extendPessimisticLockScope to DEDICATED_QUERY. */ protected ReadQuery getExtendPessimisticLockScopeDedicatedQuery(AbstractSession session, short lockMode) { return null; } /** * INTERNAL: * Return the mapping's indirection policy. */ public IndirectionPolicy getIndirectionPolicy() { return indirectionPolicy; } /** * INTERNAL: * Return whether the specified object is instantiated. */ @Override public boolean isAttributeValueFromObjectInstantiated(Object object) { return this.indirectionPolicy.objectIsInstantiated(getAttributeValueFromObject(object)); } /** * INTERNAL: * Returns the join criteria stored in the mapping selection query. This criteria * is used to read reference objects across the tables from the database. */ public Expression getJoinCriteria(ObjectExpression context, Expression base) { Expression selectionCriteria = getSelectionCriteria(); return context.getBaseExpression().twist(selectionCriteria, base); } /** * INTERNAL: * return the object on the client corresponding to the specified object. * ForeignReferenceMappings have to worry about * maintaining object identity. */ @Override public Object getObjectCorrespondingTo(Object object, DistributedSession session, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query) { return session.getObjectCorrespondingTo(object, objectDescriptors, processedObjects, query); } /** * INTERNAL: * Returns the attribute value from the reference object. * If the attribute is using indirection the value of the value-holder is returned. * If the value holder is not instantiated then it is instantiated. */ @Override public Object getRealAttributeValueFromAttribute(Object attributeValue, Object object, AbstractSession session) { return this.indirectionPolicy.getRealAttributeValueFromObject(object, attributeValue); } /** * Return if this mapping is lazy. * For relationship mappings this should normally be the same value as indirection, * however for eager relationships this can be used with indirection to allow * indirection locking and change tracking, but still always force instantiation. */ @Override public boolean isLazy() { if (isLazy == null) { // False by default for mappings without indirection. isLazy = usesIndirection(); } return isLazy; } /** * INTERNAL: * Return whether this mapping should be traversed when we are locking. */ @Override public boolean isLockableMapping(){ return !(this.usesIndirection()) && !referenceDescriptor.getCachePolicy().isIsolated(); } /** * INTERNAL: * Trigger the instantiation of the attribute if lazy. */ @Override public void instantiateAttribute(Object object, AbstractSession session) { this.indirectionPolicy.instantiateObject(object, getAttributeValueFromObject(object)); } /** * PUBLIC: * Returns the reference class. */ public Class getReferenceClass() { return referenceClass; } /** * INTERNAL: * Returns the reference class name. */ public String getReferenceClassName() { if ((referenceClassName == null) && (referenceClass != null)) { referenceClassName = referenceClass.getName(); } return referenceClassName; } /** * INTERNAL: * Return the referenceDescriptor. This is a descriptor which is associated with * the reference class. */ public ClassDescriptor getReferenceDescriptor() { if (referenceDescriptor == null) { if (getTempSession() == null) { return null; } else { referenceDescriptor = getTempSession().getDescriptor(getReferenceClass()); } } return referenceDescriptor; } /** * INTERNAL: * Return the relationshipPartner mapping for this bi-directional mapping. If the relationshipPartner is null then * this is a uni-directional mapping. */ public DatabaseMapping getRelationshipPartner() { if ((this.relationshipPartner == null) && (this.relationshipPartnerAttributeName != null)) { setRelationshipPartner(getReferenceDescriptor().getObjectBuilder().getMappingForAttributeName(getRelationshipPartnerAttributeName())); } return this.relationshipPartner; } /** * PUBLIC: * Use this method retrieve the relationship partner attribute name of this bidirectional Mapping. */ public String getRelationshipPartnerAttributeName() { return this.relationshipPartnerAttributeName; } /** * INTERNAL: * Returns the selection criteria stored in the mapping selection query. This criteria * is used to read reference objects from the database. It will return null before * initialization. To obtain the selection criteria before initialization (e.g., in a * customizer) you can use the buildSelectionCriteria() method defined by some subclasses. * * @see org.eclipse.persistence.mappings.OneToOneMapping#buildSelectionCriteria() * @see org.eclipse.persistence.mappings.OneToManyMapping#buildSelectionCriteria() */ public Expression getSelectionCriteria() { return getSelectionQuery().getSelectionCriteria(); } /** * INTERNAL: * Returns the read query associated with the mapping. */ public ReadQuery getSelectionQuery() { return selectionQuery; } protected AbstractSession getTempSession() { return tempInitSession; } /** * INTERNAL: * Extract and return the appropriate value from the * specified remote value holder. */ @Override public Object getValueFromRemoteValueHolder(RemoteValueHolder remoteValueHolder) { return this.indirectionPolicy.getValueFromRemoteValueHolder(remoteValueHolder); } /** * INTERNAL: * Indicates whether the selection query is TopLink generated or defined by * the user. */ public boolean hasCustomSelectionQuery() { return hasCustomSelectionQuery; } /** * INTERNAL: * Indicates whether the mapping (or at least one of its nested mappings, at any nested depth) * references an entity. * To return true the mapping (or nested mapping) should be ForeignReferenceMapping with non-null and non-aggregate reference descriptor. */ @Override public boolean hasNestedIdentityReference() { return true; } /** * INTERNAL: * Initialize the state of mapping. */ @Override public void preInitialize(AbstractSession session) throws DescriptorException { super.preInitialize(session); // If weaving was used the mapping must be configured to use the weaved get/set methods. if ((this.indirectionPolicy instanceof BasicIndirectionPolicy) && ClassConstants.PersistenceWeavedLazy_Class.isAssignableFrom(getDescriptor().getJavaClass())) { Class attributeType = getAttributeAccessor().getAttributeClass(); // Check that not already weaved or coded. if (!(ClassConstants.ValueHolderInterface_Class.isAssignableFrom(attributeType))) { if (!indirectionPolicy.isWeavedObjectBasicIndirectionPolicy()){ if(getAttributeAccessor().isMethodAttributeAccessor()) { useWeavedIndirection(getGetMethodName(), getSetMethodName(), true); } else if(getAttributeAccessor().isInstanceVariableAttributeAccessor()) { useWeavedIndirection(Helper.getWeavedGetMethodName(getAttributeName()), Helper.getWeavedSetMethodName(getAttributeName()), false); } } setGetMethodName(Helper.getWeavedValueHolderGetMethodName(getAttributeName())); setSetMethodName(Helper.getWeavedValueHolderSetMethodName(getAttributeName())); // Must re-initialize the attribute accessor. super.preInitialize(session); } } if (getPartitioningPolicyName() != null) { PartitioningPolicy policy = session.getProject().getPartitioningPolicy(getPartitioningPolicyName()); if (policy == null) { session.getIntegrityChecker().handleError(DescriptorException.missingPartitioningPolicy(getPartitioningPolicyName(), null, this)); } setPartitioningPolicy(policy); } if (this.isCascadeOnDeleteSetOnDatabase && !session.getPlatform().supportsDeleteOnCascade()) { this.isCascadeOnDeleteSetOnDatabase = false; } } /** * INTERNAL: * Initialize the state of mapping. */ @Override public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if (this.isPrivateOwned && (this.descriptor != null)) { this.descriptor.addMappingsPostCalculateChanges(this); } initializeReferenceDescriptor(session); initializeSelectionQuery(session); this.indirectionPolicy.initialize(); if ((this.referenceDescriptor != null) && this.referenceDescriptor.getCachePolicy().isIsolated()) { this.isCacheable = false; } } /** * Initialize and set the descriptor for the referenced class in this mapping. */ protected void initializeReferenceDescriptor(AbstractSession session) throws DescriptorException { if (getReferenceClass() == null) { throw DescriptorException.referenceClassNotSpecified(this); } ClassDescriptor refDescriptor = session.getDescriptor(getReferenceClass()); if (refDescriptor == null) { throw DescriptorException.descriptorIsMissing(getReferenceClass().getName(), this); } if (refDescriptor.isAggregateDescriptor() && (!isAggregateCollectionMapping())) { throw DescriptorException.referenceDescriptorCannotBeAggregate(this); } // can not be isolated if it is null. Seems that only aggregates do not set // the owning descriptor on the mapping. setReferenceDescriptor(refDescriptor); } /** * INTERNAL: * The method validateAttributeOfInstantiatedObject(Object attributeValue) fixes the value of the attributeValue * in cases where it is null and indirection requires that it contain some specific data structure. Return whether this will happen. * This method is used to help determine if indirection has been triggered * @param attributeValue * @return * @see validateAttributeOfInstantiatedObject(Object attributeValue) */ public boolean isAttributeValueFullyBuilt(Object object){ Object attributeValue = super.getAttributeValueFromObject(object); return this.indirectionPolicy.isAttributeValueFullyBuilt(attributeValue); } /** * A subclass should implement this method if it wants non default behavior. */ protected void initializeSelectionQuery(AbstractSession session) throws DescriptorException { if (((ObjectLevelReadQuery)getSelectionQuery()).getReferenceClass() == null) { throw DescriptorException.referenceClassNotSpecified(this); } getSelectionQuery().setName(getAttributeName()); getSelectionQuery().setDescriptor(getReferenceDescriptor()); getSelectionQuery().setSourceMapping(this); if (getSelectionQuery().getPartitioningPolicy() == null) { getSelectionQuery().setPartitioningPolicy(getPartitioningPolicy()); } } /** * INTERNAL: * The referenced object is checked if it is instantiated or not */ public boolean isAttributeValueInstantiated(Object object) { return this.indirectionPolicy.objectIsInstantiated(getAttributeValueFromObject(object)); } /** * PUBLIC: * Check cascading value for the detach operation. */ public boolean isCascadeDetach() { return this.cascadeDetach; } /** * PUBLIC: * Check cascading value for the CREATE operation. */ public boolean isCascadePersist() { return this.cascadePersist; } /** * PUBLIC: * Check cascading value for the MERGE operation. */ public boolean isCascadeMerge() { return this.cascadeMerge; } /** * PUBLIC: * Check cascading value for the REFRESH operation. */ public boolean isCascadeRefresh() { return this.cascadeRefresh; } /** * PUBLIC: * Check cascading value for the REMOVE operation. */ public boolean isCascadeRemove() { return this.cascadeRemove; } /** * INTERNAL: * Return if the mapping has any ownership or other dependency over its target object(s). */ @Override public boolean hasDependency() { return isPrivateOwned() || isCascadeRemove(); } /** * INTERNAL: */ @Override public boolean isForeignReferenceMapping() { return true; } /** * INTERNAL: * Return if this mapping supports joining. */ public boolean isJoiningSupported() { return false; } /** * PUBLIC: * Return true if referenced objects are privately owned else false. */ @Override public boolean isPrivateOwned() { return isPrivateOwned; } /** * INTERNAL: * Iterate on the iterator's current object's attribute defined by this mapping. * The iterator's settings for cascading and value holders determine how the * iteration continues from here. */ @Override public void iterate(DescriptorIterator iterator) { Object attributeValue = this.getAttributeValueFromObject(iterator.getVisitedParent()); this.indirectionPolicy.iterateOnAttributeValue(iterator, attributeValue); } /** * INTERNAL: * Iterate on the attribute value. * The value holder has already been processed. */ @Override public abstract void iterateOnRealAttributeValue(DescriptorIterator iterator, Object realAttributeValue); /** * Force instantiation of the load group. */ @Override public void load(final Object object, AttributeItem item, final AbstractSession session, final boolean fromFetchGroup) { instantiateAttribute(object, session); if (item.getGroup() != null && (!fromFetchGroup || session.isUnitOfWork())) { // if fromFetchGroup then the above instantiate already loaded the elements unless this is in UOW // in which case the clones must be loaded as well. Object value = getRealAttributeValueFromObject(object, session); session.load(value, item.getGroup(), getReferenceDescriptor(), fromFetchGroup); } } /** * INTERNAL: * Replace the client value holder with the server value holder, * after copying some of the settings from the client value holder. */ public void mergeRemoteValueHolder(Object clientSideDomainObject, Object serverSideDomainObject, MergeManager mergeManager) { this.indirectionPolicy.mergeRemoteValueHolder(clientSideDomainObject, serverSideDomainObject, mergeManager); } /** * PUBLIC: * Sets the reference object to be a private owned. * The default behavior is non private owned, or independent. * @see #setIsPrivateOwned(boolean) */ public void privateOwnedRelationship() { setIsPrivateOwned(true); } /** * INTERNAL: * Extract value from the row and set the attribute to this value in the object. * return value as this value will have been converted to the appropriate type for * the object. */ @Override public Object readFromRowIntoObject(AbstractRecord databaseRow, JoinedAttributeManager joinManager, Object targetObject, CacheKey parentCacheKey, ObjectBuildingQuery sourceQuery, AbstractSession executionSession, boolean isTargetProtected) throws DatabaseException { Boolean[] wasCacheUsed = new Boolean[]{Boolean.FALSE}; Object attributeValue = valueFromRow(databaseRow, joinManager, sourceQuery, parentCacheKey, executionSession, isTargetProtected, wasCacheUsed); if (wasCacheUsed[0]){ //must clone here as certain mappings require the clone object to clone the attribute. Integer refreshCascade = null; if (sourceQuery != null && sourceQuery.isObjectBuildingQuery() && sourceQuery.shouldRefreshIdentityMapResult()) { refreshCascade = sourceQuery.getCascadePolicy(); } attributeValue = this.indirectionPolicy.cloneAttribute(attributeValue, parentCacheKey.getObject(), parentCacheKey, targetObject, refreshCascade, executionSession, false); } if (executionSession.isUnitOfWork() && sourceQuery.shouldRefreshIdentityMapResult() || databaseRow.hasSopObject()){ // check whether the attribute is fully build before calling getAttributeValueFromObject because that // call may fully build the attribute boolean wasAttributeValueFullyBuilt = isAttributeValueFullyBuilt(targetObject); Object oldAttribute = this.getAttributeValueFromObject(targetObject); setAttributeValueInObject(targetObject, attributeValue); // set this first to prevent infinite recursion if (wasAttributeValueFullyBuilt && this.indirectionPolicy.objectIsInstantiatedOrChanged(oldAttribute)){ this.indirectionPolicy.instantiateObject(targetObject, attributeValue); } }else{ setAttributeValueInObject(targetObject, attributeValue); } if (parentCacheKey != null){ this.indirectionPolicy.setSourceObject(parentCacheKey.getObject(), attributeValue); } return attributeValue; } /** * INTERNAL: * Once descriptors are serialized to the remote session. All its mappings and reference descriptors are traversed. Usually * mappings are initialized and serialized reference descriptors are replaced with local descriptors if they already exist on the * remote session. */ @Override public void remoteInitialization(DistributedSession session) { super.remoteInitialization(session); setTempSession(session); } /** * INTERNAL: * replace the value holders in the specified reference object(s) */ @Override public Map replaceValueHoldersIn(Object object, RemoteSessionController controller) { return controller.replaceValueHoldersIn(object); } /** * Returns true if this mappings associated weaved field requires a * transient setting to avoid metadata processing. */ public boolean requiresTransientWeavedFields() { return requiresTransientWeavedFields; } /** * PUBLIC: * Sets the cascading for all JPA operations. */ public void setCascadeAll(boolean value) { setCascadePersist(value); setCascadeMerge(value); setCascadeRefresh(value); setCascadeRemove(value); setCascadeDetach(value); } /** * PUBLIC: * Sets the cascading for the JPA detach operation. */ public void setCascadeDetach(boolean value) { this.cascadeDetach = value; } /** * PUBLIC: * Sets the cascading for the JPA CREATE operation. */ public void setCascadePersist(boolean value) { this.cascadePersist = value; } /** * PUBLIC: * Sets the cascading for the JPA MERGE operation. */ public void setCascadeMerge(boolean value) { this.cascadeMerge = value; } /** * PUBLIC: * Sets the cascading for the JPA REFRESH operation. */ public void setCascadeRefresh(boolean value) { this.cascadeRefresh = value; } /** * PUBLIC: * Sets the cascading for the JPA REMOVE operation. */ public void setCascadeRemove(boolean value) { this.cascadeRemove = value; } /** * PUBLIC: * Relationship mappings creates a read query to read reference objects. If this default * query needs to be customize then user can specify its own read query to do the reading * of reference objects. One must instance of ReadQuery or subclasses of the ReadQuery. */ public void setCustomSelectionQuery(ReadQuery query) { setSelectionQuery(query); setHasCustomSelectionQuery(true); } protected void setHasCustomSelectionQuery(boolean bool) { hasCustomSelectionQuery = bool; } /** * INTERNAL: * A way of forcing the selection criteria to be rebuilt. */ public void setForceInitializationOfSelectionCriteria(boolean bool) { forceInitializationOfSelectionCriteria = bool; } /** * ADVANCED: * Set the indirection policy. */ public void setIndirectionPolicy(IndirectionPolicy indirectionPolicy) { this.indirectionPolicy = indirectionPolicy; indirectionPolicy.setMapping(this); } /** * PUBLIC: * Set if the relationship is privately owned. * A privately owned relationship means the target object is a dependent part of the source * object and is not referenced by any other object and cannot exist on its own. * Private ownership causes many operations to be cascaded across the relationship, * including, deletion, insertion, refreshing, locking (when cascaded). * It also ensures that private objects removed from collections are deleted and object added are inserted. */ public void setIsPrivateOwned(boolean isPrivateOwned) { if (this.descriptor != null && ! this.isMapKeyMapping()){ // initialized if (isPrivateOwned && !this.isPrivateOwned){ this.descriptor.addMappingsPostCalculateChanges(this); if (getDescriptor().hasInheritance()){ for (ClassDescriptor descriptor: getDescriptor().getInheritancePolicy().getAllChildDescriptors()) { descriptor.addMappingsPostCalculateChanges(this); } } }else if (!isPrivateOwned && this.isPrivateOwned){ this.descriptor.getMappingsPostCalculateChanges().remove(this); if (getDescriptor().hasInheritance()){ for (ClassDescriptor descriptor: getDescriptor().getInheritancePolicy().getAllChildDescriptors()) { descriptor.getMappingsPostCalculateChanges().remove(this); } } } } this.isPrivateOwned = isPrivateOwned; } /** * INTERNAL: * Set the value of the attribute mapped by this mapping, * placing it inside a value holder if necessary. * If the value holder is not instantiated then it is instantiated. */ @Override public void setRealAttributeValueInObject(Object object, Object value) throws DescriptorException { this.indirectionPolicy.setRealAttributeValueInObject(object, value); } /** * PUBLIC: * Set the referenced class. */ public void setReferenceClass(Class referenceClass) { this.referenceClass = referenceClass; if (referenceClass != null) { setReferenceClassName(referenceClass.getName()); // Make sure the reference class of the selectionQuery is set. setSelectionQuery(getSelectionQuery()); } } /** * INTERNAL: * Used by MW. */ public void setReferenceClassName(String referenceClassName) { this.referenceClassName = referenceClassName; } /** * Set the referenceDescriptor. This is a descriptor which is associated with * the reference class. */ protected void setReferenceDescriptor(ClassDescriptor aDescriptor) { referenceDescriptor = aDescriptor; } /** * INTERNAL: * Sets the relationshipPartner mapping for this bi-directional mapping. If the relationshipPartner is null then * this is a uni-directional mapping. */ public void setRelationshipPartner(DatabaseMapping mapping) { this.relationshipPartner = mapping; } /** * PUBLIC: * Use this method to specify the relationship partner attribute name of a bidirectional Mapping. * TopLink will use the attribute name to find the back pointer mapping to maintain referential integrity of * the bi-directional mappings. */ public void setRelationshipPartnerAttributeName(String attributeName) { this.relationshipPartnerAttributeName = attributeName; } /** * Set this flag if this mappings associated weaved field requires a * transient setting to avoid metadata processing. */ public void setRequiresTransientWeavedFields(boolean requiresTransientWeavedFields) { this.requiresTransientWeavedFields = requiresTransientWeavedFields; } /** * PUBLIC: * Sets the selection criteria to be used as a where clause to read * reference objects. This criteria is automatically generated by the * TopLink if not explicitly specified by the user. */ public void setSelectionCriteria(Expression anExpression) { getSelectionQuery().setSelectionCriteria(anExpression); } /** * Sets the query */ protected void setSelectionQuery(ReadQuery aQuery) { selectionQuery = aQuery; // Make sure the reference class of the selectionQuery is set. if ((selectionQuery != null) && selectionQuery.isObjectLevelReadQuery() && (selectionQuery.getReferenceClassName() == null)) { ((ObjectLevelReadQuery)selectionQuery).setReferenceClass(getReferenceClass()); } } /** * PUBLIC: * This is a property on the mapping which will allow custom SQL to be * substituted for reading a reference object. */ public void setSelectionSQLString(String sqlString) { getSelectionQuery().setSQLString(sqlString); setCustomSelectionQuery(getSelectionQuery()); } /** * PUBLIC: * This is a property on the mapping which will allow custom call to be * substituted for reading a reference object. */ public void setSelectionCall(Call call) { getSelectionQuery().setCall(call); setCustomSelectionQuery(getSelectionQuery()); } /** * ADVANCED: * Indicates whether pessimistic lock of ObjectLevelReadQuery with isPessimisticLockScopeExtended set to true * should be applied through this mapping beyond the tables mapped to the source object. */ public void setShouldExtendPessimisticLockScope(boolean shouldExtend) { extendPessimisticLockScope = shouldExtend ? ExtendPessimisticLockScope.TARGET_QUERY : ExtendPessimisticLockScope.NONE; } protected void setTempSession(AbstractSession session) { this.tempInitSession = session; } /** * PUBLIC: * Indicates whether the referenced object should always be batch read on read all queries. * Batch reading will read all of the related objects in a single query when accessed from an originating read all. * This should only be used if it is know that the related objects are always required with the source object, or indirection is not used. * @see #setBatchFetchType(BatchFetchType) */ public void setUsesBatchReading(boolean usesBatchReading) { if (usesBatchReading) { setBatchFetchType(BatchFetchType.JOIN); } else { setBatchFetchType(null); } } /** * PUBLIC: * Indirection means that a ValueHolder will be put in-between the attribute and the real object. * This allows for the reading of the target from the database to be delayed until accessed. * This defaults to true and is strongly suggested as it give a huge performance gain. * @see #useBasicIndirection() * @see #dontUseIndirection() */ public void setUsesIndirection(boolean usesIndirection) { if (usesIndirection) { useBasicIndirection(); } else { dontUseIndirection(); } } /** * INTERNAL: * Indicates whether pessimistic lock of ObjectLevelReadQuery with isPessimisticLockScopeExtended set to true * should be applied through this mapping beyond the tables mapped to the source object. */ public boolean shouldExtendPessimisticLockScope() { return extendPessimisticLockScope != ExtendPessimisticLockScope.NONE; } public boolean shouldExtendPessimisticLockScopeInSourceQuery() { return extendPessimisticLockScope == ExtendPessimisticLockScope.SOURCE_QUERY; } public boolean shouldExtendPessimisticLockScopeInTargetQuery() { return extendPessimisticLockScope == ExtendPessimisticLockScope.TARGET_QUERY; } public boolean shouldExtendPessimisticLockScopeInDedicatedQuery() { return extendPessimisticLockScope == ExtendPessimisticLockScope.DEDICATED_QUERY; } /** * INTERNAL: */ protected boolean shouldForceInitializationOfSelectionCriteria() { return forceInitializationOfSelectionCriteria; } protected boolean shouldInitializeSelectionCriteria() { if (shouldForceInitializationOfSelectionCriteria()) { return true; } if (hasCustomSelectionQuery()) { return false; } if (getSelectionCriteria() == null) { return true; } return false; } /** * INTERNAL: * Returns true if the merge should cascade to the mappings reference's parts. */ public boolean shouldMergeCascadeParts(MergeManager mergeManager) { return (mergeManager.shouldCascadeByMapping() && ((this.isCascadeMerge() && !mergeManager.isForRefresh()) || (this.isCascadeRefresh() && mergeManager.isForRefresh()) )) || mergeManager.shouldCascadeAllParts() || (mergeManager.shouldCascadePrivateParts() && isPrivateOwned()); } /** * INTERNAL: * Returns true if the merge should cascade to the mappings reference's parts. */ public boolean shouldRefreshCascadeParts(MergeManager mergeManager) { return (mergeManager.shouldCascadeByMapping() && this.isCascadeRefresh()) || mergeManager.shouldCascadeAllParts() || (mergeManager.shouldCascadePrivateParts() && isPrivateOwned()); } /** * Returns true if the merge should cascade to the mappings reference. */ protected boolean shouldMergeCascadeReference(MergeManager mergeManager) { if (mergeManager.shouldCascadeReferences()) { return true; } // P2.0.1.3: Was merging references on non-privately owned parts // Same logic in: return shouldMergeCascadeParts(mergeManager); } /** * Returns true if any process leading to object modification should also affect its parts * Usually used by write, insert, update and delete. */ protected boolean shouldObjectModifyCascadeToParts(ObjectLevelModifyQuery query) { if (this.isReadOnly) { return false; } // Only cascade dependents writes in uow. if (query.shouldCascadeOnlyDependentParts()) { return hasConstraintDependency(); } if (this.isPrivateOwned) { return true; } return query.shouldCascadeAllParts(); } /** * PUBLIC: * Indicates whether the referenced object should always be batch read on read all queries. * Batch reading will read all of the related objects in a single query when accessed from an originating read all. * This should only be used if it is know that the related objects are always required with the source object, or indirection is not used. */ public boolean shouldUseBatchReading() { return this.batchFetchType != null; } /** * PUBLIC: * Indirection means that a ValueHolder will be put in-between the attribute and the real object. * This allows for the reading of the target from the database to be delayed until accessed. * This defaults to true and is strongly suggested as it give a huge performance gain. */ public void useBasicIndirection() { setIndirectionPolicy(new BasicIndirectionPolicy()); } /** * PUBLIC: * Indicates whether the referenced object should always be batch read on read all queries. * Batch reading will read all of the related objects in a single query when accessed from an originating read all. * This should only be used if it is know that the related objects are always required with the source object, or indirection is not used. */ public void useBatchReading() { setBatchFetchType(BatchFetchType.JOIN); } /** * INTERNAL: * Configures the mapping to used weaved indirection. * This requires that the toplink-agent be used to weave indirection into the class. * This policy is only require for method access. * @param getMethodName is the name of the original (or weaved in field access case) set method for the mapping. * @param setMethodName is the name of the original (or weaved in field access case) set method for the mapping. * @param hasUsedMethodAccess indicates whether method or field access was originally used. */ public void useWeavedIndirection(String getMethodName, String setMethodName, boolean hasUsedMethodAccess){ setIndirectionPolicy(new WeavedObjectBasicIndirectionPolicy(getMethodName, setMethodName, null, hasUsedMethodAccess)); } /** * PUBLIC: * Indirection means that a IndirectContainer (wrapping a ValueHolder) will be put in-between the attribute and the real object. * This allows for an application specific class to be used which wraps the value holder. * The purpose of this is that the domain objects will not require to import the ValueHolderInterface class. * Refer also to transparent indirection for a transparent solution to indirection. */ public void useContainerIndirection(Class containerClass) { ContainerIndirectionPolicy policy = new ContainerIndirectionPolicy(); policy.setContainerClass(containerClass); setIndirectionPolicy(policy); } /** * PUBLIC: * Indirection means that some sort of indirection object will be put in-between the attribute and the real object. * This allows for the reading of the target from the database to be delayed until accessed. * This defaults to true and is strongly suggested as it give a huge performance gain. */ public boolean usesIndirection() { return this.indirectionPolicy.usesIndirection(); } /** * INTERNAL: * Update a ChangeRecord to replace the ChangeSet for the old entity with the changeSet for the new Entity. This is * used when an Entity is merged into itself and the Entity reference new or detached entities. */ public abstract void updateChangeRecordForSelfMerge(ChangeRecord changeRecord, Object source, Object target, UnitOfWorkChangeSet parentUOWChangeSet, UnitOfWorkImpl unitOfWork); /** * PUBLIC: * Indicates whether the referenced object(s) should always be joined on read queries. * Joining will join the two classes tables to read all of the data in a single query. * This should only be used if it is know that the related objects are always required with the source object, * or indirection is not used. * A join-fetch can either use an INNER_JOIN or OUTER_JOIN, * if the relationship may reference null or an empty collection an outer join should be used to avoid filtering the source objects from the queries. * Join fetch can also be specified on the query, and it is normally more efficient to do so as some queries may not require the related objects. * Typically batch reading is more efficient than join fetching and should be considered, especially for collection relationships. * @see org.eclipse.persistence.queries.ObjectLevelReadQuery#addJoinedAttribute(String) * @see org.eclipse.persistence.queries.ReadAllQuery#addBatchReadAttribute(String) */ public void setJoinFetch(int joinFetch) { this.joinFetch = joinFetch; } /** * PUBLIC: * Return if this relationship should always be join fetched. */ public int getJoinFetch() { return joinFetch; } /** * INTERNAL: Called by JPA metadata processing to store the owning mapping * for this mapping * * @param mappedBy */ public void setMappedBy(String mappedBy) { this.mappedBy = mappedBy; } /** * PUBLIC: * Return if this relationship should always be join fetched. */ public boolean isJoinFetched() { return getJoinFetch() != NONE; } /** * PUBLIC: * Return if this relationship should always be INNER join fetched. */ public boolean isInnerJoinFetched() { return getJoinFetch() == INNER_JOIN; } /** * PUBLIC: * Return if this relationship should always be OUTER join fetched. */ public boolean isOuterJoinFetched() { return getJoinFetch() == OUTER_JOIN; } /** * PUBLIC: * Specify this relationship to always be join fetched using an INNER join. */ public void useInnerJoinFetch() { setJoinFetch(INNER_JOIN); } /** * PUBLIC: * Specify this relationship to always be join fetched using an OUTER join. */ public void useOuterJoinFetch() { setJoinFetch(OUTER_JOIN); } /** * ADVANCED: * Return if delete cascading has been set on the database for the * mapping's foreign key constraint. */ public boolean isCascadeOnDeleteSetOnDatabase() { return isCascadeOnDeleteSetOnDatabase; } /** * ADVANCED: * Set if delete cascading has been set on the database for the * mapping's foreign key constraint. * The behavior is dependent on the mapping. *

OneToOne (target foreign key) - deletes target object (private owned) *

OneToMany, AggregateCollection - deletes target objects (private owned) *

ManyToMany - deletes from join table (only) *

DirectCollection - delete from direct table */ public void setIsCascadeOnDeleteSetOnDatabase(boolean isCascadeOnDeleteSetOnDatabase) { this.isCascadeOnDeleteSetOnDatabase = isCascadeOnDeleteSetOnDatabase; } /** * Used to signal that this mapping references a protected/isolated entity and requires * special merge/object building behaviour. */ public void setIsCacheable(boolean cacheable) { this.isCacheable = cacheable; } /** * INTERNAL: * To validate mappings declaration */ @Override public void validateBeforeInitialization(AbstractSession session) throws DescriptorException { super.validateBeforeInitialization(session); // If a lazy mapping required weaving for lazy, and weaving did not occur, // then the mapping must be reverted to no use indirection. if ((this.indirectionPolicy instanceof WeavedObjectBasicIndirectionPolicy) && !ClassConstants.PersistenceWeavedLazy_Class.isAssignableFrom(getDescriptor().getJavaClass())) { Object[] args = new Object[2]; args[0] = getAttributeName(); args[1] = getDescriptor().getJavaClass(); session.log(SessionLog.WARNING, SessionLog.METADATA, "metadata_warning_ignore_lazy", args); setIndirectionPolicy(new NoIndirectionPolicy()); } if (getAttributeAccessor() instanceof InstanceVariableAttributeAccessor) { Class attributeType = ((InstanceVariableAttributeAccessor)getAttributeAccessor()).getAttributeType(); this.indirectionPolicy.validateDeclaredAttributeType(attributeType, session.getIntegrityChecker()); } else if (getAttributeAccessor().isMethodAttributeAccessor()) { // 323148 Class returnType = ((MethodAttributeAccessor)getAttributeAccessor()).getGetMethodReturnType(); this.indirectionPolicy.validateGetMethodReturnType(returnType, session.getIntegrityChecker()); Class parameterType = ((MethodAttributeAccessor)getAttributeAccessor()).getSetMethodParameterType(); this.indirectionPolicy.validateSetMethodParameterType(parameterType, session.getIntegrityChecker()); } } /** * This method is used to load a relationship from a list of PKs. This list * may be available if the relationship has been cached. */ public abstract Object valueFromPKList(Object[] pks, AbstractRecord foreignKeys, AbstractSession session); /** * INTERNAL: Return the value of the reference attribute or a value holder. * Check whether the mapping's attribute should be optimized through batch * and joining. */ @Override public Object valueFromRow(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, CacheKey cacheKey, AbstractSession executionSession, boolean isTargetProtected, Boolean[] wasCacheUsed) throws DatabaseException { if (this.descriptor.getCachePolicy().isProtectedIsolation()) { if (this.isCacheable && isTargetProtected && cacheKey != null) { //cachekey will be null when isolating to uow //used cached collection Object cached = cacheKey.getObject(); if (cached != null) { if (wasCacheUsed != null){ wasCacheUsed[0] = Boolean.TRUE; } //this will just clone the indirection. //the indirection object is responsible for cloning the value. return getAttributeValueFromObject(cached); } } else if (!this.isCacheable && !isTargetProtected && cacheKey != null) { return this.indirectionPolicy.buildIndirectObject(new ValueHolder(null)); } } if (row.hasSopObject()) { // DirectCollection or AggregateCollection that doesn't reference entities: no need to build members back into cache - just return the whole collection from sopObject. if (!hasNestedIdentityReference()) { return getAttributeValueFromObject(row.getSopObject()); } else { return valueFromRowInternal(row, null, sourceQuery, executionSession, true); } } // PERF: Direct variable access. // If the query uses batch reading, return a special value holder // or retrieve the object from the query property. if (sourceQuery.isObjectLevelReadQuery() && (((ObjectLevelReadQuery)sourceQuery).isAttributeBatchRead(this.descriptor, getAttributeName()) || (sourceQuery.isReadAllQuery() && shouldUseBatchReading()))) { return batchedValueFromRow(row, (ObjectLevelReadQuery)sourceQuery, cacheKey); } if (shouldUseValueFromRowWithJoin(joinManager, sourceQuery)) { return valueFromRowInternalWithJoin(row, joinManager, sourceQuery, cacheKey, executionSession, isTargetProtected); } else { return valueFromRowInternal(row, joinManager, sourceQuery, executionSession, false); } } /** * INTERNAL: * Indicates whether valueFromRow should call valueFromRowInternalWithJoin (true) * or valueFromRowInternal (false) */ protected boolean shouldUseValueFromRowWithJoin(JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery) { return ((joinManager != null) && (joinManager.isAttributeJoined(this.descriptor, this))) || sourceQuery.hasPartialAttributeExpressions(); } /** * INTERNAL: * If the query used joining or partial attributes, build the target object directly. * If isJoiningSupported()==true then this method must be overridden. */ protected Object valueFromRowInternalWithJoin(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, CacheKey parentCacheKey, AbstractSession executionSession, boolean isTargetProtected) throws DatabaseException { throw ValidationException.mappingDoesNotOverrideValueFromRowInternalWithJoin(Helper.getShortClassName(this.getClass())); } /** * INTERNAL: * Return the value of the reference attribute or a value holder. * Check whether the mapping's attribute should be optimized through batch and joining. */ protected Object valueFromRowInternal(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, AbstractSession executionSession) throws DatabaseException { return valueFromRowInternal(row, joinManager, sourceQuery, executionSession, false); } /** * INTERNAL: * Return the value of the reference attribute or a value holder. * Check whether the mapping's attribute should be optimized through batch and joining. * @param shouldUseSopObject indicates whether sopObject stored in the row should be used to extract the value (and fields/values stored in the row ignored). */ protected Object valueFromRowInternal(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, AbstractSession executionSession, boolean shouldUseSopObject) throws DatabaseException { // PERF: Direct variable access. ReadQuery targetQuery = this.selectionQuery; if (shouldUseSopObject) { Object sopAttribute = getAttributeValueFromObject(row.getSopObject()); Object sopRealAttribute; if (isCollectionMapping()) { if (sopAttribute == null) { return getContainerPolicy().containerInstance(); } sopRealAttribute = getIndirectionPolicy().getRealAttributeValueFromObject(row.getSopObject(), sopAttribute); if (getContainerPolicy().isEmpty(sopRealAttribute)) { return sopAttribute; } } else { if (sopAttribute == null) { return this.indirectionPolicy.nullValueFromRow(); } // As part of SOP object the indirection should be already triggered sopRealAttribute = getIndirectionPolicy().getRealAttributeValueFromObject(row.getSopObject(), sopAttribute); if (sopRealAttribute == null) { return sopAttribute; } } DatabaseRecord sopRow = new DatabaseRecord(0); sopRow.setSopObject(sopRealAttribute); row = sopRow; } // Copy nested fetch group from the source query if (targetQuery.isObjectLevelReadQuery() && targetQuery.getDescriptor().hasFetchGroupManager()) { FetchGroup sourceFG = sourceQuery.getExecutionFetchGroup(this.getDescriptor()); if (sourceFG != null) { FetchGroup targetFetchGroup = sourceFG.getGroup(getAttributeName()); if(targetFetchGroup != null) { // perf: bug#4751950, first prepare the query before cloning. if (targetQuery.shouldPrepare()) { targetQuery.checkPrepare(executionSession, row); } targetQuery = (ObjectLevelReadQuery)targetQuery.clone(); targetQuery.setIsExecutionClone(true); ((ObjectLevelReadQuery)targetQuery).setFetchGroup(targetFetchGroup); } } } // CR #4365, 3610825 - moved up from the block below, needs to be set with // indirection off. Clone the query and set its id. // All indirections are triggered in sopObject, therefore if sopObject is used then indirection on targetQuery to be triggered, too. if (!this.indirectionPolicy.usesIndirection() || shouldUseSopObject) { if (targetQuery == this.selectionQuery) { // perf: bug#4751950, first prepare the query before cloning. if (targetQuery.shouldPrepare()) { targetQuery.checkPrepare(executionSession, row); } targetQuery = (ObjectLevelReadQuery)targetQuery.clone(); targetQuery.setIsExecutionClone(true); } targetQuery.setQueryId(sourceQuery.getQueryId()); if (sourceQuery.usesResultSetAccessOptimization()) { targetQuery.setAccessors(sourceQuery.getAccessors()); } ((ObjectLevelReadQuery)targetQuery).setRequiresDeferredLocks(sourceQuery.requiresDeferredLocks()); } // If the source query is cascading then the target query must use the same settings. if (targetQuery.isObjectLevelReadQuery()) { if (sourceQuery.shouldCascadeAllParts() || (this.isPrivateOwned && sourceQuery.shouldCascadePrivateParts()) || (this.cascadeRefresh && sourceQuery.shouldCascadeByMapping())) { // If the target query has already been cloned (we're refreshing) avoid // re-cloning the query again. if (targetQuery == this.selectionQuery) { // perf: bug#4751950, first prepare the query before cloning. if (targetQuery.shouldPrepare()) { targetQuery.checkPrepare(executionSession, row); } targetQuery = (ObjectLevelReadQuery)targetQuery.clone(); targetQuery.setIsExecutionClone(true); } ((ObjectLevelReadQuery)targetQuery).setShouldRefreshIdentityMapResult(sourceQuery.shouldRefreshIdentityMapResult()); targetQuery.setCascadePolicy(sourceQuery.getCascadePolicy()); // For queries that have turned caching off, such as aggregate collection, leave it off. if (targetQuery.shouldMaintainCache()) { targetQuery.setShouldMaintainCache(sourceQuery.shouldMaintainCache()); } // For flashback: Read attributes as of the same time if required. if (((ObjectLevelReadQuery)sourceQuery).hasAsOfClause()) { targetQuery.setSelectionCriteria((Expression)targetQuery.getSelectionCriteria().clone()); ((ObjectLevelReadQuery)targetQuery).setAsOfClause(((ObjectLevelReadQuery)sourceQuery).getAsOfClause()); } } if (isExtendingPessimisticLockScope(sourceQuery)) { if (this.extendPessimisticLockScope == ExtendPessimisticLockScope.TARGET_QUERY) { if (targetQuery == this.selectionQuery) { // perf: bug#4751950, first prepare the query before cloning. if (targetQuery.shouldPrepare()) { targetQuery.checkPrepare(executionSession, row); } targetQuery = (ObjectLevelReadQuery)targetQuery.clone(); targetQuery.setIsExecutionClone(true); } extendPessimisticLockScopeInTargetQuery((ObjectLevelReadQuery)targetQuery, sourceQuery); } else if (this.extendPessimisticLockScope == ExtendPessimisticLockScope.DEDICATED_QUERY) { ReadQuery dedicatedQuery = getExtendPessimisticLockScopeDedicatedQuery(executionSession, sourceQuery.getLockMode()); executionSession.executeQuery(dedicatedQuery, row); } } } targetQuery = prepareHistoricalQuery(targetQuery, sourceQuery, executionSession); return this.indirectionPolicy.valueFromQuery(targetQuery, row, executionSession); } /** * INTERNAL: * Indicates whether the source query's pessimistic lock scope scope should be extended in the target query. */ protected boolean isExtendingPessimisticLockScope(ObjectBuildingQuery sourceQuery) { // TODO: What if sourceQuery is NOT ObjectLevelReadQuery? Should we somehow handle this? // Or alternatively define ObjectBuildingQuery.shouldExtendPessimisticLockScope() to always return false? return sourceQuery.isLockQuery() && sourceQuery.isObjectLevelReadQuery() && ((ObjectLevelReadQuery)sourceQuery).shouldExtendPessimisticLockScope(); } /** * INTERNAL: * Allow for the mapping to perform any historical query additions. * Return the new target query. */ protected ReadQuery prepareHistoricalQuery(ReadQuery targetQuery, ObjectBuildingQuery sourceQuery, AbstractSession executionSession) { return targetQuery; } /** * INTERNAL: * Return a sub-partition of the row starting at the index for the mapping. */ public AbstractRecord trimRowForJoin(AbstractRecord row, JoinedAttributeManager joinManager, AbstractSession executionSession) { // The field for many objects may be in the row, // so build the subpartion of the row through the computed values in the query, // this also helps the field indexing match. if ((joinManager != null) && (joinManager.getJoinedMappingIndexes_() != null)) { Object value = joinManager.getJoinedMappingIndexes_().get(this); if (value != null) { return trimRowForJoin(row, value, executionSession); } } return row; } /** * INTERNAL: * Return a sub-partition of the row starting at the index. */ public AbstractRecord trimRowForJoin(AbstractRecord row, Object value, AbstractSession executionSession) { // CR #... the field for many objects may be in the row, // so build the subpartion of the row through the computed values in the query, // this also helps the field indexing match. int fieldStartIndex; if (value instanceof Integer) { fieldStartIndex = ((Integer)value).intValue(); } else { // must be Map of classes to Integers Map map = (Map)value; Class cls; if (getDescriptor().hasInheritance() && getDescriptor().getInheritancePolicy().shouldReadSubclasses()) { cls = getDescriptor().getInheritancePolicy().classFromRow(row, executionSession); } else { cls = getDescriptor().getJavaClass(); } fieldStartIndex = ((Integer)map.get(cls)).intValue(); } Vector trimedFields = new NonSynchronizedSubVector(row.getFields(), fieldStartIndex, row.size()); Vector trimedValues = new NonSynchronizedSubVector(row.getValues(), fieldStartIndex, row.size()); return new DatabaseRecord(trimedFields, trimedValues); } /** * INTERNAL: * Prepare the clone of the nested query for joining. * The nested query clones are stored on the execution (clone) joinManager to avoid cloning per row. */ protected ObjectLevelReadQuery prepareNestedJoinQueryClone(AbstractRecord row, List dataResults, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, AbstractSession executionSession) { // A nested query must be built to pass to the descriptor that looks like the real query execution would, // these should be cached on the query during prepare. ObjectLevelReadQuery nestedQuery = null; // This is also call for partial object reading. if (joinManager == null) { nestedQuery = prepareNestedJoins(joinManager, sourceQuery, executionSession); nestedQuery.setSession(executionSession); nestedQuery.setPrefetchedCacheKeys(sourceQuery.getPrefetchedCacheKeys()); return nestedQuery; } // PERF: Also store the clone of the nested query on the execution query to avoid // cloning per row. if (joinManager.getJoinedMappingQueryClones() == null) { joinManager.setJoinedMappingQueryClones(new HashMap(5)); } nestedQuery = joinManager.getJoinedMappingQueryClones().get(this); if (nestedQuery == null) { if (joinManager.getJoinedMappingQueries_() != null) { nestedQuery = joinManager.getJoinedMappingQueries_().get(this); nestedQuery = (ObjectLevelReadQuery)nestedQuery.clone(); } else { nestedQuery = prepareNestedJoins(joinManager, sourceQuery, executionSession); } nestedQuery.setSession(executionSession); //CR #4365 - used to prevent infinite recursion on refresh object cascade all nestedQuery.setQueryId(joinManager.getBaseQuery().getQueryId()); nestedQuery.setExecutionTime(joinManager.getBaseQuery().getExecutionTime()); joinManager.getJoinedMappingQueryClones().put(this, nestedQuery); } nestedQuery.setPrefetchedCacheKeys(sourceQuery.getPrefetchedCacheKeys()); // Must also set data results to the nested query if it uses to-many joining. if (nestedQuery.hasJoining() && nestedQuery.getJoinedAttributeManager().isToManyJoin()) { // The data results only of the child object are required, they must also be trimmed. List nestedDataResults = dataResults; if (nestedDataResults == null) { // Extract the primary key of the source object, to filter only the joined rows for that object. Object sourceKey = this.descriptor.getObjectBuilder().extractPrimaryKeyFromRow(row, executionSession); nestedDataResults = joinManager.getDataResultsByPrimaryKey().get(sourceKey); } nestedDataResults = new ArrayList(nestedDataResults); Object indexObject = joinManager.getJoinedMappingIndexes_().get(this); // Trim results to start at nested row index. for (int index = 0; index < nestedDataResults.size(); index++) { AbstractRecord sourceRow = (AbstractRecord)nestedDataResults.get(index); nestedDataResults.set(index, trimRowForJoin(sourceRow, indexObject, executionSession)); } nestedQuery.getJoinedAttributeManager().setDataResults(nestedDataResults, executionSession); } nestedQuery.setRequiresDeferredLocks(sourceQuery.requiresDeferredLocks()); return nestedQuery; } /** * PUBLIC: * Return the type of batch fetching to use for all queries for this class if configured. */ public BatchFetchType getBatchFetchType() { return batchFetchType; } /** * PUBLIC: * Set the type of batch fetching to use for all queries for this class. */ public void setBatchFetchType(BatchFetchType batchFetchType) { this.batchFetchType = batchFetchType; } /** * INTERNAL: * Allow subclass to define a foreign key in the target's table. */ public void addTargetForeignKeyField(DatabaseField targetForeignKeyField, DatabaseField sourcePrimaryKeyField) { throw new UnsupportedOperationException("addTargetForeignKeyField"); } /** * INTERNAL: * Allow subclass to define a foreign key in the source's table. */ public void addForeignKeyField(DatabaseField sourceForeignKeyField, DatabaseField targetPrimaryKeyField) { throw new UnsupportedOperationException("addForeignKeyField"); } /** * INTERNAL: * Relationships order by their target primary key fields by default. */ @Override public List getOrderByNormalizedExpressions(Expression base) { List orderBys = new ArrayList(this.referenceDescriptor.getPrimaryKeyFields().size()); for (DatabaseField field : this.referenceDescriptor.getPrimaryKeyFields()) { orderBys.add(base.getField(field)); } return orderBys; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/TransformationMapping.java0000664000000000000000000000277712216173130025423 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings; import org.eclipse.persistence.mappings.foundation.AbstractTransformationMapping; /** *

Purpose: A transformation mapping is used for a specialized translation between how * a value is represented in Java and its representation on the databae. Transformation mappings * should only be used when other mappings are inadequate. * * @author Sati * @since TOPLink/Java 1.0 */ public class TransformationMapping extends AbstractTransformationMapping implements RelationalMapping { /** * PUBLIC: * Default constructor. */ public TransformationMapping() { super(); } /** * INTERNAL: */ public boolean isRelationalMapping() { return true; } } eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/EmbeddableMapping.java0000664000000000000000000000406312216173130024407 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * 01/28/2009-2.0 Guy Pelletier * - 248293: JPA 2.0 Element Collections (part 1) * 02/06/2009-2.0 Guy Pelletier * - 248293: JPA 2.0 Element Collections (part 2) * 10/25/2012-2.5 Guy Pelletier * - 374688: JPA 2.1 Converter support ******************************************************************************/ package org.eclipse.persistence.mappings; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.mappings.converters.Converter; /** * INTERNAL * Common interface to those mappings that are used to map JPA Embedded objects. * - ElementCollection -> AggregateCollectionMapping * - Embedded -> AggregateObjectMapping * * This interface was build to ease the metadata processing, namely to avoid * costly casting between the mappings above since their common parent is * DatabaseMapping. * * @author Guy Pelletier * @since EclipseLink 1.2 */ public interface EmbeddableMapping { public String getAttributeName(); public void addConverter(Converter converter, String attributeName); public void addOverrideManyToManyMapping(ManyToManyMapping mapping); public void addOverrideUnidirectionalOneToManyMapping(UnidirectionalOneToManyMapping mapping); public void addFieldTranslation(DatabaseField sourceFieldName, String aggregateFieldName); public void addNestedFieldTranslation(String attributeName, DatabaseField sourceField, String aggregateField); }eclipselink-2.5.1.orig/org/eclipse/persistence/mappings/AttributeAccessor.java0000664000000000000000000001037412216173130024517 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.mappings; import java.io.*; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.helper.ClassConstants; /** *

Purpose: This provides an abstract class for setting and retrieving * the attribute value for the mapping from an object. * It can be used in advanced situations if the attribute * requires advanced conversion of the mapping value, or a real attribute does not exist. * * @author James * @since OracleAS TopLink 10g (10.0.3) */ public abstract class AttributeAccessor implements CoreAttributeAccessor, Cloneable, Serializable { /** Stores the name of the attribute */ protected String attributeName; protected boolean isWriteOnly = false; protected boolean isReadOnly = false; /** * INTERNAL: * Clones itself. */ public Object clone() { try { return super.clone(); } catch (CloneNotSupportedException e) { throw new InternalError(); } } /** * INTERNAL: * Return the attribute name. */ public String getAttributeName() { return attributeName; } /** * INTERNAL: * Set the attribute name. */ public void setAttributeName(String attributeName) { this.attributeName = attributeName; } /** * Return the class type of the attribute. */ public Class getAttributeClass() { return ClassConstants.OBJECT; } /** * Allow any initialization to be performed with the descriptor class. */ public void initializeAttributes(Class descriptorClass) throws DescriptorException { if (getAttributeName() == null) { throw DescriptorException.attributeNameNotSpecified(); } } /** * Returns true if this attribute accessor has been initialized and now stores a reference to the * class's attribute. An attribute accessor can become uninitialized on serialization. */ public boolean isInitialized(){ return true; } public boolean isInstanceVariableAttributeAccessor() { return false; } public boolean isMapValueAttributeAccessor(){ return false; } public boolean isMethodAttributeAccessor() { return false; } public boolean isValuesAccessor() { return false; } public boolean isVirtualAttributeAccessor(){ return false; } /** * INTERNAL: * @return */ public boolean isWriteOnly() { return isWriteOnly; } /** * INTERNAL: * @param aBoolean */ public void setIsWriteOnly(boolean aBoolean) { this.isWriteOnly = aBoolean; } /** * INTERNAL * @return */ public boolean isReadOnly() { return isReadOnly; } /** * INTERNAL * @param aBoolean */ public void setIsReadOnly(boolean aBoolean) { this.isReadOnly = aBoolean; } /** * Return the attribute value from the object. */ @Override public abstract Object getAttributeValueFromObject(Object object) throws DescriptorException; /** * Set the attribute value into the object. */ @Override public abstract void setAttributeValueInObject(Object object, Object value) throws DescriptorException; } eclipselink-2.5.1.orig/org/eclipse/persistence/Version.j0000664000000000000000000001105612216173544020217 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * egwin - Changed buildNumber to buildDate. Added buildRevision, * buildType, getBuildDate(), getBuildRevision(), getBuildType(), * getVersionString(), printVersionString(), and main() ******************************************************************************/ package org.eclipse.persistence; /** * This class stores variables for the version and build numbers that are used in printouts and exceptions. * * @author Eric Gwin * @since 1.0, */ public class Version { // The current copyright info for EclipseLink. private static final String CopyrightString = "Copyright (c) 1998, 2013 Oracle. All rights reserved."; // The current version of EclipseLink. // This will be used by all product components and included in exceptions. private static String product = "Eclipse Persistence Services"; // A three part version number (major.minor.service) private static final String version = "@VERSION@"; // A string that describes this build i.e.( vYYYYMMDD-HHMM, etc.) private static final String qualifier = "@QUALIFIER@"; // Should be in the format YYYYMMDD private static final String buildDate = "@BUILD_DATE@"; // Should be in the format HHMM private static final String buildTime = "@BUILD_TIME@"; // revision of source from the repository private static final String buildRevision = "@BUILD_REVISION@"; // Typically SNAPSHOT, Milestone name (M1,M2,etc), or RELEASE private static final String buildType = "@BUILD_TYPE@"; /** Keep track of JDK version in order to make some decisions about data structures. **/ public static final int JDK_VERSION_NOT_SET = 0; public static final int JDK_1_5 = 1; public static final int JDK_1_6 = 2; public static int JDK_VERSION = JDK_VERSION_NOT_SET; public static String getVersionString ( ) { String verString; verString = getVersion() + "." + getQualifier(); return( verString ); } public static String getProduct() { return product; } public static void setProduct(String ProductName) { product = ProductName; } public static String getVersion() { return version; } public static String getQualifier() { return qualifier; } public static String getBuildNumber() { return getBuildDate(); } public static String getBuildDate() { return buildDate; } public static String getBuildTime() { return buildTime; } public static String getBuildRevision() { return buildRevision; } public static String getBuildType() { return buildType; } /** * INTERNAL: * Return the JDK version we are using. */ public static int getJDKVersion() { if (JDK_VERSION == JDK_VERSION_NOT_SET) { String version = System.getProperty("java.version"); if ((version != null) && version.startsWith("1.5")) { useJDK15(); } else { useJDK16(); } } return JDK_VERSION; } public static void useJDK15() { JDK_VERSION = JDK_1_5; } public static void useJDK16() { JDK_VERSION = JDK_1_6; } public static boolean isJDK15() { return getJDKVersion() == JDK_1_5; } public static boolean isJDK16() { return getJDKVersion() == JDK_1_6; } public static void printVersion ( ) { System.out.println( getVersionString() ); } public static void main ( String[] args ) { System.out.println( "\n" + getProduct() + " (EclipseLink)" + "\n Build Version: " + getVersionString() + "\n Build Qualifier: " + getQualifier() + "\n Build Date: " + getBuildDate() + "\n Build Time: " + getBuildTime() + "\n SVN Revision: " + getBuildRevision() ); } } eclipselink-2.5.1.orig/org/eclipse/persistence/sequencing/0000775000000000000000000000000012216174372020555 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/sequencing/DefaultSequence.java0000664000000000000000000001672512216173130024477 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.sequencing; import java.util.Vector; import org.eclipse.persistence.internal.databaseaccess.Accessor; import org.eclipse.persistence.internal.sessions.AbstractSession; /** *

* Purpose: Reference to the default sequence. *

* Description * This sequence can be used to provide a sequence using the session's * default sequencing mechanism but override the pre-allocation size. */ public class DefaultSequence extends Sequence { protected Sequence defaultSequence; public DefaultSequence() { super(); } /** * Create a new sequence with the name. */ public DefaultSequence(String name) { super(name, 0); } /** * Create a new sequence with the name and sequence pre-allocation size. */ public DefaultSequence(String name, int size) { super(name, size); } public DefaultSequence(String name, int size, int initialValue) { super(name, size, initialValue); } /** * INTERNAL: * Return the platform's default sequence. */ public Sequence getDefaultSequence() { return getDatasourcePlatform().getDefaultSequence(); } public boolean hasPreallocationSize() { return size != 0; } public int getPreallocationSize() { if ((size != 0) || (getDefaultSequence() == null)) { return size; } else { return getDefaultSequence().getPreallocationSize(); } } public int getInitialValue() { if ((initialValue != 0) || (getDefaultSequence() == null)) { return initialValue; } else { return getDefaultSequence().getInitialValue(); } } public boolean equals(Object obj) { if (obj instanceof DefaultSequence) { return equalNameAndSize(this, (DefaultSequence)obj); } else { return false; } } /** * INTERNAL: * Indicates whether sequencing value should be acquired after INSERT. * Note that preallocation could be used only in case sequencing values * should be acquired before insert (this method returns false). * In default implementation, it is true for table sequencing and native * sequencing on Oracle platform, false for native sequencing on other platforms. */ public boolean shouldAcquireValueAfterInsert() { return getDefaultSequence().shouldAcquireValueAfterInsert(); } /** * INTERNAL: * Indicates whether the existing pk value should always be overridden by the sequence. */ public boolean shouldAlwaysOverrideExistingValue(String seqName) { return this.shouldAlwaysOverrideExistingValue || getDefaultSequence().shouldAlwaysOverrideExistingValue(seqName); } /** * INTERNAL: * Indicates whether several sequencing values should be acquired at a time * and be kept by TopLink. This in only possible in case sequencing numbers should * be acquired before insert (shouldAcquireValueAfterInsert()==false). * In default implementation, it is true for table sequencing and native * sequencing on Oracle platform, false for native sequencing on other platforms. */ public boolean shouldUsePreallocation() { return getDefaultSequence().shouldUsePreallocation(); } /** * INTERNAL: * Indicates whether EclipseLink should internally call beginTransaction() before * getGeneratedValue/Vector, and commitTransaction after. * In default implementation, it is true for table sequencing and * false for native sequencing. */ public boolean shouldUseTransaction() { return getDefaultSequence().shouldUseTransaction(); } /** * INTERNAL: * Return the newly-generated sequencing value. * Used only in case preallocation is not used (shouldUsePreallocation()==false). * Accessor may be non-null only in case shouldUseSeparateConnection()==true. * Even in this case accessor could be null - if SequencingControl().shouldUseSeparateConnection()==false; * Therefore in case shouldUseSeparateConnection()==true, implementation should handle * both cases: use a separate connection if provided (accessor != null), or get by * without it (accessor == null). * @param accessor Accessor is a separate sequencing accessor (may be null); * @param writeSession Session is a Session used for writing (either ClientSession or DatabaseSession); * @param seqName String is sequencing number field name */ public Object getGeneratedValue(Accessor accessor, AbstractSession writeSession, String seqName) { return getDefaultSequence().getGeneratedValue(accessor, writeSession, seqName); } /** * INTERNAL: * Return a Vector of newly-generated sequencing values. * Used only in case preallocation is used (shouldUsePreallocation()==true). * Accessor may be non-null only in case shouldUseSeparateConnection()==true. * Even in this case accessor could be null - if SequencingControl().shouldUseSeparateConnection()==false; * Therefore in case shouldUseSeparateConnection()==true, implementation should handle * both cases: use a separate connection if provided (accessor != null), or get by * without it (accessor == null). * @param accessor Accessor is a separate sequencing accessor (may be null); * @param writeSession Session is a Session used for writing (either ClientSession or DatabaseSession); * @param seqName String is sequencing number field name * @param size int number of values to preallocate (output Vector size). */ public Vector getGeneratedVector(Accessor accessor, AbstractSession writeSession, String seqName, int size) { return getDefaultSequence().getGeneratedVector(accessor, writeSession, seqName, size); } /** * INTERNAL: * This method is called when Sequencing object is created. * It's a chance to do initialization. */ public void onConnect() { qualifier = getDefaultSequence().getQualifier(); } /** * INTERNAL: * This method is called when Sequencing object is destroyed.. * It's a chance to do deinitialization. */ public void onDisconnect() { qualifier = ""; } /** * PUBLIC: * Indicates that Sequence is connected. */ public boolean isConnected() { return getDefaultSequence().isConnected(); } /** * INTERNAL: * Ignored, getDefaultSequence().getQualifier() used instead. */ public void setQualifier(String qualifier) { } } eclipselink-2.5.1.orig/org/eclipse/persistence/sequencing/Sequence.java0000664000000000000000000003417612216173130023172 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.sequencing; import java.util.Vector; import java.io.Serializable; import org.eclipse.persistence.internal.databaseaccess.Platform; import org.eclipse.persistence.internal.databaseaccess.DatasourcePlatform; import org.eclipse.persistence.internal.databaseaccess.Accessor; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.exceptions.ValidationException; /** *

* Purpose: Abstract class to define sequencing. *

* Description * A sequence defines how generated ids are obtained. * The main sequence types are TableSequence and NativeSequence. * Descriptors using sequencing will use the sequence object defined in their session's * DatabaseLogin with the name matching their sequence name. If a specific sequence is * not defined for the name the DatabaseLogin's default sequence will be used. * @see TableSequence * @see NativeSequence */ public abstract class Sequence implements Serializable, Cloneable { // name protected String name = ""; // preallocation size protected int size = 50; // owner platform protected Platform platform; protected int initialValue = 1; // number of times onConnect was called - number of times onDisconnect was called protected int depth; protected String qualifier = ""; // true indicates that qualifier was set through setQualifier method, // false - copied from platform (or not set at all). protected boolean isCustomQualifier; // indicates whether the existing pk value should always be overridden by the sequence. // note that even if set to false sequence always overrides if shouldAcquireValueAfterInsert returns true. protected boolean shouldAlwaysOverrideExistingValue; public Sequence() { super(); setName("SEQUENCE"); } /** * Create a new sequence with the name. */ public Sequence(String name) { this(); setName(name); } /** * Create a new sequence with the name and sequence pre-allocation size. */ public Sequence(String name, int size) { this(); setName(name); setPreallocationSize(size); } public Sequence(String name, int size, int initialValue) { this(); setName(name); setPreallocationSize(size); setInitialValue(initialValue); } public boolean isNative() { return false; } public boolean isTable() { return false; } public boolean isUnaryTable() { return false; } public String getName() { return name; } public void setName(String name) { this.name = name; } public int getPreallocationSize() { return size; } public void setPreallocationSize(int size) { this.size = size; } public int getInitialValue() { return initialValue; } public void setInitialValue(int initialValue) { this.initialValue = initialValue; } public Object clone() { try { Sequence clone = (Sequence)super.clone(); if (isConnected()) { clone.depth = 1; clone.onDisconnect(); clone.setDatasourcePlatform(null); } return clone; } catch (Exception exception) { throw new InternalError("Clone failed"); } } public boolean equals(Object obj) { if (obj instanceof Sequence) { return equalNameAndSize(this, (Sequence)obj); } else { return false; } } /** * INTERNAL: * Used in equals. */ public static boolean equalNameAndSize(Sequence seq1, Sequence seq2) { if (seq1 == seq2) { return true; } return seq1.getName().equals(seq2.getName()) && (seq1.getPreallocationSize() == seq2.getPreallocationSize()); } protected void setDatasourcePlatform(Platform platform) { this.platform = platform; } public Platform getDatasourcePlatform() { return platform; } /** * INTERNAL: * Indicates whether sequencing value should be acquired after INSERT. * Note that preallocation could be used only in case sequencing values * should be acquired before insert (this method returns false). * In default implementation, it is true for table sequencing and native * sequencing on Oracle platform, false for native sequencing on other platforms. */ public abstract boolean shouldAcquireValueAfterInsert(); /** * INTERNAL: * Indicates whether several sequencing values should be acquired at a time * and be kept by TopLink. This in only possible in case sequencing numbers should * be acquired before insert (shouldAcquireValueAfterInsert()==false). * In default implementation, it is true for table sequencing and native * sequencing on Oracle platform, false for native sequencing on other platforms. */ public boolean shouldUsePreallocation() { return !shouldAcquireValueAfterInsert(); } /** * INTERNAL: * Indicates whether TopLink should internally call beginTransaction() before * getGeneratedValue/Vector, and commitTransaction after. * In default implementation, it is true for table sequencing and * false for native sequencing. */ public abstract boolean shouldUseTransaction(); /** * INTERNAL: * Return the newly-generated sequencing value. * Used only in case preallocation is not used (shouldUsePreallocation()==false). * Accessor may be non-null only in case shouldUseSeparateConnection()==true. * Even in this case accessor could be null - if SequencingControl().shouldUseSeparateConnection()==false; * Therefore in case shouldUseSeparateConnection()==true, implementation should handle * both cases: use a separate connection if provided (accessor != null), or get by * without it (accessor == null). * @param accessor Accessor is a separate sequencing accessor (may be null); * @param writeSession Session is a Session used for writing (either ClientSession or DatabaseSession); * @param seqName String is sequencing number field name */ public abstract Object getGeneratedValue(Accessor accessor, AbstractSession writeSession, String seqName); /** * INTERNAL: * Return the newly-generated sequencing value. * Used only in case preallocation is not used (shouldUsePreallocation()==false). * Accessor may be non-null only in case shouldUseSeparateConnection()==true. * Even in this case accessor could be null - if SequencingControl().shouldUseSeparateConnection()==false; * Therefore in case shouldUseSeparateConnection()==true, implementation should handle * both cases: use a separate connection if provided (accessor != null), or get by * without it (accessor == null). * @param accessor Accessor is a separate sequencing accessor (may be null); * @param writeSession Session is a Session used for writing (either ClientSession or DatabaseSession); */ public Object getGeneratedValue(Accessor accessor, AbstractSession writeSession) { return getGeneratedValue(accessor, writeSession, getName()); } /** * INTERNAL: * Return a Vector of newly-generated sequencing values. * Used only in case preallocation is used (shouldUsePreallocation()==true). * Accessor may be non-null only in case shouldUseSeparateConnection()==true. * Even in this case accessor could be null - if SequencingControl().shouldUseSeparateConnection()==false; * Therefore in case shouldUseSeparateConnection()==true, implementation should handle * both cases: use a separate connection if provided (accessor != null), or get by * without it (accessor == null). * @param accessor Accessor is a separate sequencing accessor (may be null); * @param writeSession Session is a Session used for writing (either ClientSession or DatabaseSession); * @param seqName String is sequencing number field name * @param size int number of values to preallocate (output Vector size). */ public abstract Vector getGeneratedVector(Accessor accessor, AbstractSession writeSession, String seqName, int size); /** * INTERNAL: * Return a Vector of newly-generated sequencing values. * Used only in case preallocation is used (shouldUsePreallocation()==true). * Accessor may be non-null only in case shouldUseSeparateConnection()==true. * Even in this case accessor could be null - if SequencingControl().shouldUseSeparateConnection()==false; * Therefore in case shouldUseSeparateConnection()==true, implementation should handle * both cases: use a separate connection if provided (accessor != null), or get by * without it (accessor == null). * @param accessor Accessor is a separate sequencing accessor (may be null); * @param writeSession Session is a Session used for writing (either ClientSession or DatabaseSession); */ public Vector getGeneratedVector(Accessor accessor, AbstractSession writeSession) { return getGeneratedVector(accessor, writeSession, getName(), getPreallocationSize()); } /** * INTERNAL: * This method is called when Sequencing object is created. * Don't override this method. */ public void onConnect(Platform platform) { setDatasourcePlatform(platform); if(!isCustomQualifier) { qualifier = getDatasourcePlatform().getTableQualifier(); } onConnect(); depth++; } /** * INTERNAL: * This method is called when Sequencing object is created. * If it requires initialization, subclass should override this method. */ public abstract void onConnect(); /** * INTERNAL: * This method is called when Sequencing object is destroyed. * Don't override this method. */ public void onDisconnect(Platform platform) { if (isConnected()) { depth--; if(depth==0 && !isCustomQualifier) { qualifier = ""; } // Can no longer disconnect sequences, as they are part of descriptor shared meta-data. } } /** * INTERNAL: * This method is called when Sequencing object is destroyed. * If it requires deinitialization, subclass should override this method. */ public abstract void onDisconnect(); /** * PUBLIC: * Indicates that Sequence is connected. */ public boolean isConnected() { return platform != null; } /** * INTERNAL: * Make sure that the sequence is not used by more than one platform. */ protected void verifyPlatform(Platform otherPlatform) { if (getDatasourcePlatform() != otherPlatform) { String hashCode1 = Integer.toString(System.identityHashCode(getDatasourcePlatform())); String name1 = ((DatasourcePlatform)getDatasourcePlatform()).toString() + '(' + hashCode1 + ')'; String hashCode2 = Integer.toString(System.identityHashCode(otherPlatform)); String name2 = ((DatasourcePlatform)otherPlatform).toString() + '(' + hashCode2 + ')'; throw ValidationException.sequenceCannotBeConnectedToTwoPlatforms(getName(), name1, name2); } } /** * INTERNAL: */ public void setQualifier(String qualifier) { if(qualifier == null) { qualifier = ""; } this.isCustomQualifier = qualifier.length() > 0; this.qualifier = qualifier; } /** * INTERNAL: */ public boolean isCustomQualifier() { return isCustomQualifier; } /** * INTERNAL: */ public String getQualifier() { return qualifier; } /** * INTERNAL: */ public String getQualified(String str) { if (qualifier.equals("")) { return str; } else { return qualifier + "." + str; } } /** * ADVANCED: * Set that to true if the sequence should always override the existing pk value. */ public void setShouldAlwaysOverrideExistingValue(boolean shouldAlwaysOverrideExistingValue) { this.shouldAlwaysOverrideExistingValue = shouldAlwaysOverrideExistingValue; } /** * INTERNAL: * Indicates whether the existing pk value should always be overridden by the sequence. * As always the version of the method taking seqName is provided for the benefit * of DefaultSequence. */ public boolean shouldAlwaysOverrideExistingValue() { return shouldAlwaysOverrideExistingValue(getName()); } /** * INTERNAL: * Indicates whether the existing pk value should always be overridden by the sequence. */ public boolean shouldAlwaysOverrideExistingValue(String seqName) { return this.shouldAlwaysOverrideExistingValue || shouldAcquireValueAfterInsert(); } public String toString() { return getClass().getSimpleName() + "(" + getName() + ")"; } } eclipselink-2.5.1.orig/org/eclipse/persistence/sequencing/NativeSequence.java0000664000000000000000000002105212216173130024326 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.sequencing; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.internal.databaseaccess.DatabasePlatform; import org.eclipse.persistence.internal.databaseaccess.DatasourcePlatform; /** *

* Purpose: Define a database's native sequencing mechanism. *

* Description * Many databases have built in support for sequencing. * This can be a SEQUENCE object such as in Oracle, * or a auto-incrementing column such as the IDENTITY field in Sybase. * For an auto-incrementing column the preallocation size is always 1. * For a SEQUENCE object the preallocation size must match the SEQUENCE objects "increment by". */ public class NativeSequence extends QuerySequence { /** * true indicates that identity should be used - if the platform supports identity. * false indicates that sequence objects should be used - if the platform supports sequence objects. */ protected boolean shouldUseIdentityIfPlatformSupports = true; /** * Allow sequencing to be delegated to another sequence if native sequencing is not supported. */ protected QuerySequence delegateSequence; public NativeSequence() { super(); setShouldSkipUpdate(true); } public NativeSequence(boolean shouldUseIdentityIfPlatformSupports) { super(); setShouldSkipUpdate(true); setShouldUseIdentityIfPlatformSupports(shouldUseIdentityIfPlatformSupports); } /** * Create a new sequence with the name. */ public NativeSequence(String name) { super(name); setShouldSkipUpdate(true); } public NativeSequence(String name, boolean shouldUseIdentityIfPlatformSupports) { super(name); setShouldSkipUpdate(true); setShouldUseIdentityIfPlatformSupports(shouldUseIdentityIfPlatformSupports); } /** * Create a new sequence with the name and sequence pre-allocation size. */ public NativeSequence(String name, int size) { super(name, size); setShouldSkipUpdate(true); } public NativeSequence(String name, int size, boolean shouldUseIdentityIfPlatformSupports) { super(name, size); setShouldSkipUpdate(true); setShouldUseIdentityIfPlatformSupports(shouldUseIdentityIfPlatformSupports); } public NativeSequence(String name, int size, int initialValue) { super(name, size, initialValue); setShouldSkipUpdate(true); } public NativeSequence(String name, int size, int initialValue, boolean shouldUseIdentityIfPlatformSupports) { super(name, size, initialValue); setShouldSkipUpdate(true); setShouldUseIdentityIfPlatformSupports(shouldUseIdentityIfPlatformSupports); } public boolean isNative() { if (this.delegateSequence != null) { return this.delegateSequence.isNative(); } return true; } public void setShouldUseIdentityIfPlatformSupports(boolean shouldUseIdentityIfPlatformSupports) { this.shouldUseIdentityIfPlatformSupports = shouldUseIdentityIfPlatformSupports; } public boolean shouldUseIdentityIfPlatformSupports() { return shouldUseIdentityIfPlatformSupports; } public boolean equals(Object obj) { if (obj instanceof NativeSequence) { return equalNameAndSize(this, (NativeSequence)obj); } else { return false; } } /** * INTERNAL: */ @Override protected ValueReadQuery buildSelectQuery() { if (this.delegateSequence != null) { return this.delegateSequence.buildSelectQuery(); } else if (shouldAcquireValueAfterInsert()) { return ((DatasourcePlatform)getDatasourcePlatform()).buildSelectQueryForIdentity(); } else { return ((DatasourcePlatform)getDatasourcePlatform()).buildSelectQueryForSequenceObject(); } } /** * INTERNAL: */ @Override protected ValueReadQuery buildSelectQuery(String seqName, Integer size) { if (this.delegateSequence != null) { return this.delegateSequence.buildSelectQuery(seqName, size); } else if (shouldAcquireValueAfterInsert()) { return ((DatabasePlatform)getDatasourcePlatform()).buildSelectQueryForIdentity(getQualified(seqName), size); } else { return ((DatasourcePlatform)getDatasourcePlatform()).buildSelectQueryForSequenceObject(getQualified(seqName), size); } } /** * Return if the sequence should be replaced by another sequence implementation. * This is used when the platform does not support the native sequence type. */ public boolean hasDelegateSequence() { return delegateSequence != null; } /** * Return the sequence delegate. * This is used when the platform does not support the native sequence type. */ public QuerySequence getDelegateSequence() { return delegateSequence; } /** * Set the sequence delegate. * This is used when the platform does not support the native sequence type. */ public void setDelegateSequence(QuerySequence delegateSequence) { this.delegateSequence = delegateSequence; } /** * INTERNAL: */ @Override public void onConnect() { DatasourcePlatform platform = (DatasourcePlatform)getDatasourcePlatform(); // Set shouldAcquireValueAfterInsert flag: identity -> true; sequence objects -> false. if (platform.supportsIdentity() && shouldUseIdentityIfPlatformSupports()) { // identity is both supported by platform and desired by the NativeSequence setShouldAcquireValueAfterInsert(true); } else if (platform.supportsSequenceObjects() && !shouldUseIdentityIfPlatformSupports()) { // sequence objects is both supported by platform and desired by the NativeSequence setShouldAcquireValueAfterInsert(false); } else { if (platform.getDefaultNativeSequenceToTable() || !platform.supportsNativeSequenceNumbers()) { // If native sequencing is not supported, or IDENTITY not desire, use TABLE. this.delegateSequence = new TableSequence(); this.delegateSequence.setName(getName()); this.delegateSequence.onConnect(platform); setShouldUseTransaction(this.delegateSequence.shouldUseTransaction()); setShouldAcquireValueAfterInsert(this.delegateSequence.shouldAcquireValueAfterInsert()); setShouldSkipUpdate(this.delegateSequence.shouldSkipUpdate()); setShouldSelectBeforeUpdate(this.delegateSequence.shouldSelectBeforeUpdate()); setUpdateQuery(this.delegateSequence.getUpdateQuery()); super.onConnect(); return; } else { // platform support contradicts to NativeSequence setting - go with platform supported choice. // platform must support either identity or sequence objects (otherwise ValidationException would've been thrown earlier), // therefore here dbPlatform.supportsIdentity() == !dbPlatform.supportsSequenceObjects(). setShouldAcquireValueAfterInsert(platform.supportsIdentity()); } } setShouldUseTransaction(platform.shouldNativeSequenceUseTransaction()); super.onConnect(); } /** * INTERNAL: */ @Override public void onDisconnect() { this.delegateSequence = null; setShouldAcquireValueAfterInsert(false); setShouldUseTransaction(false); super.onDisconnect(); } } eclipselink-2.5.1.orig/org/eclipse/persistence/sequencing/UnaryTableSequence.java0000664000000000000000000001300112216173130025141 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.sequencing; import java.io.StringWriter; import org.eclipse.persistence.queries.*; /** *

* Purpose: Defines sequencing through using a singleton sequence table. *

* Description * This is similar to the TableSequence but the sequence table only stores a single * row defining a single sequence. */ public class UnaryTableSequence extends QuerySequence { /** Hold the name of the only column in multiple single-column tables */ protected String counterFieldName = "SEQUENCE"; protected String updateString1; protected String updateString2; protected String selectString1; protected String selectString2; protected int updateStringBufferSize; protected int selectStringBufferSize; public UnaryTableSequence() { super(false, true); } /** * Create a new sequence with the name. */ public UnaryTableSequence(String name) { super(name, false, true); } /** * Create a new sequence with the name and sequence pre-allocation size. */ public UnaryTableSequence(String name, int size) { super(name, size, false, true); } public UnaryTableSequence(String name, String counterFieldName) { this(name); setCounterFieldName(counterFieldName); } public UnaryTableSequence(String name, int size, String counterFieldName) { this(name, size); setCounterFieldName(counterFieldName); } public boolean isUnaryTable() { return true; } public boolean equals(Object obj) { if (obj instanceof UnaryTableSequence) { UnaryTableSequence other = (UnaryTableSequence)obj; if (equalNameAndSize(this, other)) { return getCounterFieldName().equals(other.getCounterFieldName()); } else { return false; } } else { return false; } } public void setCounterFieldName(String name) { this.counterFieldName = name; } public String getCounterFieldName() { return counterFieldName; } /** * INTERNAL: */ public void onConnect() { super.onConnect(); initialize(); } /** * INTERNAL: */ public void onDisconnect() { clear(); super.onDisconnect(); } protected ValueReadQuery buildSelectQuery(String seqName, Integer size) { ValueReadQuery selectQuery = new ValueReadQuery(); selectQuery.dontBindAllParameters(); StringWriter writer = new StringWriter(selectStringBufferSize + seqName.length()); writer.write(selectString1); writer.write(seqName); selectQuery.setSQLString(writer.toString()); return selectQuery; } protected DataModifyQuery buildUpdateQuery(String seqName, Number size) { DataModifyQuery updateQuery = new DataModifyQuery(); updateQuery.dontBindAllParameters(); String sizeString = size.toString(); StringWriter writer = new StringWriter(updateStringBufferSize + seqName.length() + sizeString.length()); writer.write(updateString1); writer.write(seqName); writer.write(updateString2); writer.write(sizeString); updateQuery.setSQLString(writer.toString()); return updateQuery; } protected void initialize() { if (getSelectQuery() == null) { buildSelectString1(); selectStringBufferSize = selectString1.length(); } if ((getUpdateQuery() == null) && !shouldSkipUpdate()) { buildUpdateString1(); buildUpdateString2(); updateStringBufferSize = updateString1.length() + updateString2.length(); } } protected void buildUpdateString1() { updateString1 = "UPDATE "; if (qualifier != "") { updateString1 = updateString1 + qualifier + '.'; } } protected void buildUpdateString2() { StringWriter writer = new StringWriter(); writer.write(" SET "); writer.write(getCounterFieldName()); writer.write(" = "); writer.write(getCounterFieldName()); writer.write(" + "); updateString2 = writer.toString(); } protected void buildSelectString1() { selectString1 = "SELECT * FROM "; if (qualifier != "") { selectString1 = selectString1 + qualifier + '.'; } } protected void clear() { updateString1 = null; updateString2 = null; selectString1 = null; selectString2 = null; updateStringBufferSize = 0; selectStringBufferSize = 0; } } eclipselink-2.5.1.orig/org/eclipse/persistence/sequencing/StandardSequence.java0000664000000000000000000000774412216173130024654 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.sequencing; import java.util.Vector; import org.eclipse.persistence.internal.databaseaccess.Accessor; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.exceptions.DatabaseException; /** *

* Purpose: An abstract class providing default sequence behavior. *

*/ public abstract class StandardSequence extends Sequence { public StandardSequence() { super(); } public StandardSequence(String name) { super(name); } public StandardSequence(String name, int size) { super(name, size); } public StandardSequence(String name, int size, int initialValue) { super(name, size, initialValue); } public void onConnect() { // does nothing } public void onDisconnect() { // does nothing } protected abstract Number updateAndSelectSequence(Accessor accessor, AbstractSession writeSession, String seqName, int size); public abstract boolean shouldAcquireValueAfterInsert(); public abstract boolean shouldUseTransaction(); public Object getGeneratedValue(Accessor accessor, AbstractSession writeSession, String seqName) { if (shouldUsePreallocation()) { return null; } else { Number value = updateAndSelectSequence(accessor, writeSession, seqName, 1); if (value == null) { throw DatabaseException.errorPreallocatingSequenceNumbers(); } return value; } } public Vector getGeneratedVector(Accessor accessor, AbstractSession writeSession, String seqName, int size) { if (shouldUsePreallocation()) { Number value = updateAndSelectSequence(accessor, writeSession, seqName, size); if (value == null) { throw DatabaseException.errorPreallocatingSequenceNumbers(); } return createVector(value, seqName, size); } else { return null; } } /** * INTERNAL: * given sequence = 10, size = 5 will create Vector (6,7,8,9,10) * @param seqName String is sequencing number field name * @param size int size of Vector to create. */ protected Vector createVector(Number sequence, String seqName, int size) { long nextSequence = sequence.longValue(); Vector sequencesForName = new Vector(size); nextSequence = nextSequence - size; // Check for incorrect values return to validate that the sequence is setup correctly. // PRS 36451 intvalue would wrap if (nextSequence < -1L) { throw ValidationException.sequenceSetupIncorrectly(seqName); } for (int index = size; index > 0; index--) { nextSequence = nextSequence + 1L; sequencesForName.add(nextSequence); } return sequencesForName; } public void setInitialValue(int initialValue) { // sequence value should be positive if (initialValue <= 0) { initialValue = 1; } super.setInitialValue(initialValue); } } eclipselink-2.5.1.orig/org/eclipse/persistence/sequencing/QuerySequence.java0000664000000000000000000002764112216173130024217 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 06/30/2011-2.3.1 Guy Pelletier * - 341940: Add disable/enable allowing native queries ******************************************************************************/ package org.eclipse.persistence.sequencing; import java.util.Vector; import java.math.BigDecimal; import org.eclipse.persistence.sessions.Record; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.internal.databaseaccess.Accessor; import org.eclipse.persistence.internal.sessions.AbstractSession; /** *

* Purpose: An generic query sequence mechanism. *

* Description * This sequence allows the sequence operations to be customized through user defined queries. * A select and update query can be set which can use custom SQL or stored procedures to define the sequencing mechanism. * If a single stored procedure is used that does the update and select only the select query needs to be set. */ public class QuerySequence extends StandardSequence { protected ValueReadQuery selectQuery; protected DataModifyQuery updateQuery; protected boolean shouldAcquireValueAfterInsert; protected boolean shouldUseTransaction; protected boolean shouldSkipUpdate; protected boolean shouldSelectBeforeUpdate; protected boolean wasSelectQueryCreated; protected boolean wasUpdateQueryCreated; public QuerySequence() { super(); } /** * Create a new sequence with the name. */ public QuerySequence(String name) { super(name); } /** * Create a new sequence with the name and sequence pre-allocation size. */ public QuerySequence(String name, int size) { super(name, size); } public QuerySequence(String name, int size, int initialValue) { super(name, size, initialValue); } public QuerySequence(boolean shouldAcquireValueAfterInsert, boolean shouldUseTransaction) { super(); setShouldAcquireValueAfterInsert(shouldAcquireValueAfterInsert); setShouldUseTransaction(shouldUseTransaction); } public QuerySequence(String name, boolean shouldAcquireValueAfterInsert, boolean shouldUseTransaction) { super(name); setShouldAcquireValueAfterInsert(shouldAcquireValueAfterInsert); setShouldUseTransaction(shouldUseTransaction); } public QuerySequence(String name, int size, boolean shouldAcquireValueAfterInsert, boolean shouldUseTransaction) { super(name, size); setShouldAcquireValueAfterInsert(shouldAcquireValueAfterInsert); setShouldUseTransaction(shouldUseTransaction); } public QuerySequence(String name, int size, int initialValue, boolean shouldAcquireValueAfterInsert, boolean shouldUseTransaction) { super(name, size, initialValue); setShouldAcquireValueAfterInsert(shouldAcquireValueAfterInsert); setShouldUseTransaction(shouldUseTransaction); } public boolean equals(Object obj) { if (obj instanceof QuerySequence && super.equals(obj)) { QuerySequence other = (QuerySequence)obj; return (getSelectQuery() == other.getSelectQuery()) && (getUpdateQuery() == other.getUpdateQuery()) && (shouldAcquireValueAfterInsert() == other.shouldAcquireValueAfterInsert()) && (shouldUseTransaction() == other.shouldUseTransaction()) && (shouldSkipUpdate() == other.shouldSkipUpdate()) && (shouldSelectBeforeUpdate() == other.shouldSelectBeforeUpdate()); } else { return false; } } /** * PUBLIC: */ public boolean shouldAcquireValueAfterInsert() { return shouldAcquireValueAfterInsert; } /** * PUBLIC: */ public void setShouldAcquireValueAfterInsert(boolean shouldAcquireValueAfterInsert) { this.shouldAcquireValueAfterInsert = shouldAcquireValueAfterInsert; } /** * PUBLIC: */ public boolean shouldUseTransaction() { return shouldUseTransaction; } /** * PUBLIC: */ public void setShouldUseTransaction(boolean shouldUseTransaction) { this.shouldUseTransaction = shouldUseTransaction; } /** * PUBLIC: */ public void setSelectQuery(ValueReadQuery query) { selectQuery = query; } /** * PUBLIC: */ public ValueReadQuery getSelectQuery() { return selectQuery; } /** * PUBLIC: */ public void setUpdateQuery(DataModifyQuery query) { updateQuery = query; } /** * PUBLIC: */ public DataModifyQuery getUpdateQuery() { return updateQuery; } /** * PUBLIC: */ public void setShouldSkipUpdate(boolean shouldSkipUpdate) { this.shouldSkipUpdate = shouldSkipUpdate; } /** * PUBLIC: */ public boolean shouldSkipUpdate() { return shouldSkipUpdate; } /** * PUBLIC: */ public void setShouldSelectBeforeUpdate(boolean shouldSelectBeforeUpdate) { this.shouldSelectBeforeUpdate = shouldSelectBeforeUpdate; } /** * PUBLIC: */ public boolean shouldSelectBeforeUpdate() { return shouldSelectBeforeUpdate; } /** * INTERNAL: */ protected ValueReadQuery buildSelectQuery() { return null; } /** * INTERNAL: */ protected DataModifyQuery buildUpdateQuery() { return null; } /** * INTERNAL: */ protected ValueReadQuery buildSelectQuery(String seqName, Integer size) { return null; } /** * INTERNAL: */ protected DataModifyQuery buildUpdateQuery(String seqName, Number sizeOrNewValue) { return null; } /** * INTERNAL: */ public void onConnect() { super.onConnect(); if (getSelectQuery() == null) { setSelectQuery(buildSelectQuery()); wasSelectQueryCreated = getSelectQuery() != null; if (wasSelectQueryCreated) { getSelectQuery().setName(getName()); } } if ((getUpdateQuery() == null) && !shouldSkipUpdate()) { setUpdateQuery(buildUpdateQuery()); wasUpdateQueryCreated = getUpdateQuery() != null; if (wasUpdateQueryCreated) { getUpdateQuery().setName(getName()); } } } /** * INTERNAL: */ public void onDisconnect() { if (wasSelectQueryCreated) { setSelectQuery(null); wasSelectQueryCreated = false; } if (wasUpdateQueryCreated) { setUpdateQuery(null); wasUpdateQueryCreated = false; } super.onDisconnect(); } /** * INTERNAL: */ protected Number updateAndSelectSequence(Accessor accessor, AbstractSession writeSession, String seqName, int size) { Integer sizeInteger = Integer.valueOf(size); if (shouldSkipUpdate()) { return (Number)select(accessor, writeSession, seqName, sizeInteger); } else { if (shouldSelectBeforeUpdate()) { Object result = select(accessor, writeSession, seqName, sizeInteger); BigDecimal currentValue; if (result instanceof Number) { currentValue = BigDecimal.valueOf(((Number)result).longValue()); } else if (result instanceof String) { currentValue = new BigDecimal((String)result); } else if (result instanceof Record) { Object val = ((Record)result).get("text()"); currentValue = new BigDecimal((String)val); } else { // DatabaseException.errorPreallocatingSequenceNumbers() is thrown by the superclass return null; } // Increment value BigDecimal newValue = currentValue.add(new BigDecimal(size)); update(accessor, writeSession, seqName, newValue); return newValue; } else { update(accessor, writeSession, seqName, sizeInteger); return (Number)select(accessor, writeSession, seqName, sizeInteger); } } } /** * INTERNAL: */ protected Object select(Accessor accessor, AbstractSession writeSession, String seqName, Integer size) { ValueReadQuery query = getSelectQuery(); if (query != null) { if (accessor != null) { // PERF: Prepare the query before being cloned. // Also BUG: SQLCall could not be prepared concurrently by different queries. // Setting user define allow custom SQL query to be prepared without translation row. query.setIsUserDefined(true); query.checkPrepare(writeSession, null); query = (ValueReadQuery)query.clone(); query.setAccessor(accessor); } } else { query = buildSelectQuery(seqName, size); if (accessor != null) { query.setAccessor(accessor); } } Vector args = createArguments(query, seqName, size); query.setIsUserDefinedSQLCall(false); if (args != null) { return writeSession.executeQuery(query, args); } else { return writeSession.executeQuery(query); } } /** * INTERNAL: */ protected void update(Accessor accessor, AbstractSession writeSession, String seqName, Number sizeOrNewValue) { DataModifyQuery query = getUpdateQuery(); if (query != null) { if (accessor != null) { // PERF: Prepare the query before being cloned. // Also BUG: SQLCall could not be prepared concurrently by different queries. // Setting user define allow custom SQL query to be prepared without translation row. query.setIsUserDefined(true); query.checkPrepare(writeSession, null); query = (DataModifyQuery)query.clone(); query.setAccessor(accessor); } } else { query = buildUpdateQuery(seqName, sizeOrNewValue); if (query == null) { return; } if (accessor != null) { query.setAccessor(accessor); } } Vector args = createArguments(query, seqName, sizeOrNewValue); query.setIsUserDefinedSQLCall(false); if (args != null) { writeSession.executeQuery(query, args); } else { writeSession.executeQuery(query); } } /** * INTERNAL: */ protected Vector createArguments(DatabaseQuery query, String seqName, Number sizeOrNewValue) { int nArgs = query.getArguments().size(); if (nArgs > 0) { Vector args = new Vector(nArgs); args.addElement(seqName); if (nArgs > 1) { args.addElement(sizeOrNewValue); } return args; } else { return null; } } } eclipselink-2.5.1.orig/org/eclipse/persistence/sequencing/UUIDSequence.java0000664000000000000000000000355112216173130023652 0ustar /******************************************************************************* * Copyright (c) 2011, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation ******************************************/ package org.eclipse.persistence.sequencing; import java.util.UUID; import java.util.Vector; import org.eclipse.persistence.internal.databaseaccess.Accessor; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.sequencing.Sequence; public class UUIDSequence extends Sequence { public UUIDSequence() { super(); } public UUIDSequence(String name) { super(name); } @Override public Object getGeneratedValue(Accessor accessor, AbstractSession writeSession, String seqName) { return UUID.randomUUID().toString().toUpperCase(); } @Override public Vector getGeneratedVector(Accessor accessor, AbstractSession writeSession, String seqName, int size) { return null; } @Override public void onConnect() { } @Override public void onDisconnect() { } @Override public boolean shouldAcquireValueAfterInsert() { return false; } @Override public boolean shouldUseTransaction() { return false; } @Override public boolean shouldUsePreallocation() { return false; } }eclipselink-2.5.1.orig/org/eclipse/persistence/sequencing/TableSequence.java0000664000000000000000000001542612216173130024137 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 11/22/2012-2.5 Guy Pelletier * - 389090: JPA 2.1 DDL Generation Support (index metadata support) ******************************************************************************/ package org.eclipse.persistence.sequencing; import java.io.StringWriter; import java.util.List; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.tools.schemaframework.IndexDefinition; import org.eclipse.persistence.internal.databaseaccess.DatabasePlatform; import org.eclipse.persistence.internal.helper.DatabaseTable; /** *

* Purpose: Defines sequencing through using a SEQUENCE table. *

* Description * This is the default sequencing mechanism. * A table defaulting to SEQUENCE is used to generate unique ids. * The table has a name field (SEQ_NAME) storing each sequences name, * and a counter (SEQ_COUNT) storing the last sequence id generated. * There will be a row in the table for each sequence object. */ public class TableSequence extends QuerySequence { /** Default sequence table name * @deprecated * Use an empty string as a default sequence table name instead, * that triggers usage of platform.getDefaultSequenceTableName() when the sequence is connected. */ public static final String defaultTableName = "SEQUENCE"; /** Hold the database table */ protected DatabaseTable table; /** Hold the name of the column in the sequence table which specifies the sequence numeric value */ protected String counterFieldName = "SEQ_COUNT"; /** Hold the name of the column in the sequence table which specifies the sequence name */ protected String nameFieldName = "SEQ_NAME"; public TableSequence() { super(false, true); setTableName(""); } /** * Create a new sequence with the name. */ public TableSequence(String name) { super(name, false, true); setTableName(""); } /** * Create a new sequence with the name and sequence pre-allocation size. */ public TableSequence(String name, int size) { super(name, size, false, true); setTableName(""); } public TableSequence(String name, int size, int initialValue) { super(name, size, initialValue, false, true); setTableName(""); } /** * Create a new sequence with the name, and the sequence table name. */ public TableSequence(String name, String tableName) { this(name); setTableName(tableName); } /** * Create a new sequence with the name, and the sequence table information. */ public TableSequence(String name, String tableName, String nameFieldName, String counterFieldName) { this(name); setTableName(tableName); setNameFieldName(nameFieldName); setCounterFieldName(counterFieldName); } public TableSequence(String name, int size, String tableName) { this(name, size); setTableName(tableName); } public TableSequence(String name, int size, String tableName, String nameFieldName, String counterFieldName) { this(name, size); setTableName(tableName); setNameFieldName(nameFieldName); setCounterFieldName(counterFieldName); } public boolean isTable() { return true; } public boolean equals(Object obj) { if (obj instanceof TableSequence) { TableSequence other = (TableSequence)obj; if (equalNameAndSize(this, other)) { return getTableName().equals(other.getTableName()) && getCounterFieldName().equals(other.getCounterFieldName()) && getNameFieldName().equals(other.getNameFieldName()); } else { return false; } } else { return false; } } public String getCounterFieldName() { return counterFieldName; } public void setCounterFieldName(String name) { counterFieldName = name; } public String getNameFieldName() { return nameFieldName; } public void setNameFieldName(String name) { nameFieldName = name; } public DatabaseTable getTable() { return table; } public List getTableIndexes() { return getTable().getIndexes(); } public String getTableName() { return getTable().getQualifiedName(); } public String getQualifiedTableName() { return getQualified(getTableName()); } public void setTable(DatabaseTable table) { this.table = table; } public void setTableName(String name) { table = new DatabaseTable(name); } public void onConnect() { if(this.table.getName().length() == 0) { this.table.setName(((DatabasePlatform)getDatasourcePlatform()).getDefaultSequenceTableName()); } super.onConnect(); } protected ValueReadQuery buildSelectQuery() { ValueReadQuery query = new ValueReadQuery(); query.addArgument(getNameFieldName()); StringWriter writer = new StringWriter(); writer.write("SELECT " + getCounterFieldName()); writer.write(" FROM " + getQualifiedTableName()); writer.write(" WHERE " + getNameFieldName()); writer.write(" = #" + getNameFieldName()); query.setSQLString(writer.toString()); return query; } protected DataModifyQuery buildUpdateQuery() { DataModifyQuery query = new DataModifyQuery(); query.addArgument(getNameFieldName()); query.addArgument("PREALLOC_SIZE"); StringWriter writer = new StringWriter(); writer.write("UPDATE " + getQualifiedTableName()); writer.write(" SET " + getCounterFieldName()); writer.write(" = " + getCounterFieldName()); writer.write(" + #PREALLOC_SIZE"); writer.write(" WHERE " + getNameFieldName() + " = #" + getNameFieldName()); query.setSQLString(writer.toString()); return query; } } eclipselink-2.5.1.orig/org/eclipse/persistence/sequencing/SequencingControl.java0000664000000000000000000002007312216173130025053 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.sequencing; import org.eclipse.persistence.sessions.Login; import org.eclipse.persistence.sessions.server.ConnectionPool; /** *

* Purpose: Define an interface to control sequencing functionality. *

* Description: This interface is accessed through DatabaseSession.getSequencingControl(). * It allows to create, re-create, customize Sequencing object * which is available through DatabaseSession.getSequencing() * and provides sequencing values for all descriptors that use sequencing. * * Here's the lifecycle of Sequencing object used by DatabaseSession: * 1. DatabaseSession created - sequencing object doesn't yet exist; * 2. DatabaseSession.login() causes creation of Sequencing object; * 3. DatabaseSession.logout() causes destruction of Sequencing object. * * In case sequencing object doesn't yet exist all the set parameters' values will be used * during its creation. * * In case sequencing object already exists: * 1. The following methods don't alter sequencing object - the corresponding parameters will only * be used in case a new sequencing object is created: * setShouldUseSeparateConnection; * setLogin; * setMinPoolSize; * setMaxPoolSize. * 2. The following methods cause immediate destruction of the sequencing object and creation of a new one: * setValueGenerationPolicy; * setShouldUseNativeSequencing; * setShouldUseTableSequencing; * resetSequencing; * 3. The following methods cause change immediately: * setPreallocationSize (next sequencing preallocation will use the set parameter's value). *

* Responsibilities: *

    *
  • Define the APIs for controlling sequencing. *
* @see Sequence * @see org.eclipse.persistence.sessions.DatabaseSession */ public interface SequencingControl { /** * ADVANCED: * Immediately re-create sequencing object. * The only reason to use this method is to pick up all parameters' * values that were changed after the original sequencing object has been created. */ public void resetSequencing(); /** * PUBLIC: * Indicate whether separate connection(s) for sequencing could be used * (by default it couldn't). * If this flag is set to true then separate connection(s) for sequencing * will be used in case getSequence().shouldUseSeparateConnection() * returns true. * @see Sequence */ public boolean shouldUseSeparateConnection(); /** * PUBLIC: * Set whether separate connection(s) for sequencing could be used * (by default it couldn't). * If this flag is set to true then separate connection(s) for sequencing * will be used in case getSequence().shouldUseSeparateConnection() * returns true. * @see Sequence */ public void setShouldUseSeparateConnection(boolean shouldUseSeparateConnection); /** * PUBLIC: * Indicates whether sequencing actually uses separate connection(s). * Returns true if sequencing is connected and uses separate connection(s). * Returns false if sequencing is not connected (getSequencing()==null). * Note that if shouldUseSeparateConnection() returns false this method also returns false. * However if shouldUseSeparateConnection() returns true this method * returns false in the following two cases: *
sequencing is not connected; *
getSequence().shouldUseSeparateConnection() == false. * @see Sequence */ boolean isConnectedUsingSeparateConnection(); /** * ADVANCED: * Return a DatabaseLogin to be used by separate sequencing connection(s). * @see org.eclipse.persistence.sessions.DatabaseLogin */ Login getLogin(); /** * ADVANCED: * Returns a DatabaseLogin to be used by separate sequencing connection(s) * The set value is ignored if shouldUseSeparateConnection() returns false. * The DatabaseLogin *MUST*: * 1. specify *NON-JTS* connections (such as NON_JTS driver or read-only datasource); * 2. sequenceLogin.shouldUseExternalTransactionController()==false * In case this method is not called, but separate connection should be used, * sequencing will use a clone of login owned by the DatabaseSession, * or a clone of read login owned by ServerSession. * @see org.eclipse.persistence.sessions.DatabaseLogin */ void setLogin(Login login); /** * ADVANCED: * Return the connection pool to use for sequencing. */ ConnectionPool getConnectionPool(); /** * ADVANCED: * Set the connection pool to use for sequencing. */ void setConnectionPool(ConnectionPool pool); /** * PUBLIC: * Returns a minimum number of connections in sequencing connection pool. * @see org.eclipse.persistence.sessions.server.ConnectionPool * @see org.eclipse.persistence.sessions.server.ServerSession */ int getMinPoolSize(); /** * PUBLIC: * Sets a minimum number of connections in sequencing connection pool * The set value is ignored if shouldUseSeparateConnection() returns false. * The set value is ignored if SequencingControl has been obtained not from ServerSession. * By default is 2. * @see org.eclipse.persistence.sessions.server.ConnectionPool * @see org.eclipse.persistence.sessions.server.ServerSession */ void setMinPoolSize(int size); /** * PUBLIC: * Returns a maximum number of connections in sequencing connection pool * @see org.eclipse.persistence.sessions.server.ConnectionPool * @see org.eclipse.persistence.sessions.server.ServerSession */ int getMaxPoolSize(); /** * PUBLIC: * Sets a maximum number of connections in sequencing connection pool * The set value is ignored if shouldUseSeparateConnection() returns false. * The set value is ignored if SequencingControl has been obtained not from ServerSession. * By default is 2. * @see org.eclipse.persistence.sessions.server.ConnectionPool * @see org.eclipse.persistence.sessions.server.ServerSession */ void setMaxPoolSize(int size); /** * PUBLIC: * Sets a initial number of connections in sequencing connection pool * The set value is ignored if shouldUseSeparateConnection() returns false. * The set value is ignored if SequencingControl has been obtained not from ServerSession. * By default is 1. * @see org.eclipse.persistence.sessions.server.ConnectionPool * @see org.eclipse.persistence.sessions.server.ServerSession */ void setInitialPoolSize(int size); /** * ADVANCED: * Removes all preallocated sequencing objects. * Ignored if getSequencingValueGenarationPolicy().shouldUsePreallocation() returns false. * This method is called internally after Sequencing object is destructed. * @see Sequence */ void initializePreallocated(); /** * ADVANCED: * Removes all preallocated sequencing objects for the given sequence name. * Ignored if getSequencingValueGenarationPolicy().shouldUsePreallocation() returns false. * @see Sequence */ void initializePreallocated(String seqName); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/0000755000000000000000000000000012216174372020226 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/0000775000000000000000000000000012216174372021634 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/ReflectiveAttributeDefinition.java0000664000000000000000000000312712216173126030463 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.codegen; /** * INTERNAL: *

Purpose: Model an attribute for code generation purposes, * using a java.lang.Class for the attribute type. * * @since TopLink 5.0 * @author Paul Fullbright */ public class ReflectiveAttributeDefinition extends AttributeDefinition { protected Class type; public ReflectiveAttributeDefinition() { this.type = null; } public Class getType() { return type; } protected String getTypeName() { //fixed for CR#4228 if (getType().isArray()) { String componentType = getType().getComponentType().getName(); return componentType + "[]"; } else { return getType().getName(); } } public void setType(Class type) { this.type = type; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/AccessLevel.java0000664000000000000000000001244112216173126024666 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.codegen; /** * INTERNAL: *

Purpose: Model an access level, i.e. private/protected/final/static/etc. * * @since TopLink 3.0 * @author James Sutherland */ public class AccessLevel { protected int level; public static int PUBLIC = 1; public static int PROTECTED = 2; public static int PACKAGE = 3; public static int PRIVATE = 4; protected boolean isAbstract; protected boolean isFinal; protected boolean isNative; protected boolean isStatic; protected boolean isSynchronized; protected boolean isTransient; protected boolean isVolatile; public AccessLevel() { this.level = PUBLIC; this.isStatic = false; this.isFinal = false; this.isTransient = false; } public AccessLevel(int level) { this.level = level; this.isStatic = false; this.isFinal = false; this.isTransient = false; } public boolean equals(Object object) { if (this == object) { return true; } if (!(object instanceof AccessLevel)) { return false; } AccessLevel accessLevel = (AccessLevel)object; return ((this.level == accessLevel.level) && (this.isStatic == accessLevel.isStatic) && (this.isFinal == accessLevel.isFinal) && (this.isTransient == accessLevel.isTransient)); } public int getLevel() { return level; } public boolean isAbstract() { return isAbstract; } public boolean isFinal() { return isFinal; } public boolean isNative() { return isNative; } public boolean isStatic() { return isStatic; } public boolean isSynchronized() { return isSynchronized; } public boolean isTransient() { return isTransient; } public boolean isVolatile() { return isVolatile; } public void setIsAbstract(boolean isAbstract) { this.isAbstract = isAbstract; } public void setIsFinal(boolean isFinal) { this.isFinal = isFinal; } public void setIsNative(boolean isNative) { this.isNative = isNative; } public void setIsStatic(boolean isStatic) { this.isStatic = isStatic; } public void setIsSynchronized(boolean isSynchronized) { this.isSynchronized = isSynchronized; } public void setIsTransient(boolean isTransient) { this.isTransient = isTransient; } public void setIsVolatile(boolean isVolatile) { this.isVolatile = isVolatile; } public void setLevel(int level) { this.level = level; } public void write(CodeGenerator generator) { boolean needsSpace = true; if (getLevel() == PUBLIC) { generator.write("public"); } else if (getLevel() == PROTECTED) { generator.write("protected"); } else if (getLevel() == PACKAGE) { // Nothing required/default. needsSpace = false; } else if (getLevel() == PRIVATE) { generator.write("private"); } if (isAbstract()) { if (needsSpace) { generator.write(" "); } generator.write("abstract"); needsSpace = true; } if (isStatic()) { if (needsSpace) { generator.write(" "); } generator.write("static"); needsSpace = true; } if (isFinal()) { if (needsSpace) { generator.write(" "); } generator.write("final"); needsSpace = true; } if (isTransient()) { if (needsSpace) { generator.write(" "); } generator.write("transient"); needsSpace = true; } if (isVolatile()) { if (needsSpace) { generator.write(" "); } generator.write("volatile"); needsSpace = true; } if (isNative()) { if (needsSpace) { generator.write(" "); } generator.write("native"); needsSpace = true; } if (isSynchronized()) { if (needsSpace) { generator.write(" "); } generator.write("synchronized"); needsSpace = true; } } } ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/NonreflectiveAttributeDefinition.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/NonreflectiveAttributeDefinition.jav0000664000000000000000000000333512216173126031036 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.codegen; import java.util.Map; /** * INTERNAL: *

Purpose: Model an attribute for code generation purposes, * using a java.lang.String for the attribute type. * * @since TopLink 5.0 * @author Paul Fullbright */ public class NonreflectiveAttributeDefinition extends AttributeDefinition { protected String type; public NonreflectiveAttributeDefinition() { this.type = ""; } private void adjustType(Map typeNameMap) { String adjustedTypeName = adjustTypeName(getTypeName(), typeNameMap); if (!getTypeName().equals(adjustedTypeName)) { setType(adjustedTypeName); } } protected void adjustTypeNames(Map typeNameMap) { adjustType(typeNameMap); super.adjustTypeNames(typeNameMap); } protected String getTypeName() { return type; } public void setType(String typeName) { this.type = typeName; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/ClassDefinition.java0000664000000000000000000002652212216173126025560 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.codegen; import java.util.*; import org.eclipse.persistence.internal.helper.*; /** * INTERNAL: *

Purpose: Model a class for code generation purposes. * * @since TopLink 3.0 * @author James Sutherland */ public class ClassDefinition extends CodeDefinition { protected String packageName; protected Vector imports; protected int type; public static final int CLASS_TYPE = 1; public static final int INTERFACE_TYPE = 2; protected String superClass; protected Vector interfaces; protected Vector attributes; protected Vector methods; protected Vector innerClasses; public ClassDefinition() { this.packageName = ""; this.imports = new Vector(3); this.type = CLASS_TYPE; this.interfaces = new Vector(3); this.attributes = new Vector(); this.methods = new Vector(); this.innerClasses = new Vector(3); } public void addAttribute(AttributeDefinition attribute) { getAttributes().addElement(attribute); } /** * The importStatement should be of the form * "{packageName}.{shortName or '*'}" */ public void addImport(String importStatement) { if (!getImports().contains(importStatement)) { getImports().addElement(importStatement); } } private void addImports(Map typeNameMap) { for (Iterator shortNameIt = typeNameMap.keySet().iterator(); shortNameIt.hasNext();) { String shortName = (String)shortNameIt.next(); Set packageNames = (Set)typeNameMap.get(shortName); if (packageNames.size() > 1) { continue; } for (Iterator packageNameIt = ((Set)typeNameMap.get(shortName)).iterator(); packageNameIt.hasNext();) { String packageName = (String)packageNameIt.next(); if (!packageName.equals(JAVA_LANG_PACKAGE_NAME) && !packageName.equals(getPackageName()) && !packageName.equals("")) { addImport(packageName + "." + shortName); } } } sortImports(); } public void addInnerClass(ClassDefinition classDefinition) { getInnerClasses().add(classDefinition); } public void addInterface(String interfaceClassName) { getInterfaces().addElement(interfaceClassName); } public void addMethod(MethodDefinition method) { getMethods().addElement(method); } private void addTypeNamesToMap(HashMap typeNameMap) { putTypeNameInMap(getSuperClass(), typeNameMap); for (Iterator i = getInterfaces().iterator(); i.hasNext();) { putTypeNameInMap((String)i.next(), typeNameMap); } for (Iterator i = getAttributes().iterator(); i.hasNext();) { ((AttributeDefinition)i.next()).putTypeNamesInMap(typeNameMap); } for (Iterator i = getMethods().iterator(); i.hasNext();) { ((MethodDefinition)i.next()).putTypeNamesInMap(typeNameMap); } } private void adjustTypeNames(HashMap typeNameMap) { setSuperClass(adjustTypeName(getSuperClass(), typeNameMap)); for (Iterator i = new Vector(getInterfaces()).iterator(); i.hasNext();) { String interfaceName = (String)i.next(); replaceInterface(interfaceName, adjustTypeName(interfaceName, typeNameMap)); } for (Iterator i = getAttributes().iterator(); i.hasNext();) { ((AttributeDefinition)i.next()).adjustTypeNames(typeNameMap); } for (Iterator i = getMethods().iterator(); i.hasNext();) { ((MethodDefinition)i.next()).adjustTypeNames(typeNameMap); } } /** * Parses the class definition, pulls out fully qualified class names, * adds imports for them, and un-fully qualifies the class names. * - Assumes that no imports have been previously added. * - Assumes that all types have been fully qualified to start. * - Will not unqualify ambiguous classes (java.util.Date and java.sql.Date). * - Will not add imports for java.lang.* * - Will not add imports for classes in the same package. * - Will not parse method bodies, but will unqualify types it finds. * * ?? - Should unqualification occur during writing? That way, reflective definitions could take advantage. * */ public void calculateImports() { // Calculate type name map for class definition. // Key - short type name, Value - Set of package names for that type name HashMap typeNameMap = new HashMap(); addTypeNamesToMap(typeNameMap); // Go back through class def, pulling out imports and removing package names from // non-repeated short type names. adjustTypeNames(typeNameMap); // Finally, add the imports addImports(typeNameMap); } public boolean containsMethod(MethodDefinition method) { return getMethods().contains(method); } protected Vector getAttributes() { return attributes; } protected Vector getImports() { return imports; } protected Vector getInnerClasses() { return innerClasses; } protected Vector getInterfaces() { return interfaces; } protected Vector getMethods() { return methods; } public String getPackageName() { return packageName; } public String getSuperClass() { return superClass; } public int getType() { return type; } public boolean isInterface() { return getType() == INTERFACE_TYPE; } protected void replaceInterface(String oldInterfaceName, String newInterfaceName) { // Don't bother sorting if (!oldInterfaceName.equals(newInterfaceName)) { this.interfaces.remove(oldInterfaceName); this.interfaces.add(newInterfaceName); } } private void setImports(Vector imports) { this.imports = imports; } private void setMethods(Vector methods) { this.methods = methods; } public void setPackageName(String packageName) { this.packageName = packageName; } /** * If the class to be generated is an interface, do not use this method. * Instead, use addInterface(String) for each interface superclass. */ public void setSuperClass(String superClass) { this.superClass = superClass; } public void setType(int type) { this.type = type; } protected void sortImports() { setImports(new Vector(new TreeSet(getImports()))); } protected void sortMethods() { //Object methodArray[] = getMethods().toArray(); Object[] methodArray = Helper.arrayFromVector(getMethods()); Comparator comparison = new Comparator() { public int compare(Object first, Object second) { if (((MethodDefinition)first).isConstructor()) { return -1; } else if (((MethodDefinition)second).isConstructor()) { return 1; } else { return ((MethodDefinition)first).getName().compareTo(((MethodDefinition)second).getName()); } } }; Arrays.sort(methodArray, comparison); Vector sortedMethods = new Vector(getMethods().size()); for (int index = 0; index < methodArray.length; index++) { sortedMethods.addElement(methodArray[index]); } setMethods(sortedMethods); } /** * Write the code out to the generator's stream. */ public void write(CodeGenerator generator) { if (getPackageName().length() > 0) { generator.write("package "); generator.write(getPackageName()); generator.writeln(";"); generator.cr(); } for (Enumeration importsEnum = getImports().elements(); importsEnum.hasMoreElements();) { String importLine = (String)importsEnum.nextElement(); generator.write("import "); generator.write(importLine); generator.writeln(";"); } if (!getImports().isEmpty()) { generator.cr(); } super.write(generator); } /** * Write the code out to the generator's stream. */ public void writeBody(CodeGenerator generator) { sortMethods(); if (isInterface()) { generator.write("interface "); } else { generator.write("class "); } generator.write(getName()); if (!isInterface() && (getSuperClass() != null)) { generator.write(" extends "); generator.writeType(getSuperClass()); } boolean isFirst = true; for (Enumeration interfacesEnum = getInterfaces().elements(); interfacesEnum.hasMoreElements();) { String interfaceName = (String)interfacesEnum.nextElement(); if (isFirst) { if (isInterface()) { generator.write(" extends"); } else { generator.write(" implements"); } isFirst = false; } else { generator.write(","); } generator.write(" "); generator.write(interfaceName); } generator.writeln(" {"); generator.cr(); for (Enumeration attributesEnum = getAttributes().elements(); attributesEnum.hasMoreElements();) { generator.tab(); ((AttributeDefinition)attributesEnum.nextElement()).write(generator); generator.cr(); } if (!getAttributes().isEmpty()) { generator.cr(); } for (Enumeration methodsEnum = getMethods().elements(); methodsEnum.hasMoreElements();) { ((MethodDefinition)methodsEnum.nextElement()).write(generator); generator.cr(); generator.cr(); } //used for Oc4j code gen for (Enumeration innerClassesEnum = getInnerClasses().elements(); innerClassesEnum.hasMoreElements();) { ((ClassDefinition)innerClassesEnum.nextElement()).write(generator); generator.cr(); generator.cr(); } generator.writeln("}"); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/AttributeDefinition.java0000664000000000000000000000650112216173126026451 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.codegen; import java.util.*; /** * INTERNAL: *

Purpose: Model an attribute for code generation purposes. * * @since TopLink 3.0 * @author James Sutherland */ public abstract class AttributeDefinition extends CodeDefinition { protected String initialValue; public AttributeDefinition() { } /** * Parses the initial value, removing the package name for each type * (and adding the appropriate import) if the type is * unambiguous. */ private void adjustInitialValue(Map typeNameMap) { if (getInitialValue() == null) { return; } StringBuffer initialValue = new StringBuffer(getInitialValue()); Set typeNames = parseForTypeNames(initialValue.toString()); for (Iterator i = typeNames.iterator(); i.hasNext();) { String typeName = (String)i.next(); String adjustedTypeName = adjustTypeName(typeName, typeNameMap); if (!typeName.equals(adjustedTypeName)) { int typeNameStartIndex = initialValue.toString().indexOf(typeName); while (typeNameStartIndex != -1) { initialValue.replace(typeNameStartIndex, typeNameStartIndex + typeName.length(), adjustedTypeName); typeNameStartIndex = initialValue.toString().indexOf(typeName); } } } setInitialValue(initialValue.toString()); } protected void adjustTypeNames(Map typeNameMap) { adjustInitialValue(typeNameMap); } public String getInitialValue() { return initialValue; } protected abstract String getTypeName(); /** * Used for calculating imports. @see org.eclipse.persistence.internal.codegen.ClassDefinition#calculateImports() */ protected void putTypeNamesInMap(Map typeNameMap) { putTypeNameInMap(getTypeName(), typeNameMap); for (Iterator i = parseForTypeNames(getInitialValue()).iterator(); i.hasNext();) { putTypeNameInMap((String)i.next(), typeNameMap); } } public void setInitialValue(String initialValue) { this.initialValue = initialValue; } public void writeBody(CodeGenerator generator) { generator.writeType(getTypeName()); generator.writeType(" "); generator.write(getName()); if (getInitialValue() != null) { generator.write(" = "); generator.write(getInitialValue()); } generator.write(";"); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/InheritanceHierarchyBuilder.java0000664000000000000000000000475712216173126030107 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.codegen; import org.eclipse.persistence.sessions.Project; import org.eclipse.persistence.descriptors.ClassDescriptor; import java.util.*; /** * INTERNAL: */ public class InheritanceHierarchyBuilder { /** * INTERNAL: * Based on a class name either return a pre-existing node from the hierarchyTree or build one and * add it to the tree. */ public static HierarchyNode getNodeForClass(String className, Hashtable hierarchyTree) { HierarchyNode node = (HierarchyNode)hierarchyTree.get(className); if (node == null) { node = new HierarchyNode(className); hierarchyTree.put(className, node); } return node; } public static Hashtable buildInheritanceHierarchyTree(Project project) { Map descriptors = project.getDescriptors(); Hashtable hierarchyTree = new Hashtable(descriptors.size()); for (Iterator descriptorIterator = descriptors.values().iterator(); descriptorIterator.hasNext();) { ClassDescriptor descriptor = (ClassDescriptor)descriptorIterator.next(); String className = descriptor.getJavaClassName(); if (className == null) { className = descriptor.getJavaClass().getName(); } HierarchyNode node = getNodeForClass(className, hierarchyTree); if (descriptor.hasInheritance() && (descriptor.getInheritancePolicy().getParentClassName() != null)) { HierarchyNode parentNode = getNodeForClass(descriptor.getInheritancePolicy().getParentClassName(), hierarchyTree); node.setParent(parentNode); } } return hierarchyTree; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/CodeDefinition.java0000664000000000000000000002055512216173126025365 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.codegen; import java.util.*; /** * INTERNAL: *

Purpose: Model a element of code generation purposes. * * @since TopLink 3.0 * @author James Sutherland */ public abstract class CodeDefinition { protected AccessLevel accessLevel; protected String name; protected String comment; protected static final String JAVA_LANG_PACKAGE_NAME = "java.lang"; protected static final String JAVA_UTIL_PACKAGE_NAME = "java.util"; protected static final String TOPLINK_INDIRECTION_PACKAGE_NAME = "org.eclipse.persistence.indirection"; public CodeDefinition() { this.accessLevel = new AccessLevel(); this.name = ""; this.comment = ""; } private static boolean adjustmentNeededForType(String typeName, Map typeNameMap) { if ((typeName == null) || typeName.equals("")) { return false; } if (packageName(typeName).length() == 0) { return false; } Set packages = (Set)typeNameMap.get(shortName(typeName)); return (packages == null) || (packages.size() <= 1); } /** * Compares the typeName to those stored in the typeNameMap. * If the short name of the typeName is unambiguous (only one package for * that short name in the Map), removes the package name and returns the * short name, else returns the whole thing. * * Assumes that typeName contains only a package name (optional) and a short name, * potentially with subtended brackets. * * (e.g. int -> int, java.util.Vector -> Vector, java.lang.Boolean[] -> Boolean[], etc.) */ protected static String adjustTypeName(String typeName, Map typeNameMap) { if (adjustmentNeededForType(typeName, typeNameMap)) { putTypeNameInMap(typeName, typeNameMap); return typeName.substring(packageName(typeName).length() + 1); } else { return typeName; } } /** * Returns a set of java.lang.String type names included in longString. * Will only look for ValueHolder, java.util collection types, and TopLink * indirect collection types. * All other searches too intractable at this point. */ protected static Set parseForTypeNames(String longString) { Set typeNames = new HashSet(); if (longString != null) { typeNames.addAll(parseForTypeNamesInPackage(longString, JAVA_LANG_PACKAGE_NAME)); typeNames.addAll(parseForTypeNamesInPackage(longString, JAVA_UTIL_PACKAGE_NAME)); typeNames.addAll(parseForTypeNamesInPackage(longString, TOPLINK_INDIRECTION_PACKAGE_NAME)); } return typeNames; } private static Set parseForTypeNamesInPackage(String longString, String packageName) { Set typeNames = new HashSet(); int packageStartIndex = longString.indexOf(packageName); while (packageStartIndex != -1) { boolean lookingForEndOfTypeName = true; int searchIndex = packageStartIndex + packageName.length() + 1; while (lookingForEndOfTypeName) { if (Character.isJavaIdentifierPart(longString.charAt(searchIndex))) { searchIndex++; } else { lookingForEndOfTypeName = false; } } typeNames.add(longString.substring(packageStartIndex, searchIndex)); packageStartIndex = longString.indexOf(packageName, searchIndex); } return typeNames; } /** * Used for calculating imports. @see org.eclipse.persistence.internal.codegen.ClassDefinition#calculateImports() */ protected static void putTypeNameInMap(String typeName, Map typeNameMap) { if ((typeName == null) || typeName.equals("")) { return; } String shortName = shortName(typeName); String packageName = packageName(typeName); if (packageName.length() > 0) { Set packageNames; if (typeNameMap.get(shortName) == null) { packageNames = new HashSet(); typeNameMap.put(shortName, packageNames); } else { packageNames = (Set)typeNameMap.get(shortName); } // There is no package name. The package is the default package. // Do nothing, as neither an import is needed, nor does the class need to be unqualified. if (!packageNames.contains(packageName)) { packageNames.add(packageName); } } } private static String packageName(String typeName) { int lastPeriod = typeName.lastIndexOf("."); if (lastPeriod == -1) { return ""; } else { return typeName.substring(0, lastPeriod); } } /** * Removes the package name, if there is one. Also removes any trailing brackets. * * Assumes that typeName contains only a package name (optional) and a short name, * potentially with subtended brackets. * * (e.g. int -> int, java.util.Vector -> Vector, java.lang.Boolean[] -> Boolean, etc.) */ private static String shortName(String typeName) { int shortNameStartIndex = typeName.lastIndexOf(".") + 1; int searchIndex = shortNameStartIndex; boolean stillLookingForEnd = true; while (stillLookingForEnd) { if (Character.isJavaIdentifierPart(typeName.charAt(searchIndex))) { searchIndex++; stillLookingForEnd = searchIndex < typeName.length(); } else { stillLookingForEnd = false; } } return typeName.substring(shortNameStartIndex, searchIndex); } public AccessLevel getAccessLevel() { return accessLevel; } public String getComment() { return comment; } public String getName() { return name; } public void setAccessLevel(AccessLevel accessLevel) { this.accessLevel = accessLevel; } public void setComment(String comment) { this.comment = comment; } public void setName(String name) { this.name = name; } public String toString() { CodeGenerator generator = new CodeGenerator(); write(generator); return generator.toString(); } /** * Write the code out to the generator's stream. */ public void write(CodeGenerator generator) { if (getComment().length() > 0) { generator.writeln("/**"); String comment = getComment(); String cr = org.eclipse.persistence.internal.helper.Helper.cr(); int lastLineIndex = 0; int nextLineIndex = comment.indexOf(cr); while (nextLineIndex != -1) { generator.write(" * "); generator.write(comment.substring(lastLineIndex, nextLineIndex + cr.length())); lastLineIndex = nextLineIndex + cr.length(); nextLineIndex = comment.indexOf(cr, lastLineIndex); } generator.write(" * "); generator.writeln(comment.substring(lastLineIndex, comment.length())); generator.writeln(" */"); generator.cr(); } getAccessLevel().write(generator); generator.write(" "); writeBody(generator); } /** * Write the code out to the generator's stream. */ public abstract void writeBody(CodeGenerator generator); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/CodeGenerator.java0000664000000000000000000001075512216173126025224 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.codegen; import java.io.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.exceptions.*; /** * INTERNAL: *

Purpose: Used to generate code * * @since TopLink 3.0 * @author James Sutherland */ public class CodeGenerator { protected Writer output; protected ClassDefinition currentClass; /*Bug#3388703 useUnicode is added to provide user with an option to escape non-ASCII characters or not */ protected boolean useUnicode = true; public CodeGenerator() { this.output = new StringWriter(); } public CodeGenerator(boolean useUnicode) { this(); this.useUnicode = useUnicode; } public void cr() { write(Helper.cr()); } public ClassDefinition getCurrentClass() { return currentClass; } public Writer getOutput() { return output; } public void setCurrentClass(ClassDefinition currentClass) { this.currentClass = currentClass; } public void setOutput(Writer output) { this.output = output; } public void tab() { write("\t"); } public void tab(int indent) { for (int index = 0; index < indent; index++) { tab(); } } public String toString() { return getOutput().toString(); } public void write(Object value) { try { /*Bug#3388703 useUnicode is added to provide user with an option to escape non-ASCII characters or not */ if (!useUnicode) { getOutput().write(String.valueOf(value)); } else { //Bug2906180 \\uxxxx escaped characters are used for non-ASCII characters String test = String.valueOf(value); StringBuffer escapedStr = new StringBuffer(test.length() * 4); for (int i = 0; i < test.length(); i++) { char c = test.charAt(i); if (c < 127) { escapedStr.append(c); } else { String escapedChar = Long.toHexString((c)).toUpperCase(); switch (escapedChar.length()) { case 1: escapedStr.append("\\u000" + escapedChar); break; case 2: escapedStr.append("\\u00" + escapedChar); break; case 3: escapedStr.append("\\u0" + escapedChar); break; default: escapedStr.append("\\u" + escapedChar); break; } } } getOutput().write(escapedStr.toString()); } } catch (IOException exception) { throw ValidationException.fileError(exception); } } public void writeln(Object value) { write(value); cr(); } /** * Write the type checking if its package is required. */ public void writeType(String typeName) { String localTypeName = typeName; if (getCurrentClass() != null) { int index = typeName.lastIndexOf('.'); if (index != -1) { String packageName = typeName.substring(index); if (getCurrentClass().getImports().contains(packageName)) { localTypeName = typeName.substring(index, typeName.length()); } } } write(localTypeName); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/ReflectiveMethodDefinition.java0000664000000000000000000001020312216173126027731 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.codegen; import java.util.Iterator; import java.util.Vector; /** * INTERNAL: *

Purpose: Model a method for code generation purposes, * using java.lang.Class for the parameter types. * * @since TopLink 5.0 * @author Paul Fullbright */ public class ReflectiveMethodDefinition extends MethodDefinition { protected Vector argumentTypes; protected Class type; public ReflectiveMethodDefinition() { super(); this.argumentTypes = new Vector(5); this.type = null; } public void addArgument(Class argumentType, String argumentName) { getArgumentNames().addElement(argumentName); getArgumentTypes().addElement(argumentType); } protected boolean argumentsEqual(MethodDefinition methodDefinition) { Object[] args1 = this.getArgumentTypes().toArray(); Object[] args2 = methodDefinition.getArgumentTypes().toArray(); if (args1.length == args2.length) { for (int i = 0; i < args1.length; i++) { if (args1[i] != args2[i]) { return false; } } } else { return false; } return true; } protected Vector getArgumentTypeNames() { Vector argumentTypeNames = new Vector(); for (Iterator i = getArgumentTypes().iterator(); i.hasNext();) { argumentTypeNames.add(((Class)i.next()).getName()); } return argumentTypeNames; } public Vector getArgumentTypes() { return this.argumentTypes; } protected void writeArguments(CodeGenerator generator) { boolean isFirst = true; for (int index = 0; index < getArgumentTypes().size(); ++index) { Class argument = (Class)getArgumentTypes().elementAt(index); if (isFirst) { isFirst = false; } else { generator.write(", "); } //fixed for CR#4228 //Bug# 4587853, if argument type is an inner class, convert name //from $ notation to . notation. if (argument.isArray()) { String componentType = argument.getComponentType().getName(); if(componentType.indexOf('$') != -1) { componentType = componentType.replace('$', '.'); } String componentTypeArrays = componentType + "[]"; generator.write(componentTypeArrays); } else { String name = argument.getName(); if(name.indexOf('$') != -1) { name = name.replace('$', '.'); } generator.write(name); } generator.write(" "); generator.write(getArgumentNames().elementAt(index)); } } public Class getReturnTypeClass() { return type; } public void setReturnTypeClass(Class type) { this.type = type; } public String getReturnType() { if (getReturnTypeClass() != null) { if (getReturnTypeClass().isArray()) { return this.getReturnTypeClass().getComponentType().getName() + "[]"; } else { return this.getReturnTypeClass().getName(); } } return returnType; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/NonreflectiveMethodDefinition.java0000664000000000000000000000716112216173126030455 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.codegen; import java.util.*; /** * INTERNAL: *

Purpose: Model a method for code generation purposes, * using java.lang.String for the parameter types. * * @since TopLink 5.0 * @author Paul Fullbright */ public class NonreflectiveMethodDefinition extends MethodDefinition { protected Vector argumentTypeNames; public NonreflectiveMethodDefinition() { super(); this.argumentTypeNames = new Vector(5); } public void addArgument(String argumentType, String argumentName) { getArgumentNames().addElement(argumentName); getArgumentTypes().addElement(argumentType); } private void adjustArgumentTypeNames(Map typeNameMap) { for (Iterator i = new Vector(getArgumentTypeNames()).iterator(); i.hasNext();) { String argumentTypeName = (String)i.next(); String adjustedArgumentTypeName = adjustTypeName(argumentTypeName, typeNameMap); if (!argumentTypeName.equals(adjustedArgumentTypeName)) { replaceArgumentTypeName(argumentTypeName, adjustedArgumentTypeName); } } } protected void adjustTypeNames(Map typeNameMap) { super.adjustTypeNames(typeNameMap); adjustArgumentTypeNames(typeNameMap); } protected boolean argumentsEqual(MethodDefinition methodDefinition) { Object[] args1 = this.getArgumentTypes().toArray(); Object[] args2 = methodDefinition.getArgumentTypes().toArray(); if (args1.length == args2.length) { for (int i = 0; i < args1.length; i++) { if (((args1[i] == null) && (args1[i] != args2[i])) || (!args1[i].equals(args2[i]))) { return false; } } return true; } return false; } protected Vector getArgumentTypeNames() { return getArgumentTypes(); } public Vector getArgumentTypes() { return this.argumentTypeNames; } protected void replaceArgumentTypeName(String oldArgumentTypeName, String newArgumentTypeName) { int index = getArgumentTypeNames().indexOf(oldArgumentTypeName); getArgumentTypeNames().remove(oldArgumentTypeName); getArgumentTypeNames().insertElementAt(newArgumentTypeName, index); } protected void writeArguments(CodeGenerator generator) { boolean isFirst = true; for (int index = 0; index < getArgumentTypes().size(); ++index) { String argument = (String)getArgumentTypes().elementAt(index); if (isFirst) { isFirst = false; } else { generator.write(", "); } generator.write(argument); generator.write(" "); generator.write(getArgumentNames().elementAt(index)); } } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/HierarchyNode.java0000664000000000000000000000405512216173126025223 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.codegen; import java.util.*; /** * INTERNAL: */ public class HierarchyNode { // the class that this node represents public String className; public HierarchyNode parent; public ArrayList children; /** * This member will hold the different definition types that should be implemented by the code generated children * Used mostly in CMP code generation */ public ArrayList definitions; public HierarchyNode(String className) { this.className = className; this.children = new ArrayList(); this.definitions = new ArrayList(); } public void setParent(HierarchyNode parent) { this.parent = parent; this.parent.addChild(this); } public void addChild(HierarchyNode child) { if (!this.children.contains(child)) { this.children.add(child); } } public List getChildren() { return this.children; } public HierarchyNode getParent() { return this.parent; } public String getClassName() { return this.className; } public String toString() { String result = "HierarchyNode:\n\t" + className + "\n" + children + "\n end HierarchyNode\n"; return result; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/codegen/MethodDefinition.java0000664000000000000000000002350312216173126025727 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.codegen; import java.util.*; /** * INTERNAL: *

Purpose: Model a method for code generation purposes. * * @since TopLink 3.0 * @author James Sutherland */ public abstract class MethodDefinition extends CodeDefinition { protected boolean isAbstract; protected boolean isConstructor; protected String returnType; protected Vector argumentNames; protected Vector lines; protected Vector exceptions; protected StringBuffer storedBuffer; public MethodDefinition() { this.isConstructor = false; this.returnType = "void"; this.lines = new Vector(); this.exceptions = new Vector(); this.storedBuffer = new StringBuffer(); } public void addException(String exceptionTypeName) { this.exceptions.add(exceptionTypeName); } public void addLine(String line) { this.storedBuffer.append(line); getLines().addElement(this.storedBuffer.toString()); this.storedBuffer = new StringBuffer(); } /** * This method can be used to store a string that will be prepended to the very next line of code entered */ public void addToBuffer(String partOfLine) { this.storedBuffer.append(partOfLine); } private void adjustExceptions(Map typeNameMap) { for (Iterator i = new Vector(getExceptions()).iterator(); i.hasNext();) { String exceptionName = (String)i.next(); String adjustedExceptionName = adjustTypeName(exceptionName, typeNameMap); if (!exceptionName.equals(adjustedExceptionName)) { replaceException(exceptionName, adjustedExceptionName); } } } /** * Parses the line, removing the package name for each type * (and adding the appropriate import) if the type is * unambiguous. */ private void adjustLine(String line, Map typeNameMap) { StringBuffer lineInProgress = new StringBuffer(line); Set typeNames = parseForTypeNames(lineInProgress.toString()); for (Iterator i = typeNames.iterator(); i.hasNext();) { String typeName = (String)i.next(); String adjustedTypeName = adjustTypeName(typeName, typeNameMap); if (!typeName.equals(adjustedTypeName)) { int typeNameStartIndex = lineInProgress.toString().indexOf(typeName); while (typeNameStartIndex != -1) { lineInProgress.replace(typeNameStartIndex, typeNameStartIndex + typeName.length(), adjustedTypeName); typeNameStartIndex = lineInProgress.toString().indexOf(typeName); } } } replaceLine(line, lineInProgress.toString()); } private void adjustLines(Map typeNameMap) { for (Iterator i = new Vector(getLines()).iterator(); i.hasNext();) { adjustLine((String)i.next(), typeNameMap); } } private void adjustReturnType(Map typeNameMap) { String adjustedReturnType = adjustTypeName(getReturnType(), typeNameMap); if (!getReturnType().equals(adjustedReturnType)) { setReturnType(adjustedReturnType); } } protected void adjustTypeNames(Map typeNameMap) { adjustReturnType(typeNameMap); adjustExceptions(typeNameMap); adjustLines(typeNameMap); } protected abstract boolean argumentsEqual(MethodDefinition methodDefinition); public boolean equals(Object object) { if (this == object) { return true; } if (!(object instanceof MethodDefinition)) { return false; } MethodDefinition methodDefinition = (MethodDefinition)object; if ((this.name == null) && (methodDefinition.getName() != null)) { return false; } if ((this.name != null) && !this.name.equals(methodDefinition.getName())) { return false; } if (!this.accessLevel.equals(methodDefinition.getAccessLevel())) { return false; } if (!this.returnType.equals(methodDefinition.getReturnType())) { return false; } if (!argumentsEqual(methodDefinition)) { return false; } if (!exceptionsEqual(methodDefinition)) { return false; } return true; } protected boolean exceptionsEqual(MethodDefinition methodDefinition) { Object[] exceptions1 = this.getExceptions().toArray(); Object[] exceptions2 = methodDefinition.getExceptions().toArray(); if (exceptions1.length == exceptions2.length) { for (int i = 0; i < exceptions1.length; i++) { if (((exceptions1[i] == null) && (exceptions1[i] != exceptions2[i])) || (!exceptions1[i].equals(exceptions2[i]))) { return false; } } return true; } return false; } protected Vector getArgumentNames() { if (this.argumentNames == null) { this.argumentNames = new Vector(5); } return argumentNames; } public String getArgumentName(int index) { return (String)getArgumentNames().get(index); } public Iterator argumentNames() { return getArgumentNames().iterator(); } public int argumentNamesSize() { return getArgumentNames().size(); } protected abstract Vector getArgumentTypeNames(); protected abstract Vector getArgumentTypes(); public Vector getLines() { return lines; } protected Vector getExceptions() { return this.exceptions; } public String getReturnType() { return returnType; } public int hashCode() { int hash = this.accessLevel.hashCode(); hash ^= this.returnType.hashCode(); hash ^= this.getArgumentTypes().hashCode(); if (this.name != null) { hash ^= this.name.hashCode(); } if (this.name != null) { hash ^= this.name.hashCode(); } hash ^= this.getExceptions().hashCode(); return hash; } public boolean isAbstract() { return this.isAbstract; } public boolean isConstructor() { return isConstructor; } /** * Used for calculating imports. @see org.eclipse.persistence.internal.codegen.ClassDefinition#calculateImports() */ protected void putTypeNamesInMap(Map typeNameMap) { putTypeNameInMap(getReturnType(), typeNameMap); for (Iterator i = getExceptions().iterator(); i.hasNext();) { putTypeNameInMap((String)i.next(), typeNameMap); } for (Iterator i = getArgumentTypeNames().iterator(); i.hasNext();) { putTypeNameInMap((String)i.next(), typeNameMap); } } protected void replaceException(String oldExceptionName, String newExceptionName) { int index = getExceptions().indexOf(oldExceptionName); getExceptions().remove(oldExceptionName); getExceptions().insertElementAt(newExceptionName, index); } protected void replaceLine(String oldLine, String newLine) { int index = getLines().indexOf(oldLine); getLines().remove(oldLine); getLines().insertElementAt(newLine, index); } public void setIsAbstract(boolean isAbstract) { this.isAbstract = isAbstract; } public void setIsConstructor(boolean isConstructor) { this.isConstructor = isConstructor; } public void setReturnType(String returnType) { this.returnType = returnType; } /** * Write the code out to the generator's stream. */ public void writeBody(CodeGenerator generator) { if (!isConstructor()) { generator.writeType(getReturnType()); generator.write(" "); } generator.write(getName()); generator.write("("); writeArguments(generator); generator.write(")"); if (!this.exceptions.isEmpty()) { writeThrowsClause(generator); } if (isAbstract()) { generator.write(";"); } else { generator.write(" {"); generator.cr(); for (Enumeration linesEnum = getLines().elements(); linesEnum.hasMoreElements();) { generator.tab(); generator.writeln(linesEnum.nextElement()); } generator.write("}"); } } protected abstract void writeArguments(CodeGenerator generator); protected void writeThrowsClause(CodeGenerator generator) { generator.write(" throws "); for (Iterator exceptionIterator = this.exceptions.iterator(); exceptionIterator.hasNext();) { generator.write(exceptionIterator.next()); if (exceptionIterator.hasNext()) { generator.write(", "); } } } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/0000775000000000000000000000000012216174372021507 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/ComplexDatabaseType.java0000664000000000000000000001764412216173126026260 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - Dec 2008 ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.util.ListIterator; import java.util.List; import org.eclipse.persistence.exceptions.QueryException; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.platform.database.DatabasePlatform; import org.eclipse.persistence.platform.database.oracle.plsql.PLSQLStoredProcedureCall; import org.eclipse.persistence.platform.database.oracle.plsql.PLSQLargument; import org.eclipse.persistence.queries.StoredProcedureCall; import org.eclipse.persistence.sessions.DatabaseRecord; import static org.eclipse.persistence.internal.helper.DatabaseType.DatabaseTypeHelper.databaseTypeHelper; import static org.eclipse.persistence.internal.helper.Helper.NL; /** * PUBLIC: Abstract class for Complex Database types * (e.g. PL/SQL records, PL/SQL collections) * * @author Mike Norman - michael.norman@oracle.com * @since Oracle TopLink 11.x.x */ public abstract class ComplexDatabaseType implements DatabaseType, Cloneable { protected String typeName; protected String compatibleType; /** * Defines the Java class that the complex type maps to. */ protected Class javaType; protected String javaTypeName; public boolean isRecord() { return false; } public boolean isCollection() { return false; } public boolean isStruct() { return false; } public boolean isArray() { return false; } /** * Indicates if a given subclass represents a PL/SQL cursor. * * @see org.eclipse.persistence.platform.database.oracle.plsql.PLSQLCursor */ public boolean isCursor() { return false; } public int getConversionCode() { return getSqlCode(); } public boolean isComplexDatabaseType() { return true; } public boolean isJDBCType() { return false; } public boolean hasCompatibleType() { return this.compatibleType != null; } public String getCompatibleType() { return compatibleType; } public void setCompatibleType(String compatibleType) { this.compatibleType = compatibleType; } public String getTypeName() { return typeName; } public void setTypeName(String typeName) { this.typeName = typeName; } /** * Set the Java class that the complex type maps to. * The mapped class for a Record type, and collection class for Collection type. */ public void setJavaType(Class javaType) { this.javaType = javaType; if (javaType != null) { javaTypeName = javaType.getName(); } } /** * Return the Java class that the complex type maps to. */ public Class getJavaType() { return javaType; } public String getJavaTypeName() { if (javaType != null && javaTypeName == null) { javaTypeName = javaType.getName(); } return javaTypeName; } public void setJavaTypeName(String javaTypeName) { this.javaTypeName = javaTypeName; } public ComplexDatabaseType clone() { try { ComplexDatabaseType clone = (ComplexDatabaseType)super.clone(); return clone; } catch (CloneNotSupportedException exception) { throw new InternalError(exception.getMessage()); } } public void buildInDeclare(StringBuilder sb, PLSQLargument inArg) { // Validate. if (!hasCompatibleType()) { throw QueryException.compatibleTypeNotSet(this); } if ((getTypeName() == null) || getTypeName().equals("")) { throw QueryException.typeNameNotSet(this); } sb.append(" "); sb.append(databaseTypeHelper.buildTarget(inArg)); sb.append(" "); sb.append(getTypeName()); sb.append(";"); sb.append(NL); sb.append(" "); sb.append(databaseTypeHelper.buildCompatible(inArg)); sb.append(" "); sb.append(getCompatibleType()); sb.append(" := :"); sb.append(inArg.inIndex); sb.append(";"); sb.append(NL); } public void buildOutDeclare(StringBuilder sb, PLSQLargument outArg) {// Validate. if (!hasCompatibleType()) { throw QueryException.compatibleTypeNotSet(this); } if ((getTypeName() == null) || getTypeName().equals("")) { throw QueryException.typeNameNotSet(this); } sb.append(" "); sb.append(databaseTypeHelper.buildTarget(outArg)); sb.append(" "); sb.append(getTypeName()); sb.append(";"); sb.append(NL); } public void buildBeginBlock(StringBuilder sb, PLSQLargument arg, PLSQLStoredProcedureCall call) { String sql2PlName = call.getSQL2PlName(this); if (sql2PlName == null) { // TODO exception throw new NullPointerException("no SQL2Pl conversion routine for " + typeName); } String target = databaseTypeHelper.buildTarget(arg); String compat = databaseTypeHelper.buildCompatible(arg); sb.append(" "); sb.append(target); sb.append(" := "); sb.append(sql2PlName); sb.append("("); sb.append(compat); sb.append(");"); sb.append(NL); } public void buildOutAssignment(StringBuilder sb, PLSQLargument outArg, PLSQLStoredProcedureCall call) { String sql2PlName = call.getPl2SQLName(this); if (sql2PlName == null) { // TODO: Error. throw new NullPointerException("no Pl2SQL conversion routine for " + typeName); } String target = databaseTypeHelper.buildTarget(outArg); sb.append(" :"); sb.append(outArg.outIndex); sb.append(" := "); sb.append(sql2PlName); sb.append("("); sb.append(target); sb.append(");"); sb.append(NL); } public void buildOutputRow(PLSQLargument outArg, AbstractRecord outputRow, DatabaseRecord newOutputRow, List outputRowFields, List outputRowValues) { databaseTypeHelper.buildOutputRow(outArg, outputRow, newOutputRow, outputRowFields, outputRowValues); } public int computeInIndex(PLSQLargument inArg, int newIndex, ListIterator i) { return databaseTypeHelper.computeInIndex(inArg, newIndex); } public int computeOutIndex(PLSQLargument outArg, int newIndex, ListIterator i) { return databaseTypeHelper.computeOutIndex(outArg, newIndex); } public void logParameter(StringBuilder sb, Integer direction, PLSQLargument arg, AbstractRecord translationRow, DatabasePlatform platform) { databaseTypeHelper.logParameter(sb, direction, arg, translationRow, platform); } public void translate(PLSQLargument arg, AbstractRecord translationRow, AbstractRecord copyOfTranslationRow, List copyOfTranslationFields, List translationRowFields, List translationRowValues, StoredProcedureCall call) { databaseTypeHelper.translate(arg, translationRow, copyOfTranslationRow, copyOfTranslationFields, translationRowFields, translationRowValues, call); } public String toString() { return getClass().getSimpleName() + "(" + getTypeName() + ")"; } }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/JPAConversionManager.java0000664000000000000000000000562712216173126026333 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; /** *

* Purpose: Extension to the existing conversion manager to support the * EJB 3.0 spec. *

* Responsibilities: *

    *
  • Allow a null value default to be read into primitives. With the current * conversion manager, setting a null into a primitive causes and exception. * This conversion manager was added to avoid that exception and therefore, add * support for schemas that were built before the object model was mapped * (using a primitive). Therefore, EclipseLink will not change the null column value * in the database through this conversion. The value on the database will only * be changed if the user actually sets a new primitive value. *
  • Allows users to define their own set of default null values to be used * in the conversion. *
* * @author Guy Pelletier * @since TopLink 10.1.4 RI */ public class JPAConversionManager extends ConversionManager { public JPAConversionManager() { super(); } /** * INTERNAL: */ public Object getDefaultNullValue(Class theClass) { Object defaultNullValue = null; if (this.defaultNullValues != null){ defaultNullValue = getDefaultNullValues().get(theClass); } if (defaultNullValue == null && theClass.isPrimitive()) { if(Double.TYPE.equals(theClass)){ return Double.valueOf(0D); } else if(Long.TYPE.equals(theClass)) { return Long.valueOf(0L); } else if(Character.TYPE.equals(theClass)){ return Character.valueOf('\u0000'); } else if(Float.TYPE.equals(theClass)){ return Float.valueOf(0F); } else if(Short.TYPE.equals(theClass)){ return Short.valueOf((short)0); } else if(Byte.TYPE.equals(theClass)){ return Byte.valueOf((byte)0); } else if(Boolean.TYPE.equals(theClass)){ return Boolean.FALSE; } else { return 0; } } else { return defaultNullValue; } } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/Helper.java0000664000000000000000000026661012216173126023600 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * dminsky - added countOccurrencesOf(Object, List) API * 08/23/2010-2.2 Michael O'Brien * - 323043: application.xml module ordering may cause weaving not to occur causing an NPE. * warn if expected "_persistence_*_vh" method not found * instead of throwing NPE during deploy validation. ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.io.Closeable; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; import java.io.Serializable; import java.io.StringWriter; import java.io.Writer; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.math.BigDecimal; import java.math.BigInteger; import java.net.URI; import java.net.URISyntaxException; import java.security.AccessController; import java.security.PrivilegedActionException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Enumeration; import java.util.HashMap; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.StringTokenizer; import java.util.TimeZone; import java.util.Vector; import java.util.concurrent.ConcurrentLinkedQueue; import org.eclipse.persistence.config.SystemProperties; import org.eclipse.persistence.exceptions.ConversionException; import org.eclipse.persistence.exceptions.EclipseLinkException; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.internal.core.helper.CoreHelper; import org.eclipse.persistence.internal.databaseaccess.DatabaseAccessor; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; import org.eclipse.persistence.internal.security.PrivilegedGetField; import org.eclipse.persistence.internal.security.PrivilegedGetMethod; import org.eclipse.persistence.internal.security.PrivilegedNewInstanceFromClass; import org.eclipse.persistence.logging.AbstractSessionLog; import org.eclipse.persistence.logging.SessionLog; /** * INTERNAL: *

* Purpose: Define any useful methods that are missing from the base Java. */ public class Helper extends CoreHelper implements Serializable { /** Used to configure JDBC level date optimization. */ public static boolean shouldOptimizeDates = false; /** Used to store null values in hashtables, is helper because need to be serializable. */ public static final Object NULL_VALUE = new Helper(); /** PERF: Used to cache a set of calendars for conversion/printing purposes. */ protected static Queue calendarCache = initCalendarCache(); /** PERF: Cache default timezone for calendar conversion. */ protected static TimeZone defaultTimeZone = TimeZone.getDefault(); // Changed static initialization to lazy initialization for bug 2756643 /** Store CR string, for some reason \n is not platform independent. */ protected static String CR = null; /** formatting strings for indenting */ public static String SPACE = " "; public static String INDENT = " "; /** Store newline string */ public static String NL = "\n"; /** Prime the platform-dependent path separator */ protected static String PATH_SEPARATOR = null; /** Prime the platform-dependent file separator */ protected static String FILE_SEPARATOR = null; /** Prime the platform-dependent current working directory */ protected static String CURRENT_WORKING_DIRECTORY = null; /** Prime the platform-dependent temporary directory */ protected static String TEMP_DIRECTORY = null; /** Backdoor to allow 0 to be used in primary keys. * @deprecated * Instead of setting the flag to true use: * session.getProject().setDefaultIdValidation(IdValidation.NULL) **/ public static boolean isZeroValidPrimaryKey = false; // settings to allow ascertaining attribute names from method names public static final String IS_PROPERTY_METHOD_PREFIX = "is"; public static final String GET_PROPERTY_METHOD_PREFIX = "get"; public static final String SET_PROPERTY_METHOD_PREFIX = "set"; public static final String SET_IS_PROPERTY_METHOD_PREFIX = "setIs"; public static final int POSITION_AFTER_IS_PREFIX = IS_PROPERTY_METHOD_PREFIX.length(); public static final int POSITION_AFTER_GET_PREFIX = GET_PROPERTY_METHOD_PREFIX.length(); public static final String DEFAULT_DATABASE_DELIMITER = "\""; public static final String PERSISTENCE_SET = "_persistence_set_"; public static final String PERSISTENCE_GET = "_persistence_get_"; // 323403: These constants are used to search for missing weaved functions - this is a copy is of the jpa project under ClassWeaver public static final String PERSISTENCE_FIELDNAME_PREFIX = "_persistence_"; public static final String PERSISTENCE_FIELDNAME_POSTFIX = "_vh"; private static String defaultStartDatabaseDelimiter = null; private static String defaultEndDatabaseDelimiter = null; /** * Return if JDBC date access should be optimized. */ public static boolean shouldOptimizeDates() { return shouldOptimizeDates; } /** * Return if JDBC date access should be optimized. */ public static void setShouldOptimizeDates(boolean value) { shouldOptimizeDates = value; } /** * PERF: * Return the calendar cache use to avoid calendar creation for processing java.sql/util.Date/Time/Timestamp objects. */ public static Queue getCalendarCache() { return calendarCache; } /** * PERF: * Init the calendar cache use to avoid calendar creation for processing java.sql/util.Date/Time/Timestamp objects. */ public static Queue initCalendarCache() { Queue calendarCache = new ConcurrentLinkedQueue(); for (int index = 0; index < 10; index++) { calendarCache.add(Calendar.getInstance()); } return calendarCache; } /** * PERF: This is used to optimize Calendar conversion/printing. * This should only be used when a calendar is temporarily required, * when finished it must be released back. */ public static Calendar allocateCalendar() { Calendar calendar = getCalendarCache().poll(); if (calendar == null) { calendar = Calendar.getInstance(); } return calendar; } /** * PERF: Return the cached default platform. * Used for ensuring Calendar are in the local timezone. * The JDK method clones the timezone, so cache it locally. */ public static TimeZone getDefaultTimeZone() { return defaultTimeZone; } /** * PERF: This is used to optimize Calendar conversion/printing. * This should only be used when a calendar is temporarily required, * when finished it must be released back. */ public static void releaseCalendar(Calendar calendar) { getCalendarCache().offer(calendar); } public static void addAllToVector(Vector theVector, Vector elementsToAdd) { for (Enumeration stream = elementsToAdd.elements(); stream.hasMoreElements();) { theVector.addElement(stream.nextElement()); } } public static Vector addAllUniqueToVector(Vector objects, List objectsToAdd) { if (objectsToAdd == null) { return objects; } int size = objectsToAdd.size(); for (int index = 0; index < size; index++) { Object element = objectsToAdd.get(index); if (!objects.contains(element)) { objects.add(element); } } return objects; } public static List addAllUniqueToList(List objects, List objectsToAdd) { if (objectsToAdd == null) { return objects; } int size = objectsToAdd.size(); for (int index = 0; index < size; index++) { Object element = objectsToAdd.get(index); if (!objects.contains(element)) { objects.add(element); } } return objects; } /** * Convert the specified vector into an array. */ public static Object[] arrayFromVector(Vector vector) { Object[] result = new Object[vector.size()]; for (int i = 0; i < vector.size(); i++) { result[i] = vector.elementAt(i); } return result; } /** * Convert the HEX string to a byte array. * HEX allows for binary data to be printed. */ public static byte[] buildBytesFromHexString(String hex) { String tmpString = hex; if ((tmpString.length() % 2) != 0) { throw ConversionException.couldNotConvertToByteArray(hex); } byte[] bytes = new byte[tmpString.length() / 2]; int byteIndex; int strIndex; byte digit1; byte digit2; for (byteIndex = bytes.length - 1, strIndex = tmpString.length() - 2; byteIndex >= 0; byteIndex--, strIndex -= 2) { digit1 = (byte)Character.digit(tmpString.charAt(strIndex), 16); digit2 = (byte)Character.digit(tmpString.charAt(strIndex + 1), 16); if ((digit1 == -1) || (digit2 == -1)) { throw ConversionException.couldNotBeConverted(hex, ClassConstants.APBYTE); } bytes[byteIndex] = (byte)((digit1 * 16) + digit2); } return bytes; } /** * Convert the byte array to a HEX string. * HEX allows for binary data to be printed. */ public static String buildHexStringFromBytes(byte[] bytes) { char[] hexArray = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' }; StringBuffer stringBuffer = new StringBuffer(); int tempByte; for (int byteIndex = 0; byteIndex < (bytes).length; byteIndex++) { tempByte = (bytes)[byteIndex]; if (tempByte < 0) { tempByte = tempByte + 256;//compensate for the fact that byte is signed in Java } tempByte = (byte)(tempByte / 16);//get the first digit if (tempByte > 16) { throw ConversionException.couldNotBeConverted(bytes, ClassConstants.STRING); } stringBuffer.append(hexArray[tempByte]); tempByte = (bytes)[byteIndex]; if (tempByte < 0) { tempByte = tempByte + 256; } tempByte = (byte)(tempByte % 16);//get the second digit if (tempByte > 16) { throw ConversionException.couldNotBeConverted(bytes, ClassConstants.STRING); } stringBuffer.append(hexArray[tempByte]); } return stringBuffer.toString(); } /** * Create a new Vector containing all of the map elements. */ public static Vector buildVectorFromMapElements(Map map) { Vector vector = new Vector(map.size()); Iterator iterator = map.values().iterator(); while (iterator.hasNext()) { vector.addElement(iterator.next()); } return vector; } /** * Answer a Calendar from a date. */ public static Calendar calendarFromUtilDate(java.util.Date date) { Calendar calendar = Calendar.getInstance(); calendar.setTime(date); //In jdk1.3, millisecond is missing if (date instanceof Timestamp) { calendar.set(Calendar.MILLISECOND, ((Timestamp)date).getNanos() / 1000000); } return calendar; } /** * INTERNAL: * Return whether a Class implements a specific interface, either directly or indirectly * (through interface or implementation inheritance). * @return boolean */ public static boolean classImplementsInterface(Class aClass, Class anInterface) { // quick check if (aClass == anInterface) { return true; } Class[] interfaces = aClass.getInterfaces(); // loop through the "directly declared" interfaces for (int i = 0; i < interfaces.length; i++) { if (interfaces[i] == anInterface) { return true; } } // recurse through the interfaces for (int i = 0; i < interfaces.length; i++) { if (classImplementsInterface(interfaces[i], anInterface)) { return true; } } // finally, recurse up through the superclasses to Object Class superClass = aClass.getSuperclass(); if (superClass == null) { return false; } return classImplementsInterface(superClass, anInterface); } /** * INTERNAL: * Return whether a Class is a subclass of, or the same as, another Class. * @return boolean */ public static boolean classIsSubclass(Class subClass, Class superClass) { Class temp = subClass; if (superClass == null) { return false; } while (temp != null) { if (temp == superClass) { return true; } temp = temp.getSuperclass(); } return false; } /** * INTERNAL: * Compares two version in num.num.num.num.num*** format. * -1, 0, 1 means the version1 is less than, equal, greater than version2. * Example: compareVersions("11.1.0.6.0-Production", "11.1.0.7") == -1 * Example: compareVersions("WebLogic Server 10.3.4", "10.3.3.0") == 1 */ public static int compareVersions(String version1, String version2) { return compareVersions(version(version1), version(version2)); } /** * INTERNAL: * Expects version in ***num.num.num.num.num*** format, converts it to a List of Integers. * Example: "11.1.0.6.0_Production" -> {11, 1, 0, 6, 0} * Example: "WebLogic Server 10.3.3.0" -> {10, 3, 3, 0} */ static protected List version(String version) { ArrayList list = new ArrayList(5); // first char - a digit - in the string corresponding to the current list index int iBegin = -1; // used to remove a non-digital prefix boolean isPrefix = true; for(int i=0; i= 0) { String strNum = version.substring(iBegin, version.length()); int num = Integer.parseInt(strNum, 10); list.add(num); } return list; } /** * INTERNAL: * Compares two lists of Integers * -1, 0, 1 means the first list is less than, equal, greater than the second list. * Example: {11, 1, 0, 6, 0} < {11, 1, 0, 7} */ static protected int compareVersions(List list1, Listlist2) { int n = Math.max(list1.size(), list2.size()); int res = 0; for(int i=0; i l2) { res = 1; break; } } return res; } public static Class getClassFromClasseName(String className, ClassLoader classLoader){ Class convertedClass = null; if(className==null){ return null; } try{ if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { convertedClass = (Class)AccessController.doPrivileged(new PrivilegedClassForName(className, true, classLoader)); } catch (PrivilegedActionException exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(className, exception.getException()); } } else { convertedClass = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(className, true, classLoader); } return convertedClass; } catch (ClassNotFoundException exc){ throw ValidationException.classNotFoundWhileConvertingClassNames(className, exc); } } public static String getComponentTypeNameFromArrayString(String aString) { if (aString == null || aString.length() == 0) { return null; } // complex array component type case if (aString.length() > 3 && (aString.startsWith("[L") & aString.endsWith(";"))) { return aString.substring(2, aString.length() - 1); } else if (aString.startsWith("[")){ Class primitiveClass = null; try { primitiveClass = Class.forName(aString); } catch (ClassNotFoundException cnf) { // invalid name specified - do not rethrow exception primitiveClass = null; } if (primitiveClass != null) { return primitiveClass.getComponentType().getName(); } } return null; } public static boolean compareArrays(Object[] array1, Object[] array2) { if (array1.length != array2.length) { return false; } for (int index = 0; index < array1.length; index++) { //Related to Bug#3128838 fix. ! is added to correct the logic. if(array1[index] != null) { if (!array1[index].equals(array2[index])) { return false; } } else { if(array2[index] != null) { return false; } } } return true; } /** * Compare two BigDecimals. * This is required because the .equals method of java.math.BigDecimal ensures that * the scale of the two numbers are equal. Therefore 0.0 != 0.00. * @see java.math.BigDecimal#equals(Object) */ public static boolean compareBigDecimals(java.math.BigDecimal one, java.math.BigDecimal two) { if (one.scale() != two.scale()) { double doubleOne = (one).doubleValue(); double doubleTwo = (two).doubleValue(); if ((doubleOne != Double.POSITIVE_INFINITY) && (doubleOne != Double.NEGATIVE_INFINITY) && (doubleTwo != Double.POSITIVE_INFINITY) && (doubleTwo != Double.NEGATIVE_INFINITY)) { return doubleOne == doubleTwo; } } return one.equals(two); } public static boolean compareByteArrays(byte[] array1, byte[] array2) { if (array1.length != array2.length) { return false; } for (int index = 0; index < array1.length; index++) { if (array1[index] != array2[index]) { return false; } } return true; } public static boolean compareCharArrays(char[] array1, char[] array2) { if (array1.length != array2.length) { return false; } for (int index = 0; index < array1.length; index++) { if (array1[index] != array2[index]) { return false; } } return true; } /** * PUBLIC: * * Compare two vectors of types. Return true if the size of the vectors is the * same and each of the types in the first Vector are assignable from the types * in the corresponding objects in the second Vector. */ public static boolean areTypesAssignable(List types1, List types2) { if ((types1 == null) || (types2 == null)) { return false; } if (types1.size() == types2.size()) { for (int i = 0; i < types1.size(); i++) { Class type1 = (Class)types1.get(i); Class type2 = (Class)types2.get(i); // if either are null then we assume assignability. if ((type1 != null) && (type2 != null)) { if (!type1.isAssignableFrom(type2)) { return false; } } } return true; } return false; } /** * PUBLIC: * Compare the elements in 2 hashtables to see if they are equal * * Added Nov 9, 2000 JED Patch 2.5.1.8 */ public static boolean compareHashtables(Hashtable hashtable1, Hashtable hashtable2) { Enumeration enumtr; Object element; Hashtable clonedHashtable; if (hashtable1.size() != hashtable2.size()) { return false; } clonedHashtable = (Hashtable)hashtable2.clone(); enumtr = hashtable1.elements(); while (enumtr.hasMoreElements()) { element = enumtr.nextElement(); if (clonedHashtable.remove(element) == null) { return false; } } return clonedHashtable.isEmpty(); } /** * Compare two potential arrays and return true if they are the same. Will * check for BigDecimals as well. */ public static boolean comparePotentialArrays(Object firstValue, Object secondValue) { Class firstClass = firstValue.getClass(); Class secondClass = secondValue.getClass(); // Arrays must be checked for equality because default does identity if ((firstClass == ClassConstants.APBYTE) && (secondClass == ClassConstants.APBYTE)) { return compareByteArrays((byte[])firstValue, (byte[])secondValue); } else if ((firstClass == ClassConstants.APCHAR) && (secondClass == ClassConstants.APCHAR)) { return compareCharArrays((char[])firstValue, (char[])secondValue); } else if ((firstClass.isArray()) && (secondClass.isArray())) { return compareArrays((Object[])firstValue, (Object[])secondValue); } else if (firstValue instanceof java.math.BigDecimal && secondValue instanceof java.math.BigDecimal) { // BigDecimals equals does not consider the precision correctly return compareBigDecimals((java.math.BigDecimal)firstValue, (java.math.BigDecimal)secondValue); } return false; } /** * Merge the two Maps into a new HashMap. */ public static Map concatenateMaps(Map first, Map second) { Map concatenation = new HashMap(first.size() + second.size() + 4); for (Iterator keys = first.keySet().iterator(); keys.hasNext();) { Object key = keys.next(); Object value = first.get(key); concatenation.put(key, value); } for (Iterator keys = second.keySet().iterator(); keys.hasNext();) { Object key = keys.next(); Object value = second.get(key); concatenation.put(key, value); } return concatenation; } /** * Return a new vector with no duplicated values. */ public static Vector concatenateUniqueVectors(Vector first, Vector second) { Vector concatenation; Object element; concatenation = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(); for (Enumeration stream = first.elements(); stream.hasMoreElements();) { concatenation.addElement(stream.nextElement()); } for (Enumeration stream = second.elements(); stream.hasMoreElements();) { element = stream.nextElement(); if (!concatenation.contains(element)) { concatenation.addElement(element); } } return concatenation; } /** * Return a new List with no duplicated values. */ public static List concatenateUniqueLists(List first, List second) { List concatenation = new ArrayList(first.size() + second.size()); concatenation.addAll(first); for (Object element : second) { if (!concatenation.contains(element)) { concatenation.add(element); } } return concatenation; } public static Vector concatenateVectors(Vector first, Vector second) { Vector concatenation; concatenation = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(); for (Enumeration stream = first.elements(); stream.hasMoreElements();) { concatenation.addElement(stream.nextElement()); } for (Enumeration stream = second.elements(); stream.hasMoreElements();) { concatenation.addElement(stream.nextElement()); } return concatenation; } /** Return a copy of the vector containing a subset starting at startIndex * and ending at stopIndex. * @param vector - original vector * @param startIndex - starting position in vector * @param stopIndex - ending position in vector * @exception EclipseLinkException */ public static Vector copyVector(List originalVector, int startIndex, int stopIndex) throws ValidationException { Vector newVector; if (stopIndex < startIndex) { return NonSynchronizedVector.newInstance(); } newVector = NonSynchronizedVector.newInstance(stopIndex - startIndex); for (int index = startIndex; index < stopIndex; index++) { newVector.add(originalVector.get(index)); } return newVector; } /** * Copy an array of strings to a new array * avoids the use of Arrays.copy() because it is not supported in JDK 1.5 * @param original * @return */ public static String[] copyStringArray(String[] original){ if (original == null){ return null; } String[] copy = new String[original.length]; for (int i=0;inull element found in the specified * Vector starting the search at the starting index specified. * Return an int >= 0 and less than size if a null element was found. * Return -1 if a null element was not found. * This is needed in jdk1.1, where Vector.contains(Object) * for a null element will result in a NullPointerException.... */ public static int indexOfNullElement(Vector v, int index) { int size = v.size(); for (int i = index; i < size; i++) { if (v.elementAt(i) == null) { return i; } } return -1; } /** * ADVANCED * returns true if the class in question is a primitive wrapper */ public static boolean isPrimitiveWrapper(Class classInQuestion) { return classInQuestion.equals(Character.class) || classInQuestion.equals(Boolean.class) || classInQuestion.equals(Byte.class) || classInQuestion.equals(Short.class) || classInQuestion.equals(Integer.class) || classInQuestion.equals(Long.class) || classInQuestion.equals(Float.class) || classInQuestion.equals(Double.class); } /** * Returns true if the string given is an all upper case string */ public static boolean isUpperCaseString(String s) { char[] c = s.toCharArray(); for (int i = 0; i < s.length(); i++) { if (Character.isLowerCase(c[i])) { return false; } } return true; } /** * Returns true if the character given is a vowel. I.e. one of a,e,i,o,u,A,E,I,O,U. */ public static boolean isVowel(char c) { return (c == 'A') || (c == 'a') || (c == 'e') || (c == 'E') || (c == 'i') || (c == 'I') || (c == 'o') || (c == 'O') || (c == 'u') || (c == 'U'); } /** * Return an array of the files in the specified directory. * This allows us to simplify jdk1.1 code a bit. */ public static File[] listFilesIn(File directory) { if (directory.isDirectory()) { return directory.listFiles(); } else { return new File[0]; } } /** * Make a Vector from the passed object. * If it's a Collection, iterate over the collection and add each item to the Vector. * If it's not a collection create a Vector and add the object to it. */ public static Vector makeVectorFromObject(Object theObject) { if (theObject instanceof Vector) { return ((Vector)theObject); } if (theObject instanceof Collection) { Vector returnVector = new Vector(((Collection)theObject).size()); Iterator iterator = ((Collection)theObject).iterator(); while (iterator.hasNext()) { returnVector.add(iterator.next()); } return returnVector; } Vector returnVector = new Vector(); returnVector.addElement(theObject); return returnVector; } /** * Used by our byte code weaving to enable users who are debugging to output * the generated class to a file * * @param className * @param classBytes * @param outputPath */ public static void outputClassFile(String className, byte[] classBytes, String outputPath) { StringBuffer directoryName = new StringBuffer(); StringTokenizer tokenizer = new StringTokenizer(className, "\n\\/"); String token = null; while (tokenizer.hasMoreTokens()) { token = tokenizer.nextToken(); if (tokenizer.hasMoreTokens()) { directoryName.append(token + File.separator); } } FileOutputStream fos = null; try { String usedOutputPath = outputPath; if (!outputPath.endsWith(File.separator)) { usedOutputPath = outputPath + File.separator; } File file = new File(usedOutputPath + directoryName); file.mkdirs(); file = new File(file, token + ".class"); if (!file.exists()) { file.createNewFile(); } else { if (!System.getProperty( SystemProperties.WEAVING_SHOULD_OVERWRITE, "false") .equalsIgnoreCase("true")) { AbstractSessionLog.getLog().log(SessionLog.WARNING, SessionLog.WEAVER, "weaver_not_overwriting", className); return; } } fos = new FileOutputStream(file); fos.write(classBytes); } catch (Exception e) { AbstractSessionLog.getLog().log(SessionLog.WARNING, SessionLog.WEAVER, "weaver_could_not_write", className, e); AbstractSessionLog.getLog().logThrowable(SessionLog.FINEST, SessionLog.WEAVER, e); } finally { Helper.close(fos); } } /** * Return a string containing the platform-appropriate * characters for separating entries in a path (e.g. the classpath) */ public static String pathSeparator() { // Bug 2756643 if (PATH_SEPARATOR == null) { PATH_SEPARATOR = System.getProperty("path.separator"); } return PATH_SEPARATOR; } /** * Return a String containing the printed stacktrace of an exception. */ public static String printStackTraceToString(Throwable aThrowable) { StringWriter swriter = new StringWriter(); PrintWriter writer = new PrintWriter(swriter, true); aThrowable.printStackTrace(writer); writer.close(); return swriter.toString(); } /* Return a string representation of a number of milliseconds in terms of seconds, minutes, or * milliseconds, whichever is most appropriate. */ public static String printTimeFromMilliseconds(long milliseconds) { if ((milliseconds > 1000) && (milliseconds < 60000)) { return (milliseconds / 1000) + "s"; } if (milliseconds > 60000) { return (milliseconds / 60000) + "min " + printTimeFromMilliseconds(milliseconds % 60000); } return milliseconds + "ms"; } /** * Given a Vector, print it, even if there is a null in it */ public static String printVector(Vector vector) { StringWriter stringWriter = new StringWriter(); stringWriter.write("["); Enumeration enumtr = vector.elements(); stringWriter.write(String.valueOf(enumtr.nextElement())); while (enumtr.hasMoreElements()) { stringWriter.write(" "); stringWriter.write(String.valueOf(enumtr.nextElement())); } stringWriter.write("]"); return stringWriter.toString(); } public static Hashtable rehashHashtable(Hashtable table) { Hashtable rehashedTable = new Hashtable(table.size() + 2); Enumeration values = table.elements(); for (Enumeration keys = table.keys(); keys.hasMoreElements();) { Object key = keys.nextElement(); Object value = values.nextElement(); rehashedTable.put(key, value); } return rehashedTable; } public static Map rehashMap(Map table) { HashMap rehashedTable = new HashMap(table.size() + 2); Iterator values = table.values().iterator(); for (Iterator keys = table.keySet().iterator(); keys.hasNext();) { Object key = keys.next(); Object value = values.next(); rehashedTable.put(key, value); } return rehashedTable; } /** * Returns a String which has had enough non-alphanumeric characters removed to be equal to * the maximumStringLength. */ public static String removeAllButAlphaNumericToFit(String s1, int maximumStringLength) { int s1Size = s1.length(); if (s1Size <= maximumStringLength) { return s1; } // Remove the necessary number of characters StringBuffer buf = new StringBuffer(); int numberOfCharsToBeRemoved = s1.length() - maximumStringLength; int s1Index = 0; while ((numberOfCharsToBeRemoved > 0) && (s1Index < s1Size)) { char currentChar = s1.charAt(s1Index); if (Character.isLetterOrDigit(currentChar)) { buf.append(currentChar); } else { numberOfCharsToBeRemoved--; } s1Index++; } // Append the rest of the character that were not parsed through. // Is it quicker to build a substring and append that? while (s1Index < s1Size) { buf.append(s1.charAt(s1Index)); s1Index++; } // return buf.toString(); } /** * Returns a String which has had enough of the specified character removed to be equal to * the maximumStringLength. */ public static String removeCharacterToFit(String s1, char aChar, int maximumStringLength) { int s1Size = s1.length(); if (s1Size <= maximumStringLength) { return s1; } // Remove the necessary number of characters StringBuffer buf = new StringBuffer(); int numberOfCharsToBeRemoved = s1.length() - maximumStringLength; int s1Index = 0; while ((numberOfCharsToBeRemoved > 0) && (s1Index < s1Size)) { char currentChar = s1.charAt(s1Index); if (currentChar == aChar) { numberOfCharsToBeRemoved--; } else { buf.append(currentChar); } s1Index++; } // Append the rest of the character that were not parsed through. // Is it quicker to build a substring and append that? while (s1Index < s1Size) { buf.append(s1.charAt(s1Index)); s1Index++; } // return buf.toString(); } /** * Returns a String which has had enough of the specified character removed to be equal to * the maximumStringLength. */ public static String removeVowels(String s1) { // Remove the vowels StringBuffer buf = new StringBuffer(); int s1Size = s1.length(); int s1Index = 0; while (s1Index < s1Size) { char currentChar = s1.charAt(s1Index); if (!isVowel(currentChar)) { buf.append(currentChar); } s1Index++; } // return buf.toString(); } /** * Replaces the first subString of the source with the replacement. */ public static String replaceFirstSubString(String source, String subString, String replacement) { int index = source.indexOf(subString); if (index >= 0) { return source.substring(0, index) + replacement + source.substring(index + subString.length()); } return null; } public static Vector reverseVector(Vector theVector) { Vector tempVector = new Vector(theVector.size()); Object currentElement; for (int i = theVector.size() - 1; i > -1; i--) { currentElement = theVector.elementAt(i); tempVector.addElement(currentElement); } return tempVector; } /** * Returns a new string with all space characters removed from the right * * @param originalString - timestamp representation of date * @return - String */ public static String rightTrimString(String originalString) { int len = originalString.length(); while ((len > 0) && (originalString.charAt(len - 1) <= ' ')) { len--; } return originalString.substring(0, len); } /** * Returns a String which is a concatenation of two string which have had enough * vowels removed from them so that the sum of the sized of the two strings is less than * or equal to the specified size. */ public static String shortenStringsByRemovingVowelsToFit(String s1, String s2, int maximumStringLength) { int size = s1.length() + s2.length(); if (size <= maximumStringLength) { return s1 + s2; } // Remove the necessary number of characters int s1Size = s1.length(); int s2Size = s2.length(); StringBuffer buf1 = new StringBuffer(); StringBuffer buf2 = new StringBuffer(); int numberOfCharsToBeRemoved = size - maximumStringLength; int s1Index = 0; int s2Index = 0; int modulo2 = 0; // While we still want to remove characters, and not both string are done. while ((numberOfCharsToBeRemoved > 0) && !((s1Index >= s1Size) && (s2Index >= s2Size))) { if ((modulo2 % 2) == 0) { // Remove from s1 if (s1Index < s1Size) { if (isVowel(s1.charAt(s1Index))) { numberOfCharsToBeRemoved--; } else { buf1.append(s1.charAt(s1Index)); } s1Index++; } } else { // Remove from s2 if (s2Index < s2Size) { if (isVowel(s2.charAt(s2Index))) { numberOfCharsToBeRemoved--; } else { buf2.append(s2.charAt(s2Index)); } s2Index++; } } modulo2++; } // Append the rest of the character that were not parsed through. // Is it quicker to build a substring and append that? while (s1Index < s1Size) { buf1.append(s1.charAt(s1Index)); s1Index++; } while (s2Index < s2Size) { buf2.append(s2.charAt(s2Index)); s2Index++; } // return buf1.toString() + buf2.toString(); } /** * Answer a sql.Date from a timestamp. */ public static java.sql.Date sqlDateFromUtilDate(java.util.Date utilDate) { // PERF: Avoid deprecated get methods, that are now very inefficient. Calendar calendar = allocateCalendar(); calendar.setTime(utilDate); java.sql.Date date = dateFromCalendar(calendar); releaseCalendar(calendar); return date; } /** * Print the sql.Date. */ public static String printDate(java.sql.Date date) { // PERF: Avoid deprecated get methods, that are now very inefficient and used from toString. Calendar calendar = allocateCalendar(); calendar.setTime(date); String string = printDate(calendar); releaseCalendar(calendar); return string; } /** * Print the date part of the calendar. */ public static String printDate(Calendar calendar) { return printDate(calendar, true); } /** * Print the date part of the calendar. * Normally the calendar must be printed in the local time, but if the timezone is printed, * it must be printing in its timezone. */ public static String printDate(Calendar calendar, boolean useLocalTime) { int year; int month; int day; if (useLocalTime && (!defaultTimeZone.equals(calendar.getTimeZone()))) { // Must convert the calendar to the local timezone if different, as dates have no timezone (always local). Calendar localCalendar = allocateCalendar(); localCalendar.setTimeInMillis(calendar.getTimeInMillis()); year = localCalendar.get(Calendar.YEAR); month = localCalendar.get(Calendar.MONTH) + 1; day = localCalendar.get(Calendar.DATE); releaseCalendar(localCalendar); } else { year = calendar.get(Calendar.YEAR); month = calendar.get(Calendar.MONTH) + 1; day = calendar.get(Calendar.DATE); } char[] buf = "2000-00-00".toCharArray(); buf[0] = Character.forDigit(year / 1000, 10); buf[1] = Character.forDigit((year / 100) % 10, 10); buf[2] = Character.forDigit((year / 10) % 10, 10); buf[3] = Character.forDigit(year % 10, 10); buf[5] = Character.forDigit(month / 10, 10); buf[6] = Character.forDigit(month % 10, 10); buf[8] = Character.forDigit(day / 10, 10); buf[9] = Character.forDigit(day % 10, 10); return new String(buf); } /** * Print the sql.Time. */ public static String printTime(java.sql.Time time) { // PERF: Avoid deprecated get methods, that are now very inefficient and used from toString. Calendar calendar = allocateCalendar(); calendar.setTime(time); String string = printTime(calendar); releaseCalendar(calendar); return string; } /** * Print the time part of the calendar. */ public static String printTime(Calendar calendar) { return printTime(calendar, true); } /** * Print the time part of the calendar. * Normally the calendar must be printed in the local time, but if the timezone is printed, * it must be printing in its timezone. */ public static String printTime(Calendar calendar, boolean useLocalTime) { int hour; int minute; int second; if (useLocalTime && (!defaultTimeZone.equals(calendar.getTimeZone()))) { // Must convert the calendar to the local timezone if different, as dates have no timezone (always local). Calendar localCalendar = allocateCalendar(); localCalendar.setTimeInMillis(calendar.getTimeInMillis()); hour = localCalendar.get(Calendar.HOUR_OF_DAY); minute = localCalendar.get(Calendar.MINUTE); second = localCalendar.get(Calendar.SECOND); releaseCalendar(localCalendar); } else { hour = calendar.get(Calendar.HOUR_OF_DAY); minute = calendar.get(Calendar.MINUTE); second = calendar.get(Calendar.SECOND); } String hourString; String minuteString; String secondString; if (hour < 10) { hourString = "0" + hour; } else { hourString = Integer.toString(hour); } if (minute < 10) { minuteString = "0" + minute; } else { minuteString = Integer.toString(minute); } if (second < 10) { secondString = "0" + second; } else { secondString = Integer.toString(second); } return (hourString + ":" + minuteString + ":" + secondString); } /** * Print the Calendar. */ public static String printCalendar(Calendar calendar) { return printCalendar(calendar, true); } /** * Print the Calendar. * Normally the calendar must be printed in the local time, but if the timezone is printed, * it must be printing in its timezone. */ public static String printCalendar(Calendar calendar, boolean useLocalTime) { String millisString; // String zeros = "000000000"; if (calendar.get(Calendar.MILLISECOND) == 0) { millisString = "0"; } else { millisString = buildZeroPrefixAndTruncTrailZeros(calendar.get(Calendar.MILLISECOND), 3); } StringBuffer timestampBuf = new StringBuffer(); timestampBuf.append(printDate(calendar, useLocalTime)); timestampBuf.append(" "); timestampBuf.append(printTime(calendar, useLocalTime)); timestampBuf.append("."); timestampBuf.append(millisString); return timestampBuf.toString(); } /** * Print the sql.Timestamp. */ public static String printTimestamp(java.sql.Timestamp timestamp) { // PERF: Avoid deprecated get methods, that are now very inefficient and used from toString. Calendar calendar = allocateCalendar(); calendar.setTime(timestamp); String nanosString; if (timestamp.getNanos() == 0) { nanosString = "0"; } else { nanosString = buildZeroPrefixAndTruncTrailZeros(timestamp.getNanos(), 9); } StringBuffer timestampBuf = new StringBuffer(); timestampBuf.append(printDate(calendar)); timestampBuf.append(" "); timestampBuf.append(printTime(calendar)); timestampBuf.append("."); timestampBuf.append(nanosString); releaseCalendar(calendar); return (timestampBuf.toString()); } /** * Build a numerical string with leading 0s. number is an existing number that * the new string will be built on. totalDigits is the number of the required * digits of the string. */ public static String buildZeroPrefix(int number, int totalDigits) { String numbString = buildZeroPrefixWithoutSign(number, totalDigits); if (number < 0) { numbString = "-" + numbString; } else { numbString = "+" + numbString; } return numbString; } /** * Build a numerical string with leading 0s. number is an existing number that * the new string will be built on. totalDigits is the number of the required * digits of the string. */ public static String buildZeroPrefixWithoutSign(int number, int totalDigits) { String zeros = "000000000"; int absValue = (number < 0) ? (-number) : number; String numbString = Integer.toString(absValue); // Add leading zeros numbString = zeros.substring(0, (totalDigits - numbString.length())) + numbString; return numbString; } /** * Build a numerical string with leading 0s and truncate trailing zeros. number is * an existing number that the new string will be built on. totalDigits is the number * of the required digits of the string. */ public static String buildZeroPrefixAndTruncTrailZeros(int number, int totalDigits) { String zeros = "000000000"; String numbString = Integer.toString(number); // Add leading zeros numbString = zeros.substring(0, (totalDigits - numbString.length())) + numbString; // Truncate trailing zeros char[] numbChar = new char[numbString.length()]; numbString.getChars(0, numbString.length(), numbChar, 0); int truncIndex = totalDigits - 1; while (numbChar[truncIndex] == '0') { truncIndex--; } return new String(numbChar, 0, truncIndex + 1); } /** * Print the sql.Timestamp without the nanos portion. */ public static String printTimestampWithoutNanos(java.sql.Timestamp timestamp) { // PERF: Avoid deprecated get methods, that are now very inefficient and used from toString. Calendar calendar = allocateCalendar(); calendar.setTime(timestamp); String string = printCalendarWithoutNanos(calendar); releaseCalendar(calendar); return string; } /** * Print the Calendar without the nanos portion. */ public static String printCalendarWithoutNanos(Calendar calendar) { StringBuffer timestampBuf = new StringBuffer(); timestampBuf.append(printDate(calendar)); timestampBuf.append(" "); timestampBuf.append(printTime(calendar)); return timestampBuf.toString(); } /** * Answer a sql.Date from a Calendar. */ public static java.sql.Date dateFromCalendar(Calendar calendar) { if (!defaultTimeZone.equals(calendar.getTimeZone())) { // Must convert the calendar to the local timezone if different, as dates have no timezone (always local). Calendar localCalendar = allocateCalendar(); localCalendar.setTimeInMillis(calendar.getTimeInMillis()); java.sql.Date date = dateFromYearMonthDate(localCalendar.get(Calendar.YEAR), localCalendar.get(Calendar.MONTH), localCalendar.get(Calendar.DATE)); releaseCalendar(localCalendar); return date; } else if ((calendar.get(Calendar.HOUR_OF_DAY) == 0) && (calendar.get(Calendar.MINUTE) == 0) && (calendar.get(Calendar.SECOND) == 0) && (calendar.get(Calendar.MILLISECOND) == 0)) { // PERF: If just a date set in the Calendar, then just use its millis. return new java.sql.Date(calendar.getTimeInMillis()); } return dateFromYearMonthDate(calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH), calendar.get(Calendar.DATE)); } /** * Return a sql.Date with time component zeroed out. * Starting with version 12.1 Oracle jdbc Statement.setDate method no longer zeroes out the time component. */ public static java.sql.Date truncateDate(java.sql.Date date) { // PERF: Avoid deprecated get methods, that are now very inefficient. Calendar calendar = allocateCalendar(); calendar.setTime(date); if ((calendar.get(Calendar.HOUR_OF_DAY) != 0) || (calendar.get(Calendar.MINUTE) != 0) || (calendar.get(Calendar.SECOND) != 0) || (calendar.get(Calendar.MILLISECOND) != 0)) { int year = calendar.get(Calendar.YEAR); int month = calendar.get(Calendar.MONTH); int day = calendar.get(Calendar.DATE); calendar.clear(); calendar.set(year, month, day, 0, 0, 0); long millis = calendar.getTimeInMillis(); date = new java.sql.Date(millis); } releaseCalendar(calendar); return date; } /** * Return a sql.Date with time component zeroed out (with possible exception of milliseconds). * Starting with version 12.1 Oracle jdbc Statement.setDate method no longer zeroes out the whole time component, * yet it still zeroes out milliseconds. */ public static java.sql.Date truncateDateIgnoreMilliseconds(java.sql.Date date) { // PERF: Avoid deprecated get methods, that are now very inefficient. Calendar calendar = allocateCalendar(); calendar.setTime(date); if ((calendar.get(Calendar.HOUR_OF_DAY) != 0) || (calendar.get(Calendar.MINUTE) != 0) || (calendar.get(Calendar.SECOND) != 0)) { int year = calendar.get(Calendar.YEAR); int month = calendar.get(Calendar.MONTH); int day = calendar.get(Calendar.DATE); calendar.clear(); calendar.set(year, month, day, 0, 0, 0); long millis = calendar.getTimeInMillis(); date = new java.sql.Date(millis); } releaseCalendar(calendar); return date; } /** * Can be used to mark code if a workaround is added for a JDBC driver or other bug. */ public static void systemBug(String description) { // Use sender to find what is needy. } /** * Answer a Time from a Date * * This implementation is based on the java.sql.Date class, not java.util.Date. * @param timestampObject - time representation of date * @return - time representation of dateObject */ public static java.sql.Time timeFromDate(java.util.Date date) { // PERF: Avoid deprecated get methods, that are now very inefficient. Calendar calendar = allocateCalendar(); calendar.setTime(date); java.sql.Time time = timeFromCalendar(calendar); releaseCalendar(calendar); return time; } /** * Answer a Time from a long * * @param longObject - milliseconds from the epoch (00:00:00 GMT * Jan 1, 1970). Negative values represent dates prior to the epoch. */ public static java.sql.Time timeFromLong(Long longObject) { return new java.sql.Time(longObject.longValue()); } /** * Answer a Time with the hour, minute, second. * This builds a time avoiding the deprecated, inefficient and concurrency bottleneck date constructors. * The hour, minute, second are the values calendar uses, * i.e. year is from 0, month is 0-11, date is 1-31. */ public static java.sql.Time timeFromHourMinuteSecond(int hour, int minute, int second) { // Use a calendar to compute the correct millis for the date. Calendar localCalendar = allocateCalendar(); localCalendar.clear(); localCalendar.set(1970, 0, 1, hour, minute, second); long millis = localCalendar.getTimeInMillis(); java.sql.Time time = new java.sql.Time(millis); releaseCalendar(localCalendar); return time; } /** * Answer a Time from a string representation. * This method will accept times in the following * formats: HH-MM-SS, HH:MM:SS * * @param timeString - string representation of time * @return - time representation of string */ public static java.sql.Time timeFromString(String timeString) throws ConversionException { int hour; int minute; int second; String timePortion = timeString; if (timeString.length() > 12) { // Longer strings are Timestamp format (ie. Sybase & Oracle) timePortion = timeString.substring(11, 19); } if ((timePortion.indexOf('-') == -1) && (timePortion.indexOf('/') == -1) && (timePortion.indexOf('.') == -1) && (timePortion.indexOf(':') == -1)) { throw ConversionException.incorrectTimeFormat(timePortion); } StringTokenizer timeStringTokenizer = new StringTokenizer(timePortion, " /:.-"); try { hour = Integer.parseInt(timeStringTokenizer.nextToken()); minute = Integer.parseInt(timeStringTokenizer.nextToken()); second = Integer.parseInt(timeStringTokenizer.nextToken()); } catch (NumberFormatException exception) { throw ConversionException.incorrectTimeFormat(timeString); } return timeFromHourMinuteSecond(hour, minute, second); } /** * Answer a Time from a Timestamp * Usus the Hours, Minutes, Seconds instead of getTime() ms value. */ public static java.sql.Time timeFromTimestamp(java.sql.Timestamp timestamp) { return timeFromDate(timestamp); } /** * Answer a sql.Time from a Calendar. */ public static java.sql.Time timeFromCalendar(Calendar calendar) { if (!defaultTimeZone.equals(calendar.getTimeZone())) { // Must convert the calendar to the local timezone if different, as dates have no timezone (always local). Calendar localCalendar = allocateCalendar(); localCalendar.setTimeInMillis(calendar.getTimeInMillis()); java.sql.Time date = timeFromHourMinuteSecond(localCalendar.get(Calendar.HOUR_OF_DAY), localCalendar.get(Calendar.MINUTE), localCalendar.get(Calendar.SECOND)); releaseCalendar(localCalendar); return date; } return timeFromHourMinuteSecond(calendar.get(Calendar.HOUR_OF_DAY), calendar.get(Calendar.MINUTE), calendar.get(Calendar.SECOND)); } /** * Answer a Timestamp from a Calendar. */ public static java.sql.Timestamp timestampFromCalendar(Calendar calendar) { return timestampFromLong(calendar.getTimeInMillis()); } /** * Answer a Timestamp from a java.util.Date. */ public static java.sql.Timestamp timestampFromDate(java.util.Date date) { return timestampFromLong(date.getTime()); } /** * Answer a Time from a long * * @param longObject - milliseconds from the epoch (00:00:00 GMT * Jan 1, 1970). Negative values represent dates prior to the epoch. */ public static java.sql.Timestamp timestampFromLong(Long millis) { return timestampFromLong(millis.longValue()); } /** * Answer a Time from a long * * @param longObject - milliseconds from the epoch (00:00:00 GMT * Jan 1, 1970). Negative values represent dates prior to the epoch. */ public static java.sql.Timestamp timestampFromLong(long millis) { java.sql.Timestamp timestamp = new java.sql.Timestamp(millis); // P2.0.1.3: Didn't account for negative millis < 1970 // Must account for the jdk millis bug where it does not set the nanos. if ((millis % 1000) > 0) { timestamp.setNanos((int)(millis % 1000) * 1000000); } else if ((millis % 1000) < 0) { timestamp.setNanos((int)(1000000000 - (Math.abs((millis % 1000) * 1000000)))); } return timestamp; } /** * Answer a Timestamp from a string representation. * This method will accept strings in the following * formats: YYYY/MM/DD HH:MM:SS, YY/MM/DD HH:MM:SS, YYYY-MM-DD HH:MM:SS, YY-MM-DD HH:MM:SS * * @param timestampString - string representation of timestamp * @return - timestamp representation of string */ @SuppressWarnings("deprecation") public static java.sql.Timestamp timestampFromString(String timestampString) throws ConversionException { if ((timestampString.indexOf('-') == -1) && (timestampString.indexOf('/') == -1) && (timestampString.indexOf('.') == -1) && (timestampString.indexOf(':') == -1)) { throw ConversionException.incorrectTimestampFormat(timestampString); } StringTokenizer timestampStringTokenizer = new StringTokenizer(timestampString, " /:.-"); int year; int month; int day; int hour; int minute; int second; int nanos; try { year = Integer.parseInt(timestampStringTokenizer.nextToken()); month = Integer.parseInt(timestampStringTokenizer.nextToken()); day = Integer.parseInt(timestampStringTokenizer.nextToken()); try { hour = Integer.parseInt(timestampStringTokenizer.nextToken()); minute = Integer.parseInt(timestampStringTokenizer.nextToken()); second = Integer.parseInt(timestampStringTokenizer.nextToken()); } catch (java.util.NoSuchElementException endOfStringException) { // May be only a date string desired to be used as a timestamp. hour = 0; minute = 0; second = 0; } } catch (NumberFormatException exception) { throw ConversionException.incorrectTimestampFormat(timestampString); } try { String nanoToken = timestampStringTokenizer.nextToken(); nanos = Integer.parseInt(nanoToken); for (int times = 0; times < (9 - nanoToken.length()); times++) { nanos = nanos * 10; } } catch (java.util.NoSuchElementException endOfStringException) { nanos = 0; } catch (NumberFormatException exception) { throw ConversionException.incorrectTimestampFormat(timestampString); } // Java dates are based on year after 1900 so I need to delete it. year = year - 1900; // Java returns the month in terms of 0 - 11 instead of 1 - 12. month = month - 1; java.sql.Timestamp timestamp; // TODO: This was not converted to use Calendar for the conversion because calendars do not take nanos. // but it should be, and then just call setNanos. timestamp = new java.sql.Timestamp(year, month, day, hour, minute, second, nanos); return timestamp; } /** * Answer a Timestamp with the year, month, day, hour, minute, second. * The hour, minute, second are the values calendar uses, * i.e. year is from 0, month is 0-11, date is 1-31, time is 0-23/59. */ @SuppressWarnings("deprecation") public static java.sql.Timestamp timestampFromYearMonthDateHourMinuteSecondNanos(int year, int month, int date, int hour, int minute, int second, int nanos) { // This was not converted to use Calendar for the conversion because calendars do not take nanos. // but it should be, and then just call setNanos. return new java.sql.Timestamp(year - 1900, month, date, hour, minute, second, nanos); } /** * Can be used to mark code as need if something strange is seen. */ public static void toDo(String description) { // Use sender to find what is needy. } /** * Convert dotted format class name to slashed format class name. * @param dottedClassName * @return String */ public static String toSlashedClassName(String dottedClassName){ if(dottedClassName==null){ return null; }else if(dottedClassName.indexOf('.')>=0){ return dottedClassName.replace('.', '/'); }else{ return dottedClassName; } } /** * If the size of the original string is larger than the passed in size, * this method will remove the vowels from the original string. * * The removal starts backward from the end of original string, and stops if the * resulting string size is equal to the passed in size. * * If the resulting string is still larger than the passed in size after * removing all vowels, the end of the resulting string will be truncated. */ public static String truncate(String originalString, int size) { if (originalString.length() <= size) { //no removal and truncation needed return originalString; } String vowels = "AaEeIiOoUu"; StringBuffer newStringBufferTmp = new StringBuffer(originalString.length()); //need to remove the extra characters int counter = originalString.length() - size; for (int index = (originalString.length() - 1); index >= 0; index--) { //search from the back to the front, if vowel found, do not append it to the resulting (temp) string! //i.e. if vowel not found, append the chararcter to the new string buffer. if (vowels.indexOf(originalString.charAt(index)) == -1) { newStringBufferTmp.append(originalString.charAt(index)); } else { //vowel found! do NOT append it to the temp buffer, and decrease the counter counter--; if (counter == 0) { //if the exceeded characters (counter) of vowel haven been removed, the total //string size should be equal to the limits, so append the reversed remaining string //to the new string, break the loop and return the shrunk string. StringBuffer newStringBuffer = new StringBuffer(size); newStringBuffer.append(originalString.substring(0, index)); //need to reverse the string //bug fix: 3016423. append(BunfferString) is jdk1.4 version api. Use append(String) instead //in order to support jdk1.3. newStringBuffer.append(newStringBufferTmp.reverse().toString()); return newStringBuffer.toString(); } } } //the shrunk string still too long, revrese the order back and truncate it! return newStringBufferTmp.reverse().toString().substring(0, size); } /** * Answer a Date from a long * * This implementation is based on the java.sql.Date class, not java.util.Date. * @param longObject - milliseconds from the epoch (00:00:00 GMT * Jan 1, 1970). Negative values represent dates prior to the epoch. */ public static java.util.Date utilDateFromLong(Long longObject) { return new java.util.Date(longObject.longValue()); } /** * Answer a java.util.Date from a sql.date * * @param sqlDate - sql.date representation of date * @return - java.util.Date representation of the sql.date */ public static java.util.Date utilDateFromSQLDate(java.sql.Date sqlDate) { return new java.util.Date(sqlDate.getTime()); } /** * Answer a java.util.Date from a sql.Time * * @param time - time representation of util date * @return - java.util.Date representation of the time */ public static java.util.Date utilDateFromTime(java.sql.Time time) { return new java.util.Date(time.getTime()); } /** * Answer a java.util.Date from a timestamp * * @param timestampObject - timestamp representation of date * @return - java.util.Date representation of timestampObject */ public static java.util.Date utilDateFromTimestamp(java.sql.Timestamp timestampObject) { // Bug 2719624 - Conditionally remove workaround for java bug which truncated // nanoseconds from timestamp.getTime(). We will now only recalculate the nanoseconds // When timestamp.getTime() results in nanoseconds == 0; long time = timestampObject.getTime(); boolean appendNanos = ((time % 1000) == 0); if (appendNanos) { return new java.util.Date(time + (timestampObject.getNanos() / 1000000)); } else { return new java.util.Date(time); } } /** * Convert the specified array into a vector. */ public static Vector vectorFromArray(Object[] array) { Vector result = new Vector(array.length); for (int i = 0; i < array.length; i++) { result.addElement(array[i]); } return result; } /** * Convert the byte array to a HEX string. * HEX allows for binary data to be printed. */ public static void writeHexString(byte[] bytes, Writer writer) throws IOException { writer.write(buildHexStringFromBytes(bytes)); } /** * Check if the value is 0 (int/long) for primitive ids. */ public static boolean isEquivalentToNull(Object value) { return (!isZeroValidPrimaryKey && (((value.getClass() == ClassConstants.LONG) && (((Long)value).longValue() == 0L)) || ((value.getClass() == ClassConstants.INTEGER) && (((Integer)value).intValue() == 0)))); } /** * Returns true if the passed value is Number that is negative or equals to zero. */ public static boolean isNumberNegativeOrZero(Object value) { return ((value.getClass() == ClassConstants.BIGDECIMAL) && (((BigDecimal)value).signum() <= 0)) || ((value.getClass() == ClassConstants.BIGINTEGER) && (((BigInteger)value).signum() <= 0)) || ((value instanceof Number) && (((Number)value).longValue() <= 0)); } /** * Return an integer representing the number of occurrences (using equals()) of the * specified object in the specified list. * If the list is null or empty (or both the object and the list is null), 0 is returned. */ public static int countOccurrencesOf(Object comparisonObject, List list) { int instances = 0; boolean comparisonObjectIsNull = comparisonObject == null; if (list != null) { for (int i = 0; i < list.size(); i++) { Object listObject = list.get(i); if ((comparisonObjectIsNull & listObject == null) || (!comparisonObjectIsNull && comparisonObject.equals(listObject))) { instances++; } } } return instances; } /** * Convert the URL into a URI allowing for special chars. */ public static URI toURI(java.net.URL url) throws URISyntaxException { try { // Attempt to use url.toURI since it will deal with all urls // without special characters and URISyntaxException allows us // to catch issues with special characters. This will handle // URLs that already have special characters replaced such as // URLS derived from searches for persistence.xml on the Java // System class loader return url.toURI(); } catch (URISyntaxException exception) { // Use multi-argument constructor for URI since single-argument // constructor and URL.toURI() do not deal with special // characters in path return new URI(url.getProtocol(), url.getUserInfo(), url.getHost(), url.getPort(), url.getPath(), url.getQuery(), null); } } /** * Return the get method name weaved for a value-holder attribute. */ public static String getWeavedValueHolderGetMethodName(String attributeName) { return PERSISTENCE_GET + attributeName + "_vh"; } /** * Return the set method name weaved for a value-holder attribute. */ public static String getWeavedValueHolderSetMethodName(String attributeName) { return PERSISTENCE_SET + attributeName + "_vh"; } /** * Return the set method name weaved for getting attribute value. * This method is always weaved in field access case. * In property access case the method weaved only if attribute name is the same as property name: * for instance, the method weaved for "manager" attribute that uses "getManager" / "setManager" access methods, * but not for "m_address" attribute that uses "getAddress" / "setAddress" access methods. */ public static String getWeavedGetMethodName(String attributeName) { return PERSISTENCE_GET + attributeName; } /** * Return the set method name weaved for setting attribute value. * This method is always weaved in field access case. * In property access case the method weaved only if attribute name is the same as property name: * for instance, the method weaved for "manager" attribute that uses "getManager" / "setManager" access methods, * but not for "m_address" attribute that uses "getAddress" / "setAddress" access methods. */ public static String getWeavedSetMethodName(String attributeName) { return PERSISTENCE_SET + attributeName; } /** * Close a closeable object, eating the exception */ public static void close(Closeable c) { try { if (c != null) { c.close(); } } catch (IOException exception) { } } /** * INTERNAL: * Method to convert a getXyz or isXyz method name to an xyz attribute name. * NOTE: The method name passed it may not actually be a method name, so * by default return the name passed in. */ public static String getAttributeNameFromMethodName(String methodName) { String restOfName = methodName; // We're looking at method named 'get' or 'set', therefore, // there is no attribute name, set it to "" string for now. if (methodName.equals(GET_PROPERTY_METHOD_PREFIX) || methodName.equals(IS_PROPERTY_METHOD_PREFIX)) { return ""; } else if (methodName.startsWith(GET_PROPERTY_METHOD_PREFIX)) { restOfName = methodName.substring(POSITION_AFTER_GET_PREFIX); } else if (methodName.startsWith(IS_PROPERTY_METHOD_PREFIX)){ restOfName = methodName.substring(POSITION_AFTER_IS_PREFIX); } //added for bug 234222 - property name generation differs from Introspector.decapitalize return java.beans.Introspector.decapitalize(restOfName); } public static String getDefaultStartDatabaseDelimiter(){ if (defaultStartDatabaseDelimiter == null){ defaultStartDatabaseDelimiter = DEFAULT_DATABASE_DELIMITER; } return defaultStartDatabaseDelimiter; } public static String getDefaultEndDatabaseDelimiter(){ if (defaultEndDatabaseDelimiter == null){ defaultEndDatabaseDelimiter = DEFAULT_DATABASE_DELIMITER; } return defaultEndDatabaseDelimiter; } public static void setDefaultStartDatabaseDelimiter(String delimiter){ defaultStartDatabaseDelimiter = delimiter; } public static void setDefaultEndDatabaseDelimiter(String delimiter){ defaultEndDatabaseDelimiter = delimiter; } /** * Convert the SQL like pattern to a regex pattern. */ public static String convertLikeToRegex(String like) { // Bug 3936427 - Replace regular expression reserved characters with escaped version of those characters // For instance replace ? with \? String pattern = like.replaceAll("\\?", "\\\\?"); pattern = pattern.replaceAll("\\*", "\\\\*"); pattern = pattern.replaceAll("\\.", "\\\\."); pattern = pattern.replaceAll("\\[", "\\\\["); pattern = pattern.replaceAll("\\)", "\\\\)"); pattern = pattern.replaceAll("\\(", "\\\\("); pattern = pattern.replaceAll("\\{", "\\\\{"); pattern = pattern.replaceAll("\\+", "\\\\+"); pattern = pattern.replaceAll("\\^", "\\\\^"); pattern = pattern.replaceAll("\\|", "\\\\|"); // regular expressions to substitute SQL wildcards with regex wildcards // Use look behind operators to replace "%" which is not preceded by "\" with ".*" pattern = pattern.replaceAll("(? fields) { for (DatabaseField field : fields) { if (isLob(field)) { return true; } } return false; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/DBPlatformHelper.java0000664000000000000000000002154112216173126025503 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Iterator; import java.util.List; import java.util.ArrayList; import java.io.BufferedReader; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; import java.security.AccessController; import java.security.PrivilegedAction; import org.eclipse.persistence.logging.SessionLog; /** * @author Mitesh Meswani * This class is responsible to translate given database name to a DatabasePlatform. */ public class DBPlatformHelper { private static final String DEFAULTPLATFORM = "org.eclipse.persistence.platform.database.DatabasePlatform"; // NOI18N private final static String PROPERTY_PATH = "org/eclipse/persistence/internal/helper/"; // NOI18N private final static String VENDOR_NAME_TO_PLATFORM_RESOURCE_NAME = PROPERTY_PATH + "VendorNameToPlatformMapping.properties"; //NOI18N /** * Holds mapping between possible vendor names to internal platforms defined above. * vendor names are treated as regular expressions. */ private static List _nameToVendorPlatform = null; /** Get Database Platform from vendor name. * @param vendorName Input vendor name. Typically this is obtained by querying * DatabaseMetaData. * @param logger The logger. * @return Database platform that corresponds to vendorName. * If vendorName does not match any of predefined vendor names, * DEFAULTPLATFORM is returned. */ public static String getDBPlatform(String vendorName, SessionLog logger) { initializeNameToVendorPlatform(logger); String detectedDbPlatform = null; if(vendorName != null) { detectedDbPlatform = matchVendorNameInProperties(vendorName, _nameToVendorPlatform, logger); } if (logger.shouldLog(SessionLog.FINE) ) { logger.log(SessionLog.FINE, SessionLog.CONNECTION, "dbPlatformHelper_detectedVendorPlatform", detectedDbPlatform ); // NOI18N } if (detectedDbPlatform == null) { if(logger.shouldLog(SessionLog.INFO)) { logger.log(SessionLog.INFO, SessionLog.CONNECTION, "dbPlatformHelper_defaultingPlatform", vendorName, DEFAULTPLATFORM); // NOI18N } detectedDbPlatform = DEFAULTPLATFORM; } return detectedDbPlatform; } /** * Allocate and initialize nameToVendorPlatform if not already done. */ private static List initializeNameToVendorPlatform(SessionLog logger) { synchronized(DBPlatformHelper.class) { if(_nameToVendorPlatform == null) { _nameToVendorPlatform = new ArrayList(); try { loadFromResource(_nameToVendorPlatform, VENDOR_NAME_TO_PLATFORM_RESOURCE_NAME, DBPlatformHelper.class.getClassLoader() ); } catch (IOException e) { logger.log(SessionLog.WARNING, SessionLog.CONNECTION, "dbPlatformHelper_noMappingFound", VENDOR_NAME_TO_PLATFORM_RESOURCE_NAME); } } } return _nameToVendorPlatform; } /** * Match vendorName in properties specified by _nameToVendorPlatform. */ private static String matchVendorNameInProperties(String vendorName, List nameToVendorPlatform, SessionLog logger) { String dbPlatform = null; //Iterate over all properties till we find match. for( Iterator iterator = nameToVendorPlatform.iterator(); dbPlatform == null && iterator.hasNext();) { String[] entry = iterator.next(); String regExpr = entry[0]; String value = entry[1]; if(logger.shouldLog(SessionLog.FINEST)) { logger.log(SessionLog.FINEST, "dbPlatformHelper_regExprDbPlatform", regExpr, value); // NOI18N } if( matchPattern(regExpr, vendorName, logger) ) { dbPlatform = value; } } return dbPlatform; } /** Matches target to pattern specified regExp. Returns false if there is * any error compiling regExp. * @param regExp The regular expression. * @param target The target against which we are trying to match regExp. * @param logger * @return false if there is error compiling regExp or target does not * match regExp. true if regExp matches pattern. */ private static boolean matchPattern(String regExp, String target, SessionLog logger) { boolean matches = false; try { matches = Pattern.matches(regExp,target); } catch (PatternSyntaxException e){ if(logger.shouldLog(SessionLog.FINE)) { logger.log(SessionLog.FINE, SessionLog.CONNECTION, "dbPlatformHelper_patternSyntaxException", e); // NOI18N } } return matches; } //-----Property Loading helper methods ----/ private static void loadFromResource(List properties, String resourceName, ClassLoader classLoader) throws IOException { load(properties, resourceName, classLoader); } /** * Loads properties list from the specified resource * into specified Properties object. * @param properties Properties object to load * @param resourceName Name of resource. * If loadFromFile is true, this is fully qualified path name to a file. * param classLoader is ignored. * If loadFromFile is false,this is resource name. * @param classLoader The class loader that should be used to load the resource. If null,primordial * class loader is used. */ private static void load(List properties, final String resourceName, final ClassLoader classLoader) throws IOException { BufferedReader bin = new BufferedReader(new InputStreamReader(openResourceInputStream(resourceName,classLoader))); try { for (String line = bin.readLine(); line != null; line = bin.readLine()) { String[] keyValue = validateLineForReturnAsKeyValueArray(line); if (keyValue != null) { properties.add(keyValue); } } } finally { try { bin.close(); } catch (Exception e) { // no action } } } /** * Open resourceName as input stream inside doPriviledged block */ private static InputStream openResourceInputStream(final String resourceName, final ClassLoader classLoader) { return (InputStream) AccessController.doPrivileged( new PrivilegedAction() { public Object run() { if (classLoader != null) { return classLoader.getResourceAsStream(resourceName); } else { return ClassLoader.getSystemResourceAsStream(resourceName); } } } ); } private static String[] validateLineForReturnAsKeyValueArray(String line) { if (line == null || line.length() == 0) { return null; } // trim leading AND trailing space line = line.trim(); // check for comment and empty line if (line.length() == 0 || line.startsWith("#")) { return null; } // check line contains valid properties '=' separator int indexOfEquals = line.indexOf('='); if (indexOfEquals == -1) { return null; } String key = line.substring(0, indexOfEquals); String value = line.substring(indexOfEquals + 1, line.length()); return new String[] { key, value }; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/NonSynchronizedVector.java0000664000000000000000000003172212216173126026670 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.util.*; /** * Vector subclass that removes the synchronization. */ public class NonSynchronizedVector extends Vector { public static NonSynchronizedVector newInstance(int initialCapacity, int capacityIncrement) { return new NonSynchronizedVector(initialCapacity, capacityIncrement); } public static NonSynchronizedVector newInstance(int initialCapacity) { return new NonSynchronizedVector(initialCapacity); } public static NonSynchronizedVector newInstance() { return new NonSynchronizedVector(); } public static NonSynchronizedVector newInstance(Collection c) { return new NonSynchronizedVector(c); } public NonSynchronizedVector(int initialCapacity, int capacityIncrement) { super(initialCapacity, capacityIncrement); } public NonSynchronizedVector(int initialCapacity) { super(initialCapacity); } public NonSynchronizedVector() { super(); } public NonSynchronizedVector(Collection c) { super(c); } public void copyInto(Object anArray[]) { System.arraycopy(elementData, 0, anArray, 0, elementCount); } public void trimToSize() { modCount++; int oldCapacity = elementData.length; if (elementCount < oldCapacity) { Object oldData[] = elementData; elementData = new Object[elementCount]; System.arraycopy(oldData, 0, elementData, 0, elementCount); } } public void ensureCapacity(int minCapacity) { modCount++; ensureCapacityHelper(minCapacity); } private void ensureCapacityHelper(int minCapacity) { int oldCapacity = elementData.length; if (minCapacity > oldCapacity) { Object oldData[] = elementData; int newCapacity = (capacityIncrement > 0) ? (oldCapacity + capacityIncrement) : (oldCapacity * 2); if (newCapacity < minCapacity) { newCapacity = minCapacity; } elementData = new Object[newCapacity]; System.arraycopy(oldData, 0, elementData, 0, elementCount); } } public void setSize(int newSize) { modCount++; if (newSize > elementCount) { ensureCapacityHelper(newSize); } else { for (int i = newSize ; i < elementCount ; i++) { elementData[i] = null; } } elementCount = newSize; } public int capacity() { return elementData.length; } public Object clone() { return new NonSynchronizedVector(this); } public int size() { return elementCount; } public boolean isEmpty() { return elementCount == 0; } public Enumeration elements() { return new Enumeration() { int count = 0; public boolean hasMoreElements() { return count < elementCount; } public Object nextElement() { if (count < elementCount) { return elementData[count++]; } throw new NoSuchElementException("Vector Enumeration"); } }; } public int indexOf(Object elem, int index) { if (elem == null) { for (int i = index ; i < elementCount ; i++) if (elementData[i]==null) return i; } else { for (int i = index ; i < elementCount ; i++) if (elem.equals(elementData[i])) return i; } return -1; } public int lastIndexOf(Object elem) { return lastIndexOf(elem, elementCount-1); } public int lastIndexOf(Object elem, int index) { if (index >= elementCount) throw new IndexOutOfBoundsException(index + " >= "+ elementCount); if (elem == null) { for (int i = index; i >= 0; i--) if (elementData[i]==null) return i; } else { for (int i = index; i >= 0; i--) if (elem.equals(elementData[i])) return i; } return -1; } public Object elementAt(int index) { if (index >= elementCount) { throw new ArrayIndexOutOfBoundsException(index + " >= " + elementCount); } return elementData[index]; } public Object firstElement() { if (elementCount == 0) { throw new NoSuchElementException(); } return elementData[0]; } public Object lastElement() { if (elementCount == 0) { throw new NoSuchElementException(); } return elementData[elementCount - 1]; } public void setElementAt(Object obj, int index) { if (index >= elementCount) { throw new ArrayIndexOutOfBoundsException(index + " >= " + elementCount); } elementData[index] = obj; } public void removeElementAt(int index) { modCount++; if (index >= elementCount) { throw new ArrayIndexOutOfBoundsException(index + " >= " + elementCount); } else if (index < 0) { throw new ArrayIndexOutOfBoundsException(index); } int j = elementCount - index - 1; if (j > 0) { System.arraycopy(elementData, index + 1, elementData, index, j); } elementCount--; elementData[elementCount] = null; /* to let gc do its work */ } public void insertElementAt(Object obj, int index) { modCount++; if (index > elementCount) { throw new ArrayIndexOutOfBoundsException(index + " > " + elementCount); } ensureCapacityHelper(elementCount + 1); System.arraycopy(elementData, index, elementData, index + 1, elementCount - index); elementData[index] = obj; elementCount++; } public void addElement(Object obj) { modCount++; ensureCapacityHelper(elementCount + 1); elementData[elementCount++] = obj; } public boolean removeElement(Object obj) { modCount++; int i = indexOf(obj); if (i >= 0) { removeElementAt(i); return true; } return false; } public void removeAllElements() { modCount++; // Let gc do its work for (int i = 0; i < elementCount; i++) elementData[i] = null; elementCount = 0; } public Object[] toArray() { Object[] result = new Object[elementCount]; System.arraycopy(elementData, 0, result, 0, elementCount); return result; } public Object[] toArray(Object a[]) { if (a.length < elementCount) a = (Object[])java.lang.reflect.Array.newInstance( a.getClass().getComponentType(), elementCount); System.arraycopy(elementData, 0, a, 0, elementCount); if (a.length > elementCount) a[elementCount] = null; return a; } public Object get(int index) { if (index >= elementCount) throw new ArrayIndexOutOfBoundsException(index); return elementData[index]; } public Object set(int index, Object element) { if (index >= elementCount) throw new ArrayIndexOutOfBoundsException(index); Object oldValue = elementData[index]; elementData[index] = element; return oldValue; } public boolean add(Object o) { modCount++; ensureCapacityHelper(elementCount + 1); elementData[elementCount++] = o; return true; } public Object remove(int index) { modCount++; if (index >= elementCount) throw new ArrayIndexOutOfBoundsException(index); Object oldValue = elementData[index]; int numMoved = elementCount - index - 1; if (numMoved > 0) System.arraycopy(elementData, index+1, elementData, index, numMoved); elementData[--elementCount] = null; // Let gc do its work return oldValue; } public boolean containsAll(Collection c) { Iterator e = c.iterator(); while (e.hasNext()) if(!contains(e.next())) return false; return true; } public boolean addAll(Collection c) { modCount++; Object[] a = c.toArray(); int numNew = a.length; ensureCapacityHelper(elementCount + numNew); System.arraycopy(a, 0, elementData, elementCount, numNew); elementCount += numNew; return numNew != 0; } public boolean removeAll(Collection c) { boolean modified = false; Iterator e = iterator(); while (e.hasNext()) { if(c.contains(e.next())) { e.remove(); modified = true; } } return modified; } public boolean retainAll(Collection c) { boolean modified = false; Iterator e = iterator(); while (e.hasNext()) { if(!c.contains(e.next())) { e.remove(); modified = true; } } return modified; } public boolean addAll(int index, Collection c) { modCount++; if (index < 0 || index > elementCount) throw new ArrayIndexOutOfBoundsException(index); Object[] a = c.toArray(); int numNew = a.length; ensureCapacityHelper(elementCount + numNew); int numMoved = elementCount - index; if (numMoved > 0) System.arraycopy(elementData, index, elementData, index + numNew, numMoved); System.arraycopy(a, 0, elementData, index, numNew); elementCount += numNew; return numNew != 0; } public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof List)) return false; List list = (List) o; int size = size(); if (list.size() != size) { return false; } for (int index = 0; index < size; index++) { Object left = get(index); Object right = list.get(index); if ((left != right) && ((left == null) || (right == null) || (!left.equals(right)))) { return false; } } return true; } public int hashCode() { int hashCode = 1; int size = size(); for (int index = 0; index < size; index++) { Object obj = get(index); hashCode = 31*hashCode + (obj==null ? 0 : obj.hashCode()); } return hashCode; } public String toString() { StringBuffer buf = new StringBuffer(); Iterator e = iterator(); buf.append("["); int maxIndex = size() - 1; for (int i = 0; i <= maxIndex; i++) { buf.append(String.valueOf(e.next())); if (i < maxIndex) buf.append(", "); } buf.append("]"); return buf.toString(); } protected void removeRange(int fromIndex, int toIndex) { modCount++; int numMoved = elementCount - toIndex; System.arraycopy(elementData, toIndex, elementData, fromIndex, numMoved); // Let gc do its work int newElementCount = elementCount - (toIndex-fromIndex); while (elementCount != newElementCount) elementData[--elementCount] = null; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { s.defaultWriteObject(); } public List subList(int fromIndex, int toIndex) { return new NonSynchronizedSubVector(this, fromIndex, toIndex); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/SimpleDatabaseType.java0000664000000000000000000000204712216173126026071 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; /** * PUBLIC: Marker interface for Simple Database types * (e.g. JDBC types, PL/SQL BOOLEAN, etc.) * * @author Mike Norman - michael.norman@oracle.com * @since Oracle TopLink 11.x.x */ public interface SimpleDatabaseType extends DatabaseType { } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/DescriptorCompare.java0000664000000000000000000000235612216173126026001 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.util.Comparator; import org.eclipse.persistence.descriptors.ClassDescriptor; /** * INTERNAL: * Use to sort vectors of strings. * Avoid using this class as sun.misc is not part of many VM's like Netscapes. */ public class DescriptorCompare implements Comparator { public int compare(Object arg1, Object arg2) { return ((ClassDescriptor)arg1).getJavaClassName().compareTo(((ClassDescriptor)arg2).getJavaClassName()); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/WriteLockManager.java0000664000000000000000000006763212216173126025562 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * 02/11/2009-1.1 Michael O'Brien * - 259993: As part 2) During mergeClonesAfterCompletion() * If the the acquire and release threads are different * switch back to the stored acquire thread stored on the mergeManager. * tware, David Mulligan - fix performance issue with releasing locks ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.util.*; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.descriptors.FetchGroupManager; import org.eclipse.persistence.exceptions.ConcurrencyException; import org.eclipse.persistence.internal.queries.ContainerPolicy; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.internal.sessions.*; import org.eclipse.persistence.internal.identitymaps.*; import org.eclipse.persistence.internal.localization.TraceLocalization; import org.eclipse.persistence.internal.helper.linkedlist.*; import org.eclipse.persistence.logging.SessionLog; /** * INTERNAL: *

* Purpose: Acquires all required locks for a particular merge process. * Implements a deadlock avoidance algorithm to prevent concurrent merge conflicts. * *

* Responsibilities: *

    *
  • Acquires locks for writing threads. *
  • Provides deadlock avoidance behavior. *
  • Releases locks for writing threads. *
* @author Gordon Yorke * @since 10.0.3 */ public class WriteLockManager { // this will allow us to prevent a readlock thread from looping forever. public static int MAXTRIES = 10000; public static int MAX_WAIT = 600000; //10 mins /* This attribute stores the list of threads that have had a problem acquiring locks */ /* the first element in this list will be the prevailing thread */ protected ExposedNodeLinkedList prevailingQueue; public WriteLockManager() { this.prevailingQueue = new ExposedNodeLinkedList(); } /** * INTERNAL: * This method will return once the object is locked and all non-indirect * related objects are also locked. */ public Map acquireLocksForClone(Object objectForClone, ClassDescriptor descriptor, CacheKey cacheKey, AbstractSession cloningSession) { boolean successful = false; IdentityHashMap lockedObjects = new IdentityHashMap(); IdentityHashMap refreshedObjects = new IdentityHashMap(); try { // if the descriptor has indirection for all mappings then wait as there will be no deadlock risks CacheKey toWaitOn = acquireLockAndRelatedLocks(objectForClone, lockedObjects, refreshedObjects, cacheKey, descriptor, cloningSession); int tries = 0; while (toWaitOn != null) {// loop until we've tried too many times. for (Iterator lockedList = lockedObjects.values().iterator(); lockedList.hasNext();) { ((CacheKey)lockedList.next()).releaseReadLock(); lockedList.remove(); } synchronized (toWaitOn) { try { if (toWaitOn.isAcquired()) {//last minute check to insure it is still locked. toWaitOn.wait();// wait for lock on object to be released } } catch (InterruptedException ex) { // Ignore exception thread should continue. } } Object waitObject = toWaitOn.getObject(); // Object may be null for loss of identity. if (waitObject != null) { cloningSession.checkAndRefreshInvalidObject(waitObject, toWaitOn, cloningSession.getDescriptor(waitObject)); refreshedObjects.put(waitObject, waitObject); } toWaitOn = acquireLockAndRelatedLocks(objectForClone, lockedObjects, refreshedObjects, cacheKey, descriptor, cloningSession); if ((toWaitOn != null) && ((++tries) > MAXTRIES)) { // If we've tried too many times abort. throw ConcurrencyException.maxTriesLockOnCloneExceded(objectForClone); } } successful = true;//successfully acquired all locks } finally { if (!successful) {//did not acquire locks but we are exiting for (Iterator lockedList = lockedObjects.values().iterator(); lockedList.hasNext();) { ((CacheKey)lockedList.next()).releaseReadLock(); lockedList.remove(); } } } return lockedObjects; } /** * INTERNAL: * This is a recursive method used to acquire read locks on all objects that * will be cloned. These include all related objects for which there is no * indirection. * The returned object is the first object that the lock could not be acquired for. * The caller must try for exceptions and release locked objects in the case * of an exception. */ public CacheKey acquireLockAndRelatedLocks(Object objectForClone, Map lockedObjects, Map refreshedObjects, CacheKey cacheKey, ClassDescriptor descriptor, AbstractSession cloningSession) { if (!refreshedObjects.containsKey(objectForClone) && cloningSession.isConsideredInvalid(objectForClone, cacheKey, descriptor)) { return cacheKey; } // Attempt to get a read-lock, null is returned if cannot be read-locked. if (cacheKey.acquireReadLockNoWait()) { if (cacheKey.getObject() == null) { // This will be the case for deleted objects, NoIdentityMap, and aggregates. lockedObjects.put(objectForClone, cacheKey); } else { objectForClone = cacheKey.getObject(); if (lockedObjects.containsKey(objectForClone)) { // This is a check for loss of identity, the original check in // checkAndLockObject() will shortcircuit in the usual case. cacheKey.releaseReadLock(); return null; } // Store locked cachekey for release later. lockedObjects.put(objectForClone, cacheKey); } return traverseRelatedLocks(objectForClone, lockedObjects, refreshedObjects, descriptor, cloningSession); } else { // Return the cache key that could not be locked. return cacheKey; } } /** * INTERNAL: * This method will transition the previously acquired active * locks to deferred locks in the case a readlock could not be acquired for * a related object. Deferred locks must be employed to prevent deadlock * when waiting for the readlock while still protecting readers from * incomplete data. */ public void transitionToDeferredLocks(MergeManager mergeManager){ try{ if (mergeManager.isTransitionedToDeferredLocks()) return; for (CacheKey cacheKey : mergeManager.getAcquiredLocks()){ cacheKey.transitionToDeferredLock(); } mergeManager.transitionToDeferredLocks(); }catch (RuntimeException ex){ for (CacheKey cacheKey : mergeManager.getAcquiredLocks()){ cacheKey.release(); } ConcurrencyManager.getDeferredLockManager(Thread.currentThread()).setIsThreadComplete(true); ConcurrencyManager.removeDeferredLockManager(Thread.currentThread()); mergeManager.getAcquiredLocks().clear(); throw ex; } } /** * INTERNAL: * Traverse the object and acquire locks on all related objects. */ public CacheKey traverseRelatedLocks(Object objectForClone, Map lockedObjects, Map refreshedObjects, ClassDescriptor descriptor, AbstractSession cloningSession) { // If all mappings have indirection short-circuit. if (descriptor.shouldAcquireCascadedLocks()) { FetchGroupManager fetchGroupManager = descriptor.getFetchGroupManager(); boolean isPartialObject = (fetchGroupManager != null) && fetchGroupManager.isPartialObject(objectForClone); for (Iterator mappings = descriptor.getLockableMappings().iterator(); mappings.hasNext();) { DatabaseMapping mapping = (DatabaseMapping)mappings.next(); // Only cascade fetched mappings. if (!isPartialObject || (fetchGroupManager.isAttributeFetched(objectForClone, mapping.getAttributeName()))) { // any mapping in this list must not have indirection. Object objectToLock = mapping.getAttributeValueFromObject(objectForClone); if (mapping.isCollectionMapping()) { // Ignore null, means empty. if (objectToLock != null) { ContainerPolicy cp = mapping.getContainerPolicy(); Object iterator = cp.iteratorFor(objectToLock); while (cp.hasNext(iterator)) { Object object = cp.next(iterator, cloningSession); if (mapping.getReferenceDescriptor().hasWrapperPolicy()) { object = mapping.getReferenceDescriptor().getWrapperPolicy().unwrapObject(object, cloningSession); } CacheKey toWaitOn = checkAndLockObject(object, lockedObjects, refreshedObjects, mapping, cloningSession); if (toWaitOn != null) { return toWaitOn; } } } } else { if (mapping.getReferenceDescriptor().hasWrapperPolicy()) { objectToLock = mapping.getReferenceDescriptor().getWrapperPolicy().unwrapObject(objectToLock, cloningSession); } CacheKey toWaitOn = checkAndLockObject(objectToLock, lockedObjects, refreshedObjects, mapping, cloningSession); if (toWaitOn != null) { return toWaitOn; } } } } } return null; } /** * INTERNAL: * This method will be the entry point for threads attempting to acquire locks for all objects that have * a changeset. This method will hand off the processing of the deadlock algorithm to other member * methods. The mergeManager must be the active mergemanager for the calling thread. * Returns true if all required locks were acquired */ public void acquireRequiredLocks(MergeManager mergeManager, UnitOfWorkChangeSet changeSet) { if (!MergeManager.LOCK_ON_MERGE) {//lockOnMerge is a backdoor and not public return; } boolean locksToAcquire = true; //while that thread has locks to acquire continue to loop. try { // initialize the MergeManager during this commit or merge for insert/updates only // this call is not required in acquireLocksForClone() or acquireLockAndRelatedLocks() mergeManager.setLockThread(Thread.currentThread()); AbstractSession session = mergeManager.getSession(); // If the session in the mergemanager is not a unit of work then the // merge is of a changeSet into a distributed session. if (session.isUnitOfWork()) { session = ((UnitOfWorkImpl)session).getParent(); } while (locksToAcquire) { //lets assume all locks will be acquired locksToAcquire = false; //first access the changeSet and begin to acquire locks ClassDescriptor descriptor = null; for (ObjectChangeSet objectChangeSet : changeSet.getAllChangeSets().values()) { // No Need to acquire locks for invalidated objects. if ((mergeManager.shouldMergeChangesIntoDistributedCache() && (objectChangeSet.getSynchronizationType() == ClassDescriptor.INVALIDATE_CHANGED_OBJECTS)) || objectChangeSet.getId() == null) { //skip this process as we will be unable to acquire the correct cachekey anyway //this is a new object with identity after write sequencing, ? huh, all objects must have an id by merge? continue; } descriptor = objectChangeSet.getDescriptor(); // Maybe null for distributed merge, initialize it. if (descriptor == null) { descriptor = session.getDescriptor(objectChangeSet.getClassType(session)); objectChangeSet.setDescriptor(descriptor); } // PERF: Do not merge nor lock into the session cache if descriptor set to unit of work isolated. if (descriptor.getCachePolicy().shouldIsolateObjectsInUnitOfWork()) { continue; } AbstractSession targetSession = session.getParentIdentityMapSession(descriptor, true, true); CacheKey activeCacheKey = attemptToAcquireLock(descriptor, objectChangeSet.getId(), targetSession); if (activeCacheKey == null) { // if cacheKey is null then the lock was not available no need to synchronize this block,because if the // check fails then this thread will just return to the queue until it gets woken up. if (this.prevailingQueue.getFirst() == mergeManager) { // wait on this object until it is free, or until wait time expires because // this thread is the prevailing thread activeCacheKey = waitOnObjectLock(descriptor, objectChangeSet.getId(), targetSession, (int)Math.round((Math.random()*500))); } if (activeCacheKey == null) { // failed to acquire lock, release all acquired // locks and place thread on waiting list releaseAllAcquiredLocks(mergeManager); // get cacheKey activeCacheKey = targetSession.getIdentityMapAccessorInstance().getCacheKeyForObjectForLock(objectChangeSet.getId(), descriptor.getJavaClass(), descriptor); if (session.shouldLog(SessionLog.FINER, SessionLog.CACHE)) { Object[] params = new Object[3]; params[0] = descriptor.getJavaClass(); params[1] = objectChangeSet.getId(); params[2] = Thread.currentThread().getName(); session.log(SessionLog.FINER, SessionLog.CACHE, "dead_lock_encountered_on_write_no_cachekey", params, null); } if (mergeManager.getWriteLockQueued() == null) { // thread is entering the wait queue for the // first time // set the QueueNode to be the node from the // linked list for quick removal upon // acquiring all locks synchronized (this.prevailingQueue) { mergeManager.setQueueNode(this.prevailingQueue.addLast(mergeManager)); } } // set the cache key on the merge manager for // the object that could not be acquired mergeManager.setWriteLockQueued(objectChangeSet.getId()); try { if (activeCacheKey != null){ //wait on the lock of the object that we couldn't get. synchronized (activeCacheKey) { // verify that the cache key is still locked before we wait on it, as //it may have been released since we tried to acquire it. if (activeCacheKey.isAcquired() && (activeCacheKey.getActiveThread() != Thread.currentThread())) { Thread thread = activeCacheKey.getActiveThread(); if (thread.isAlive()){ long time = System.currentTimeMillis(); activeCacheKey.wait(MAX_WAIT); if (System.currentTimeMillis() - time >= MAX_WAIT){ Object[] params = new Object[]{MAX_WAIT /1000, descriptor.getJavaClassName(), activeCacheKey.getKey(), thread.getName()}; StringBuilder buffer = new StringBuilder(TraceLocalization.buildMessage("max_time_exceeded_for_acquirerequiredlocks_wait", params)); StackTraceElement[] trace = thread.getStackTrace(); for (StackTraceElement element : trace){ buffer.append("\t\tat"); buffer.append(element.toString()); buffer.append("\n"); } session.log(SessionLog.SEVERE, SessionLog.CACHE, buffer.toString()); session.getIdentityMapAccessor().printIdentityMapLocks(); } }else{ session.log(SessionLog.SEVERE, SessionLog.CACHE, "releasing_invalid_lock", new Object[] { thread.getName(),descriptor.getJavaClass(), objectChangeSet.getId()}); //thread that held lock is no longer alive. Something bad has happened like while (activeCacheKey.isAcquired()){ // could have a depth greater than one. activeCacheKey.release(); } } } } } } catch (InterruptedException exception) { throw org.eclipse.persistence.exceptions.ConcurrencyException.waitWasInterrupted(exception.getMessage()); } // failed to acquire, exit this loop to restart all over again. locksToAcquire = true; break; }else{ objectChangeSet.setActiveCacheKey(activeCacheKey); mergeManager.getAcquiredLocks().add(activeCacheKey); } } else { objectChangeSet.setActiveCacheKey(activeCacheKey); mergeManager.getAcquiredLocks().add(activeCacheKey); } } } } catch (RuntimeException exception) { // if there was an exception then release. //must not release in a finally block as release only occurs in this method // if there is a problem or all of the locks can not be acquired. releaseAllAcquiredLocks(mergeManager); throw exception; }catch (Error error){ releaseAllAcquiredLocks(mergeManager); mergeManager.getSession().logThrowable(SessionLog.SEVERE, SessionLog.TRANSACTION, error); throw error; }finally { if (mergeManager.getWriteLockQueued() != null) { //the merge manager entered the wait queue and must be cleaned up synchronized(this.prevailingQueue) { this.prevailingQueue.remove(mergeManager.getQueueNode()); } mergeManager.setWriteLockQueued(null); } } } /** * INTERNAL: * This method will be called by a merging thread that is attempting to lock * a new object that was not locked previously. Unlike the other methods * within this class this method will lock only this object. */ public CacheKey appendLock(Object primaryKey, Object objectToLock, ClassDescriptor descriptor, MergeManager mergeManager, AbstractSession session) { CacheKey lockedCacheKey = session.getIdentityMapAccessorInstance().acquireLockNoWait(primaryKey, descriptor.getJavaClass(), false, descriptor); if (lockedCacheKey == null) { session.getIdentityMapAccessorInstance().getWriteLockManager().transitionToDeferredLocks(mergeManager); lockedCacheKey = session.getIdentityMapAccessorInstance().acquireDeferredLock(primaryKey, descriptor.getJavaClass(), descriptor, true); Object cachedObject = lockedCacheKey.getObject(); if (cachedObject == null) { if (lockedCacheKey.getActiveThread() == Thread.currentThread()) { lockedCacheKey.setObject(objectToLock); } else { cachedObject = lockedCacheKey.waitForObject(); } } lockedCacheKey.releaseDeferredLock(); return lockedCacheKey; } else { if (lockedCacheKey.getObject() == null) { lockedCacheKey.setObject(objectToLock); // set the object in the // cachekey // for others to find an prevent cycles } if (mergeManager.isTransitionedToDeferredLocks()){ lockedCacheKey.getDeferredLockManager(Thread.currentThread()).getActiveLocks().add(lockedCacheKey); }else{ mergeManager.getAcquiredLocks().add(lockedCacheKey); } return lockedCacheKey; } } /** * INTERNAL: * This method performs the operations of finding the cacheKey and locking it if possible. * Returns True if the lock was acquired, false otherwise */ protected CacheKey attemptToAcquireLock(ClassDescriptor descriptor, Object primaryKey, AbstractSession session) { return session.getIdentityMapAccessorInstance().acquireLockNoWait(primaryKey, descriptor.getJavaClass(), true, descriptor); } /** * INTERNAL: * Simply check that the object is not already locked then pass it on to the locking method */ protected CacheKey checkAndLockObject(Object objectToLock, Map lockedObjects, Map refreshedObjects, DatabaseMapping mapping, AbstractSession cloningSession) { //the cachekey should always reference an object otherwise what would we be cloning. if ((objectToLock != null) && !lockedObjects.containsKey(objectToLock)) { Object primaryKeyToLock = null; ClassDescriptor referenceDescriptor = null; if (mapping.getReferenceDescriptor().hasInheritance() || mapping.getReferenceDescriptor().isDescriptorForInterface()) { referenceDescriptor = cloningSession.getDescriptor(objectToLock); } else { referenceDescriptor = mapping.getReferenceDescriptor(); } // Need to traverse aggregates, but not lock aggregates directly. if (referenceDescriptor.isDescriptorTypeAggregate()) { traverseRelatedLocks(objectToLock, lockedObjects, refreshedObjects, referenceDescriptor, cloningSession); } else { primaryKeyToLock = referenceDescriptor.getObjectBuilder().extractPrimaryKeyFromObject(objectToLock, cloningSession); CacheKey cacheKey = cloningSession.getIdentityMapAccessorInstance().getCacheKeyForObjectForLock(primaryKeyToLock, objectToLock.getClass(), referenceDescriptor); if (cacheKey == null) { // Cache key may be null for no-identity map, missing or deleted object, just create a new one to be locked. cacheKey = new CacheKey(primaryKeyToLock); cacheKey.setReadTime(System.currentTimeMillis()); } CacheKey toWaitOn = acquireLockAndRelatedLocks(objectToLock, lockedObjects, refreshedObjects, cacheKey, referenceDescriptor, cloningSession); if (toWaitOn != null) { return toWaitOn; } } } return null; } /** * INTERNAL: * This method will release all acquired locks */ public void releaseAllAcquiredLocks(MergeManager mergeManager) { if (!MergeManager.LOCK_ON_MERGE) {//lockOnMerge is a backdoor and not public return; } List acquiredLocks = mergeManager.getAcquiredLocks(); Iterator locks = acquiredLocks.iterator(); RuntimeException exception = null; while (locks.hasNext()) { try { CacheKey cacheKeyToRemove = (CacheKey) locks.next(); if (cacheKeyToRemove.getObject() == null) { cacheKeyToRemove.removeFromOwningMap(); } if (mergeManager.isTransitionedToDeferredLocks()) { cacheKeyToRemove.releaseDeferredLock(); } else { cacheKeyToRemove.release(); } } catch (RuntimeException e){ if (exception == null){ exception = e; } } } acquiredLocks.clear(); if (exception != null){ throw exception; } } /** * INTERNAL: * This method performs the operations of finding the cacheKey and locking it if possible. * Waits until the lock can be acquired */ protected CacheKey waitOnObjectLock(ClassDescriptor descriptor, Object primaryKey, AbstractSession session, int waitTime) { return session.getIdentityMapAccessorInstance().acquireLockWithWait(primaryKey, descriptor.getJavaClass(), true, descriptor, waitTime); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/JavaPlatform.java0000664000000000000000000001070512216173126024737 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.security.AccessController; import java.security.PrivilegedActionException; import java.sql.SQLException; import org.eclipse.persistence.Version; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedNewInstanceFromClass; import org.eclipse.persistence.internal.security.PrivilegedClassForName; /** * INTERNAL: * JavaPlatform abstracts the version of the JDK we are using. It allows any operation * which is dependent on JDK version to be called from a single place and then delegates * the call to its JDKPlatform * @see JDPlatform * @author Tom Ware */ public class JavaPlatform { protected static JDKPlatform platform = null; /** * INTERNAL: * Get the version of JDK being used from the Version class. * @return JDKPlatform a platform appropriate for the version of JDK being used. */ protected static JDKPlatform getPlatform() { if (platform == null) { if (Version.isJDK16()) { try { Class platformClass = null; // use class.forName() to avoid loading the JDK 1.6 class unless it is needed. if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { platformClass = (Class)AccessController.doPrivileged(new PrivilegedClassForName("org.eclipse.persistence.internal.helper.JDK16Platform")); } catch (PrivilegedActionException exception) { } } else { platformClass = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName("org.eclipse.persistence.internal.helper.JDK16Platform"); } if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { platform = (JDKPlatform)AccessController.doPrivileged(new PrivilegedNewInstanceFromClass(platformClass)); } catch (PrivilegedActionException exception) { } } else { platform = (JDKPlatform)PrivilegedAccessHelper.newInstanceFromClass(platformClass); } } catch (Exception exception) { } } if (platform == null) { platform = new JDK15Platform(); } } return platform; } /** * INTERNAL: * Conform an expression which uses the operator "like" for an in-memory query * @return Boolean (TRUE, FALSE, null == unknown) */ public static Boolean conformLike(Object left, Object right) { return getPlatform().conformLike(left, right); } /** * INTERNAL: * Conform an expression which uses the operator "regexp" for an in-memory query * @return Boolean (TRUE, FALSE, null == unknown) */ public static Boolean conformRegexp(Object left, Object right) { return getPlatform().conformRegexp(left, right); } /** * INTERNAL: * Indicates whether the passed object implements java.sql.SQLXML introduced in jdk 1.6 */ public static boolean isSQLXML(Object object) { return getPlatform().isSQLXML(object); } /** * INTERNAL: * Casts the passed object to SQLXML and calls getString and free methods */ public static String getStringAndFreeSQLXML(Object sqlXml) throws SQLException { return getPlatform().getStringAndFreeSQLXML(sqlXml); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/ConcurrencyManager.java0000664000000000000000000005205612216173126026143 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.io.*; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import org.eclipse.persistence.config.SystemProperties; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.localization.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.logging.*; /** * INTERNAL: *

* Purpose: To maintain concurrency for a particular task. * It is a wrappers of a semaphore that allows recursive waits by a single thread. *

* Responsibilities: *

    *
  • Keep track of the active thread. *
  • Wait all other threads until the first thread is done. *
  • Maintain the depth of the active thread. *
*/ public class ConcurrencyManager implements Serializable { protected int numberOfReaders; protected int depth; protected int numberOfWritersWaiting; protected volatile transient Thread activeThread; public static Map deferredLockManagers = initializeDeferredLockManagers(); protected boolean lockedByMergeManager; protected static boolean shouldTrackStack = System.getProperty(SystemProperties.RECORD_STACK_ON_LOCK) != null; protected Exception stack; /** * Initialize the newly allocated instance of this class. * Set the depth to zero. */ public ConcurrencyManager() { this.depth = 0; this.numberOfReaders = 0; this.numberOfWritersWaiting = 0; } /** * Wait for all threads except the active thread. * If the active thread just increment the depth. * This should be called before entering a critical section. */ public void acquire() throws ConcurrencyException { this.acquire(false); } /** * Wait for all threads except the active thread. * If the active thread just increment the depth. * This should be called before entering a critical section. * called with true from the merge process, if true then the refresh will not refresh the object */ public synchronized void acquire(boolean forMerge) throws ConcurrencyException { while (((this.activeThread != null) || (this.numberOfReaders > 0)) && (this.activeThread != Thread.currentThread())) { // This must be in a while as multiple threads may be released, or another thread may rush the acquire after one is released. try { this.numberOfWritersWaiting++; wait(); this.numberOfWritersWaiting--; } catch (InterruptedException exception) { throw ConcurrencyException.waitWasInterrupted(exception.getMessage()); } } if (this.activeThread == null) { this.activeThread = Thread.currentThread(); if (shouldTrackStack){ this.stack = new Exception(); } } this.lockedByMergeManager = forMerge; this.depth++; } /** * If the lock is not acquired already acquire it and return true. * If it has been acquired already return false * Added for CR 2317 */ public boolean acquireNoWait() throws ConcurrencyException { return acquireNoWait(false); } /** * If the lock is not acquired already acquire it and return true. * If it has been acquired already return false * Added for CR 2317 * called with true from the merge process, if true then the refresh will not refresh the object */ public synchronized boolean acquireNoWait(boolean forMerge) throws ConcurrencyException { if ((this.activeThread == null && this.numberOfReaders == 0) || (this.activeThread == Thread.currentThread())) { //if I own the lock increment depth acquire(forMerge); return true; } else { return false; } } /** * If the lock is not acquired already acquire it and return true. * If it has been acquired already return false * Added for CR 2317 * called with true from the merge process, if true then the refresh will not refresh the object */ public synchronized boolean acquireWithWait(boolean forMerge, int wait) throws ConcurrencyException { if ((this.activeThread == null && this.numberOfReaders == 0) || (this.activeThread == Thread.currentThread())) { //if I own the lock increment depth acquire(forMerge); return true; } else { try { wait(wait); } catch (InterruptedException e) { return false; } if ((this.activeThread == null && this.numberOfReaders == 0) || (this.activeThread == Thread.currentThread())){ acquire(forMerge); return true; } return false; } } /** * If the activeThread is not set, acquire it and return true. * If the activeThread is set, it has been acquired already, return false. * Added for Bug 5840635 * Call with true from the merge process, if true then the refresh will not refresh the object. */ public synchronized boolean acquireIfUnownedNoWait(boolean forMerge) throws ConcurrencyException { // Only acquire lock if active thread is null. Do not check current thread. if (this.activeThread == null && this.numberOfReaders == 0) { // if lock is unowned increment depth acquire(forMerge); return true; } else { return false; } } /** * Add deferred lock into a hashtable to avoid deadlock */ public void acquireDeferredLock() throws ConcurrencyException { Thread currentThread = Thread.currentThread(); DeferredLockManager lockManager = getDeferredLockManager(currentThread); if (lockManager == null) { lockManager = new DeferredLockManager(); putDeferredLock(currentThread, lockManager); } lockManager.incrementDepth(); synchronized (this) { while (this.numberOfReaders != 0) { // There are readers of this object, wait until they are done before determining if //there are any other writers. If not we will wait on the readers for acquire. If another //thread is also waiting on the acquire then a deadlock could occur. See bug 3049635 //We could release all active locks before releasing deferred but the object may not be finished building //we could make the readers get a hard lock, but then we would just build a deferred lock even though //the object is not being built. try { this.numberOfWritersWaiting++; wait(); this.numberOfWritersWaiting--; } catch (InterruptedException exception) { throw ConcurrencyException.waitWasInterrupted(exception.getMessage()); } } if ((this.activeThread == currentThread) || (!isAcquired())) { lockManager.addActiveLock(this); acquire(); } else { lockManager.addDeferredLock(this); if (AbstractSessionLog.getLog().shouldLog(SessionLog.FINER) && this instanceof CacheKey) { AbstractSessionLog.getLog().log(SessionLog.FINER, SessionLog.CACHE, "acquiring_deferred_lock", ((CacheKey)this).getObject(), currentThread.getName()); } } } } /** * Check the lock state, if locked, acquire and release a deferred lock. * This optimizes out the normal deferred-lock check if not locked. */ public void checkDeferredLock() throws ConcurrencyException { // If it is not locked, then just return. if (this.activeThread == null) { return; } acquireDeferredLock(); releaseDeferredLock(); } /** * Check the lock state, if locked, acquire and release a read lock. * This optimizes out the normal read-lock check if not locked. */ public void checkReadLock() throws ConcurrencyException { // If it is not locked, then just return. if (this.activeThread == null) { return; } acquireReadLock(); releaseReadLock(); } /** * Wait on any writer. * Allow concurrent reads. */ public synchronized void acquireReadLock() throws ConcurrencyException { // Cannot check for starving writers as will lead to deadlocks. while ((this.activeThread != null) && (this.activeThread != Thread.currentThread())) { try { wait(); } catch (InterruptedException exception) { throw ConcurrencyException.waitWasInterrupted(exception.getMessage()); } } this.numberOfReaders++; } /** * If this is acquired return false otherwise acquire readlock and return true */ public synchronized boolean acquireReadLockNoWait() { if ((this.activeThread == null) || (this.activeThread == Thread.currentThread())) { acquireReadLock(); return true; } else { return false; } } /** * Return the active thread. */ public Thread getActiveThread() { return activeThread; } /** * Return the deferred lock manager from the thread */ public static DeferredLockManager getDeferredLockManager(Thread thread) { return getDeferredLockManagers().get(thread); } /** * Return the deferred lock manager hashtable (thread - DeferredLockManager). */ protected static Map getDeferredLockManagers() { return deferredLockManagers; } /** * Init the deferred lock managers (thread - DeferredLockManager). */ protected static Map initializeDeferredLockManagers() { return new ConcurrentHashMap(); } /** * Return the current depth of the active thread. */ public int getDepth() { return depth; } /** * Number of writer that want the lock. * This is used to ensure that a writer is not starved. */ public int getNumberOfReaders() { return numberOfReaders; } /** * Number of writers that want the lock. * This is used to ensure that a writer is not starved. */ public int getNumberOfWritersWaiting() { return numberOfWritersWaiting; } /** * Return if a thread has acquire this manager. */ public boolean isAcquired() { return depth > 0; } /** * INTERNAL: * Used byt the refresh process to determine if this concurrency manager is locked by * the merge process. If it is then the refresh should not refresh the object */ public boolean isLockedByMergeManager() { return this.lockedByMergeManager; } /** * Check if the deferred locks of a thread are all released */ public static boolean isBuildObjectOnThreadComplete(Thread thread, Map recursiveSet) { if (recursiveSet.containsKey(thread)) { return true; } recursiveSet.put(thread, thread); DeferredLockManager lockManager = getDeferredLockManager(thread); if (lockManager == null) { return true; } Vector deferredLocks = lockManager.getDeferredLocks(); for (Enumeration deferredLocksEnum = deferredLocks.elements(); deferredLocksEnum.hasMoreElements();) { ConcurrencyManager deferedLock = (ConcurrencyManager)deferredLocksEnum.nextElement(); Thread activeThread = null; if (deferedLock.isAcquired()) { activeThread = deferedLock.getActiveThread(); // the active thread may be set to null at anypoint // if added for CR 2330 if (activeThread != null) { DeferredLockManager currentLockManager = getDeferredLockManager(activeThread); if (currentLockManager == null) { return false; } else if (currentLockManager.isThreadComplete()) { activeThread = deferedLock.getActiveThread(); // The lock may suddenly finish and no longer have an active thread. if (activeThread != null) { if (!isBuildObjectOnThreadComplete(activeThread, recursiveSet)) { return false; } } } else { return false; } } } } return true; } /** * Return if this manager is within a nested acquire. */ public boolean isNested() { return depth > 1; } public void putDeferredLock(Thread thread, DeferredLockManager lockManager) { getDeferredLockManagers().put(thread, lockManager); } /** * Decrement the depth for the active thread. * Assume the current thread is the active one. * Raise an error if the depth become < 0. * The notify will release the first thread waiting on the object, * if no threads are waiting it will do nothing. */ public synchronized void release() throws ConcurrencyException { if (this.depth == 0) { throw ConcurrencyException.signalAttemptedBeforeWait(); } else { this.depth--; } if (this.depth == 0) { this.activeThread = null; if (shouldTrackStack){ this.stack = null; } this.lockedByMergeManager = false; notifyAll(); } } /** * Release the deferred lock. * This uses a deadlock detection and resolution algorithm to avoid cache deadlocks. * The deferred lock manager keeps track of the lock for a thread, so that other * thread know when a deadlock has occurred and can resolve it. */ public void releaseDeferredLock() throws ConcurrencyException { Thread currentThread = Thread.currentThread(); DeferredLockManager lockManager = getDeferredLockManager(currentThread); if (lockManager == null) { return; } int depth = lockManager.getThreadDepth(); if (depth > 1) { lockManager.decrementDepth(); return; } // If the set is null or empty, means there is no deferred lock for this thread, return. if (!lockManager.hasDeferredLock()) { lockManager.releaseActiveLocksOnThread(); removeDeferredLockManager(currentThread); return; } lockManager.setIsThreadComplete(true); // Thread have three stages, one where they are doing work (i.e. building objects) // two where they are done their own work but may be waiting on other threads to finish their work, // and a third when they and all the threads they are waiting on are done. // This is essentially a busy wait to determine if all the other threads are done. while (true) { try{ // 2612538 - the default size of Map (32) is appropriate Map recursiveSet = new IdentityHashMap(); if (isBuildObjectOnThreadComplete(currentThread, recursiveSet)) {// Thread job done. lockManager.releaseActiveLocksOnThread(); removeDeferredLockManager(currentThread); AbstractSessionLog.getLog().log(SessionLog.FINER, SessionLog.CACHE, "deferred_locks_released", currentThread.getName()); return; } else {// Not done yet, wait and check again. try { Thread.sleep(1); } catch (InterruptedException interrupted) { AbstractSessionLog.getLog().logThrowable(SessionLog.SEVERE, SessionLog.CACHE, interrupted); lockManager.releaseActiveLocksOnThread(); removeDeferredLockManager(currentThread); throw ConcurrencyException.waitWasInterrupted(interrupted.getMessage()); } } } catch (Error error) { AbstractSessionLog.getLog().logThrowable(SessionLog.SEVERE, SessionLog.CACHE, error); lockManager.releaseActiveLocksOnThread(); removeDeferredLockManager(currentThread); throw error; } } } /** * Decrement the number of readers. * Used to allow concurrent reads. */ public synchronized void releaseReadLock() throws ConcurrencyException { if (this.numberOfReaders == 0) { throw ConcurrencyException.signalAttemptedBeforeWait(); } else { this.numberOfReaders--; } if (this.numberOfReaders == 0) { notifyAll(); } } /** * Remove the deferred lock manager for the thread */ public static DeferredLockManager removeDeferredLockManager(Thread thread) { return getDeferredLockManagers().remove(thread); } /** * Set the active thread. */ public void setActiveThread(Thread activeThread) { this.activeThread = activeThread; } /** * Set the current depth of the active thread. */ protected void setDepth(int depth) { this.depth = depth; } /** * INTERNAL: * Used by the mergemanager to let the read know not to refresh this object as it is being * loaded by the merge process. */ public void setIsLockedByMergeManager(boolean state) { this.lockedByMergeManager = state; } /** * Track the number of readers. */ protected void setNumberOfReaders(int numberOfReaders) { this.numberOfReaders = numberOfReaders; } /** * Number of writers that want the lock. * This is used to ensure that a writer is not starved. */ protected void setNumberOfWritersWaiting(int numberOfWritersWaiting) { this.numberOfWritersWaiting = numberOfWritersWaiting; } public synchronized void transitionToDeferredLock() { Thread currentThread = Thread.currentThread(); DeferredLockManager lockManager = getDeferredLockManager(currentThread); if (lockManager == null) { lockManager = new DeferredLockManager(); putDeferredLock(currentThread, lockManager); } lockManager.incrementDepth(); lockManager.addActiveLock(this); } /** * Print the nested depth. */ public String toString() { Object[] args = { Integer.valueOf(getDepth()) }; return Helper.getShortClassName(getClass()) + ToStringLocalization.buildMessage("nest_level", args); } public Exception getStack() { return stack; } public void setStack(Exception stack) { this.stack = stack; } public static boolean shouldTrackStack() { return shouldTrackStack; } /** * INTERNAL: * This can be set during debugging to record the stacktrace when a lock is acquired. * Then once IdentityMapAccessor.printIdentityMapLocks() is called the stack call for each * lock will be printed as well. Because locking issues are usually quite time sensitive setting * this flag may inadvertently remove the deadlock because of the change in timings. * * There is also a system level property for this setting. "eclipselink.cache.record-stack-on-lock" * @param shouldTrackStack */ public static void setShouldTrackStack(boolean shouldTrackStack) { ConcurrencyManager.shouldTrackStack = shouldTrackStack; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/DatabaseType.java0000664000000000000000000002204612216173126024720 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import static java.lang.Integer.MIN_VALUE; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.platform.database.DatabasePlatform; import org.eclipse.persistence.platform.database.oracle.plsql.PLSQLStoredProcedureCall; import org.eclipse.persistence.platform.database.oracle.plsql.PLSQLargument; import org.eclipse.persistence.queries.StoredProcedureCall; import org.eclipse.persistence.sessions.DatabaseRecord; import static org.eclipse.persistence.internal.databaseaccess.DatasourceCall.IN; import static org.eclipse.persistence.internal.databaseaccess.DatasourceCall.OUT; import static org.eclipse.persistence.internal.helper.Helper.NL; import static org.eclipse.persistence.internal.helper.Helper.buildHexStringFromBytes; /** * PUBLIC: Interface used to categorize arguments to Stored Procedures as either * 'simple' (use subclass SimpleDatabaseType) or 'complex' (use subclass ComplexDatabaseType) * * @author Mike Norman - michael.norman@oracle.com * @since Oracle TopLink 11.x.x */ @SuppressWarnings("unchecked") public interface DatabaseType { public static final String TARGET_SHORT_PREFIX = "T_"; public static final String TARGET_SUFFIX = "TARGET"; public static final String COMPAT_SHORT_PREFIX = "C_"; public static final String COMPAT_SUFFIX = "COMPAT"; public static final int ARGNAME_SIZE_LIMIT = 30 - TARGET_SUFFIX.length(); public boolean isComplexDatabaseType(); public boolean isJDBCType(); public int getSqlCode(); public int getConversionCode(); public String getTypeName(); public int computeInIndex(PLSQLargument inArg, int newIndex, ListIterator i); public int computeOutIndex(PLSQLargument outArg, int newIndex, ListIterator i); public void buildInDeclare(StringBuilder sb, PLSQLargument inArg); public void buildOutDeclare(StringBuilder sb, PLSQLargument outArg); public void buildBeginBlock(StringBuilder sb, PLSQLargument arg, PLSQLStoredProcedureCall call); public void buildOutAssignment(StringBuilder sb, PLSQLargument outArg, PLSQLStoredProcedureCall call); public void translate(PLSQLargument arg, AbstractRecord translationRow, AbstractRecord copyOfTranslationRow, List copyOfTranslationFields, List translationRowFields, List translationRowValues, StoredProcedureCall call); public void buildOutputRow(PLSQLargument outArg, AbstractRecord outputRow, DatabaseRecord newOutputRow, List outputRowFields, List outputRowValues); public void logParameter(StringBuilder sb, Integer direction, PLSQLargument arg, AbstractRecord translationRow, DatabasePlatform platform); public enum DatabaseTypeHelper { databaseTypeHelper; static String getTruncatedSHA1Hash(String s) { StringBuilder sb = new StringBuilder(28); try { byte[] longIdentifierBytes = s.getBytes(); MessageDigest md = MessageDigest.getInstance("SHA-1"); md.update(longIdentifierBytes, 0, longIdentifierBytes.length); byte[] digest = md.digest(); //produces a 160-bit hash //truncate to 112 bits, which is about the same java.util.UUID; //HMAC-SHA1-96 is only 96 bits and that's good enough for IPSEC work //TL;DR - probability of collision quite small byte[] truncDigest = new byte[14]; System.arraycopy(digest, 0, truncDigest, 0, 14); sb.append(buildHexStringFromBytes(truncDigest)); } catch (NoSuchAlgorithmException e) { //ignore: should never happen } return sb.toString(); } protected String getTruncatedSHA1Name(String argName, String prefix) { if (argName.length() >= ARGNAME_SIZE_LIMIT) { StringBuilder sb = new StringBuilder(); //the truncated SHA is great, but a PL/SQL identifier //can't start with a number, so use prefix sb.append(prefix); sb.append(getTruncatedSHA1Hash(argName)); return sb.toString(); } return argName; } public String buildTarget(PLSQLargument arg) { StringBuilder sb = new StringBuilder(); if (arg.name.length() >= ARGNAME_SIZE_LIMIT) { sb.append(getTruncatedSHA1Name(arg.name, TARGET_SHORT_PREFIX)); } else { sb.append(arg.name); sb.append(TARGET_SUFFIX); } return sb.toString(); } public String buildCompatible(PLSQLargument arg) { StringBuilder sb = new StringBuilder(); if (arg.name.length() >= ARGNAME_SIZE_LIMIT) { sb.append(getTruncatedSHA1Name(arg.name, COMPAT_SHORT_PREFIX)); } else { sb.append(arg.name); sb.append(COMPAT_SUFFIX); } return sb.toString(); } public void declareTarget(StringBuilder sb, PLSQLargument arg, DatabaseType databaseType) { sb.append(" "); sb.append(buildTarget(arg)); sb.append(" "); sb.append(databaseType.getTypeName()); } public int computeInIndex(PLSQLargument inArg, int newIndex) { inArg.inIndex = newIndex; return ++newIndex; } public int computeOutIndex(PLSQLargument outArg, int newIndex) { outArg.outIndex = newIndex; return ++newIndex; } public void buildOutAssignment(StringBuilder sb, PLSQLargument outArg, PLSQLStoredProcedureCall call) { sb.append(" :"); sb.append(outArg.outIndex); sb.append(" := "); sb.append(buildTarget(outArg)); sb.append(";"); sb.append(NL); } public void translate(PLSQLargument arg, AbstractRecord translationRow, AbstractRecord copyOfTranslationRow, List copyOfTranslationFields, List translationRowFields, List translationRowValues, StoredProcedureCall call) { DatabaseField field = null; for (Iterator i = copyOfTranslationFields.iterator(); i.hasNext(); ) { DatabaseField f = (DatabaseField)i.next(); if (f.getName().equals(arg.name)) { field = f; break; } } if (arg.length != MIN_VALUE) { field.setLength(arg.length); } if (arg.precision != MIN_VALUE) { field.setPrecision(arg.precision); } if (arg.scale != MIN_VALUE) { field.setScale(arg.scale); } translationRowFields.set(arg.inIndex - 1, field); Object value = copyOfTranslationRow.get(field); translationRowValues.set(arg.inIndex - 1, value); } public void buildOutputRow(PLSQLargument outArg, AbstractRecord outputRow, DatabaseRecord newOutputRow, List outputRowFields, List outputRowValues) { DatabaseField field = null; for (Iterator i = outputRowFields.iterator(); i.hasNext(); ) { DatabaseField f = (DatabaseField)i.next(); if (f.getName().equals(outArg.name)) { field = f; break; } } Object value = outputRow.get(field); newOutputRow.add(field, value); } public void logParameter(StringBuilder sb, Integer direction, PLSQLargument arg, AbstractRecord translationRow, DatabasePlatform platform) { if (direction == IN && arg.inIndex != MIN_VALUE) { sb.append(":"); sb.append(arg.inIndex); sb.append(" => "); sb.append(platform.convertToDatabaseType(translationRow.get(arg.name))); } if (direction == OUT && arg.outIndex != MIN_VALUE) { sb.append(arg.name); sb.append(" => :"); sb.append(arg.outIndex); } } } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/CustomObjectInputStream.java0000664000000000000000000000353412216173126027150 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.lang.Class; import java.lang.ClassNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.ObjectStreamClass; import java.io.ObjectInputStream; import org.eclipse.persistence.sessions.Session; import org.eclipse.persistence.internal.helper.ConversionManager; /** * INTERNAL: * Defines a custom ObjectInputStream that is used with SerializedObjectConverter * to ensure the correct class loader is used. * BUG# 2813583 * * @author Guy Pelletier * @version 1.0 March 25/03 */ public class CustomObjectInputStream extends ObjectInputStream { ConversionManager m_conversionManager; public CustomObjectInputStream(InputStream stream, Session session) throws IOException { super(stream); m_conversionManager = session.getDatasourceLogin().getDatasourcePlatform().getConversionManager(); } public Class resolveClass(ObjectStreamClass classDesc) throws ClassNotFoundException, IOException { return m_conversionManager.convertClassNameToClass(classDesc.getName()); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/ClassConstants.java0000664000000000000000000002260012216173126025310 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.util.*; import java.math.*; import java.net.URL; import javax.xml.datatype.Duration; import javax.xml.datatype.XMLGregorianCalendar; import javax.xml.namespace.QName; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.eclipse.persistence.internal.core.helper.CoreClassConstants; import org.eclipse.persistence.internal.sessions.DatabaseSessionImpl; import org.eclipse.persistence.internal.sessions.AbstractSession; /** * INTERNAL: */ public class ClassConstants extends CoreClassConstants { // Java classes public static final Class Hashtable_Class = Hashtable.class; public static final Class Enumeration_Class = Enumeration.class; public static final Class JavaSqlTime_Class = java.sql.Time.class; public static final Class JavaSqlDate_Class = java.sql.Date.class; public static final Class JavaSqlTimestamp_Class = java.sql.Timestamp.class; public static final Class List_Class = List.class; public static final Class Map_Entry_Class = Map.Entry.class; public static final Class Object_Class = Object.class; public static final Class SortedSet_Class = SortedSet.class; public static final Class URL_Class = URL.class; public static final Class Vector_class = Vector.class; public static final Class HashSet_class = HashSet.class; public static final Class Void_Class = void.class; public static final Class PropertyChangeEvent_Class = java.beans.PropertyChangeEvent.class; // Eclipselink Classes public static final Class Accessor_Class = org.eclipse.persistence.internal.databaseaccess.Accessor.class; public static final Class ConversionManager_Class = org.eclipse.persistence.internal.helper.ConversionManager.class; public static final Class CursoredStream_Class = org.eclipse.persistence.queries.CursoredStream.class; public static final Class DatabaseQuery_Class = org.eclipse.persistence.queries.DatabaseQuery.class; public static final Class DatabaseRow_Class = org.eclipse.persistence.internal.sessions.AbstractRecord.class; public static final Class OldDescriptorEvent_Class = org.eclipse.persistence.descriptors.DescriptorEvent.class; public static final Class DescriptorEvent_Class = org.eclipse.persistence.descriptors.DescriptorEvent.class; public static final Class DirectConnector_Class = org.eclipse.persistence.sessions.DirectConnector.class; public static final Class Expression_Class = org.eclipse.persistence.expressions.Expression.class; public static final Class FunctionExpression_Class = org.eclipse.persistence.internal.expressions.FunctionExpression.class; public static final Class ArgumentListFunctionExpression_Class = org.eclipse.persistence.internal.expressions.ArgumentListFunctionExpression.class; public static final Class IndirectContainer_Class = org.eclipse.persistence.indirection.IndirectContainer.class; public static final Class IndirectList_Class = org.eclipse.persistence.indirection.IndirectList.class; public static final Class IndirectSet_Class = org.eclipse.persistence.indirection.IndirectSet.class; public static final Class IndirectMap_Class = org.eclipse.persistence.indirection.IndirectMap.class; public static final Class LogicalExpression_Class = org.eclipse.persistence.internal.expressions.LogicalExpression.class; public static final Class PublicInterfaceDatabaseSession_Class = DatabaseSessionImpl.class; public static final Class PerformanceProfiler_Class = org.eclipse.persistence.tools.profiler.PerformanceProfiler.class; public static final Class PublicInterfaceSession_Class = AbstractSession.class; public static final Class QueryKey_Class = org.eclipse.persistence.mappings.querykeys.QueryKey.class; public static final Class RelationExpression_Class = org.eclipse.persistence.internal.expressions.RelationExpression.class; public static final Class Record_Class = org.eclipse.persistence.sessions.Record.class; public static final Class ServerSession_Class = org.eclipse.persistence.sessions.server.ServerSession.class; public static final Class SessionsSession_Class = org.eclipse.persistence.sessions.Session.class; public static final Class ScrollableCursor_Class = org.eclipse.persistence.queries.ScrollableCursor.class; public static final Class ValueHolderInterface_Class = org.eclipse.persistence.indirection.ValueHolderInterface.class; public static final Class CollectionChangeEvent_Class = org.eclipse.persistence.descriptors.changetracking.CollectionChangeEvent.class; public static final Class MapChangeEvent_Class = org.eclipse.persistence.descriptors.changetracking.MapChangeEvent.class; public static final Class ChangeTracker_Class = org.eclipse.persistence.descriptors.changetracking.ChangeTracker.class; public static final Class WeavedAttributeValueHolderInterface_Class = org.eclipse.persistence.indirection.WeavedAttributeValueHolderInterface.class; public static final Class PersistenceWeavedLazy_Class = org.eclipse.persistence.internal.weaving.PersistenceWeavedLazy.class; // Identity map classes public static final Class CacheIdentityMap_Class = org.eclipse.persistence.internal.identitymaps.CacheIdentityMap.class; public static final Class FullIdentityMap_Class = org.eclipse.persistence.internal.identitymaps.FullIdentityMap.class; public static final Class HardCacheWeakIdentityMap_Class = org.eclipse.persistence.internal.identitymaps.HardCacheWeakIdentityMap.class; public static final Class NoIdentityMap_Class = org.eclipse.persistence.internal.identitymaps.NoIdentityMap.class; public static final Class SoftCacheWeakIdentityMap_Class = org.eclipse.persistence.internal.identitymaps.SoftCacheWeakIdentityMap.class; public static final Class SoftIdentityMap_Class = org.eclipse.persistence.internal.identitymaps.SoftIdentityMap.class; public static final Class WeakIdentityMap_Class = org.eclipse.persistence.internal.identitymaps.WeakIdentityMap.class; //fetch group class public static final Class FetchGroupTracker_class = org.eclipse.persistence.queries.FetchGroupTracker.class; // Moved from ConversionManager public static final Class ABYTE = Byte[].class; public static final Class AOBJECT = Object[].class; public static final Class ACHAR = Character[].class; public static final Class APBYTE = byte[].class; public static final Class APCHAR = char[].class; public static final Class BIGDECIMAL = BigDecimal.class; public static final Class BIGINTEGER = BigInteger.class; public static final Class BOOLEAN = Boolean.class; public static final Class BYTE = Byte.class; public static final Class CLASS = Class.class; public static final Class CHAR = Character.class; public static final Class CALENDAR = Calendar.class; public static final Class DOUBLE = Double.class; public static final Class FLOAT = Float.class; public static final Class GREGORIAN_CALENDAR = GregorianCalendar.class; public static final Class INTEGER = Integer.class; public static final Class LONG = Long.class; public static final Class NUMBER = Number.class; public static final Class OBJECT = Object.class; public static final Class PBOOLEAN = boolean.class; public static final Class PBYTE = byte.class; public static final Class PCHAR = char.class; public static final Class PDOUBLE = double.class; public static final Class PFLOAT = float.class; public static final Class PINT = int.class; public static final Class PLONG = long.class; public static final Class PSHORT = short.class; public static final Class SHORT = Short.class; public static final Class SQLDATE = java.sql.Date.class; public static final Class STRING = String.class; public static final Class TIME = java.sql.Time.class; public static final Class TIMESTAMP = java.sql.Timestamp.class; public static final Class UTILDATE = java.util.Date.class; public static final Class QNAME = QName.class; public static final Class XML_GREGORIAN_CALENDAR = XMLGregorianCalendar.class; public static final Class DURATION = Duration.class; //LOB support types public static final Class BLOB = java.sql.Blob.class; public static final Class CLOB = java.sql.Clob.class; //Indication to ConversionManager not to convert classes implementing this interface public static final Class NOCONVERSION = NoConversion.class; //XML Classes public static final Class DOCUMENT = Document.class; public static final Class NODE = Node.class; public ClassConstants() { } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/ThreadCursoredList.java0000664000000000000000000004133312216173126026124 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.util.*; import org.eclipse.persistence.exceptions.ValidationException; /** * Special List/Vector subclass that allows concurrent * population of the contents while the list is in use. * The list will allow iteration while it is still being populated * to allow concurrent processing of the contents. * Other API such as size that require to full contents know will wait until * the list is notified as being complete. * This is use to allow the rows and objects of a read-all query to be processed concurrently. * * @author James Sutherland * @since OracleAS 10g TopLink (10.0.3) */ public class ThreadCursoredList extends Vector { /** Store if the list is fully populated. */ protected boolean isComplete; /** Used to throw exception that occur from the concurrent population thread. */ protected RuntimeException exception; /** * Construct an empty list so that its internal data array * has size 10 and its standard capacity increment is zero. */ public ThreadCursoredList() { this(10); } /** * Construct an empty list with the specified initial capacity and * with its capacity increment equal to zero. */ public ThreadCursoredList(int initialCapacity) { this(initialCapacity, 0); } /** * Construct an empty list with the specified initial capacity and * capacity increment. */ public ThreadCursoredList(int initialCapacity, int capacityIncrement) { super(0); this.isComplete = false; } /** * Add and notify any waiters that there are new elements. */ public synchronized void add(int index, Object element) { super.add(index, element); this.notifyAll(); } /** * Add and notify any waiters that there are new elements. */ public synchronized boolean add(Object element) { boolean result = super.add(element); notifyAll(); return result; } /** * Add and notify any waiters that there are new elements. */ public synchronized boolean addAll(int index, Collection collection) { boolean result = super.addAll(index, collection); notifyAll(); return result; } /** * Add and notify any waiters that there are new elements. */ public synchronized boolean addAll(Collection collection) { boolean result = super.addAll(collection); notifyAll(); return result; } /** * Add and notify any waiters that there are new elements. */ public synchronized void addElement(Object object) { super.addElement(object); notifyAll(); } /** * First wait until complete. */ public synchronized void clear() { waitUntilComplete(); super.clear(); } /** * First wait until complete. */ public synchronized Object clone() { waitUntilComplete(); return super.clone(); } /** * Return if any exception that was throw from concurrent population thread. */ public boolean hasException() { return exception != null; } /** * Return any exception that was throw from concurrent population thread. */ public RuntimeException getException() { return exception; } /** * Record that the population thread hit an exception, * that should be thrown to the processing thread on the next access. * This also records the list and complete. */ public synchronized void throwException(RuntimeException exception) { this.exception = exception; setIsComplete(true); } /** * Return if the list is complete. * If an exception was thrown during the concurrent population throw the exception. */ public synchronized boolean isComplete() { if (exception != null) { // Set the exception to null so it is only thrown once. RuntimeException thrownException = this.exception; this.exception = null; throw thrownException; } return isComplete; } /** * Set the list complete and notify any waiters. */ public synchronized void setIsComplete(boolean isComplete) { this.isComplete = isComplete; notifyAll(); } /** * Wait until the list has been fully populated. */ public synchronized void waitUntilComplete() { while (!isComplete()) { try { wait(); } catch (InterruptedException ignore) { } } } /** * Wait until a new element has been added. */ public synchronized void waitUntilAdd() { try { wait(); } catch (InterruptedException ignore) { } } /** * If it does not contain the object must wait until it is complete. */ public synchronized boolean contains(Object element) { boolean result = super.contains(element); if ((result != true) && (!isComplete())) { waitUntilComplete(); result = super.contains(element); } return result; } /** * If it does not contain the object must wait until it is complete. */ public synchronized boolean containsAll(Collection collection) { boolean result = super.containsAll(collection); if ((result != true) && (!isComplete())) { waitUntilComplete(); result = super.containsAll(collection); } return result; } /** * First wait until complete. */ public synchronized void copyInto(Object[] array) { waitUntilComplete(); super.copyInto(array); } /** * If the index is beyond the size wait until complete. */ public synchronized Object elementAt(int index) { Object result = super.elementAt(index); if ((result == null) && (!isComplete())) { waitUntilComplete(); result = super.elementAt(index); } return result; } protected int getSize() { return super.size(); } /** * Allow concurrent streaming of the elements. */ public Enumeration elements() { return new Enumeration() { int count = 0; public boolean hasMoreElements() { synchronized (ThreadCursoredList.this) { boolean result = count < ThreadCursoredList.this.getSize(); while ((!result) && (!isComplete())) { waitUntilAdd(); result = count < ThreadCursoredList.this.getSize(); } return result; } } public Object nextElement() { synchronized (ThreadCursoredList.this) { boolean result = count < ThreadCursoredList.this.getSize(); while ((!result) && (!isComplete())) { waitUntilAdd(); result = count < ThreadCursoredList.this.getSize(); } if (result) { return get(count++); } } throw new NoSuchElementException("Vector Enumeration"); } }; } /** * First wait until complete. */ public synchronized boolean equals(Object object) { waitUntilComplete(); return super.equals(object); } /** * Wait until has an element or is complete. */ public synchronized Object firstElement() { while ((!isComplete()) && (super.size() < 1)) { waitUntilAdd(); } return super.firstElement(); } /** * Wait until has the element or is complete. */ public synchronized Object get(int index) { while ((!isComplete()) && (super.size() < index)) { waitUntilAdd(); } return super.get(index); } /** * First wait until complete. */ public synchronized int hashCode() { waitUntilComplete(); return super.hashCode(); } /** * If does not contain the object wait until complete. */ public int indexOf(Object element) { int result = super.indexOf(element); if ((result == -1) && (!isComplete())) { waitUntilComplete(); result = super.indexOf(element); } return result; } /** * If does not contain the object wait until complete. */ public synchronized int indexOf(Object element, int index) { int result = super.indexOf(element, index); if ((result == -1) && (!isComplete())) { waitUntilComplete(); result = super.indexOf(element, index); } return result; } /** * Add the element a notify any waiters that there are new elements. */ public synchronized void insertElementAt(Object element, int index) { super.insertElementAt(element, index); notify(); } /** * If empty wait until an element has been added or is complete. */ public boolean isEmpty() { boolean result = super.isEmpty(); if (result && (!isComplete())) { waitUntilAdd(); result = super.isEmpty(); } return result; } public Iterator iterator() { return listIterator(0); } /** * First wait until complete. */ public synchronized Object lastElement() { waitUntilComplete(); return super.lastElement(); } /** * First wait until complete. */ public int lastIndexOf(Object element) { waitUntilComplete(); return super.lastIndexOf(element); } /** * First wait until complete. */ public synchronized int lastIndexOf(Object element, int index) { waitUntilComplete(); return super.lastIndexOf(element, index); } public ListIterator listIterator() { return listIterator(0); } /** * Iterate while waiting at end until complete. */ public ListIterator listIterator(final int index) { return new ListIterator() { int count = index; public boolean hasNext() { synchronized (ThreadCursoredList.this) { boolean result = count < ThreadCursoredList.this.getSize(); while ((!result) && (!isComplete())) { waitUntilAdd(); result = count < ThreadCursoredList.this.getSize(); } return result; } } public Object next() { synchronized (ThreadCursoredList.this) { boolean result = count < ThreadCursoredList.this.getSize(); while ((!result) && (!isComplete())) { waitUntilAdd(); result = count < ThreadCursoredList.this.getSize(); } if (result) { return get(count++); } } throw new NoSuchElementException("Vector Iterator"); } public void remove() { throw ValidationException.operationNotSupported("remove"); } public void set(Object object) { throw ValidationException.operationNotSupported("set"); } public void add(Object object) { throw ValidationException.operationNotSupported("add"); } public int previousIndex() { return count - 1; } public int nextIndex() { return count; } public Object previous() { count--; return get(count); } public boolean hasPrevious() { return count > 0; } }; } /** * If index is missing wait until is there. */ public synchronized Object remove(int index) { while ((!isComplete()) && (super.size() < index)) { waitUntilAdd(); } return super.remove(index); } /** * If object is missing wait until complete. */ public boolean remove(Object element) { boolean result = super.remove(element); if ((!result) && (!isComplete())) { waitUntilAdd(); result = super.remove(element); } return result; } /** * First wait until complete. */ public synchronized boolean removeAll(Collection collection) { waitUntilComplete(); return super.removeAll(collection); } /** * First wait until complete. */ public synchronized void removeAllElements() { waitUntilComplete(); super.removeAllElements(); } /** * If missing wait until complete. */ public synchronized boolean removeElement(Object element) { boolean result = super.removeElement(element); if ((!result) && (!isComplete())) { waitUntilAdd(); result = super.removeElement(element); } return result; } /** * If index is missing wait until reasched or complete. */ public synchronized void removeElementAt(int index) { while ((!isComplete()) && (super.size() < index)) { waitUntilAdd(); } super.removeElementAt(index); } /** * First wait until complete. */ public synchronized boolean retainAll(Collection collection) { waitUntilComplete(); return super.retainAll(collection); } /** * If index is missing wait until reached or complete. */ public synchronized Object set(int index, Object element) { while ((!isComplete()) && (super.size() < index)) { waitUntilAdd(); } return super.set(index, element); } /** * If index is missing wait until reached or complete. */ public synchronized void setElementAt(Object element, int index) { while ((!isComplete()) && (super.size() < index)) { waitUntilAdd(); } super.setElementAt(element, index); } /** * First wait until complete. */ public int size() { waitUntilComplete(); return super.size(); } /** * If index is missing wait until reached or complete. */ public List subList(int fromIndex, int toIndex) { while ((!isComplete()) && (super.size() < toIndex)) { waitUntilAdd(); } return super.subList(fromIndex, toIndex); } /** * First wait until complete. */ public synchronized Object[] toArray() { waitUntilComplete(); return super.toArray(); } /** * First wait until complete. */ public synchronized Object[] toArray(Object[] array) { waitUntilComplete(); return super.toArray(array); } /** * First wait until complete. */ public synchronized String toString() { waitUntilComplete(); return super.toString(); } /** * First wait until complete. */ public synchronized void trimToSize() { waitUntilComplete(); super.trimToSize(); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/DeferredLockManager.java0000664000000000000000000001015112216173126026170 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.util.*; /** * INTERNAL: *

* Purpose: Be used for deadlock avoidance through allowing detection and resolution. * *

* Responsibilities: *

    *
  • Keep track of all deferred locks of each thread. *
  • Keep track of all active locks of each thread.. *
  • Maintain the depth of the each thread. *
*/ public class DeferredLockManager { protected Vector deferredLocks; protected Vector activeLocks; protected int threadDepth; protected boolean isThreadComplete; public static boolean SHOULD_USE_DEFERRED_LOCKS = true; /** * DeferredLockManager constructor comment. */ public DeferredLockManager() { super(); this.deferredLocks = new Vector(1); this.activeLocks = new Vector(1); this.threadDepth = 0; this.isThreadComplete = false; } /** * add a concurrency manager as active locks to the DLM */ public void addActiveLock(Object manager) { getActiveLocks().addElement(manager); } /** * add a concurrency manager as deferred locks to the DLM */ public void addDeferredLock(Object manager) { getDeferredLocks().addElement(manager); } /** * decrement the depth of the thread */ public void decrementDepth() { threadDepth--; } /** * Return a set of the active locks from the DLM */ public Vector getActiveLocks() { return activeLocks; } /** * Return a set of the deferred locks */ public Vector getDeferredLocks() { return deferredLocks; } /** * Return the depth of the thread associated with the DLM, being used to release the lock */ public int getThreadDepth() { return threadDepth; } /** * Return if there are any deferred locks. */ public boolean hasDeferredLock() { return !getDeferredLocks().isEmpty(); } /** * increment the depth of the thread */ public void incrementDepth() { threadDepth++; } /** * Return if the thread is complete */ public boolean isThreadComplete() { return isThreadComplete; } /** * Release the active lock on the DLM */ public void releaseActiveLocksOnThread() { Vector activeLocks = getActiveLocks(); if (!activeLocks.isEmpty()) { for (Enumeration activeLocksEnum = activeLocks.elements(); activeLocksEnum.hasMoreElements();) { ConcurrencyManager manager = (ConcurrencyManager)activeLocksEnum.nextElement(); manager.release(); } } setIsThreadComplete(true); } /** * set a set of the active locks to the DLM */ public void setActiveLocks(Vector activeLocks) { this.activeLocks = activeLocks; } /** * set a set of the deferred locks to the DLM */ public void setDeferredLocks(Vector deferredLocks) { this.deferredLocks = deferredLocks; } /** * set if the thread is complete in the given DLM */ public void setIsThreadComplete(boolean isThreadComplete) { this.isThreadComplete = isThreadComplete; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/IdentityWeakHashMap.java0000664000000000000000000007606212216173126026224 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Gordon Yorke ******************************************************************************/ package org.eclipse.persistence.internal.helper; /** * INTERNAL: *

* Purpose: Define a {@link Map} that manages key equality by reference, * not equals(). This is required to track objects throughout the lifecycle * of a {@link org.eclipse.persistence.sessions.UnitOfWork}, regardless if the domain * object redefines its equals() method. Additionally, this implementation does * not permit nulls either as values or as keys. Any Entry that has a null in the key or * in the value will be assumed to have garbage collected. * This class also uses weak references to the contents of the map allowing for garbage * collection to reduce the size of the Map * * This work is an extension of the original work completed on the IdentityWeakHashMap as completed by * Mike Norman. * * @author Gordon Yorke (EclipseLink 1.0M4) * */ // J2SE imports import java.io.*; import java.lang.ref.ReferenceQueue; import java.lang.ref.WeakReference; import java.util.*; import org.eclipse.persistence.internal.localization.ExceptionLocalization; public class IdentityWeakHashMap extends AbstractMap implements Map, Cloneable, Serializable { static final long serialVersionUID = -5176951017503351630L; // the default initial capacity static final int DEFAULT_INITIAL_CAPACITY = 32; // the maximum capacity. static final int MAXIMUM_CAPACITY = 1 << 30; // the loadFactor used when none specified in constructor. static final float DEFAULT_LOAD_FACTOR = 0.75f; protected transient WeakEntry[] entries;// internal array of Entry's protected transient int count = 0; private transient int modCount = 0;// # of times this Map has been modified protected int threshold = 0; protected float loadFactor = 0; /** This is used by the garbage collector. Every weak reference that is garbage collected * will be enqueued on this. Then only this queue needs to be checked to remove empty * references. */ protected ReferenceQueue referenceQueue; /** * Constructs a new IdentityWeakHashMap with the given * initial capacity and the given loadFactor. * * @param initialCapacity the initial capacity of this * IdentityWeakHashMap. * @param loadFactor the loadFactor of the IdentityWeakHashMap. * @throws IllegalArgumentException if the initial capacity is less * than zero, or if the loadFactor is nonpositive. */ public IdentityWeakHashMap(int initialCapacity, float loadFactor) { if (initialCapacity < 0) { throw new IllegalArgumentException("Illegal initialCapacity: " + initialCapacity); } if (initialCapacity > MAXIMUM_CAPACITY) { initialCapacity = MAXIMUM_CAPACITY; } if ((loadFactor <= 0) || Float.isNaN(loadFactor)) { throw new IllegalArgumentException("Illegal loadFactor: " + loadFactor); } // Find a power of 2 >= initialCapacity int capacity = 1; while (capacity < initialCapacity) { capacity <<= 1; } this.loadFactor = loadFactor; threshold = (int)(capacity * loadFactor); entries = new WeakEntry[capacity]; referenceQueue = new ReferenceQueue(); } /** * Constructs a new IdentityWeakHashMap with the given * initial capacity and a default loadFactor of 0.75. * * @param initialCapacity the initial capacity of the * IdentityWeakHashMap. * @throws IllegalArgumentException if the initial capacity is less * than zero. */ public IdentityWeakHashMap(int initialCapacity) { this(initialCapacity, DEFAULT_LOAD_FACTOR); } /** * Constructs a new IdentityWeakHashMap with a default initial * capacity of 32 and a loadfactor of 0.75. */ public IdentityWeakHashMap() { loadFactor = DEFAULT_LOAD_FACTOR; threshold = (int)(DEFAULT_INITIAL_CAPACITY * DEFAULT_LOAD_FACTOR); entries = new WeakEntry[DEFAULT_INITIAL_CAPACITY]; referenceQueue = new ReferenceQueue(); } /** * Constructs a new IdentityWeakHashMap with the same mappings * as the given map. The IdentityWeakHashMap is created with a * capacity sufficient to hold the elements of the given map. * * @param m the map whose mappings are to be placed in the * IdentityWeakHashMap. */ public IdentityWeakHashMap(Map m) { this(Math.max((int)(m.size() / DEFAULT_LOAD_FACTOR) + 1, DEFAULT_INITIAL_CAPACITY), DEFAULT_LOAD_FACTOR); putAll(m); } /** * @return the size of this IdentityWeakHashMap. */ public int size() { cleanUp(); return count; } /** * @return true if this IdentityWeakHashMap is empty. */ public boolean isEmpty() { return (count == 0); } /** * Returns true if this IdentityWeakHashMap contains * the given object. Equality is tested by the equals() method. * * @param obj the object to find. * @return true if this IdentityWeakHashMap contains * obj. * @throws NullPointerException if obj is null. */ public boolean containsValue(Object obj) { if (obj == null) { throw new IllegalArgumentException(ExceptionLocalization.buildMessage("null_not_supported_identityweakhashmap")); } //cleanup before searching as to reduce number of possible empty Entries cleanUp(); WeakEntry[] copyOfEntries = entries; for (int i = copyOfEntries.length; i-- > 0;) { for (WeakEntry e = copyOfEntries[i]; e != null; e = e.next) { if (obj.equals(e.value.get())) { return true; } } } return false; } /** * Returns true if this IdentityWeakHashMap contains a * mapping for the given key. Equality is tested by reference. * * @param key object to be used as a key into this * IdentityWeakHashMap. * @return true if this IdentityWeakHashMap contains a * mapping for key. */ public boolean containsKey(Object key) { if (key == null) { throw new IllegalArgumentException(ExceptionLocalization.buildMessage("null_not_supported_identityweakhashmap")); } cleanUp(); WeakEntry[] copyOfEntries = entries; int hash = System.identityHashCode(key); int index = (hash & 0x7FFFFFFF) % copyOfEntries.length; for (WeakEntry e = copyOfEntries[index]; e != null; e = e.next) { if (e.key.get() == key) { return true; } } return false; } /** * Returns the value to which the given key is mapped in this * IdentityWeakHashMap. Returns null if this * IdentityWeakHashMap contains no mapping for this key. * * @return the value to which this IdentityWeakHashMap maps the * given key. * @param key key whose associated value is to be returned. */ public V get(Object key) { if (key == null) return null; cleanUp(); WeakEntry[] copyOfEntries = entries; int hash = System.identityHashCode(key); int index = (hash & 0x7FFFFFFF) % copyOfEntries.length; for (WeakEntry e = copyOfEntries[index]; e != null; e = e.next) { if (e.key.get() == key) { return (V)e.value.get(); } } return null; } /** * INTERNAL: * Re-builds the internal array of Entry's with a larger capacity. * This method is called automatically when the number of objects in this * IdentityWeakHashMap exceeds its current threshold. */ private void rehash() { int oldCapacity = entries.length; WeakEntry[] oldEntries = entries; int newCapacity = (oldCapacity * 2) + 1; WeakEntry[] newEntries = new WeakEntry[newCapacity]; modCount++; threshold = (int)(newCapacity * loadFactor); entries = newEntries; for (int i = oldCapacity; i-- > 0;) { for (WeakEntry old = oldEntries[i]; old != null;) { WeakEntry e = old; old = old.next; int index = (e.hash & 0x7FFFFFFF) % newCapacity; e.next = newEntries[index]; newEntries[index] = e; } } } /** * Associate the given object with the given key in this * IdentityWeakHashMap, replacing any existing mapping. * * @param key key to map to given object. * @param obj object to be associated with key. * @return the previous object for key or null if this * IdentityWeakHashMap did not have one. * @throws NullPointerException if obj is null. */ public V put(K key, V obj) { if (obj == null || key == null) { throw new IllegalArgumentException(ExceptionLocalization.buildMessage("null_not_supported_identityweakhashmap")); } cleanUp(); WeakEntry[] copyOfEntries = entries; int hash = System.identityHashCode(key); int index = (hash & 0x7FFFFFFF) % copyOfEntries.length; for (WeakEntry e = copyOfEntries[index]; e != null; e = e.next) { if (e.key.get() == key) { EntryReference old = e.value; if (key == obj){ e.value = e.key; }else{ e.value = new HardEntryReference(obj); } return old.get(); } } modCount++; if (count >= threshold) { rehash(); copyOfEntries = entries; index = (hash & 0x7FFFFFFF) % copyOfEntries.length; } WeakEntry e = new WeakEntry(hash, key, obj, copyOfEntries[index], referenceQueue); copyOfEntries[index] = e; count++; return null; } /** * Removes the mapping (key and its corresponding value) from this * IdentityWeakHashMap, if present. * * @param key key whose mapping is to be removed from the map. * @return the previous object for key or null if this * IdentityWeakHashMap did not have one. */ public V remove(Object key) { if (key == null) return null; cleanUp(); WeakEntry[] copyOfEntries = entries; int hash = System.identityHashCode(key); int index = (hash & 0x7FFFFFFF) % copyOfEntries.length; for (WeakEntry e = copyOfEntries[index], prev = null; e != null; prev = e, e = e.next) { if (e.key.get() == key) { if (prev != null) { prev.next = e.next; } else { copyOfEntries[index] = e.next; } count--; return (V)e.value.get(); } } return null; } protected boolean removeEntry(WeakEntry o, boolean userModification) { WeakEntry[] copyOfEntries = entries; int index = (o.hash & 0x7FFFFFFF) % copyOfEntries.length; for (WeakEntry e = copyOfEntries[index], prev = null; e != null; prev = e, e = e.next) { if (e == o) { // if this method was called as a result of a user action, // increment the modification count // this method is also called by our cleanup code and // that code should not cause a concurrent modification // exception if (userModification){ modCount++; } if (prev != null) { prev.next = e.next; } else { copyOfEntries[index] = e.next; } count--; e.value = null; e.next = null; return true; } } return false; } /** * Copies all of the mappings from the given map to this * IdentityWeakHashMap, replacing any existing mappings. * * @param m mappings to be stored in this IdentityWeakHashMap. * @throws NullPointerException if m is null. */ public void putAll(Map m) { if (m == null) { throw new NullPointerException(); } Iterator> i = m.entrySet().iterator(); while (i.hasNext()) { Map.Entry me = i.next(); put(me.getKey(), me.getValue()); } } /** * Removes all of the mappings from this IdentityWeakHashMap. */ public void clear() { if (count > 0) { modCount++; WeakEntry[] copyOfEntries = entries; for (int i = copyOfEntries.length; --i >= 0;) { copyOfEntries[i] = null; } count = 0; } } protected void cleanUp(){ WeakEntryReference reference = (WeakEntryReference)referenceQueue.poll(); while (reference != null){ // remove the entry but do not increment the modcount // since this is not a user action removeEntry(reference.owner, false); reference = (WeakEntryReference)referenceQueue.poll(); } } /** * Returns a shallow copy of this IdentityWeakHashMap (the * elements are not cloned). * * @return a shallow copy of this IdentityWeakHashMap. */ public Object clone() { try { WeakEntry[] copyOfEntries = entries; IdentityWeakHashMap clone = (IdentityWeakHashMap)super.clone(); clone.referenceQueue = new ReferenceQueue(); clone.entries = new WeakEntry[copyOfEntries.length]; for (int i = copyOfEntries.length; i-- > 0;) { clone.entries[i] = (copyOfEntries[i] != null) ? (WeakEntry)copyOfEntries[i].clone(clone.referenceQueue) : null; } clone.keySet = null; clone.entrySet = null; clone.values = null; clone.modCount = 0; return clone; } catch (CloneNotSupportedException e) { // this shouldn't happen, since we are Cloneable throw new InternalError(); } } // Views - the following is standard 'boiler-plate' Map stuff private transient Set keySet = null; private transient Set entrySet = null; private transient Collection values = null; /** * Returns a set view of the keys contained in this * IdentityWeakHashMap. The set is backed by the map, so * changes to the map are reflected in the set, and vice versa. The set * supports element removal, which removes the corresponding mapping from * this map, via the Iterator.remove, Set.remove, * removeAll, retainAll, and clear operations. * It does not support the add or addAll operations. * * @return a set view of the keys contained in this * IdentityWeakHashMap. */ public Set keySet() { if (keySet == null) { keySet = new AbstractSet() { public Iterator iterator() { return getHashIterator(COMPONENT_TYPES.KEYS); } public int size() { return count; } public boolean contains(Object o) { return containsKey(o); } public boolean remove(Object o) { int oldSize = count; IdentityWeakHashMap.this.remove(o); return count != oldSize; } public void clear() { IdentityWeakHashMap.this.clear(); } }; } return keySet; } /** * Returns a collection view of the values contained in this * IdentityWeakHashMap. The collection is backed by the map, so * changes to the map are reflected in the collection, and vice versa. The * collection supports element removal, which removes the corresponding * mapping from this map, via the Iterator.remove, * Collection.remove, removeAll, retainAll, and * clear operations. It does not support the add or * addAll operations. * * @return a collection view of the values contained in this * IdentityWeakHashMap. */ public Collection values() { if (values == null) { values = new AbstractCollection() { public Iterator iterator() { return getHashIterator(COMPONENT_TYPES.VALUES); } public int size() { return count; } public boolean contains(Object o) { return containsValue(o); } public void clear() { IdentityWeakHashMap.this.clear(); } }; } return values; } /** * Returns a collection view of the mappings contained in this * IdentityWeakHashMap. Each element in the returned collection * is a Map.Entry. The collection is backed by the map, so changes * to the map are reflected in the collection, and vice versa. The * collection supports element removal, which removes the corresponding * mapping from the map, via the Iterator.remove, * Collection.remove, removeAll, retainAll, and * clear operations. It does not support the add or * addAll operations. * * @return a collection view of the mappings contained in this * IdentityWeakHashMap. */ public Set entrySet() { if (entrySet == null) { entrySet = new AbstractSet() { public Iterator iterator() { return getHashIterator(COMPONENT_TYPES.ENTRIES); } public boolean contains(Object o) { if (!(o instanceof Map.Entry)) { return false; } Map.Entry entry = (Map.Entry)o; Object key = entry.getKey(); WeakEntry[] copyOfEntries = entries; int hash = System.identityHashCode(key); int index = (hash & 0x7FFFFFFF) % copyOfEntries.length; for (WeakEntry e = copyOfEntries[index]; e != null; e = e.next) { if ((e.hash == hash) && e.equals(entry)) { return true; } } return false; } public boolean remove(Object o) { if (!(o instanceof WeakEntry)) { return false; } WeakEntry entry = (WeakEntry)o; // remove the entry but and increment the modcount // because this is a user action return removeEntry(entry, true); } public int size() { return count; } public void clear() { IdentityWeakHashMap.this.clear(); } }; } return entrySet; } private Iterator getHashIterator(COMPONENT_TYPES type) { if (count == 0) { return emptyHashIterator; } else { return new HashIterator(type); } } /** * IdentityWeakHashMap entry. */ static class WeakEntry implements Map.Entry { boolean removed = false; int hash; EntryReference key; EntryReference value; WeakEntry next; WeakEntry(int hash, K key, V value, WeakEntry next, ReferenceQueue refQueue) { this.hash = hash; this.key = new WeakEntryReference(key, refQueue, this); if (key == value){ this.value = (EntryReference)this.key; }else{ this.value = new HardEntryReference(value); } this.next = next; } protected Object clone(ReferenceQueue refQueue) { WeakEntry current = this; WeakEntry root = new WeakEntry(current.hash, current.key.get(), current.value.get(), null, refQueue); WeakEntry currentClone = root; while (current.next != null) { currentClone.next = new WeakEntry(current.next.hash, current.next.key.get(), current.next.value.get(), null, refQueue); current = current.next; currentClone = currentClone.next; } return root; } // Map.Entry Ops public K getKey() { return key.get(); } public V getValue() { return value.get(); } public V setValue(V value) { EntryReference oldValue = this.value; if (value == this.key.get()){ this.value = (EntryReference)this.key; }else{ this.value = new HardEntryReference(value); } return oldValue.get(); } public boolean equals(Object o) { if (!(o instanceof Map.Entry)) { return false; } Map.Entry e = (Map.Entry)o; Object v = value.get(); return (key == e.getKey()) && ((v == null) ? (e.getValue() == null) : v.equals(e.getValue())); } public int hashCode() { Object v = value.get(); return hash ^ ((v == null) ? 0 : v.hashCode()); } public String toString() { return key.get() + "=" + value.get(); } public boolean shouldBeIgnored(){ return key.get() == null || value.get() == null; } } static interface EntryReference { public T get(); } static class WeakEntryReference extends WeakReference implements EntryReference{ protected WeakEntry owner; protected boolean trashed = false; protected ReferenceQueue referenceQueue; public WeakEntryReference(T referent, ReferenceQueue q, WeakEntry owner) { super(referent, q); this.owner = owner; this.referenceQueue = q; } } //This limited class is here to allow the value to be switched from a weak reference to a hard // referernce. This Map only makes the key weak but inorder to allow for garbage collection //of the key when the key and the value are the same object the same weak reference will be used static class HardEntryReference implements EntryReference{ protected T referent; public HardEntryReference(T referent){ this.referent = referent; } public T get(){ return referent; } } // Types of Iterators private enum COMPONENT_TYPES {KEYS, VALUES, ENTRIES}; private static EmptyHashIterator emptyHashIterator = new EmptyHashIterator(); private static class EmptyHashIterator implements Iterator { EmptyHashIterator() { } public boolean hasNext() { return false; } public Object next() { throw new NoSuchElementException(); } public void remove() { throw new IllegalStateException(); } } private class HashIterator implements Iterator { WeakEntry[] entries = IdentityWeakHashMap.this.entries; int index = entries.length; WeakEntry entry = null; WeakEntry lastReturned = null; COMPONENT_TYPES type; Object currentEntryRef; /** * The modCount value that the iterator believes that the backing * List should have. If this expectation is violated, the iterator * has detected concurrent modification. */ private int expectedModCount = modCount; HashIterator(COMPONENT_TYPES type) { this.type = type; } public boolean hasNext() { WeakEntry e = entry; int i = index; WeakEntry[] copyOfEntries = IdentityWeakHashMap.this.entries; while ((e == null || currentEntryRef == null) && (i > 0)) { e = copyOfEntries[--i]; if (e != null) { currentEntryRef = e.key.get(); }else{ currentEntryRef = null; } } entry = e; index = i; return e != null && currentEntryRef != null; } public Object next() { if (modCount != expectedModCount) { throw new ConcurrentModificationException(); } WeakEntry et = entry; int i = index; WeakEntry[] copyOfEntries = IdentityWeakHashMap.this.entries; while ((et == null || currentEntryRef == null) && (i > 0)) { et = copyOfEntries[--i]; if (et != null) { currentEntryRef = et.key.get(); }else{ currentEntryRef = null; } } entry = et; index = i; if (et != null) { WeakEntry e = lastReturned = entry; entry = e.next; if (entry != null) { currentEntryRef = entry.key.get(); }else{ currentEntryRef = null; } return (type == COMPONENT_TYPES.KEYS) ? e.key.get() : ((type == COMPONENT_TYPES.VALUES) ? e.value.get() : e); } throw new NoSuchElementException(); } public void remove() { if (lastReturned == null) { throw new IllegalStateException(); } if (modCount != expectedModCount) { throw new ConcurrentModificationException(); } WeakEntry[] copyOfEntries = IdentityWeakHashMap.this.entries; int index = (lastReturned.hash & 0x7FFFFFFF) % copyOfEntries.length; for (WeakEntry e = copyOfEntries[index], prev = null; e != null; prev = e, e = e.next) { if (e == lastReturned) { modCount++; expectedModCount++; if (prev == null) { copyOfEntries[index] = e.next; } else { prev.next = e.next; } count--; lastReturned = null; return; } } throw new ConcurrentModificationException(); } } /** * Serialize the state of this IdentityWeakHashMap to a stream. * * @serialData The capacity of the IdentityWeakHashMap * (the length of the bucket array) is emitted (int), followed by the * size of the IdentityWeakHashMap, followed by the * key-value mappings (in no particular order). */ private void writeObject(ObjectOutputStream s) throws IOException { // Write out the threshold, loadfactor (and any hidden 'magic' stuff). s.defaultWriteObject(); // Write out number of buckets s.writeInt(entries.length); // Write out count s.writeInt(count); // Write out contents for (int i = entries.length - 1; i >= 0; i--) { WeakEntry entry = entries[i]; while (entry != null) { s.writeObject(entry.key.get()); s.writeObject(entry.value.get()); entry = entry.next; } } } /** * Deserialize the IdentityWeakHashMap from a stream. */ private void readObject(ObjectInputStream s) throws IOException, ClassNotFoundException { // Read in the threshold, loadfactor (and any hidden 'magic' stuff). s.defaultReadObject(); // Read in number of buckets and allocate the bucket array; int numBuckets = s.readInt(); entries = new WeakEntry[numBuckets]; // Read in size (count) int size = s.readInt(); // Read the mappings and add to the IdentityWeakHashMap for (int i = 0; i < size; i++) { Object key = s.readObject(); Object value = s.readObject(); //only re-add if not null as could have been garbage collected at any time //before the writeObject if (key != null && value != null){ put((K)key, (V)value); } } } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/NonSynchronizedSubVector.java0000664000000000000000000001512612216173126027342 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.util.*; /** * SubList that implements Vector. */ public class NonSynchronizedSubVector extends NonSynchronizedVector { private Vector l; private int offset; private int size; public NonSynchronizedSubVector(Vector list, int fromIndex, int toIndex) { super(0); if (fromIndex < 0) throw new IndexOutOfBoundsException("fromIndex = " + fromIndex); if (toIndex > list.size()) throw new IndexOutOfBoundsException("toIndex = " + toIndex); if (fromIndex > toIndex) throw new IllegalArgumentException("fromIndex(" + fromIndex + ") > toIndex(" + toIndex + ")"); l = list; offset = fromIndex; size = toIndex - fromIndex; } public Object set(int index, Object element) { return l.set(index+offset, element); } public void setElementAt(Object obj, int index) { set(index, obj); } public Object elementAt(int index) { return get(index); } public Object firstElement() { return get(0); } public Object lastElement() { return get(size() - 1); } public int indexOf(Object elem, int index) { int size = size(); if (elem == null) { for (int i = index ; i < size ; i++) if (get(i)==null) return i; } else { for (int i = index ; i < size ; i++) if (elem.equals(get(i))) return i; } return -1; } public int lastIndexOf(Object elem, int index) { int size = size(); if (index >= size) throw new IndexOutOfBoundsException(index + " >= "+ size); if (elem == null) { for (int i = index; i >= 0; i--) if (get(i)==null) return i; } else { for (int i = index; i >= 0; i--) if (elem.equals(get(i))) return i; } return -1; } public Object get(int index) { return l.get(index+offset); } public int size() { return size; } public void add(int index, Object element) { if (index<0 || index>size) throw new IndexOutOfBoundsException(); l.add(index+offset, element); size++; modCount++; } public Object remove(int index) { Object result = l.remove(index+offset); size--; modCount++; return result; } /*protected void removeRange(int fromIndex, int toIndex) { l.removeRange(fromIndex+offset, toIndex+offset); size -= (toIndex-fromIndex); modCount++; }*/ public boolean addAll(Collection c) { return addAll(size, c); } public boolean addAll(int index, Collection c) { if (index<0 || index>size) throw new IndexOutOfBoundsException( "Index: "+index+", Size: "+size); int cSize = c.size(); if (cSize==0) return false; l.addAll(offset+index, c); size += cSize; modCount++; return true; } public Enumeration elements() { return new Enumeration() { int count = 0; public boolean hasMoreElements() { return count < size(); } public Object nextElement() { if (count < elementCount) { return get(count++); } throw new NoSuchElementException("Vector Enumeration"); } }; } public Object[] toArray() { Object[] result = new Object[size]; System.arraycopy(l.toArray(), offset, result, 0, size); return result; } public Object[] toArray(Object a[]) { if (a.length < size) a = (Object[])java.lang.reflect.Array.newInstance( a.getClass().getComponentType(), size); System.arraycopy(l.toArray(), offset, a, 0, size); if (a.length > size) a[size] = null; return a; } public Iterator iterator() { return listIterator(); } public ListIterator listIterator(final int index) { if (index<0 || index>size) throw new IndexOutOfBoundsException( "Index: "+index+", Size: "+size); return new ListIterator() { private ListIterator i = l.listIterator(index+offset); public boolean hasNext() { return nextIndex() < size; } public Object next() { if (hasNext()) return i.next(); else throw new NoSuchElementException(); } public boolean hasPrevious() { return previousIndex() >= 0; } public Object previous() { if (hasPrevious()) return i.previous(); else throw new NoSuchElementException(); } public int nextIndex() { return i.nextIndex() - offset; } public int previousIndex() { return i.previousIndex() - offset; } public void remove() { i.remove(); size--; modCount++; } public void set(Object o) { i.set(o); } public void add(Object o) { i.add(o); size++; modCount++; } }; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/JDK16Platform.java0000664000000000000000000000273712216173126024643 0ustar /******************************************************************************* * Copyright (c) 2011, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * ailitchev - 2010/08/19 * Bug 322960 - TWO TESTS IN CUSTOMFEATURESJUNITTESTSUITE FAILED WITH 11.2.0.2 DRIVER ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.sql.SQLException; import java.sql.SQLXML; /** * INTERNAL: * Implements operations specific to JDK 1.6 */ public class JDK16Platform extends JDK15Platform { /** * Indicates whether the passed object implements java.sql.SQLXML introduced in jdk 1.6 */ public boolean isSQLXML(Object object) { return (object instanceof SQLXML); } /** * Casts the passed object to SQLXML and calls getString and free methods */ public String getStringAndFreeSQLXML(Object sqlXml) throws SQLException { String str = ((SQLXML)sqlXml).getString(); ((SQLXML)sqlXml).free(); return str; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/NonSynchronizedProperties.java0000664000000000000000000000762412216173126027566 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * James Sutherland - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.util.*; import java.util.Map.Entry; /** * Properties subclass that removes the synchronization. */ public class NonSynchronizedProperties extends Properties { protected Map values; public NonSynchronizedProperties(int size) { super(); this.values = new HashMap(size); } @Override public void clear() { this.values.clear(); } @Override public Object clone() { NonSynchronizedProperties properties = new NonSynchronizedProperties(size()); properties.putAll(this); return properties; } @Override public boolean contains(Object value) { return this.values.containsValue(value); } @Override public boolean containsKey(Object key) { return this.values.containsKey(key); } @Override public boolean containsValue(Object value) { return this.values.containsValue(value); } @Override public Enumeration elements() { return new Hashtable(this.values).elements(); } @Override public Set> entrySet() { return this.values.entrySet(); } @Override public boolean equals(Object o) { return this.values.equals(o); } @Override public Object get(Object key) { return this.values.get(key); } @Override public int hashCode() { return this.values.hashCode(); } @Override public boolean isEmpty() { return this.values.isEmpty(); } @Override public Enumeration keys() { return new Hashtable(this.values).keys(); } @Override public Set keySet() { return this.values.keySet(); } @Override public Object put(Object key, Object value) { // Properties cannot store null. if (value == null) { return this.values.remove(key); } return this.values.put(key, value); } @Override public void putAll(Map t) { this.values.putAll(t); } @Override public Object remove(Object key) { return this.values.remove(key); } @Override public int size() { return this.values.size(); } @Override public String toString() { return this.values.toString(); } @Override public Collection values() { return this.values.values(); } @Override public Object setProperty(String key, String value) { return this.values.put(key, value); } @Override public String getProperty(String key, String defaultValue) { String val = getProperty(key); return (val == null) ? defaultValue : val; } @Override public String getProperty(String key) { Object oval = get(key); String sval = (oval instanceof String) ? (String)oval : null; return ((sval == null) && (defaults != null)) ? defaults.getProperty(key) : sval; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/FunctionField.java0000664000000000000000000000266112216173126025104 0ustar /******************************************************************************* * Copyright (c) 2011, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation ******************************************************************************/ package org.eclipse.persistence.internal.helper; import org.eclipse.persistence.expressions.Expression; /** * INTERNAL: *

Purpose: * Allow fields to have functions applied to them or be computed.

*/ public class FunctionField extends DatabaseField { protected Expression expression; public FunctionField() { super(); } /** * A unique field name should still be given to the function. */ public FunctionField(String fieldName) { super(fieldName); } public Expression getExpression() { return expression; } public void setExpression(Expression expression) { this.expression = expression; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/ConcurrentFixedCache.java0000664000000000000000000000661212216173126026401 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.io.Serializable; import java.util.*; import java.util.concurrent.*; /** * Provide a concurrent fixed size caching mechanism. * This is used for caching EJBQL parsed queries, Update calls, * and other places a fixed size cache is needed. * The default fixed size is 100. */ public class ConcurrentFixedCache implements Serializable { protected int maxSize; protected Map cache; /** * Create a new concurrent cache, with a fixed size of 100. */ public ConcurrentFixedCache() { this(100); } /** * Create a new concurrent cache, with the max size. */ public ConcurrentFixedCache(int maxSize) { // PERF: Use a concurrent map to allow concurrent gets. this.cache = new ConcurrentHashMap(maxSize); this.maxSize = maxSize; } /** * Return the fixed size of the parse cache. */ public int getMaxSize() { return maxSize; } /** * Set the fixed size of the parse cache. * When the size is exceeded, subsequent EJBQL will not be cached. * The default size is 100; */ public void setMaxSize(int maxSize) { this.maxSize = maxSize; } /** * Return the pre-parsed query that represents the EJBQL string. * If the EJBQL has not been cached, null is returned. */ public Object get(Object key) { return this.cache.get(key); } /** * Add the value to the cache. * Remove the */ public void put(Object key, Object value) { if (this.maxSize == 0) { return; } this.cache.put(key, value); // Currently just removes the first one encountered, not LRU, // this is not ideal, but the most concurrent and quickest way to ensure fixed size. if (this.cache.size() > this.maxSize) { Iterator iterator = this.cache.keySet().iterator(); try { while ((this.cache.size() > this.maxSize) && iterator.hasNext()) { Object next = iterator.next(); // Do not remove what was just put in. if (next != key) { this.cache.remove(next); } } } catch (Exception alreadyGone) { // Ignore. } } } /** * Remove from cache. */ public void remove(Object key) { this.cache.remove(key); } /** * Return the cache. */ public Map getCache() { return cache; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/DatabaseField.java0000664000000000000000000005622412216173126025027 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * tware - added handling of database delimiters * 03/24/2011-2.3 Guy Pelletier * - 337323: Multi-tenant with shared schema support (part 1) * 05/30/2012-2.4 Guy Pelletier * - 354678: Temp classloader is still being used during metadata processing * 02/11/2013-2.5 Guy Pelletier * - 365931: @JoinColumn(name="FK_DEPT",insertable = false, updatable = true) causes INSERT statement to include this data value that it is associated with ******************************************************************************/ package org.eclipse.persistence.internal.helper; //javase imports import java.io.Serializable; import java.security.AccessController; import java.security.PrivilegedActionException; import static java.lang.Integer.MIN_VALUE; //EclipseLink imports import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.databaseaccess.DatabasePlatform; import org.eclipse.persistence.internal.databaseaccess.DatasourcePlatform; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedClassForName; /** * INTERNAL: *

Purpose: * Define a fully qualified field name.

* Responsibilities:

    *
  • Know its name and its table. *
* @see DatabaseTable */ public class DatabaseField implements Cloneable, Serializable, CoreField { /** Variables used for generating DDL **/ protected int scale; protected int length; protected int precision; protected boolean isUnique; protected boolean isNullable; protected boolean isUpdatable; protected boolean isInsertable; protected boolean isCreatable; protected boolean isPrimaryKey; protected String columnDefinition; /** Column name of the field. */ protected String name; /** PERF: Cache fully qualified table.field-name. */ protected String qualifiedName; /** Fields table (encapsulates name + creator). */ protected DatabaseTable table; /** * Respective Java type desired for the field's value, used to optimize performance and for binding. * PERF: Allow direct variable access from getObject. */ public transient Class type; public String typeName; // shadow variable - string name of above Class type variable /** * Respective JDBC type of the field's value. * This overrides the class type, which the JDBC type is normally computed from. * PERF: Allow direct variable access from getObject. */ public int sqlType; /** * Store normal index of field in result set to optimize performance. * PERF: Allow direct variable access from getIndicatingNoEntry. */ public int index; protected boolean useDelimiters = false; /** * If this is set, it will be used in determining equality (unless delimiters are used) and the hashcode. * @see getNameForComparisons */ protected String nameForComparisons; /** * setting to true will cause getNameForComparisons to lazy initialize nameForComparisons using * the value from getName().toUpperCase(). */ protected boolean useUpperCaseForComparisons = false; /** * used to represent the value when it has not being defined */ public static final int NULL_SQL_TYPE = MIN_VALUE; /** * Returns true if this field was translated. */ protected boolean isTranslated = false; /** * Indicates whether the field should be kept in the record after the object is created. * Used by ObjectLevelReadQuery ResultSetAccessOptimization. */ public boolean keepInRow; public DatabaseField() { this("", new DatabaseTable()); } public DatabaseField(String qualifiedName) { this(qualifiedName, null, null); } public DatabaseField(String qualifiedName, String startDelimiter, String endDelimiter) { this.index = -1; this.sqlType = NULL_SQL_TYPE; int index = qualifiedName.lastIndexOf('.'); if (index == -1) { setName(qualifiedName, startDelimiter, endDelimiter); this.table = new DatabaseTable(); } else { setName(qualifiedName.substring(index + 1, qualifiedName.length()), startDelimiter, endDelimiter); this.table = new DatabaseTable(qualifiedName.substring(0, index), startDelimiter, endDelimiter); } initDDLFields(); } public DatabaseField(String fieldName, String tableName) { this(fieldName, new DatabaseTable(tableName)); } public DatabaseField(String fieldName, DatabaseTable databaseTable) { this(fieldName, databaseTable, null, null); } public DatabaseField(String fieldName, DatabaseTable databaseTable, String startDelimiter, String endDelimiter) { this.index = -1; this.sqlType = NULL_SQL_TYPE; setName(fieldName, startDelimiter, endDelimiter); this.table = databaseTable; initDDLFields(); } /** * Inits the DDL generation fields with our defaults. Note: we used to * initialize the length to the JPA default of 255 but since this default * value should only apply for string fields we set it to 0 to indicate * that it was not specified and rely on the default (255) to come from * individual platforms. */ public void initDDLFields() { scale = 0; length = 0; precision = 0; isUnique = false; isNullable = true; isUpdatable = true; isInsertable = true; isCreatable = true; isPrimaryKey = false; columnDefinition = ""; } /** * The table is not cloned because it is treated as an automatic value. */ public DatabaseField clone() { try { return (DatabaseField)super.clone(); } catch (CloneNotSupportedException exception) { throw new InternalError(exception.getMessage()); } } /* * INTERNAL: * Convert all the class-name-based settings in this mapping to actual * class-based settings. This method is implemented by subclasses as * necessary. * @param classLoader */ public void convertClassNamesToClasses(ClassLoader classLoader) { if (type == null && typeName != null) { try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { type = (Class) AccessController.doPrivileged(new PrivilegedClassForName(typeName, true, classLoader)); } catch (PrivilegedActionException e) { throw ValidationException.classNotFoundWhileConvertingClassNames(typeName, e.getException()); } } else { type = org.eclipse.persistence.internal.security.PrivilegedAccessHelper.getClassForName(typeName, true, classLoader); } } catch (Exception exception) { throw ValidationException.classNotFoundWhileConvertingClassNames(typeName, exception); } } } /** * Determine whether the receiver is equal to a DatabaseField. * Return true if the receiver and field have the same name and table. * Also return true if the table of the receiver or field are unspecified, * ie. have no name. */ public boolean equals(Object object) { if (!(object instanceof DatabaseField)) { return false; } return equals((DatabaseField)object); } /** * Determine whether the receiver is equal to a DatabaseField. * Return true if the receiver and field have the same name and table. * Also return true if the table of the receiver or field are unspecified, * ie. have no name. */ public boolean equals(DatabaseField field) { if (this == field) { return true; } if (field != null) { // PERF: Optimize common cases first. // PERF: Use direct variable access. if (getQualifiedName().equals(field.getQualifiedName())) { return true; } //preserve old behavior if static shouldIgnoreCaseOnFieldComparisons is set if (DatabasePlatform.shouldIgnoreCaseOnFieldComparisons()) { if (this.name.equalsIgnoreCase(field.name)) { //getTableName will cause NPE if there isn't a table. use hasTableName instead if ((!hasTableName()) || (!field.hasTableName())) { return true; } return (this.table.equals(field.table)); } } else { String ourNameToCompare; String fieldNameToCompare; if (field.shouldUseDelimiters() || shouldUseDelimiters()) { ourNameToCompare = this.name; fieldNameToCompare = field.name; } else { ourNameToCompare = getNameForComparisons(); fieldNameToCompare = field.getNameForComparisons(); } if (this.name.equals(field.name) || ourNameToCompare.equals(fieldNameToCompare)) { //getTableName will cause NPE if there isn't a table. use hasTableName instead if ((!hasTableName()) || (!field.hasTableName())) { return true; } return (this.table.equals(field.table)); } } } return false; } /** * Get the SQL fragment that is used when generating the DDL for the column. */ public String getColumnDefinition() { return this.columnDefinition; } /** * Return the expected index that this field will occur in the result set * row. This is used to optimize performance of database row field lookups. */ public int getIndex() { return index; } /** * Used to specify the column length when generating DDL. */ public int getLength() { return this.length; } /** * Return the unqualified name of the field. */ @Override public String getName() { return name; } /** * Returns this fields name with database delimiters if useDelimiters is true. * This method should be called any time the field name is requested for writing SQL. */ public String getNameDelimited(DatasourcePlatform platform) { if (this.useDelimiters){ return platform.getStartDelimiter() + this.name + platform.getEndDelimiter(); } return this.name; } /** * Returns the precision for a decimal column when generating DDL. */ public int getPrecision() { return this.precision; } public String getQualifiedName(){ if (this.qualifiedName == null) { if (hasTableName()) { this.qualifiedName = this.table.getQualifiedName() + "." + getName(); } else { this.qualifiedName = getName(); } } return this.qualifiedName; } /** * Return the qualified name of the field. * PERF: Cache the qualified name. */ public String getQualifiedNameDelimited(DatasourcePlatform platform) { if (hasTableName()) { return this.table.getQualifiedNameDelimited(platform) + "." + getNameDelimited(platform); } else { return getNameDelimited(platform); } } /** * Returns the scale for a decimal column when generating DDL. */ public int getScale() { return this.scale; } public DatabaseTable getTable() { return table; } public String getTableName() { return getTable().getName(); } public void setTableName(String tableName) { setTable(new DatabaseTable(tableName)); } @Override public Class getType() { if ((this.type == null) && (this.typeName != null)) { convertClassNamesToClasses(getClass().getClassLoader()); } return this.type; } public String getTypeName() { return typeName; } public void setTypeName(String typeName) { this.typeName = typeName; } /** * Return the JDBC type that corresponds to the field. * The JDBC type is normally determined from the class type, * but this allows it to be overridden for types that do not match directly to a Java type, * such as MONEY or ARRAY, STRUCT, XMLTYPE, etc. * This can be used for binding or stored procedure usage. */ public int getSqlType() { return sqlType; } /** * Return the hashcode of the name, because it is fairly unique. */ public int hashCode() { return getNameForComparisons().hashCode(); } public boolean hasTableName() { if (this.table == null) { return false; } if (this.table.getName() == null) { return false; } return !(this.table.getName().equals("")); } /** * PUBLIC: * Return if this is an ObjectRelationalDatabaseField. */ public boolean isObjectRelationalDatabaseField(){ return false; } /** * Used to specify whether the column should be included in SQL UPDATE * statements. */ public boolean isInsertable() { return this.isInsertable; } /** * Used for generating DDL. Returns true if the database column is * nullable. */ public boolean isNullable() { return this.isNullable; } /** * Used to specify whether the column should be included in the primary * on the database table. */ public boolean isPrimaryKey() { return this.isPrimaryKey; } /** * Return true if this database field is a translation. */ public boolean isTranslated() { return this.isTranslated; } /** * Used for generating DDL. Returns true if the field is a unique key. */ public boolean isUnique() { return this.isUnique; } /** * Returns true is this database field should be read only. */ public boolean isReadOnly() { return (! isUpdatable && ! isInsertable); } public boolean keepInRow() { return keepInRow; } /** * Returns whether the column should be included in SQL INSERT * statements. */ public boolean isUpdatable() { return this.isUpdatable; } /** * Reset the field's name and table from the qualified name. */ public void resetQualifiedName(String qualifiedName) { setIndex(-1); int index = qualifiedName.lastIndexOf('.'); if (index == -1) { setName(qualifiedName); getTable().setName(""); getTable().setTableQualifier(""); } else { setName(qualifiedName.substring(index + 1, qualifiedName.length())); getTable().setPossiblyQualifiedName(qualifiedName.substring(0, index)); } } /** * Set the SQL fragment that is used when generating the DDL for the column. */ public void setColumnDefinition(String columnDefinition) { this.columnDefinition = columnDefinition; } /** * Set the expected index that this field will occur in the result set row. * This is used to optimize performance of database row field lookups. */ public void setIndex(int index) { this.index = index; } /** * Used to specify whether the column should be included in SQL UPDATE * statements. */ public void setInsertable(boolean isInsertable) { this.isInsertable = isInsertable; } public void setKeepInRow(boolean keepInRow) { this.keepInRow = keepInRow; } /** * Set the isTranslated flag. */ public void setIsTranslated(boolean isTranslated) { this.isTranslated = isTranslated; } /** * Used to specify the column length when generating DDL. */ public void setLength(int length) { this.length = length; } /** * Set the unqualified name of the field. */ @Override public void setName(String name) { setName(name, null, null); } /** * Set the unqualified name of the field. * * If the name contains database delimiters, they will be stripped and a flag will be set to have them * added when the DatabaseField is written to SQL */ public void setName(String name, DatasourcePlatform platform){ setName(name, platform.getStartDelimiter(), platform.getEndDelimiter()); } /** * Set the unqualified name of the field. * * If the name contains database delimiters, they will be stripped and a flag will be set to have them * added when the DatabaseField is written to SQL */ public void setName(String name, String startDelimiter, String endDelimiter) { if ((startDelimiter != null) && (endDelimiter != null) && !startDelimiter.equals("")&& !endDelimiter.equals("") && name.startsWith(startDelimiter) && name.endsWith(endDelimiter)){ this.name = name.substring(startDelimiter.length(), name.length() - endDelimiter.length()); this.useDelimiters = true; } else { this.name = name; } this.nameForComparisons = null; this.qualifiedName = null; } /** * Used for generating DDL. Set to true if the database column is * nullable. */ public void setNullable(boolean isNullable) { this.isNullable = isNullable; } /** * Used to specify the precision for a decimal column when generating DDL. */ public void setPrecision(int precision) { this.precision = precision; } /** * Used to specify whether the column should be included in primary key * on the database table. */ public void setPrimaryKey(boolean isPrimaryKey) { this.isPrimaryKey = isPrimaryKey; } /** * Used to specify the scale for a decimal column when generating DDL. */ public void setScale(int scale) { this.scale = scale; } /** * Set the JDBC type that corresponds to the field. * The JDBC type is normally determined from the class type, * but this allows it to be overridden for types that do not match directly * to a Java type, such as MONEY or ARRAY, STRUCT, XMLTYPE, etc. * This can be used for binding or stored procedure usage. */ public void setSqlType(int sqlType) { this.sqlType = sqlType; } /** * Set the table for the field. */ public void setTable(DatabaseTable table) { this.table = table; this.qualifiedName = null; } /** * Set the Java class type that corresponds to the field. * The JDBC type is determined from the class type, * this is used to optimize performance, and for binding. */ @Override public void setType(Class type) { this.type = type; if (this.type != null && typeName == null) { typeName = this.type.getName(); } } /** * Used for generating DDL. Set to true if the field is a unique key. */ public void setUnique(boolean isUnique) { this.isUnique = isUnique; } /** * Used to specify whether the column should be included in SQL INSERT * statements. */ public void setUpdatable(boolean isUpdatable) { this.isUpdatable = isUpdatable; } public String toString() { return this.getQualifiedName(); } public void setUseDelimiters(boolean useDelimiters) { this.useDelimiters = useDelimiters; } public boolean shouldUseDelimiters() { return this.useDelimiters; } /** * INTERNAL: * Sets the useUpperCaseForComparisons flag which is used to force using the uppercase version of the field's * name to determine field equality and its hashcode, but will still use the original name when writing/printing * operations. If this isn't a change, it is ignored, otherwise it sets the nameForComparisons to null. */ public void useUpperCaseForComparisons(boolean useUpperCaseForComparisons){ if (this.useUpperCaseForComparisons != useUpperCaseForComparisons){ this.useUpperCaseForComparisons = useUpperCaseForComparisons; this.setNameForComparisons(null); } } public boolean getUseUpperCaseForComparisons(){ return this.useUpperCaseForComparisons; } /** * INTERNAL: * sets the string to be used for equality checking and determining the hashcode of this field. * This will overwrite the useUpperCaseForEquality setting with the passed in string. */ public void setNameForComparisons(String name){ this.nameForComparisons = name; } public boolean isCreatable() { return isCreatable; } public void setCreatable(boolean isCreatable) { this.isCreatable = isCreatable; } /** * INTERNAL: * gets the string used for comparisons and in determining the hashcode. */ public String getNameForComparisons(){ if (this.nameForComparisons == null) { if ((!this.useUpperCaseForComparisons) || (this.name == null)) { this.nameForComparisons = this.name; } else { this.nameForComparisons = this.name.toUpperCase(); } } return this.nameForComparisons; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/SerializationHelper.java0000664000000000000000000001106212216173126026323 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import org.eclipse.persistence.exceptions.ValidationException; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.Serializable; /** *

Provide common functionalities for serialization of object. *

* *

This class throws exceptions for invalid null inputs. * Each method documents its behavior in more detail.

* * @author Steven Vo * @since OracleAS 10.0.3 */ public class SerializationHelper { /** *

Deep clone a Serializable object using serialization. * @param the serializable object * @return the deep cloned object * @throws IOException, ClassNotFoundException */ public static Object clone(Serializable object) throws IOException, ClassNotFoundException { return deserialize(serialize(object)); } /** * Serialize the object to an OutputStream * * @param obj the object to serialize to bytes * @param outputStream the stream to write to, can not be null * @throws IOException */ public static void serialize(Serializable obj, OutputStream outputStream) throws IOException { if (outputStream == null) { throw ValidationException.invalidNullMethodArguments(); } ObjectOutputStream outStream = null; try { // stream closed in the finally outStream = new ObjectOutputStream(outputStream); outStream.writeObject(obj); } finally { try { if (outStream != null) { outStream.close(); } } catch (IOException ex) { // ignore; } } } /** * Serialize the object to a byte array * * @param obj the object to serialize to bytes * @return a byte[] of the obj * @throws IOException */ public static byte[] serialize(Serializable obj) throws IOException { ByteArrayOutputStream outStream = new ByteArrayOutputStream(512); serialize(obj, outStream); return outStream.toByteArray(); } /** * Deserialze an object from an InputStream * * @param inputStream the serialized object input stream, must not be null * @return the deserialized object * @throws IOException, ClassNotFoundException */ public static Object deserialize(InputStream inputStream) throws IOException, ClassNotFoundException { if (inputStream == null) { throw new IllegalArgumentException("The inputStream argument cannot be null"); } ObjectInputStream inStream = null; try { // stream closed in the finally inStream = new ObjectInputStream(inputStream); return inStream.readObject(); } finally { try { if (inStream != null) { inStream.close(); } } catch (IOException ex) { // ignore } } } /** * Deserialize an object from a byte array * * @param objectBytes the serialized object, can not be null * @return the deserialized object * @throws IOException, ClassNotFoundException */ public static Object deserialize(byte[] objectBytes) throws IOException, ClassNotFoundException { if (objectBytes == null) { throw ValidationException.invalidNullMethodArguments(); } ByteArrayInputStream inStream = new ByteArrayInputStream(objectBytes); return deserialize(inStream); } } ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/VendorNameToPlatformMapping.propertieseclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/VendorNameToPlatformMapping.propertie0000664000000000000000000000566612216173126031035 0ustar #******************************************************************************* # Copyright (c) 1998, 2013 Oracle. All rights reserved. # This program and the accompanying materials are made available under the # terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 # which accompanies this distribution. # The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html # and the Eclipse Distribution License is available at # http://www.eclipse.org/org/documents/edl-v10.php. # Contributors: # Oracle - initial API and implementation from Oracle TopLink #******************************************************************************/ # Key-Value file containing mappings between DB product name, major version and # database platform class name. # # The key of the property is in the form of a java regular expression. # At runtime, DB product name obtained from DatabaseMetaData.getDatabaseProductName() # is concatenated with DatabaseMetaData.getDatabaseMajorVersion() and is matched # against the regular expression to determine which DatabasePlaform to set. # # This file is parsed sequentially, top to bottom; More specific regular expression # to platform class entries should be placed before less specific entries. Each # platform entry must be on its own line, an entry cannot span multiple lines. (?i)oracle.*12=org.eclipse.persistence.platform.database.oracle.Oracle11Platform (?i)oracle.*11=org.eclipse.persistence.platform.database.oracle.Oracle11Platform (?i)oracle.*10=org.eclipse.persistence.platform.database.oracle.Oracle10Platform (?i)oracle.*9=org.eclipse.persistence.platform.database.oracle.Oracle9Platform (?i)oracle.*=org.eclipse.persistence.platform.database.oracle.OraclePlatform SQL\ Anywhere.*=org.eclipse.persistence.platform.database.SQLAnywherePlatform (?i)(sybase.*)|(adaptive\ server\ enterprise.*)|(SQL\ Server.*)=org.eclipse.persistence.platform.database.SybasePlatform (?i)microsoft.*=org.eclipse.persistence.platform.database.SQLServerPlatform #Use JavaDBPlatform as the platform for Derby (?i).*derby.*=org.eclipse.persistence.platform.database.JavaDBPlatform (?i).*db2.*=org.eclipse.persistence.platform.database.DB2Platform (?i)pointbase.*=org.eclipse.persistence.platform.database.PointBasePlatform (?i)mysql.*=org.eclipse.persistence.platform.database.MySQLPlatform (?i)informix.*=org.eclipse.persistence.platform.database.InformixPlatform (?i)postgresql.*=org.eclipse.persistence.platform.database.PostgreSQLPlatform (?i)h2.*=org.eclipse.persistence.platform.database.H2Platform (?i)hsql.*=org.eclipse.persistence.platform.database.HSQLPlatform (?i)firebird.*=org.eclipse.persistence.platform.database.FirebirdPlatform (?i).*symfoware.*=org.eclipse.persistence.platform.database.SymfowarePlatform (?i)access.*=org.eclipse.persistence.platform.database.AccessPlatform SAP\ DB.*=org.eclipse.persistence.platform.database.MaxDBPlatform HDB.*=org.eclipse.persistence.platform.database.HANAPlatform eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/JDKPlatform.java0000664000000000000000000000350512216173126024466 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.sql.SQLException; /** * INTERNAL: * Interface which abstracts the version of the JDK we are on. * This should only implement methods that are dependent on JDK version * The implementers should implement the minimum amount of functionality required to * allow support of multiple versions of the JDK. * @see JDK15Platform * @see JavaPlatform * @author Tom Ware */ public interface JDKPlatform { /** * Conforming queries with LIKE will act differently in different JDKs. */ Boolean conformLike(Object left, Object right); /** * Conforming queries with REGEXP will act differently in different JDKs. */ Boolean conformRegexp(Object left, Object right); /** * Indicates whether the passed object implements java.sql.SQLXML introduced in jdk 1.6 */ boolean isSQLXML(Object object); /** * Casts the passed object to SQLXML and calls getString and free methods */ String getStringAndFreeSQLXML(Object sqlXml) throws SQLException; } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/IdentityHashSet.java0000664000000000000000000003576412216173126025436 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; /** * INTERNAL: *

* Purpose: Define a {@link Set} that tests equality by reference, * not equals(). This is required to track objects throughout the lifecycle * of a {@link org.eclipse.persistence.sessions.UnitOfWork}, regardless if the domain * object redefines its equals() method. Additionally, this implementation does * not allow null elements. *

* This class does not inherit from {@link AbstractSet} because the * method {@link AbstractSet#removeAll removeAll(Collection c)} does not work * correctly with reference equality testing (NB the Javadocs for * {@link AbstractCollection} indicates that removeAll is an optional method). * * @author Mike Norman (since TopLink 10.1.3) * */ // J2SE imports import java.io.*; import java.util.*; public class IdentityHashSet extends AbstractCollection implements Set, Cloneable, Serializable { static final long serialVersionUID = 1619330892277906704L; // the default initial capacity static final int DEFAULT_INITIAL_CAPACITY = 32; // the maximum capacity. static final int MAXIMUM_CAPACITY = 1 << 30; // the loadFactor used when none specified in constructor. static final float DEFAULT_LOAD_FACTOR = 0.75f; protected transient Entry[] entries;// internal array of Entry's protected transient int count = 0; protected int threshold = 0; protected float loadFactor = 0; /** * Constructs a new IdentityHashSet with the given initial * capacity and the given loadFactor. * * @param initialCapacity the initial capacity of the * IdentityHashSet. * @param loadFactor the loadFactor of the IdentityHashSet. * @throws IllegalArgumentException if the initial capacity is less * than zero, or if the loadFactor is nonpositive. */ public IdentityHashSet(int initialCapacity, float loadFactor) { if (initialCapacity < 0) { throw new IllegalArgumentException("Illegal initialCapacity: " + initialCapacity); } if (initialCapacity > MAXIMUM_CAPACITY) { initialCapacity = MAXIMUM_CAPACITY; } if ((loadFactor <= 0) || Float.isNaN(loadFactor)) { throw new IllegalArgumentException("Illegal loadFactor: " + loadFactor); } // Find a power of 2 >= initialCapacity int capacity = 1; while (capacity < initialCapacity) { capacity <<= 1; } this.loadFactor = loadFactor; threshold = (int)(capacity * loadFactor); entries = new Entry[capacity]; } /** * Constructs a new IdentityHashSet with the given * initial capacity and a default loadFactor of 0.75. * * @param initialCapacity the initial capacity of the IdentityHashSet. * @throws IllegalArgumentException if the initial capacity is less * than zero. */ public IdentityHashSet(int initialCapacity) { this(initialCapacity, DEFAULT_LOAD_FACTOR); } /** * Constructs a new IdentityHashSet with a default initial * capacity of 32 and a loadfactor of 0.75. */ public IdentityHashSet() { loadFactor = DEFAULT_LOAD_FACTOR; threshold = (int)(DEFAULT_INITIAL_CAPACITY * DEFAULT_LOAD_FACTOR); entries = new Entry[DEFAULT_INITIAL_CAPACITY]; } /** * Constructs a new IdentityHashSet with the same contents * as the given Collection. The new IdentityHashSet * is created with an initial capacity sufficient to hold the elements of * the given Collection. * * @param c the Collection whose contents are to be placed in the * new IdentityHashSet. */ public IdentityHashSet(Collection c) { this(Math.max((int)(c.size() / DEFAULT_LOAD_FACTOR) + 1, DEFAULT_INITIAL_CAPACITY), DEFAULT_LOAD_FACTOR); addAll(c); } /** * @return the size of this IdentityHashSet. */ public int size() { return count; } /** * @return true if this IdentityHashSet is empty. */ public boolean isEmpty() { return (count == 0); } /** * Returns true if this IdentityHashSet contains * the given object. * * @param obj the object to find. * @return true if this IdentityHashSet contains * obj by reference. */ public boolean contains(Object obj) { if (obj == null) { return false; } Entry[] copyOfEntries = entries; int hash = System.identityHashCode(obj); int index = (hash & 0x7FFFFFFF) % copyOfEntries.length; for (Entry e = copyOfEntries[index]; e != null; e = e.next) { if ((e.hash == hash) && (obj == e.value)) { return true; } } return false; } /** * INTERNAL: * Re-builds the internal array of Entry's with a larger capacity. * This method is called automatically when the number of objects in this * IdentityHashSet exceeds its current threshold. */ private void rehash() { int oldCapacity = entries.length; Entry[] oldEntries = entries; int newCapacity = (oldCapacity * 2) + 1; Entry[] newEntries = new Entry[newCapacity]; threshold = (int)(newCapacity * loadFactor); entries = newEntries; for (int i = oldCapacity; i-- > 0;) { for (Entry old = oldEntries[i]; old != null;) { Entry e = old; old = old.next; int index = (e.hash & 0x7FFFFFFF) % newCapacity; e.next = newEntries[index]; newEntries[index] = e; } } } /** * Adds the given object to this IdentityHashSet. * * @param obj object to add. * @return true if this IdentityHashSet did not * already contain obj. * @throws NullPointerException if obj is null. */ public boolean add(Object obj) { if (obj == null) { throw new NullPointerException(); } // Makes sure the object is not already in the IdentityHashSet. Entry[] copyOfEntries = entries; int hash = System.identityHashCode(obj); int index = (hash & 0x7FFFFFFF) % copyOfEntries.length; for (Entry e = copyOfEntries[index]; e != null; e = e.next) { if ((e.hash == hash) && (obj == e.value)) { return false; } } if (count >= threshold) { // Rehash the table if the threshold is exceeded rehash(); copyOfEntries = entries; index = (hash & 0x7FFFFFFF) % copyOfEntries.length; } // Creates the new entry. Entry e = new Entry(hash, obj, copyOfEntries[index]); copyOfEntries[index] = e; count++; return true; } /** * Removes the given object from this IdentityHashSet, if * present. * * @param obj the object to be removed from this IdentityHashSet. * @return true if this IdentityHashSet contained * obj. */ public boolean remove(Object obj) { if (obj == null) { return false; } Entry[] copyOfEntries = entries; int hash = System.identityHashCode(obj); int index = (hash & 0x7FFFFFFF) % copyOfEntries.length; for (Entry e = copyOfEntries[index], prev = null; e != null; prev = e, e = e.next) { if ((e.hash == hash) && (obj == e.value)) { if (prev != null) { prev.next = e.next; } else { copyOfEntries[index] = e.next; } count--; return true; } } return false; } /** * This implementation throws an UnsupportedOperationException * because removeAll does not work correctly with reference * equality testing.

*/ public boolean removeAll(Collection c) { throw new UnsupportedOperationException("IdentityHashSet removeAll"); } /** * This implementation throws an UnsupportedOperationException. * The Javadocs for {@link AbstractCollection} indicates that retainAll * is an optional method.

*/ public boolean retainAll(Collection c) { throw new UnsupportedOperationException("IdentityHashSet retainAll"); } /** * Removes all of the objects from this IdentityHashSet. */ public void clear() { if (count > 0) { Entry[] copyOfEntries = entries; for (int i = copyOfEntries.length; --i >= 0;) { copyOfEntries[i] = null; } count = 0; } } /** * Returns a shallow copy of this IdentityHashSet (the * elements are not cloned). * * @return a shallow copy of this IdentityHashSet. */ public Object clone() { try { Entry[] copyOfEntries = entries; IdentityHashSet clone = (IdentityHashSet)super.clone(); clone.entries = new Entry[copyOfEntries.length]; for (int i = copyOfEntries.length; i-- > 0;) { clone.entries[i] = (copyOfEntries[i] != null) ? (Entry)copyOfEntries[i].clone() : null; } return clone; } catch (CloneNotSupportedException e) { // this shouldn't happen, since we are Cloneable throw new InternalError(); } } /** * Get an iterator for this IdentityHashSet */ public Iterator iterator() { return new IdentityHashSetIterator(); } /** * IdentityHashSet entry. */ static class Entry { int hash; Object value; Entry next; Entry(int hash, Object value, Entry next) { this.hash = hash; this.value = value; this.next = next; } protected Object clone() { Entry currentNode = this; Entry rootNode = new Entry(hash, value, null); Entry currentNodeClone = rootNode; while (currentNode.next != null) { currentNodeClone.next = new Entry(currentNode.next.hash, currentNode.next.value, null); currentNode = currentNode.next; currentNodeClone = currentNodeClone.next; } return rootNode; } } class IdentityHashSetIterator implements Iterator { Entry[] copyOfEntries = IdentityHashSet.this.entries; int index = copyOfEntries.length; Entry entry = null; Entry lastReturned = null; IdentityHashSetIterator() { } public boolean hasNext() { Entry e = entry; int i = index; Entry[] tmp = copyOfEntries; while ((e == null) && (i > 0)) { e = tmp[--i]; } entry = e; index = i; return e != null; } public Object next() { Entry et = entry; int i = index; Entry[] tmp = copyOfEntries; while ((et == null) && (i > 0)) { et = tmp[--i]; } entry = et; index = i; if (et != null) { Entry e = lastReturned = entry; entry = e.next; return e.value; } throw new NoSuchElementException(); } public void remove() { if (lastReturned == null) { throw new IllegalStateException(); } Entry[] copyOfEntries = IdentityHashSet.this.entries; int index = (lastReturned.hash & 0x7FFFFFFF) % copyOfEntries.length; for (Entry e = copyOfEntries[index], prev = null; e != null; prev = e, e = e.next) { if (e == lastReturned) { if (prev == null) { copyOfEntries[index] = e.next; } else { prev.next = e.next; } count--; lastReturned = null; return; } } throw new ConcurrentModificationException(); } } /** * Serialize the state of this IdentityHashSet to a stream. * * @serialData The capacity of the IdentityHashSet * (the length of the bucket array) is emitted (int), followed by the * size of the IdentityHashSet, followed by the * contents (in no particular order). */ private void writeObject(ObjectOutputStream s) throws IOException, ClassNotFoundException { // Write out the threshold, loadfactor (and any hidden 'magic' stuff). s.defaultWriteObject(); // Write out number of buckets s.writeInt(entries.length); // Write out count s.writeInt(count); // Write out contents for (Iterator i = iterator(); i.hasNext();) { s.writeObject(i.next()); } } /** * Deserialize the IdentityHashSet from a stream. */ private void readObject(ObjectInputStream s) throws IOException, ClassNotFoundException { // Read in the threshold, loadfactor (and any hidden 'magic' stuff). s.defaultReadObject(); // Read in number of buckets and allocate the bucket array; int numBuckets = s.readInt(); entries = new Entry[numBuckets]; // Read in size (count) int size = s.readInt(); // Read the objects and add to the IdentityHashSet for (int i = 0; i < size; i++) { add(s.readObject()); } } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/LOBValueWriter.java0000664000000000000000000002244012216173126025156 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.sql.*; import java.util.*; import org.eclipse.persistence.internal.expressions.SQLSelectStatement; import org.eclipse.persistence.internal.expressions.ForUpdateClause; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.platform.database.OraclePlatform; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.expressions.Expression; import org.eclipse.persistence.internal.databaseaccess.DatabaseAccessor; import org.eclipse.persistence.internal.databaseaccess.DatabaseCall; import org.eclipse.persistence.internal.databaseaccess.Accessor; import org.eclipse.persistence.internal.databaseaccess.DatabasePlatform; /** * INTERNAL: *

Purpose:LOBValueWriter is used to write a large size of object into an Oracle * CLOB/BLOB column through Oracle LOB Locator. It's a work-around object for the well-known 4k write * limits on an Oracle thin driver. * *

Responsibilities:

    *
  • Build the Oracle empty lob method call string for the insert call. *
  • Build the minimal SELECT call to retrieve the locator. *
  • Write the lob value through the locator. *
  • Resolve the multiple table INSERT/SELECT orders. *
  • Resolve the nested unit of work commit issue. *
* * @author: King Wang * @since TopLink/Java 5.0. July 2002. */ public class LOBValueWriter { //DatabaseCalls still to be processed private Collection calls = null; private Accessor accessor; private boolean isNativeConnectionRequired; /** * This is the default constructor for the class. * * Bug 2804663 - Each DatabaseAccessor will now hold on to its own instance * of this class, hence a singleton pattern is not applicable. */ public LOBValueWriter(Accessor accessor) { this.accessor = accessor; DatabasePlatform platform = ((DatabaseAccessor)accessor).getPlatform(); this.isNativeConnectionRequired = platform.isOracle() && ((OraclePlatform)platform).isNativeConnectionRequiredForLobLocator(); } protected void buildAndExecuteCall(DatabaseCall dbCall, AbstractSession session) { DatabaseQuery query = dbCall.getQuery(); if (!query.isWriteObjectQuery()) { //if not writequery, should not go through the locator writing.. return; } WriteObjectQuery writeQuery = (WriteObjectQuery)query; writeQuery.setAccessor(accessor); //build a select statement form the query SQLSelectStatement selectStatement = buildSelectStatementForLocator(writeQuery, dbCall, session); //then build a call from the statement DatabaseCall call = buildCallFromSelectStatementForLocator(selectStatement, writeQuery, dbCall, session); accessor.executeCall(call, call.getQuery().getTranslationRow(), session); } /** * Fetch the locator(s) from the result set and write LOB value to the table */ public void fetchLocatorAndWriteValue(DatabaseCall dbCall, Object resultSet) throws SQLException { Enumeration enumFields = dbCall.getContexts().getFields().elements(); Enumeration enumValues = dbCall.getContexts().getValues().elements(); AbstractSession executionSession = dbCall.getQuery().getSession().getExecutionSession(dbCall.getQuery()); while (enumFields.hasMoreElements()) { DatabaseField field = (DatabaseField)enumFields.nextElement(); Object value = enumValues.nextElement(); //write the value through the locator executionSession.getPlatform().writeLOB(field, value, (ResultSet)resultSet, executionSession); } } /** * Build the select statement for selecting the locator */ private SQLSelectStatement buildSelectStatementForLocator(WriteObjectQuery writeQuery, DatabaseCall call, AbstractSession session) { SQLSelectStatement selectStatement = new SQLSelectStatement(); Vector tables = writeQuery.getDescriptor().getTables(); selectStatement.setTables(tables); //rather than get ALL fields from the descriptor, only use the LOB-related fields to build the minimal SELECT statement. selectStatement.setFields(call.getContexts().getFields()); //the where clause setting here is sufficient if the object does not map to multiple tables. selectStatement.setWhereClause(writeQuery.getDescriptor().getObjectBuilder().buildPrimaryKeyExpressionFromObject(writeQuery.getObject(), session)); //need pessimistic locking for the locator select selectStatement.setLockingClause(ForUpdateClause.newInstance(ObjectBuildingQuery.LOCK)); if (tables.size() > 1) { //the primary key expression from the primary table Expression expression = selectStatement.getWhereClause(); //additional join from the non-primary tables Expression additionalJoin = writeQuery.getDescriptor().getQueryManager().getAdditionalJoinExpression(); if (additionalJoin != null) { expression = expression.and(additionalJoin); } //where clause now contains extra joins across all tables selectStatement.setWhereClause(expression); } //normalize the statement at the end, such as assign alias to all tables, and build sorting statement selectStatement.normalize(session, writeQuery.getDescriptor()); return selectStatement; } /** * Build the sql call from the select statement for selecting the locator */ private DatabaseCall buildCallFromSelectStatementForLocator(SQLSelectStatement selectStatement, WriteObjectQuery writeQuery, DatabaseCall dbCall, AbstractSession session) { DatabaseCall call = selectStatement.buildCall(session); // Locator LOB must not be wrapped (WLS wraps LOBs). call.setIsNativeConnectionRequired(this.isNativeConnectionRequired); //the LOB context must be passed into the new call object call.setContexts(dbCall.getContexts()); //need to explicitly define one row return, otherwise, EL assumes multiple rows return and confuses the accessor call.returnOneRow(); //the query object has to be set in order to access to the platform and login objects call.setQuery(writeQuery); // prepare it call.prepare(session); //finally do the translation call.translate(writeQuery.getTranslationRow(), writeQuery.getModifyRow(), session); return call; } // Building of SELECT statements is no longer done in DatabaseAccessor.basicExecuteCall // for updates because DatabaseCall.isUpdateCall() can't recognize update in case // StoredProcedureCall is used. Therefore in all cases: insert(single or multiple tables) // and update the original (insert and update) calls are saved // and both building and executing of SELECT statements postponed until // buildAndExecuteSelectCalls method is called. /** * Add original (insert or update) call to the collection */ public void addCall(Call call) { if (calls == null) { //use lazy initialization calls = new ArrayList(2); } calls.add(call); } // Bug 3110860: RETURNINGPOLICY-OBTAINED PK CAUSES LOB TO BE INSERTED INCORRECTLY // The deferred locator SELECT calls should be generated and executed after ReturningPolicy // merges PK obtained from the db into the object held by the query. // That's why original (insert or update) calls are saved, // and both building and executing of SELECT statements postponed until // this method is called. /** * Build and execute the deferred select calls. */ public void buildAndExecuteSelectCalls(AbstractSession session) { if ((calls == null) || calls.isEmpty()) { //no deferred select calls (it means no locator is required) return; } //all INSERTs have been executed, time to execute the SELECTs try { for (Iterator callIt = calls.iterator(); callIt.hasNext();) { DatabaseCall dbCall = (DatabaseCall)callIt.next(); buildAndExecuteCall(dbCall, session); } } finally { //after executing all select calls, need to empty the collection. //this is necessary in the nested unit of work cases. calls.clear(); } } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/DatabaseTable.java0000664000000000000000000003531312216173126025027 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink * tware - added handling of database delimiters * 11/19/2012-2.5 Guy Pelletier * - 389090: JPA 2.1 DDL Generation Support (foreign key metadata support) * 11/22/2012-2.5 Guy Pelletier * - 389090: JPA 2.1 DDL Generation Support (index metadata support) * 12/07/2012-2.5 Guy Pelletier * - 389090: JPA 2.1 DDL Generation Support (foreign key metadata support) ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.io.*; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.eclipse.persistence.internal.databaseaccess.*; import org.eclipse.persistence.internal.expressions.ExpressionSQLPrinter; import org.eclipse.persistence.tools.schemaframework.ForeignKeyConstraint; import org.eclipse.persistence.tools.schemaframework.IndexDefinition; /** * INTERNAL: *

Purpose: * Define a fully qualified table name.

* Responsibilities:

    *
  • Allow specification of a qualifier to the table, i.e. creator or database. *
*@see DatabaseField */ public class DatabaseTable implements Cloneable, Serializable { protected String name; protected String tableQualifier; protected String qualifiedName; /** JPA 2.1 Foreign key specification data */ protected Map foreignKeyConstraints; /** * Contains the user specified unique constraints. JPA 2.0 introduced * the name element, therefore, if specified we will use that name * to create the constraint. Constraints with no name will be added to the * map under the null key and generated with a default name. * Therefore, when a name is given the list size should only ever be * 1. We will validate. The null key could have multiples however they will * have their names defaulted (as we did before). */ protected Map>> uniqueConstraints; /** * Store the set of indexes defined through meta-data for the table. */ protected List indexes; protected boolean useDelimiters = false; protected String creationSuffix; /** * Initialize the newly allocated instance of this class. * By default their is no qualifier. */ public DatabaseTable() { name = ""; tableQualifier = ""; } public DatabaseTable(String possiblyQualifiedName) { this(possiblyQualifiedName, null, null); } public DatabaseTable(String possiblyQualifiedName, String startDelimiter, String endDelimiter) { setPossiblyQualifiedName(possiblyQualifiedName, startDelimiter, endDelimiter); } public DatabaseTable(String tableName, String qualifier) { this(tableName, qualifier, false, null, null); } public DatabaseTable(String tableName, String qualifier, boolean useDelimiters, String startDelimiter, String endDelimiter) { setName(tableName, startDelimiter, endDelimiter); this.tableQualifier = qualifier; this.useDelimiters = useDelimiters; } public void addForeignKeyConstraint(ForeignKeyConstraint foreignKeyConstraint) { if (foreignKeyConstraints == null) { foreignKeyConstraints = new HashMap(); } foreignKeyConstraints.put(foreignKeyConstraint.getName(), foreignKeyConstraint); } /** * Add an index definition to this table. */ public void addIndex(IndexDefinition index) { getIndexes().add(index); } /** * Add the unique constraint for the columns names. Used for DDL generation. * For now we just add all the unique constraints as we would have before * when we didn't have a name. */ public void addUniqueConstraints(String name, List columnNames) { if (getUniqueConstraints().containsKey(name)) { getUniqueConstraints().get(name).add(columnNames); } else { List> value = new ArrayList>(); value.add(columnNames); getUniqueConstraints().put(name, value); } } /** * Return a shallow copy of the receiver. */ public DatabaseTable clone() { try { return (DatabaseTable)super.clone(); } catch (CloneNotSupportedException exception) { throw new InternalError(exception.getMessage()); } } /** * Two tables are equal if their names and tables are equal, * or their names are equal and one does not have a qualifier assigned. * This allows an unqualified table to equal the same fully qualified one. */ public boolean equals(Object object) { if (object instanceof DatabaseTable) { return equals((DatabaseTable)object); } return false; } /** * Two tables are equal if their names and tables are equal, * or their names are equal and one does not have a qualifier assigned. * This allows an unqualified table to equal the same fully qualified one. */ public boolean equals(DatabaseTable table) { if (this == table) { return true; } if (DatabasePlatform.shouldIgnoreCaseOnFieldComparisons()) { if (getName().equalsIgnoreCase(table.getName())) { if ((getTableQualifier().length() == 0) || (table.getTableQualifier().length() == 0) || (getTableQualifier().equalsIgnoreCase(table.getTableQualifier()))) { return true; } } } else { if (getName().equals(table.getName())) { if ((getTableQualifier().length() == 0) || (table.getTableQualifier().length() == 0) || (getTableQualifier().equals(table.getTableQualifier()))) { return true; } } } return false; } /** * returns the suffix applied to the CREATE table statement on this field for DDL generation. */ public String getCreationSuffix() { return creationSuffix; } public ForeignKeyConstraint getForeignKeyConstraint(String name) { return foreignKeyConstraints.get(name); } public Map getForeignKeyConstraints() { return foreignKeyConstraints; } /** * Return a list of index definitions. * Used for DDL generation. */ public List getIndexes() { if (this.indexes == null) { this.indexes = new ArrayList(); } return this.indexes; } /** * Get method for table name. */ public String getName() { return name; } /** * Get method for table name. */ public String getNameDelimited(DatasourcePlatform platform) { if (useDelimiters){ return platform.getStartDelimiter() + name + platform.getEndDelimiter(); } return name; } public String getQualifiedName() { if (qualifiedName == null) { if (tableQualifier.equals("")) { qualifiedName = getName(); } else { qualifiedName = getTableQualifier() + "." + getName(); } } return qualifiedName; } public String getQualifiedNameDelimited(DatasourcePlatform platform) { if (tableQualifier.equals("")) { if (useDelimiters){ return platform.getStartDelimiter() + getName() + platform.getEndDelimiter(); } else { return getName(); } } else { if (useDelimiters){ return platform.getStartDelimiter() + getTableQualifier() + platform.getEndDelimiter() + "." + platform.getStartDelimiter() + getName() + platform.getEndDelimiter(); } else { return getTableQualifier() + "." + getName(); } } } /** * Print the table's SQL from clause. */ public void printSQL(ExpressionSQLPrinter printer) throws IOException { printer.getWriter().write(getQualifiedNameDelimited(printer.getPlatform())); } public String getTableQualifierDelimited(DatasourcePlatform platform) { if (useDelimiters && tableQualifier != null && !tableQualifier.equals("")){ return platform.getStartDelimiter() + tableQualifier + platform.getEndDelimiter(); } return tableQualifier; } public String getTableQualifier() { return tableQualifier; } public boolean hasUniqueConstraints() { return (this.uniqueConstraints != null) && (!this.uniqueConstraints.isEmpty()); } public boolean hasForeignKeyConstraints() { return foreignKeyConstraints != null; } /** * Return the hashcode of the name, because it is fairly unique. */ public int hashCode() { return getName().hashCode(); } public boolean hasIndexes() { return (this.indexes != null) && (!this.indexes.isEmpty()); } /** * Return a list of the unique constraints for this table. * Used for DDL generation. */ public Map>> getUniqueConstraints() { if (this.uniqueConstraints == null) { this.uniqueConstraints = new HashMap>>(); } return this.uniqueConstraints; } /** * Determine whether the receiver has any identification information. * Return true if the name or qualifier of the receiver are nonempty. */ public boolean hasName() { if ((getName().length() == 0) && (getTableQualifier().length() == 0)) { return false; } return true; } /** * INTERNAL: * Is this decorated / has an AS OF (some past time) clause. * Example: * SELECT ... FROM EMPLOYEE AS OF TIMESTAMP (exp) t0 ... */ public boolean isDecorated() { return false; } protected void resetQualifiedName() { this.qualifiedName = null; } public void setCreationSuffix(String creationSuffix) { this.creationSuffix = creationSuffix; } /** * Set the table name. * Used when aliasing table names. * @param name */ public void setName(String name) { setName(name, null, null); } /** * Set the table name. * Used when aliasing table names. * * If the name contains database delimiters, they will be stripped and a flag will be set to have them * added when the DatabaseTable is written to SQL * * @param name */ public void setName(String name, String startDelimiter, String endDelimiter) { if (name != null && (startDelimiter != null) && (endDelimiter != null) && !startDelimiter.equals("")&& !endDelimiter.equals("") && name.startsWith(startDelimiter) && name.endsWith(endDelimiter)){ this.name = name.substring(startDelimiter.length(), name.length() - endDelimiter.length()); useDelimiters = true; } else { this.name = name ; } resetQualifiedName(); } /** * Used to map the project xml. Any time a string name is read from the * project xml, we must check if it is fully qualified and split the * actual name from the qualifier. * * @param possiblyQualifiedName */ public void setPossiblyQualifiedName(String possiblyQualifiedName) { setPossiblyQualifiedName(possiblyQualifiedName, null, null); } public void setPossiblyQualifiedName(String possiblyQualifiedName, String startDelimiter, String endDelimiter) { resetQualifiedName(); int index = possiblyQualifiedName.lastIndexOf('.'); if (index == -1) { setName(possiblyQualifiedName, startDelimiter, endDelimiter); this.tableQualifier = ""; } else { setName(possiblyQualifiedName.substring(index + 1, possiblyQualifiedName.length()), startDelimiter, endDelimiter); setTableQualifier(possiblyQualifiedName.substring(0, index), startDelimiter, endDelimiter); if((startDelimiter != null) && possiblyQualifiedName.startsWith(startDelimiter) && (endDelimiter != null) && possiblyQualifiedName.endsWith(endDelimiter)) { // It's 'Qualifier.Name' - it should be treated as a single string. // Would that be 'Qualifier'.'Name' both setName and setTableQualifier methods would have set useDelimeters to true. if(!this.useDelimiters) { setName(possiblyQualifiedName); this.tableQualifier = ""; } } } } public void setTableQualifier(String qualifier) { setTableQualifier(qualifier, null, null); } public void setTableQualifier(String qualifier, String startDelimiter, String endDelimiter) { if ((startDelimiter != null) && (endDelimiter != null) && !startDelimiter.equals("")&& !endDelimiter.equals("") && qualifier.startsWith(startDelimiter) && qualifier.endsWith(endDelimiter)){ this.tableQualifier = qualifier.substring(startDelimiter.length(), qualifier.length() - endDelimiter.length()); useDelimiters = true; } else { this.tableQualifier = qualifier; } resetQualifiedName(); } public String toString() { return "DatabaseTable(" + getQualifiedName() + ")"; } public void setUseDelimiters(boolean useDelimiters) { this.useDelimiters = useDelimiters; } public boolean shouldUseDelimiters() { return useDelimiters; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/IndexedObject.java0000664000000000000000000000460612216173126025063 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * 06/10/2009 Andrei Ilitchev * - JPA 2.0 - OrderedList support. ******************************************************************************/ package org.eclipse.persistence.internal.helper; /** *

Purpose: A helper class for sorting index/object pairs. *

*

Responsibilities: * Allows to sort a list of index/object pairs either directly * or with Collections.sort(List) - using IndexedObject.compareTo; * or with Collections.sort(List, Comparator) - using custom-defined Comparator. * * @see Collections * @see Comparator * @see OrderedListContainerPolicy */ public class IndexedObject implements Comparable { private Integer index; private Object object; public IndexedObject(Integer index, Object object) { this.index = index; this.object = object; } public Integer getIndex() { return index; } public void setIndex(Integer index) { this.index = index; } public Object getObject() { return object; } public void setObject(Object object) { this.object = object; } /* * Compares indexes, null is less than any non-null. */ public int compareTo(IndexedObject anotherIndexedObject) { Integer anotherIndex = anotherIndexedObject.getIndex(); if(index == null) { if(anotherIndex == null) { return 0; } else { return -1; } } else { if(anotherIndex == null) { return 1; } else { return index.compareTo(anotherIndex); } } } public String toString() { return "(" + index + ", " + object + ")"; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/JPAClassLoaderHolder.java0000664000000000000000000000470212216173126026236 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * mobrien - initial API and implementation ******************************************************************************/ package org.eclipse.persistence.internal.helper; /** * INTERNAL: * This class is a composite object containing the classLoader and * a flag that is true if the classLoader returned is temporary.
* JIRA EJBTHREE-572 requires that we use the real classLoader in place of the getNewTempClassLoader(). * The override code should stay in place until the UCL3 loader does not throw a NPE on loadClass() * @return ClassLoaderHolder - may be a temporary classLoader */ public class JPAClassLoaderHolder { private ClassLoader classLoader; private boolean isTempClassLoader; /** * INTERNAL: * Create an instance of JPAClassLoaderHolder that wraps aClassLoader that is an original call to get*ClassLoader(). * @param aClassLoader */ public JPAClassLoaderHolder(ClassLoader aClassLoader) { this(aClassLoader, true); } /** * INTERNAL: * Create an instance of JPAClassLoaderHolder that wraps aClassLoader and the inUse boolean. * @param aClassLoader * @param isThisTempClassLoader */ public JPAClassLoaderHolder(ClassLoader aClassLoader, boolean isThisTempClassLoader) { classLoader = aClassLoader; isTempClassLoader = isThisTempClassLoader; } /** * INTERNAL: * @return the classLoader */ public ClassLoader getClassLoader() { return classLoader; } /** * INTERNAL: * @return boolean is true if this classLoader is temporary
*/ public boolean isTempClassLoader() { return isTempClassLoader; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/linkedlist/0000775000000000000000000000000012216174372023651 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/linkedlist/LinkedNode.java0000664000000000000000000000262712216173126026533 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper.linkedlist; /** * INTERNAL: * A custom implementation of a linked list node for use in the ExposedNodeLinkedList. * @author Gordon Yorke * @since 10.0.3 * @see ExposedNodeLinkedList */ public class LinkedNode { Object contents; LinkedNode next; LinkedNode previous; LinkedNode(Object object, LinkedNode next, LinkedNode previous) { this.contents = object; this.next = next; this.previous = previous; } public Object getContents() { return contents; } public void setContents(Object contents) { this.contents = contents; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/linkedlist/ExposedNodeLinkedList.java0000664000000000000000000002315412216173126030715 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper.linkedlist; import java.util.*; import org.eclipse.persistence.exceptions.ValidationException; /** * INTERNAL: * A custom implementation of a linked list. This list exposes the linked nodes * directly to the developer. It allows nodes to be referenced in code for quick * list manipulation (ie reshuffle, remove, or queuing) * It is specifically used in the EclipseLink cache write lock mechanism in order * to allow quick removal of objects from the list while still providing the getFirst() * addLast() functionality of a queue. The alternative java classes LinkedList, LinkedHashMap * do not provide both functional requirements. * @author Gordon Yorke * @since 10.0.3 * @see org.eclipse.persistence.internal.helper.linkedlist.LinkedNode */ public class ExposedNodeLinkedList implements List { private transient LinkedNode header; private transient int size; /** * Constructs an empty list. */ public ExposedNodeLinkedList() { this.size = 0; this.header = new LinkedNode(null, null, null); header.next = header; header.previous = header; } // Bunch of List methods not currently implemented. public Object[] toArray(Object[] array) { throw ValidationException.operationNotSupported("toArray"); } public Object[] toArray() { throw ValidationException.operationNotSupported("toArray"); } public Object set(int index, Object value) { throw ValidationException.operationNotSupported("set"); } public ListIterator listIterator(int index) { throw ValidationException.operationNotSupported("listIterator"); } public ListIterator listIterator() { throw ValidationException.operationNotSupported("listIterator"); } public Iterator iterator() { throw ValidationException.operationNotSupported("iterator"); } public List subList(int start, int end) { throw ValidationException.operationNotSupported("subList"); } public boolean retainAll(Collection collection) { throw ValidationException.operationNotSupported("retainAll"); } public boolean removeAll(Collection collection) { throw ValidationException.operationNotSupported("removeAll"); } public boolean containsAll(Collection collection) { throw ValidationException.operationNotSupported("containsAll"); } public boolean addAll(Collection collection) { throw ValidationException.operationNotSupported("addAll"); } public boolean addAll(int index, Collection collection) { throw ValidationException.operationNotSupported("addAll"); } public boolean remove(Object object) { throw ValidationException.operationNotSupported("remove"); } public boolean add(Object object) { addLast(object); return true; } public int lastIndexOf(Object object) { throw ValidationException.operationNotSupported("lastIndexOf"); } public void add(int index, Object object) { throw ValidationException.operationNotSupported("add"); } public Object remove(int index) { throw ValidationException.operationNotSupported("remove"); } public Object get(int index) { throw ValidationException.operationNotSupported("get"); } public boolean isEmpty() { return size() == 0; } /** * Returns the first contents in this list. * * @return the first contents in this list. Null if this list is empty. */ public Object getFirst() { if (size == 0) { return null; } return header.next.contents; } /** * Returns the last contents in this list. * * @return the last contents in this list. Null if this list is empty. */ public Object getLast() { if (size == 0) { return null; } return header.previous.contents; } /** * Removes and returns the first contents from this list. * * @return the first contents from this list. * @throws NoSuchElementException if this list is empty. */ public Object removeFirst() { if (size != 0) { Object first = header.next.contents; remove(header.next); return first; } return null; } /** * Removes and returns the last contents from this list. * * @return the last contents from this list. * @throws NoSuchElementException if this list is empty. */ public Object removeLast() { if (size != 0) { Object last = header.previous.contents; remove(header.previous); return last; } return null; } /** * Inserts the given contents at the beginning of this list. * * @param o the contents to be inserted at the beginning of this list. */ public LinkedNode addFirst(Object o) { return addAfter(o, header); } /** * Appends the given contents to the end of this list. (Identical in * function to the add method; included only for consistency.) * * @param o the contents to be inserted at the end of this list. */ public LinkedNode addLast(Object o) { return addAfter(o, header.previous); } /** * Returns true if this list contains the specified contents. * More formally, returns true if and only if this list contains * at least one contents e such that (o==null ? e==null * : o.equals(e)). * * @param o contents whose presence in this list is to be tested. * @return true if this list contains the specified contents. */ public boolean contains(Object o) { return indexOf(o) != -1; } /** * Returns the number of contents in this list. * * @return the number of contents in this list. */ public int size() { return size; } /** * Removes all of the contents from this list. */ public void clear() { header.next = header; header.previous = header; size = 0; } /** * Returns the index in this list of the first occurrence of the * specified contents, or -1 if the List does not contain this * contents. More formally, returns the lowest index i such that * (o==null ? get(i)==null : o.equals(get(i))), or -1 if * there is no such index. * * @param o contents to search for. * @return the index in this list of the first occurrence of the * specified contents, or -1 if the list does not contain this * contents. */ public int indexOf(Object o) { int index = 0; if (o == null) { for (LinkedNode n = header.next; n != header; n = n.next) { if (n.contents == null) { return index; } index++; } } else { for (LinkedNode n = header.next; n != header; n = n.next) { if (o.equals(n.contents)) { return index; } index++; } } return -1; } private LinkedNode addAfter(Object o, LinkedNode n) { LinkedNode newNode = new LinkedNode(o, n.next, n); newNode.previous.next = newNode; newNode.next.previous = newNode; size++; return newNode; } /** * Allows a node to be efficiently removed. */ public void remove(LinkedNode n) { if (n == header) { throw new NoSuchElementException(); } else if ((n.previous == null) || (n.next == null)) { // Handles case of node having already been removed. return; } n.previous.next = n.next; n.next.previous = n.previous; // Also clear the nodes references to know that it has been removed. n.previous = null; n.next = null; n.contents = null; size--; } /** * Allows a node to be efficiently moved first. */ public void moveFirst(LinkedNode node) { if (node == header) { throw new NoSuchElementException(); } else if ((node.previous == null) || (node.next == null)) { // Handles case of node having already been removed. size++; } else { node.previous.next = node.next; node.next.previous = node.previous; } node.next = header.next; node.previous = header; header.next = node; node.next.previous = node; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/JDK15Platform.java0000664000000000000000000001101412216173126024626 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.sql.SQLException; import java.util.regex.Pattern; import java.util.concurrent.ConcurrentHashMap; /** * INTERNAL: * Implements operations specific to JDK 1.5 */ public class JDK15Platform implements JDKPlatform { /** * PERF: The like expression compiled Pattern objects are cached * to avoid re-compilation on every usage. */ protected static ConcurrentHashMap patternCache = new ConcurrentHashMap(); /** * PERF: The regular expression compiled Pattern objects are cached * to avoid re-compilation on every usage. */ protected static ConcurrentHashMap regexpPatternCache = new ConcurrentHashMap(); /** * INTERNAL: * An implementation of in memory queries with Like which uses the * regular expression framework. */ public Boolean conformLike(Object left, Object right) { if ((left == null) && (right == null)) { return Boolean.TRUE; } else if ((left == null) || (right == null)) { return Boolean.FALSE; } left = String.valueOf(left); right = String.valueOf(right); // PERF: First check the pattern cache for the pattern. // Note that the original string is the key, to avoid having to translate it first. Pattern pattern = (Pattern)patternCache.get(right); if (pattern == null) { // Bug 3936427 - Replace regular expression reserved characters with escaped version of those characters // For instance replace ? with \? String convertedRight = Helper.convertLikeToRegex((String)right); pattern = Pattern.compile(convertedRight); // Ensure cache does not grow beyond 100. if (patternCache.size() > 100) { patternCache.remove(patternCache.keySet().iterator().next()); } patternCache.put(right, pattern); } boolean match = pattern.matcher((String)left).matches(); if (match) { return Boolean.TRUE; } else { return Boolean.FALSE; } } /** * INTERNAL: * An implementation of in memory queries with Regexp which uses the * regular expression framework. */ public Boolean conformRegexp(Object left, Object right) { if ((left == null) && (right == null)) { return Boolean.TRUE; } else if ((left == null) || (right == null)) { return Boolean.FALSE; } left = String.valueOf(left); right = String.valueOf(right); // PERF: First check the pattern cache for the pattern. // Note that the original string is the key, to avoid having to translate it first. Pattern pattern = (Pattern)regexpPatternCache.get(right); if (pattern == null) { pattern = Pattern.compile((String)right); // Ensure cache does not grow beyond 100. if (regexpPatternCache.size() > 100) { regexpPatternCache.remove(regexpPatternCache.keySet().iterator().next()); } regexpPatternCache.put(right, pattern); } boolean match = pattern.matcher((String)left).matches(); if (match) { return Boolean.TRUE; } else { return Boolean.FALSE; } } /** * Indicates whether the passed object implements java.sql.SQLXML introduced in jdk 1.6 */ public boolean isSQLXML(Object object) { return false; } /** * Casts the passed object to SQLXML and calls getString and free methods */ public String getStringAndFreeSQLXML(Object sqlXml) throws SQLException { return null; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/BasicTypeHelperImpl.java0000664000000000000000000004131712216173126026221 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Date; import java.util.HashSet; import java.util.HashMap; import java.util.Map; import java.util.Set; import org.eclipse.persistence.descriptors.ClassDescriptor; /** * INTERNAL * This class is a helper class providing type information. * Its implementation uses Java reflection to calculate the type information. */ public class BasicTypeHelperImpl { /** Set of numeric types and its wrapper classes. */ private static Set numericTypes = new HashSet(); /** Set of integral types and its wrapper classes. */ private static Set integralTypes = new HashSet(); /** Set of floating point types and its wrapper classes. */ private static Set floatingPointTypes = new HashSet(); /** Set of date classes. */ private static Set dateClasses = new HashSet(); /** Maps primtives types to their wrapper classes. */ private static Map primitiveToWrapper = new HashMap(); /** Maps wrapper classes to their primitive types. */ private static Map wrapperToPrimitive = new HashMap(); static { // Initialize set of integral types plus their wrapper classes integralTypes.add(byte.class); integralTypes.add(Byte.class); integralTypes.add(short.class); integralTypes.add(Short.class); integralTypes.add(char.class); integralTypes.add(Character.class); integralTypes.add(int.class); integralTypes.add(Integer.class); integralTypes.add(long.class); integralTypes.add(Long.class); // Initialize set of floating point types plus their wrapper classes floatingPointTypes.add(float.class); floatingPointTypes.add(Float.class); floatingPointTypes.add(double.class); floatingPointTypes.add(Double.class); // Initialize set of floating point types plus their wrapper classes dateClasses.add(java.util.Date.class); dateClasses.add(java.util.Calendar.class); dateClasses.add(java.sql.Date.class); dateClasses.add(java.sql.Time.class); dateClasses.add(java.sql.Timestamp.class); numericTypes.addAll(integralTypes); numericTypes.addAll(floatingPointTypes); numericTypes.add(java.math.BigDecimal.class); numericTypes.add(java.math.BigInteger.class); // Initialize mapping primitives to their wrapper classes primitiveToWrapper.put(boolean.class, Boolean.class); primitiveToWrapper.put(byte.class, Byte.class); primitiveToWrapper.put(short.class, Short.class); primitiveToWrapper.put(char.class, Character.class); primitiveToWrapper.put(int.class, Integer.class); primitiveToWrapper.put(long.class, Long.class); primitiveToWrapper.put(float.class, Float.class); primitiveToWrapper.put(double.class, Double.class); // Initialize mapping wrapper classes to their primitives wrapperToPrimitive.put(Boolean.class, boolean.class); wrapperToPrimitive.put(Byte.class, byte.class); wrapperToPrimitive.put(Short.class, short.class); wrapperToPrimitive.put(Character.class, char.class); wrapperToPrimitive.put(Integer.class, int.class); wrapperToPrimitive.put(Long.class, long.class); wrapperToPrimitive.put(Float.class, float.class); wrapperToPrimitive.put(Double.class, double.class); } /** A singleton for this class */ private static final BasicTypeHelperImpl singleton = new BasicTypeHelperImpl(); /** Gets instance of this class */ public static BasicTypeHelperImpl getInstance() { return singleton; } /** Returns the name of the specified type. */ public String getTypeName(Object type) { Class clazz = getJavaClass(type); return (clazz == null) ? null : clazz.getName(); } /** Returns the class object of the specified type. */ public Class getJavaClass(Object type) { Class clazz = null; if (type instanceof Class) { clazz = (Class)type; } else if (type instanceof ClassDescriptor) { clazz = ((ClassDescriptor)type).getJavaClass(); } return clazz; } /** Returns the Object type representation.*/ public Object getObjectType() { return Object.class; } /** Returns the boolean type representation.*/ public Object getBooleanType() { return boolean.class; } /** Returns the Boolean class representation.*/ public Object getBooleanClassType() { return Boolean.class; } /** Returns the char type representation.*/ public Object getCharType() { return char.class; } /** Returns the Date type representation.*/ public Object getSQLDateType() { return java.sql.Date.class; } /** Returns the Time type representation.*/ public Object getTimeType() { return java.sql.Time.class; } /** Returns the timestamp type representation.*/ public Object getTimestampType() { return java.sql.Timestamp.class; } /** Returns the Character class representation.*/ public Object getCharacterClassType() { return Character.class; } /** Returns the byte type representation.*/ public Object getByteType() { return byte.class; } /** Returns the Byte class representation.*/ public Object getByteClassType() { return Byte.class; } /** Returns the short type representation.*/ public Object getShortType() { return short.class; } /** Returns the Short class representation.*/ public Object getShortClassType() { return Short.class; } /** Returns the int type representation.*/ public Object getIntType() { return int.class; } /** Returns the Inter class representation.*/ public Object getIntegerClassType() { return Integer.class; } /** Returns the long type representation.*/ public Object getLongType() { return long.class; } /** Returns the type representation of class Long.*/ public Object getLongClassType() { return Long.class; } /** Returns the type representation of class Map.Entry.*/ public Object getMapEntryType(){ return Map.Entry.class; } /** Returns the float type representation.*/ public Object getFloatType() { return float.class; } /** Returns the type representation of class Float.*/ public Object getFloatClassType() { return Float.class; } /** Returns the double type representation.*/ public Object getDoubleType() { return double.class; } /** Returns the type representation of class Double.*/ public Object getDoubleClassType() { return Double.class; } /** Returns the String type representation.*/ public Object getStringType() { return String.class; } /** Returns the BigInteger type representation.*/ public Object getBigIntegerType() { return BigInteger.class; } /** Returns the BigDecimal type representation.*/ public Object getBigDecimalType() { return BigDecimal.class; } /** Returns the java.util.Date type representation.*/ public Object getDateType() { return Date.class; } /** */ public boolean isEnumType(Object type) { Class clazz = getJavaClass(type); return (clazz != null) && (clazz.isEnum()); } /** * Returns true if the class is any numeric type. */ public boolean isNumericType(Object type) { return numericTypes.contains(type); } /** * Returns true if the specified type represents an * integral type or a wrapper class of an integral type. */ public boolean isIntegralType(Object type) { return integralTypes.contains(type); } /** * Returns true if the specified type represents an * floating point type or a wrapper class of an floating point type. */ public boolean isFloatingPointType(Object type) { return floatingPointTypes.contains(type); } /** Returns true if the specified type is a wrapper class. */ public boolean isWrapperClass(Object type) { return wrapperToPrimitive.containsKey(type); } /** * Returns true if type is the boolean primitive type or the Boolean wrapper class */ public boolean isBooleanType(Object type) { return (type == getBooleanType()) || (type == getBooleanClassType()); } /** * Returns true if type is the char primitive type or the Character wrapper class */ public boolean isCharacterType(Object type) { return (type == getCharType()) || (type == getCharacterClassType()); } /** * Returns true if type is the byte primitive type or the Byte wrapper class */ public boolean isByteType(Object type) { return (type == getByteType()) || (type == getByteClassType()); } /** * Returns true if type is the short primitive type or the Short wrapper class */ public boolean isShortType(Object type) { return (type == getShortType()) || (type == getShortClassType()); } /** * Returns true if type is the int primitive type or the Integer wrapper class */ public boolean isIntType(Object type) { return (type == getIntType()) || (type == getIntegerClassType()); } /** * Returns true if type is the int primitive type or the Integer wrapper class */ public boolean isIntegerType(Object type) { return isIntType(type); } /** * Returns true if type is the long primitive type or the Long wrapper class */ public boolean isLongType(Object type) { return (type == getLongType()) || (type == getLongClassType()); } /** * Returns true if type is the float primitive type or the Float wrapper class */ public boolean isFloatType(Object type) { return (type == getFloatType()) || (type == getFloatClassType()); } /** * Returns true if type is the double primitive type or the Double wrapper class */ public boolean isDoubleType(Object type) { return (type == getDoubleType()) || (type == getDoubleClassType()); } /** Returns true if the specified type represents java.lang.String. */ public boolean isStringType(Object type) { return type == getStringType(); } /** */ public boolean isDateClass(Object type) { return dateClasses.contains(type); } /** */ public boolean isBigIntegerType(Object type) { return type == getBigIntegerType(); } /** */ public boolean isBigDecimalType(Object type) { return type == getBigDecimalType(); } /** Returns true if the specified type denotes an orderable type */ public boolean isOrderableType(Object type) { return true; } /** * convenience method for java's isAssignableFrom that allows auto-boxing, taking java class or a descriptor as arguments. * It will return true if both sides are in the same category (Numberic, Date or Boolean) otherwise it will use java's * isAssignableFrom on the argument classes. Returns true if either arguments is null. */ public boolean isAssignableFrom(Object left, Object right) { if ((left == null) || (right == null)) { return true; } // check for identical types if (left == right) { return true; } if ((left == ClassConstants.OBJECT) || (right == ClassConstants.OBJECT)) { return true; } // numeric types are compatible else if (isNumericType(left) && isNumericType(right)) { return true; } // date types are compatible else if (isDateClass(left) && isDateClass(right)) { return true; } // handle boolean and Boolean else if (isBooleanType(left) && isBooleanType(right)) { return true; } // check for inheritance and implements return getJavaClass(left).isAssignableFrom(getJavaClass(right)); } /** * convenience method for java's isAssignableFrom that allows auto-boxing but follows more closely Java's * Class.isAssignableFrom method results, and returns true if either arguments is null. */ public boolean isStrictlyAssignableFrom(Object left, Object right) { if ((left == null) || (right == null)) { return true; } // check for identical types if (left == right) { return true; } if (left == ClassConstants.OBJECT) { return true; } Class leftClass = getJavaClass(left); Class rightClass = getJavaClass(right); if ( leftClass.isPrimitive() ){ leftClass = this.getWrapperClass(leftClass); } if ( rightClass.isPrimitive() ){ rightClass = this.getWrapperClass(rightClass); } // check for inheritance and implements return leftClass.isAssignableFrom(rightClass); } /** Implements binary numeric promotion as defined in JLS extended by * wrapper classes, BigDecimal and BigInteger. */ public Object extendedBinaryNumericPromotion(Object left, Object right) { if ((left == null) || (right == null) || !isNumericType(left) || !isNumericType(right)) { return null; } // handle BigDecimal if (isBigDecimalType(left) || isBigDecimalType(right)) { return getBigDecimalType(); } // handle BigInteger if (isBigIntegerType(left)) { return isFloatingPointType(right) ? right : getBigIntegerType(); } if (isBigIntegerType(right)) { return isFloatingPointType(left) ? left : getBigIntegerType(); } // check wrapper classes boolean wrapper = false; if (isWrapperClass(left)) { wrapper = true; left = getPrimitiveType(left); } if (isWrapperClass(right)) { wrapper = true; right = getPrimitiveType(right); } Object promoted = binaryNumericPromotion(left, right); if (wrapper && promoted != null) { promoted = getWrapperClass(promoted); } return promoted; } // Helper methods /** Returns the primitive for the specified wrapper class. */ protected Class getPrimitiveType(Object wrapper) { return wrapperToPrimitive.get(wrapper); } /** Returns the wrapper class for the specified primitive. */ protected Class getWrapperClass(Object primitive) { return primitiveToWrapper.get(primitive); } /** Implements binary numeric promotion as defined in JLS. */ protected Object binaryNumericPromotion(Object left, Object right) { if ((left == null) || (right == null)) { return null; } Object type = null; if (left == getDoubleType() || right == getDoubleType()) { type = getDoubleType(); } else if (left == getFloatType() || right == getFloatType()) { type = getFloatType(); } else if (left == getLongType() || right == getLongType()) { type = getLongType(); } else if (isIntegralType(left) && isIntegralType(right)) { type = getIntType(); } return type; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/QueryCounter.java0000664000000000000000000000236612216173126025022 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; /** * INTERNAL: * This counter is used by the sessions to assign individual id's to all queries. * It is not synchronized because ++ with volatile is atomic, and queries executing at the same time do not require to refresh twice. * It is part of the fix for Bug#2698903 which arose from the fix for BUG#2612628 */ public class QueryCounter { private static volatile long count = 0; public static long getCount() { return ++count; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/InvalidObject.java0000664000000000000000000000343212216173126025065 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; /** * Purpose:Indicates an object that should not be returned from * query execution. *

* When conforming if checkEarly return finds a matching object by exact primary * key, but that object is deleted, want to return null from query execution. *

* However if null is returned from checkEarly return that will indicate that * no object was found and to go to the database. Hence returning null is not * enough, something else needed to be returned, indicating not only that * checkEarlyReturn had failed but query execution should not proceed. *

* Can be used in other instances where returning null is ambiguous. *

* Implements singleton pattern * @author Stephen McRitchie */ public class InvalidObject { public static final InvalidObject instance = new InvalidObject(); private InvalidObject() { } /** * @return singleton invalid object. */ public static InvalidObject instance() { return instance; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/NoConversion.java0000664000000000000000000000153112216173126024770 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; /** * INTERNAL: */ public interface NoConversion { } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/ConversionManager.java0000664000000000000000000017060112216173126025773 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.math.*; import java.net.URL; import java.util.*; import java.io.*; import java.security.AccessController; import java.security.PrivilegedActionException; import java.sql.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.core.helper.CoreConversionManager; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedGetClassLoaderForClass; import org.eclipse.persistence.internal.security.PrivilegedGetContextClassLoader; /** *

* Purpose: Contains the conversion routines for some common classes in the system. * Primarily used to convert objects from a given database type to a different type in Java. * Uses a singleton instance, this is also used from the platform. *

* Responsibilities: *

    *
  • Execute the appropriate conversion routine. *
*/ public class ConversionManager extends CoreConversionManager implements Serializable, Cloneable { protected Map defaultNullValues; /** * This flag is here if the Conversion Manager should use the class loader on the * thread when loading classes. */ protected boolean shouldUseClassLoaderFromCurrentThread = false; protected static ConversionManager defaultManager; /** Allows the setting of a global default if no instance-level loader is set. */ private static ClassLoader defaultLoader; protected ClassLoader loader; /** Store the list of Classes that can be converted to from the key. */ protected Hashtable dataTypesConvertedFromAClass; /** Store the list of Classes that can be converted from to the key. */ protected Hashtable dataTypesConvertedToAClass; public ConversionManager() { this.dataTypesConvertedFromAClass = new Hashtable(); this.dataTypesConvertedToAClass = new Hashtable(); } /** * INTERNAL: */ public Object clone() { try { return super.clone(); } catch (CloneNotSupportedException exception) { return null; } } /** * Convert the object to the appropriate type by invoking the appropriate * ConversionManager method * @param object - the object that must be converted * @param javaClass - the class that the object must be converted to * @exception - ConversionException, all exceptions will be thrown as this type. * @return - the newly converted object */ @Override public Object convertObject(Object sourceObject, Class javaClass) throws ConversionException { if (sourceObject == null) { // Check for default null conversion. // i.e. allow for null to be defaulted to "", or 0 etc. if (javaClass != null ) { return getDefaultNullValue(javaClass); } else { return null; } } if ((sourceObject.getClass() == javaClass) || (javaClass == null) || (javaClass == ClassConstants.OBJECT) || (javaClass == ClassConstants.BLOB) || (javaClass == ClassConstants.CLOB)) { return sourceObject; } try { if (javaClass == ClassConstants.STRING) { return convertObjectToString(sourceObject); } else if (javaClass == ClassConstants.UTILDATE) { return convertObjectToUtilDate(sourceObject); } else if (javaClass == ClassConstants.SQLDATE) { return convertObjectToDate(sourceObject); } else if (javaClass == ClassConstants.TIME) { return convertObjectToTime(sourceObject); } else if (javaClass == ClassConstants.TIMESTAMP) { return convertObjectToTimestamp(sourceObject); } else if ((javaClass == ClassConstants.CALENDAR) || (javaClass == ClassConstants.GREGORIAN_CALENDAR)) { return convertObjectToCalendar(sourceObject); } else if ((javaClass == ClassConstants.CHAR) || (javaClass == ClassConstants.PCHAR && !(sourceObject instanceof Character))) { return convertObjectToChar(sourceObject); } else if ((javaClass == ClassConstants.INTEGER) || (javaClass == ClassConstants.PINT && !(sourceObject instanceof Integer))) { return convertObjectToInteger(sourceObject); } else if ((javaClass == ClassConstants.DOUBLE) || (javaClass == ClassConstants.PDOUBLE && !(sourceObject instanceof Double))) { return convertObjectToDouble(sourceObject); } else if ((javaClass == ClassConstants.FLOAT) || (javaClass == ClassConstants.PFLOAT && !(sourceObject instanceof Float))) { return convertObjectToFloat(sourceObject); } else if ((javaClass == ClassConstants.LONG) || (javaClass == ClassConstants.PLONG && !(sourceObject instanceof Long))) { return convertObjectToLong(sourceObject); } else if ((javaClass == ClassConstants.SHORT) || (javaClass == ClassConstants.PSHORT && !(sourceObject instanceof Short))) { return convertObjectToShort(sourceObject); } else if ((javaClass == ClassConstants.BYTE) || (javaClass == ClassConstants.PBYTE && !(sourceObject instanceof Byte))) { return convertObjectToByte(sourceObject); } else if (javaClass == ClassConstants.BIGINTEGER) { return convertObjectToBigInteger(sourceObject); } else if (javaClass == ClassConstants.BIGDECIMAL) { return convertObjectToBigDecimal(sourceObject); } else if (javaClass == ClassConstants.NUMBER) { return convertObjectToNumber(sourceObject); } else if ((javaClass == ClassConstants.BOOLEAN) || (javaClass == ClassConstants.PBOOLEAN && !(sourceObject instanceof Boolean))) { return convertObjectToBoolean(sourceObject); } else if (javaClass == ClassConstants.APBYTE) { return convertObjectToByteArray(sourceObject); } else if (javaClass == ClassConstants.ABYTE) { return convertObjectToByteObjectArray(sourceObject); } else if (javaClass == ClassConstants.APCHAR) { return convertObjectToCharArray(sourceObject); } else if (javaClass == ClassConstants.ACHAR) { return convertObjectToCharacterArray(sourceObject); } else if ((sourceObject.getClass() == ClassConstants.STRING) && (javaClass == ClassConstants.CLASS)) { return convertObjectToClass(sourceObject); } else if(javaClass == ClassConstants.URL_Class) { return convertObjectToUrl(sourceObject); } } catch (ConversionException ce) { throw ce; } catch (Exception e) { throw ConversionException.couldNotBeConverted(sourceObject, javaClass, e); } // Check if object is instance of the real class for the primitive class. if ((((javaClass == ClassConstants.PBOOLEAN) && (sourceObject instanceof Boolean) ) || ((javaClass == ClassConstants.PLONG) && (sourceObject instanceof Long) ) || ((javaClass == ClassConstants.PINT) && (sourceObject instanceof Integer) ) || ((javaClass == ClassConstants.PFLOAT) && (sourceObject instanceof Float)) || ((javaClass == ClassConstants.PDOUBLE) && (sourceObject instanceof Double) ) || ((javaClass == ClassConstants.PBYTE) && (sourceObject instanceof Byte)) || ((javaClass == ClassConstants.PCHAR) && (sourceObject instanceof Character)) || ((javaClass == ClassConstants.PSHORT) && (sourceObject instanceof Short)))) { return sourceObject; } // Delay this check as poor performance. if (javaClass.isInstance(sourceObject)) { return sourceObject; } if (ClassConstants.NOCONVERSION.isAssignableFrom(javaClass)) { return sourceObject; } throw ConversionException.couldNotBeConverted(sourceObject, javaClass); } /** * Build a valid instance of BigDecimal from the given sourceObject * @param sourceObject Valid instance of String, BigInteger, any Number */ protected BigDecimal convertObjectToBigDecimal(Object sourceObject) throws ConversionException { BigDecimal bigDecimal = null; try { if (sourceObject instanceof String) { bigDecimal = new BigDecimal((String)sourceObject); } else if (sourceObject instanceof BigInteger) { bigDecimal = new BigDecimal((BigInteger)sourceObject); } else if (sourceObject instanceof Number) { // Doubles do not maintain scale, because of this it is // impossible to distinguish between 1 and 1.0. In order to // maintain backwards compatibility both 1 and 1.0 will be // treated as BigDecimal(1). String numberString = String.valueOf(sourceObject); if(numberString.endsWith(".0") || numberString.contains(".0E+")) { bigDecimal = new BigDecimal(((Number)sourceObject).doubleValue()); } else { bigDecimal = new BigDecimal(numberString); } } else { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.BIGDECIMAL); } } catch (NumberFormatException exception) { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.BIGDECIMAL, exception); } return bigDecimal; } /** * Build a valid instance of BigInteger from the provided sourceObject. * @param sourceObject Valid instance of String, BigDecimal, or any Number */ protected BigInteger convertObjectToBigInteger(Object sourceObject) throws ConversionException { BigInteger bigInteger = null; try { if (sourceObject instanceof BigInteger) { bigInteger = (BigInteger)sourceObject; } else if (sourceObject instanceof String) { bigInteger = new BigInteger((String)sourceObject); } else if (sourceObject instanceof BigDecimal) { bigInteger = ((BigDecimal)sourceObject).toBigInteger(); } else if (sourceObject instanceof Number) { bigInteger = new BigInteger(String.valueOf(((Number)sourceObject).longValue())); } else if (sourceObject instanceof Byte[]) { Byte[] objectBytes = (Byte[])sourceObject; byte[] bytes = new byte[objectBytes.length]; for (int index = 0; index < objectBytes.length; index++) { bytes[index] = objectBytes[index].byteValue(); } bigInteger = new BigInteger(bytes); } else if (sourceObject instanceof byte[]) { bigInteger = new BigInteger((byte[]) sourceObject); } else { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.BIGINTEGER); } } catch (NumberFormatException exception) { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.BIGINTEGER, exception); } return bigInteger; } /** * Build a valid instance of Boolean from the source object. * 't', 'T', "true", "TRUE", 1,'1' -> Boolean(true) * 'f', 'F', "false", "FALSE", 0 ,'0' -> Boolean(false) */ protected Boolean convertObjectToBoolean(Object sourceObject) { if (sourceObject instanceof Character) { switch (Character.toLowerCase(((Character)sourceObject).charValue())) { case '1': case 't': return Boolean.TRUE; case '0': case 'f': return Boolean.FALSE; } } if (sourceObject instanceof String) { String stringValue = ((String)sourceObject).toLowerCase(); if (stringValue.equals("t") || stringValue.equals("true") || stringValue.equals("1")) { return Boolean.TRUE; } else if (stringValue.equals("f") || stringValue.equals("false") || stringValue.equals("0")) { return Boolean.FALSE; } } if (sourceObject instanceof Number) { int intValue = ((Number)sourceObject).intValue(); if (intValue != 0) { return Boolean.TRUE; } else if (intValue == 0) { return Boolean.FALSE; } } throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.BOOLEAN); } /** * Build a valid instance of Byte from the provided sourceObject * @param sourceObject Valid instance of String or any Number * @caught exception The Byte(String) constructor throws a * NumberFormatException if the String does not contain a * parsable byte. * */ protected Byte convertObjectToByte(Object sourceObject) throws ConversionException { try { if (sourceObject instanceof String) { return Byte.valueOf((String)sourceObject); } if (sourceObject instanceof Number) { return Byte.valueOf(((Number)sourceObject).byteValue()); } } catch (NumberFormatException exception) { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.BYTE, exception); } throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.BYTE); } /** * Build a valid instance of a byte array from the given object. * This method does hex conversion of the string values. Some * databases have problems with storing blobs unless the blob * is stored as a hex string. */ protected byte[] convertObjectToByteArray(Object sourceObject) throws ConversionException { //Bug#3128838 Used when converted to Byte[] if (sourceObject instanceof byte[]) { return (byte[])sourceObject; //Related to Bug#3128838. Add support to convert to Byte[] } else if (sourceObject instanceof Byte[]) { Byte[] objectBytes = (Byte[])sourceObject; byte[] bytes = new byte[objectBytes.length]; for (int index = 0; index < objectBytes.length; index++) { bytes[index] = objectBytes[index].byteValue(); } return bytes; } else if (sourceObject instanceof String) { return Helper.buildBytesFromHexString((String)sourceObject); } else if (sourceObject instanceof Blob) { Blob blob = (Blob)sourceObject; try { return blob.getBytes(1L, (int)blob.length()); } catch (SQLException exception) { throw DatabaseException.sqlException(exception); } } else if (sourceObject instanceof InputStream) { InputStream inputStream = (InputStream)sourceObject; ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); try { int tempInt = inputStream.read(); while (tempInt != -1) { outputStream.write(tempInt); tempInt = inputStream.read(); } return outputStream.toByteArray(); } catch (IOException ioException) { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.APBYTE, ioException); } } else if (sourceObject instanceof BigInteger) { return ((BigInteger)sourceObject).toByteArray(); } throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.APBYTE); } /** * Build a valid instance of a Byte array from the given object. * This method does hex conversion of the string values. Some * databases have problems with storing blobs unless the blob * is stored as a hex string. */ protected Byte[] convertObjectToByteObjectArray(Object sourceObject) throws ConversionException { byte[] bytes = convertObjectToByteArray(sourceObject); Byte[] objectBytes = new Byte[bytes.length]; for (int index = 0; index < bytes.length; index++) { objectBytes[index] = Byte.valueOf(bytes[index]); } return objectBytes; } /** * Build a valid instance of java.util.Calendar from the given source object. * @param sourceObject Valid instance of java.util.Date, String, java.sql.Timestamp, or Long */ protected Calendar convertObjectToCalendar(Object sourceObject) throws ConversionException { if (sourceObject instanceof Calendar) { return (Calendar)sourceObject; } else if (sourceObject instanceof java.util.Date) { // PERF: Avoid double conversion for date subclasses. return Helper.calendarFromUtilDate((java.util.Date)sourceObject); } return Helper.calendarFromUtilDate(convertObjectToUtilDate(sourceObject)); } /** * Build a valid instance of Character from the provided sourceObject. * @param sourceObject Valid instance of String or any Number */ protected Character convertObjectToChar(Object sourceObject) throws ConversionException { if (sourceObject instanceof String) { if (((String)sourceObject).length() < 1) { // ELBug336192 - Return default null value of char instead of returning null. return (Character)getDefaultNullValue(ClassConstants.PCHAR); } return Character.valueOf(((String)sourceObject).charAt(0)); } if (sourceObject instanceof Number) { return Character.valueOf((char)((Number)sourceObject).byteValue()); } throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.CHAR); } /** * Build a valid instance of a Character array from the given object. */ protected Character[] convertObjectToCharacterArray(Object sourceObject) throws ConversionException { String stringValue = convertObjectToString(sourceObject); Character[] chars = new Character[stringValue.length()]; for (int index = 0; index < stringValue.length(); index++) { chars[index] = Character.valueOf(stringValue.charAt(index)); } return chars; } /** * Build a valid instance of a char array from the given object. */ protected char[] convertObjectToCharArray(Object sourceObject) throws ConversionException { if (sourceObject instanceof Character[]) { Character[] objectChars = (Character[])sourceObject; char[] chars = new char[objectChars.length]; for (int index = 0; index < objectChars.length; index++) { chars[index] = objectChars[index].charValue(); } return chars; } String stringValue = convertObjectToString(sourceObject); char[] chars = new char[stringValue.length()]; for (int index = 0; index < stringValue.length(); index++) { chars[index] = stringValue.charAt(index); } return chars; } /** * Build a valid Class from the string that is passed in * @param sourceObject Valid instance of String */ protected Class convertObjectToClass(Object sourceObject) throws ConversionException { Class theClass = null; if (!(sourceObject instanceof String)) { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.CLASS); } try { // bug # 2799318 theClass = getPrimitiveClass((String)sourceObject); if (theClass == null) { theClass = Class.forName((String)sourceObject, true, getLoader()); } } catch (Exception exception) { throw ConversionException.couldNotBeConvertedToClass(sourceObject, ClassConstants.CLASS, exception); } return theClass; } /** * Convert the object to an instance of java.sql.Date. * @param sourceObject Object of type java.sql.Timestamp, java.util.Date, String or Long */ protected java.sql.Date convertObjectToDate(Object sourceObject) throws ConversionException { java.sql.Date date = null; if (sourceObject instanceof java.sql.Date) { date = (java.sql.Date)sourceObject;//Helper date is not caught on class check. } else if (sourceObject instanceof java.sql.Timestamp) { date = Helper.dateFromTimestamp((java.sql.Timestamp)sourceObject); } else if (sourceObject.getClass() == ClassConstants.UTILDATE) { date = Helper.sqlDateFromUtilDate((java.util.Date)sourceObject); } else if (sourceObject instanceof Calendar) { return Helper.dateFromCalendar((Calendar)sourceObject); } else if (sourceObject instanceof String) { date = Helper.dateFromString((String)sourceObject); } else if (sourceObject instanceof Long) { date = Helper.dateFromLong((Long)sourceObject); } else { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.SQLDATE); } return date; } /** * Convert the object to an instance of Double. * @param sourceObject Object of type String or Number. * @caught exception The Double(String) constructor throws a * NumberFormatException if the String does not contain a * parsable double. */ protected Double convertObjectToDouble(Object sourceObject) throws ConversionException { try { if (sourceObject instanceof String) { return Double.valueOf((String)sourceObject); } if (sourceObject instanceof Number) { return Double.valueOf(((Number)sourceObject).doubleValue()); } } catch (NumberFormatException exception) { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.DOUBLE, exception); } throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.DOUBLE); } /** * Build a valid Float instance from a String or another Number instance. * @caught exception The Float(String) constructor throws a * NumberFormatException if the String does not contain a * parsable Float. */ protected Float convertObjectToFloat(Object sourceObject) throws ConversionException { try { if (sourceObject instanceof String) { return Float.valueOf((String)sourceObject); } if (sourceObject instanceof Number) { return Float.valueOf(((Number)sourceObject).floatValue()); } } catch (NumberFormatException exception) { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.FLOAT, exception); } throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.FLOAT); } /** * Build a valid Integer instance from a String or another Number instance. * @caught exception The Integer(String) constructor throws a * NumberFormatException if the String does not contain a * parsable integer. */ protected Integer convertObjectToInteger(Object sourceObject) throws ConversionException { try { if (sourceObject instanceof String) { return Integer.valueOf((String)sourceObject); } if (sourceObject instanceof Number) { return Integer.valueOf(((Number)sourceObject).intValue()); } if (sourceObject instanceof Boolean) { if (((Boolean)sourceObject).booleanValue()) { return Integer.valueOf(1); } else { return Integer.valueOf(0); } } } catch (NumberFormatException exception) { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.INTEGER, exception); } throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.INTEGER); } /** * Build a valid Long instance from a String or another Number instance. * @caught exception The Long(String) constructor throws a * NumberFormatException if the String does not contain a * parsable long. * */ protected Long convertObjectToLong(Object sourceObject) throws ConversionException { try { if (sourceObject instanceof String) { return Long.valueOf((String)sourceObject); } if (sourceObject instanceof Number) { return Long.valueOf(((Number)sourceObject).longValue()); } if (sourceObject instanceof java.util.Date) { return Long.valueOf(((java.util.Date)sourceObject).getTime()); } if (sourceObject instanceof java.util.Calendar) { return Long.valueOf(((java.util.Calendar)sourceObject).getTimeInMillis()); } if (sourceObject instanceof Boolean) { if (((Boolean)sourceObject).booleanValue()) { return Long.valueOf(1); } else { return Long.valueOf(0); } } } catch (NumberFormatException exception) { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.LONG, exception); } throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.LONG); } /** * INTERNAL: * Build a valid BigDecimal instance from a String or another * Number instance. BigDecimal is the most general type so is * must be returned when an object is converted to a number. * @caught exception The BigDecimal(String) constructor throws a * NumberFormatException if the String does not contain a * parsable BigDecimal. */ protected BigDecimal convertObjectToNumber(Object sourceObject) throws ConversionException { try { if (sourceObject instanceof String) { return new BigDecimal((String)sourceObject); } if (sourceObject instanceof Number) { return new BigDecimal(((Number)sourceObject).doubleValue()); } if (sourceObject instanceof Boolean) { if (((Boolean)sourceObject).booleanValue()) { return BigDecimal.valueOf(1); } else { return BigDecimal.valueOf(0); } } } catch (NumberFormatException exception) { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.NUMBER, exception); } throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.NUMBER); } /** * INTERNAL: * Build a valid Short instance from a String or another Number instance. * @caught exception The Short(String) constructor throws a * NumberFormatException if the String does not contain a * parsable short. */ protected Short convertObjectToShort(Object sourceObject) throws ConversionException { try { if (sourceObject instanceof String) { return Short.valueOf((String)sourceObject); } if (sourceObject instanceof Number) { return Short.valueOf(((Number)sourceObject).shortValue()); } if (sourceObject instanceof Boolean) { if (((Boolean)sourceObject).booleanValue()) { return Short.valueOf((short)1); } else { return Short.valueOf((short)0); } } } catch (Exception exception) { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.SHORT, exception); } throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.SHORT); } /** * INTERNAL: * Converts objects to their string representations. java.util.Date * is converted to a timestamp first and then to a string. An array * of bytes is converted to a hex string. */ protected String convertObjectToString(Object sourceObject) throws ConversionException { if (sourceObject.getClass() == ClassConstants.UTILDATE) { return Helper.printTimestamp(Helper.timestampFromDate((java.util.Date)sourceObject)); } else if (sourceObject instanceof Calendar) { return Helper.printCalendar((Calendar)sourceObject); } else if (sourceObject instanceof java.sql.Timestamp) { return Helper.printTimestamp((java.sql.Timestamp)sourceObject); } else if (sourceObject instanceof java.sql.Date) { return Helper.printDate((java.sql.Date)sourceObject); } else if (sourceObject instanceof java.sql.Time) { return Helper.printTime((java.sql.Time)sourceObject); } else if (sourceObject instanceof byte[]) { return Helper.buildHexStringFromBytes((byte[])sourceObject); //Bug#3854296 Added support to convert Byte[], char[] and Character[] to String correctly } else if (sourceObject instanceof Byte[]) { return Helper.buildHexStringFromBytes(convertObjectToByteArray(sourceObject)); } else if (sourceObject instanceof char[]) { return new String((char[])sourceObject); } else if (sourceObject instanceof Character[]) { return new String(convertObjectToCharArray(sourceObject)); } else if (sourceObject instanceof Class) { return ((Class)sourceObject).getName(); } else if (sourceObject instanceof Character) { return sourceObject.toString(); } else if (sourceObject instanceof Clob) { Clob clob = (Clob)sourceObject; try { return clob.getSubString(1L, (int)clob.length()); } catch (SQLException exception) { throw DatabaseException.sqlException(exception); } } return sourceObject.toString(); } /** * INTERNAL: * Build a valid instance of java.sql.Time from the given source object. * @param sourceObject Valid instance of java.sql.Time, String, java.util.Date, java.sql.Timestamp, or Long */ protected java.sql.Time convertObjectToTime(Object sourceObject) throws ConversionException { java.sql.Time time = null; if (sourceObject instanceof java.sql.Time) { return (java.sql.Time)sourceObject;//Helper timestamp is not caught on class check. } if (sourceObject instanceof String) { time = Helper.timeFromString((String)sourceObject); } else if (sourceObject.getClass() == ClassConstants.UTILDATE) { time = Helper.timeFromDate((java.util.Date)sourceObject); } else if (sourceObject instanceof java.sql.Timestamp) { time = Helper.timeFromTimestamp((java.sql.Timestamp)sourceObject); } else if (sourceObject instanceof Calendar) { return Helper.timeFromCalendar((Calendar)sourceObject); } else if (sourceObject instanceof Long) { time = Helper.timeFromLong((Long)sourceObject); } else { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.TIME); } return time; } /** * INTERNAL: * Build a valid instance of java.sql.Timestamp from the given source object. * @param sourceObject Valid obejct of class java.sql.Timestamp, String, java.util.Date, or Long */ protected java.sql.Timestamp convertObjectToTimestamp(Object sourceObject) throws ConversionException { java.sql.Timestamp timestamp = null; if (sourceObject instanceof java.sql.Timestamp) { return (java.sql.Timestamp)sourceObject;// Helper timestamp is not caught on class check. } if (sourceObject instanceof String) { timestamp = Helper.timestampFromString((String)sourceObject); } else if (sourceObject instanceof java.util.Date) {// This handles all date and subclasses, sql.Date, sql.Time conversions. timestamp = Helper.timestampFromDate((java.util.Date)sourceObject); } else if (sourceObject instanceof Calendar) { return Helper.timestampFromCalendar((Calendar)sourceObject); } else if (sourceObject instanceof Long) { timestamp = Helper.timestampFromLong((Long)sourceObject); } else { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.TIMESTAMP); } return timestamp; } /** * INTERNAL: * Build a valid instance of java.net.URL from the given source object. * @param sourceObject Valid instance of java.net.URL, or String */ protected URL convertObjectToUrl(Object sourceObject) throws ConversionException { if(sourceObject.getClass() == ClassConstants.URL_Class) { return (URL) sourceObject; } else if (sourceObject.getClass() == ClassConstants.STRING) { try { return new URL((String) sourceObject); } catch(Exception e) { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.URL_Class, e); } } else { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.URL_Class); } } /** * INTERNAL: * Build a valid instance of java.util.Date from the given source object. * @param sourceObject Valid instance of java.util.Date, String, java.sql.Timestamp, or Long */ protected java.util.Date convertObjectToUtilDate(Object sourceObject) throws ConversionException { java.util.Date date = null; if (sourceObject.getClass() == java.util.Date.class) { date = (java.util.Date)sourceObject;//used when converting util.Date to Calendar } else if (sourceObject instanceof java.sql.Date) { date = Helper.utilDateFromSQLDate((java.sql.Date)sourceObject); } else if (sourceObject instanceof java.sql.Time) { date = Helper.utilDateFromTime((java.sql.Time)sourceObject); } else if (sourceObject instanceof String) { date = Helper.utilDateFromTimestamp(Helper.timestampFromString((String)sourceObject)); } else if (sourceObject instanceof java.sql.Timestamp) { date = Helper.utilDateFromTimestamp((java.sql.Timestamp)sourceObject); } else if (sourceObject instanceof Calendar) { return ((Calendar)sourceObject).getTime(); } else if (sourceObject instanceof Long) { date = Helper.utilDateFromLong((Long)sourceObject); } else if (sourceObject instanceof java.util.Date) { date = new java.util.Date(((java.util.Date) sourceObject).getTime()); } else { throw ConversionException.couldNotBeConverted(sourceObject, ClassConstants.UTILDATE); } return date; } /** * PUBLIC: * Resolve the given String className into a class using this * ConversionManager's classloader. */ public Class convertClassNameToClass(String className) throws ConversionException { return convertObjectToClass(className); } /** * A singleton conversion manager is used to handle generic conversions. * This should not be used for conversion under the session context, these must go through the platform. * This allows for the singleton to be customized through setting the default to a user defined subclass. */ public static ConversionManager getDefaultManager() { if (defaultManager == null) { setDefaultManager(new ConversionManager()); defaultManager.setShouldUseClassLoaderFromCurrentThread(true); } return defaultManager; } /** * INTERNAL: * Allow for the null values for classes to be defaulted in one place. * Any nulls read from the database to be converted to the class will be given the specified null value. */ public Object getDefaultNullValue(Class theClass) { if (this.defaultNullValues == null) return null; return getDefaultNullValues().get(theClass); } /** * INTERNAL: * Allow for the null values for classes to be defaulted in one place. * Any nulls read from the database to be converted to the class will be given the specified null value. */ public Map getDefaultNullValues() { return defaultNullValues; } /** * INTERNAL: */ public ClassLoader getLoader() { if (shouldUseClassLoaderFromCurrentThread()) { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try { return (ClassLoader)AccessController.doPrivileged(new PrivilegedGetContextClassLoader(Thread.currentThread())); } catch (PrivilegedActionException exception) { // should not be thrown } } else { return PrivilegedAccessHelper.getContextClassLoader(Thread.currentThread()); } } if (loader == null) { if (defaultLoader == null) { //CR 2621 ClassLoader loader = null; if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try{ loader = (ClassLoader)AccessController.doPrivileged(new PrivilegedGetClassLoaderForClass(ClassConstants.ConversionManager_Class)); } catch (PrivilegedActionException exc){ // will not be thrown } } else { loader = PrivilegedAccessHelper.getClassLoaderForClass(ClassConstants.ConversionManager_Class); } setLoader(loader); } else { setLoader(getDefaultLoader()); } } return loader; } /** * INTERNAL */ public boolean hasDefaultNullValues(){ return this.defaultNullValues != null; } /** * INTERNAL: * Load the class using the default managers class loader. * This is a thread based class loader by default. * This should be used to load all classes as Class.forName can only * see classes on the same classpath as the eclipselink.jar. */ public static Class loadClass(String className) { return (Class)getDefaultManager().convertObject(className, ClassConstants.CLASS); } /** * INTERNAL: * This is used to determine the wrapper class for a primitive. */ public static Class getObjectClass(Class javaClass) { // Null means unknown always for classifications. if (javaClass == null) { return null; } if (javaClass.isPrimitive()) { if (javaClass == ClassConstants.PCHAR) { return ClassConstants.CHAR; } if (javaClass == ClassConstants.PINT) { return ClassConstants.INTEGER; } if (javaClass == ClassConstants.PDOUBLE) { return ClassConstants.DOUBLE; } if (javaClass == ClassConstants.PFLOAT) { return ClassConstants.FLOAT; } if (javaClass == ClassConstants.PLONG) { return ClassConstants.LONG; } if (javaClass == ClassConstants.PSHORT) { return ClassConstants.SHORT; } if (javaClass == ClassConstants.PBYTE) { return ClassConstants.BYTE; } if (javaClass == ClassConstants.PBOOLEAN) { return ClassConstants.BOOLEAN; } } else if (javaClass == ClassConstants.APBYTE) { return ClassConstants.APBYTE; } else if (javaClass == ClassConstants.APCHAR) { return ClassConstants.APCHAR; } else { return javaClass; } return javaClass; } /** * INTERNAL: * Returns a class based on the passed in string. */ public static Class getPrimitiveClass(String classType) { if (classType.equals("int")) { return Integer.TYPE; } else if (classType.equals("boolean")) { return Boolean.TYPE; } else if (classType.equals("char")) { return Character.TYPE; } else if (classType.equals("short")) { return Short.TYPE; } else if (classType.equals("byte")) { return Byte.TYPE; } else if (classType.equals("float")) { return Float.TYPE; } else if (classType.equals("double")) { return Double.TYPE; } else if (classType.equals("long")) { return Long.TYPE; } return null; } /** * A singleton conversion manager is used to handle generic conversions. * This should not be used for conversion under the session context, these must go through the platform. * This allows for the singleton to be customized through setting the default to a user defined subclass. */ public static void setDefaultManager(ConversionManager theManager) { defaultManager = theManager; } /** * INTERNAL: * Allow for the null values for classes to be defaulted in one place. * Any nulls read from the database to be converted to the class will be given the specified null value. * Primitive null values should be set to the wrapper class. */ public void setDefaultNullValue(Class theClass, Object theValue) { if (this.defaultNullValues == null){ this.defaultNullValues = new HashMap(5); } getDefaultNullValues().put(theClass, theValue); } /** * INTERNAL: * Allow for the null values for classes to be defaulted in one place. * Any nulls read from the database to be converted to the class will be given the specified null value. */ public void setDefaultNullValues(Map defaultNullValues) { this.defaultNullValues = defaultNullValues; } /** * INTERNAL: * @parameter java.lang.ClassLoader */ public void setLoader(ClassLoader classLoader) { shouldUseClassLoaderFromCurrentThread = false; loader = classLoader; } /** * INTERNAL: * Set the default class loader to use if no instance-level loader is set * @parameter java.lang.ClassLoader */ public static void setDefaultLoader(ClassLoader classLoader) { defaultLoader = classLoader; } /** * INTERNAL: * Get the default class loader to use if no instance-level loader is set * @return java.lang.ClassLoader */ public static ClassLoader getDefaultLoader() { return defaultLoader; } /** * ADVANCED: * This flag should be set if the current thread classLoader should be used. * This is the case in certain Application Servers were the class loader must be * retrieved from the current Thread. If classNotFoundExceptions are being thrown then set * this flag. In certain cases it will resolve the problem */ public void setShouldUseClassLoaderFromCurrentThread(boolean useCurrentThread) { this.shouldUseClassLoaderFromCurrentThread = useCurrentThread; } /** * ADVANCED: * This flag should be set if the current thread classLoader should be used. * This is the case in certain Application Servers were the class loader must be * retrieved from the current Thread. If classNotFoundExceptions are being thrown then set * this flag. In certain cases it will resolve the problem */ public boolean shouldUseClassLoaderFromCurrentThread() { return this.shouldUseClassLoaderFromCurrentThread; } /** * PUBLIC: * Return the list of Classes that can be converted to from the passed in javaClass. * @param javaClass - the class that is converted from * @return - a vector of classes */ public Vector getDataTypesConvertedFrom(Class javaClass) { if (dataTypesConvertedFromAClass.isEmpty()) { buildDataTypesConvertedFromAClass(); } return (Vector)dataTypesConvertedFromAClass.get(javaClass); } /** * PUBLIC: * Return the list of Classes that can be converted from to the passed in javaClass. * @param javaClass - the class that is converted to * @return - a vector of classes */ public Vector getDataTypesConvertedTo(Class javaClass) { if (dataTypesConvertedToAClass.isEmpty()) { buildDataTypesConvertedToAClass(); } return (Vector)dataTypesConvertedToAClass.get(javaClass); } protected Vector buildNumberVec() { Vector vec = new Vector(); vec.addElement(BigInteger.class); vec.addElement(BigDecimal.class); vec.addElement(Byte.class); vec.addElement(Double.class); vec.addElement(Float.class); vec.addElement(Integer.class); vec.addElement(Long.class); vec.addElement(Short.class); vec.addElement(Number.class); return vec; } protected Vector buildDateTimeVec() { Vector vec = new Vector(); vec.addElement(java.util.Date.class); vec.addElement(Timestamp.class); vec.addElement(Calendar.class); return vec; } protected void buildDataTypesConvertedFromAClass() { dataTypesConvertedFromAClass.put(BigDecimal.class, buildFromBigDecimalVec()); dataTypesConvertedFromAClass.put(BigInteger.class, buildFromBigIntegerVec()); dataTypesConvertedFromAClass.put(Blob.class, buildFromBlobVec()); dataTypesConvertedFromAClass.put(Boolean.class, buildFromBooleanVec()); dataTypesConvertedFromAClass.put(byte[].class, buildFromByteArrayVec()); dataTypesConvertedFromAClass.put(Byte.class, buildFromByteVec()); dataTypesConvertedFromAClass.put(Calendar.class, buildFromCalendarVec()); dataTypesConvertedFromAClass.put(Character.class, buildFromCharacterVec()); dataTypesConvertedFromAClass.put(Clob.class, buildFromClobVec()); dataTypesConvertedFromAClass.put(java.sql.Date.class, buildFromDateVec()); dataTypesConvertedFromAClass.put(Double.class, buildFromDoubleVec()); dataTypesConvertedFromAClass.put(Float.class, buildFromFloatVec()); dataTypesConvertedFromAClass.put(Integer.class, buildFromIntegerVec()); dataTypesConvertedFromAClass.put(Long.class, buildFromLongVec()); dataTypesConvertedFromAClass.put(Number.class, buildFromNumberVec()); dataTypesConvertedFromAClass.put(Short.class, buildFromShortVec()); dataTypesConvertedFromAClass.put(String.class, buildFromStringVec()); dataTypesConvertedFromAClass.put(Timestamp.class, buildFromTimestampVec()); dataTypesConvertedFromAClass.put(Time.class, buildFromTimeVec()); dataTypesConvertedFromAClass.put(java.util.Date.class, buildFromUtilDateVec()); dataTypesConvertedFromAClass.put(Byte[].class, buildFromByteObjectArraryVec()); dataTypesConvertedFromAClass.put(char[].class, buildFromCharArrayVec()); dataTypesConvertedFromAClass.put(Character[].class, buildFromCharacterArrayVec()); } protected Vector buildFromBooleanVec() { Vector vec = new Vector(); vec.addElement(String.class); vec.addElement(Boolean.class); vec.addElement(Integer.class); vec.addElement(Long.class); vec.addElement(Short.class); vec.addElement(Number.class); vec.addElement(Character[].class); vec.addElement(char[].class); vec.addElement(boolean.class); vec.addElement(int.class); vec.addElement(long.class); vec.addElement(short.class); return vec; } protected Vector buildFromNumberVec() { Vector vec = buildNumberVec(); vec.addElement(String.class); vec.addElement(Character.class); vec.addElement(Boolean.class); vec.addElement(Character[].class); vec.addElement(char[].class); vec.addElement(char.class); vec.addElement(int.class); vec.addElement(double.class); vec.addElement(float.class); vec.addElement(long.class); vec.addElement(short.class); vec.addElement(byte.class); vec.addElement(boolean.class); return vec; } protected Vector buildFromBigDecimalVec() { return buildFromNumberVec(); } protected Vector buildFromBigIntegerVec() { return buildFromNumberVec(); } protected Vector buildFromIntegerVec() { return buildFromNumberVec(); } protected Vector buildFromFloatVec() { return buildFromNumberVec(); } protected Vector buildFromDoubleVec() { return buildFromNumberVec(); } protected Vector buildFromShortVec() { return buildFromNumberVec(); } protected Vector buildFromByteVec() { return buildFromNumberVec(); } protected Vector buildFromLongVec() { Vector vec = buildFromNumberVec(); vec.addAll(buildDateTimeVec()); vec.addElement(java.sql.Date.class); vec.addElement(Time.class); return vec; } protected Vector buildFromStringVec() { Vector vec = buildFromLongVec(); vec.addElement(Byte[].class); vec.addElement(byte[].class); vec.addElement(Clob.class); return vec; } protected Vector buildFromCharacterVec() { Vector vec = new Vector(); vec.addElement(String.class); vec.addElement(Boolean.class); vec.addElement(Character[].class); vec.addElement(Character.class); vec.addElement(char[].class); vec.addElement(char.class); vec.addElement(boolean.class); return vec; } protected Vector buildFromByteArrayVec() { Vector vec = new Vector(); vec.addElement(String.class); vec.addElement(byte[].class); vec.addElement(Byte[].class); vec.addElement(Character[].class); vec.addElement(char[].class); return vec; } protected Vector buildFromClobVec() { Vector vec = new Vector(); vec.addElement(String.class); vec.addElement(Character[].class); vec.addElement(char[].class); return vec; } protected Vector buildFromBlobVec() { Vector vec = new Vector(); vec.addElement(String.class); vec.addElement(Byte[].class); vec.addElement(byte[].class); vec.addElement(Character[].class); vec.addElement(char[].class); return vec; } protected Vector buildFromUtilDateVec() { Vector vec = buildDateTimeVec(); vec.addElement(String.class); vec.addElement(Long.class); vec.addElement(java.sql.Date.class); vec.addElement(Time.class); vec.addElement(long.class); vec.addElement(Character[].class); vec.addElement(char[].class); return vec; } protected Vector buildFromTimestampVec() { return buildFromUtilDateVec(); } protected Vector buildFromCalendarVec() { return buildFromUtilDateVec(); } protected Vector buildFromDateVec() { Vector vec = buildDateTimeVec(); vec.addElement(String.class); vec.addElement(Long.class); vec.addElement(java.sql.Date.class); vec.addElement(long.class); vec.addElement(Character[].class); vec.addElement(char[].class); return vec; } protected Vector buildFromTimeVec() { Vector vec = buildDateTimeVec(); vec.addElement(String.class); vec.addElement(Long.class); vec.addElement(Time.class); vec.addElement(long.class); vec.addElement(Character[].class); vec.addElement(char[].class); return vec; } protected Vector buildFromByteObjectArraryVec() { Vector vec = new Vector(); vec.addElement(Blob.class); vec.addElement(byte[].class); return vec; } protected Vector buildFromCharArrayVec() { Vector vec = new Vector(); vec.addElement(Clob.class); return vec; } protected Vector buildFromCharacterArrayVec() { Vector vec = new Vector(); vec.addElement(Clob.class); return vec; } protected void buildDataTypesConvertedToAClass() { dataTypesConvertedToAClass.put(BigDecimal.class, buildToBigDecimalVec()); dataTypesConvertedToAClass.put(BigInteger.class, buildToBigIntegerVec()); dataTypesConvertedToAClass.put(Boolean.class, buildToBooleanVec()); dataTypesConvertedToAClass.put(Byte.class, buildToByteVec()); dataTypesConvertedToAClass.put(byte[].class, buildToByteArrayVec()); dataTypesConvertedToAClass.put(Byte[].class, buildToByteObjectArrayVec()); dataTypesConvertedToAClass.put(Calendar.class, buildToCalendarVec()); dataTypesConvertedToAClass.put(Character.class, buildToCharacterVec()); dataTypesConvertedToAClass.put(Character[].class, buildToCharacterArrayVec()); dataTypesConvertedToAClass.put(char[].class, buildToCharArrayVec()); dataTypesConvertedToAClass.put(java.sql.Date.class, buildToDateVec()); dataTypesConvertedToAClass.put(Double.class, buildToDoubleVec()); dataTypesConvertedToAClass.put(Float.class, buildToFloatVec()); dataTypesConvertedToAClass.put(Integer.class, buildToIntegerVec()); dataTypesConvertedToAClass.put(Long.class, buildToLongVec()); dataTypesConvertedToAClass.put(Number.class, buildToNumberVec()); dataTypesConvertedToAClass.put(Short.class, buildToShortVec()); dataTypesConvertedToAClass.put(String.class, buildToStringVec()); dataTypesConvertedToAClass.put(Timestamp.class, buildToTimestampVec()); dataTypesConvertedToAClass.put(Time.class, buildToTimeVec()); dataTypesConvertedToAClass.put(java.util.Date.class, buildToUtilDateVec()); dataTypesConvertedToAClass.put(Clob.class, buildToClobVec()); dataTypesConvertedToAClass.put(Blob.class, buildToBlobVec()); } protected Vector buildAllTypesToAClassVec() { Vector vec = new Vector(); vec.addElement(String.class); vec.addElement(Integer.class); vec.addElement(java.util.Date.class); vec.addElement(java.sql.Date.class); vec.addElement(Time.class); vec.addElement(Timestamp.class); vec.addElement(Calendar.class); vec.addElement(Character.class); vec.addElement(Double.class); vec.addElement(Float.class); vec.addElement(Long.class); vec.addElement(Short.class); vec.addElement(Byte.class); vec.addElement(BigInteger.class); vec.addElement(BigDecimal.class); vec.addElement(Number.class); vec.addElement(Boolean.class); vec.addElement(Character[].class); vec.addElement(Blob.class); vec.addElement(Clob.class); return vec; } protected Vector buildToBigDecimalVec() { Vector vec = buildNumberVec(); vec.addElement(String.class); return vec; } protected Vector buildToBigIntegerVec() { return buildToBigDecimalVec(); } protected Vector buildToBooleanVec() { Vector vec = buildToBigDecimalVec(); vec.addElement(Character.class); vec.addElement(Boolean.class); return vec; } protected Vector buildToByteVec() { return buildToBigDecimalVec(); } protected Vector buildToDoubleVec() { return buildToBigDecimalVec(); } protected Vector buildToFloatVec() { return buildToBigDecimalVec(); } protected Vector buildToIntegerVec() { Vector vec = buildToBigDecimalVec(); vec.addElement(Boolean.class); return vec; } protected Vector buildToLongVec() { Vector vec = buildToIntegerVec(); vec.addElement(Calendar.class); vec.addElement(java.util.Date.class); return vec; } protected Vector buildToNumberVec() { return buildToIntegerVec(); } protected Vector buildToShortVec() { return buildToIntegerVec(); } protected Vector buildToByteArrayVec() { Vector vec = new Vector(); vec.addElement(String.class); vec.addElement(Blob.class); vec.addElement(byte[].class); vec.addElement(Byte[].class); return vec; } protected Vector buildToByteObjectArrayVec() { Vector vec = buildToByteArrayVec(); vec.addElement(Byte[].class); return vec; } protected Vector buildToCharacterVec() { Vector vec = buildToBigDecimalVec(); vec.addElement(Character.class); return vec; } protected Vector buildToCharacterArrayVec() { return buildAllTypesToAClassVec(); } protected Vector buildToCharArrayVec() { return buildAllTypesToAClassVec(); } protected Vector buildToStringVec() { return buildAllTypesToAClassVec(); } protected Vector buildToCalendarVec() { Vector vec = buildDateTimeVec(); vec.addElement(String.class); vec.addElement(Long.class); vec.addElement(java.sql.Date.class); vec.addElement(Time.class); return vec; } protected Vector buildToTimestampVec() { return buildToCalendarVec(); } protected Vector buildToUtilDateVec() { return buildToCalendarVec(); } protected Vector buildToDateVec() { Vector vec = buildDateTimeVec(); vec.addElement(String.class); vec.addElement(Long.class); vec.addElement(java.sql.Date.class); return vec; } protected Vector buildToTimeVec() { Vector vec = buildDateTimeVec(); vec.addElement(String.class); vec.addElement(Long.class); vec.addElement(Time.class); return vec; } protected Vector buildToBlobVec() { Vector vec = new Vector(); vec.addElement(Byte[].class); vec.addElement(byte[].class); return vec; } protected Vector buildToClobVec() { Vector vec = new Vector(); vec.addElement(String.class); vec.addElement(char[].class); vec.addElement(Character[].class); return vec; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/MappingCompare.java0000664000000000000000000000402212216173126025246 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.util.Comparator; import org.eclipse.persistence.mappings.DatabaseMapping; /** * INTERNAL: * Use to Sort The mappings in ClassDescriptor, Mappings are either DirectToField, which must be at the top * or other * Avoid using this class as sun.misc is not part of many VM's like Netscapes. * */ public class MappingCompare implements Comparator { public int compare(Object arg1, Object arg2) { int arg1Value = ((DatabaseMapping)arg1).getWeight().intValue(); int arg2Value = ((DatabaseMapping)arg2).getWeight().intValue(); if (arg1Value == arg2Value) { int result = ((DatabaseMapping)arg1).getClass().getName().compareTo(((DatabaseMapping)arg2).getClass().getName()); // For same classes, compare attribute names. if (result == 0) { // Can be null for TransformationMapping. if (((DatabaseMapping)arg1).getAttributeName() != null && ((DatabaseMapping)arg2).getAttributeName() != null) { result = ((DatabaseMapping)arg1).getAttributeName().compareTo(((DatabaseMapping)arg2).getAttributeName()); } } return result; } return (arg1Value - arg2Value); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/helper/TimeZoneHolder.java0000664000000000000000000000160612216173126025241 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.helper; import java.util.TimeZone; public interface TimeZoneHolder { public TimeZone getTimeZone(); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/weaving/0000775000000000000000000000000012216174372021670 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/internal/weaving/PersistenceWeavedLazy.java0000664000000000000000000000174012216173130027004 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.weaving; /** * INTERNAL: * Marker interface used to identify classes modified by the TopLink weaver for LAZY (ValueHolder indirection) mappings. */ public interface PersistenceWeavedLazy { } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/weaving/PersistenceWeaved.java0000664000000000000000000000165512216173130026151 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.weaving; /** * INTERNAL: * Marker interface used to identify classes modified by the EclipseLink weaver. */ public interface PersistenceWeaved { } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/weaving/PersistenceWeavedRest.java0000664000000000000000000000227312216173130027004 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * tware - initial ******************************************************************************/ package org.eclipse.persistence.internal.weaving; import java.util.List; import org.eclipse.persistence.internal.jpa.rs.metadata.model.Link; /** * Used by JPA-RS to build links for relationships. * * @author tware */ public interface PersistenceWeavedRest { public List _persistence_getRelationships(); public void _persistence_setRelationships(List relationships); Link _persistence_getHref(); void _persistence_setHref(Link href); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/weaving/PersistenceWeavedChangeTracking.java0000664000000000000000000000171512216173130030737 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.weaving; /** * INTERNAL: * Marker interface used to identify classes modified by the TopLink weaver for change tracking. */ public interface PersistenceWeavedChangeTracking { } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/weaving/RelationshipInfo.java0000664000000000000000000000353712216173130026007 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * tware - initial ******************************************************************************/ package org.eclipse.persistence.internal.weaving; /** * Stores information about a relationships mapping that is used by JPA-RS to build links for relationships. * * @author tware */ public class RelationshipInfo { private Object primaryKey; private String owningEntityAlias; private String attributeName; private Object owningEntity; public void setPersistencePrimaryKey(Object primaryKey) { this.primaryKey = primaryKey; } public void setAttributeName(String attributeName) { this.attributeName = attributeName; } public Object getPersistencePrimaryKey() { return primaryKey; } public String getAttributeName() { return attributeName; } public Object getOwningEntity() { return owningEntity; } public void setOwningEntity(Object owningEntity) { this.owningEntity = owningEntity; } public String getOwningEntityAlias() { return owningEntityAlias; } public void setOwningEntityAlias(String owningEntityAlias) { this.owningEntityAlias = owningEntityAlias; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/weaving/PersistenceWeavedFetchGroups.java0000664000000000000000000000170712216173130030321 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.weaving; /** * INTERNAL: * Marker interface used to identify classes modified by the TopLink weaver for fetch groups. */ public interface PersistenceWeavedFetchGroups { } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/0000775000000000000000000000000012216174372022371 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/SequencingLogInOut.java0000664000000000000000000000246512216173126026761 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; /** *

* Purpose: Simple interface inherited by several sequencing interfaces and classes *

*/ interface SequencingLogInOut { /** * INTERNAL: * Called when the object is connected (logged in). */ public void onConnect(); /** * INTERNAL: * Called when the object is disconnected (logged out). */ public void onDisconnect(); /** * INTERNAL: * Indicates whether the object is connected or not. */ public boolean isConnected(); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/PreallocationHandler.java0000664000000000000000000000633412216173126027330 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; import java.util.*; import java.util.concurrent.*; /** * Handles the storage and allocation of sequence values. * This is held by the session (ServerSession) through the SequencingManager. * @see SequencingManager */ class PreallocationHandler implements SequencingLogInOut { protected Map preallocatedSequences; public PreallocationHandler() { super(); } /** * Returns the Queue of sequences from the global sequences for the seqName. * If there is not one, a new empty Queue is registered. * This queue is thread-safe, and threads can concurrent poll the queue to remove the first element. */ public Queue getPreallocated(String sequenceName) { Queue sequences = preallocatedSequences.get(sequenceName); if (sequences == null) { synchronized (preallocatedSequences) { sequences = preallocatedSequences.get(sequenceName); if (sequences == null) { sequences = new ConcurrentLinkedQueue(); preallocatedSequences.put(sequenceName, sequences); } } } return sequences; } // SequencingLogInOut public void onConnect() { initializePreallocated(); } public void onDisconnect() { preallocatedSequences = null; } public boolean isConnected() { return preallocatedSequences != null; } /** * Removes all preallocated objects. * A dangerous method to use in multithreaded environment method, * but so handy for testing. */ public void initializePreallocated() { preallocatedSequences = new ConcurrentHashMap(20); } /** * Removes all preallocated objects for the specified seqName. * A dangerous method to use in multithreaded environment method, * but so handy for testing. */ public void initializePreallocated(String seqName) { preallocatedSequences.remove(seqName); } /** * Add the preallocated sequences to the global sequence pool for the sequence name. * Although this method is thread-safe, a lock should typically be obtained from the sequence manager before calling this method, * to ensure sequential numbers. */ public void setPreallocated(String seqName, Vector sequences) { getPreallocated(seqName).addAll(sequences); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/SequencingConnectionHandler.java0000664000000000000000000000246612216173126030657 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; import org.eclipse.persistence.internal.databaseaccess.Accessor; /** *

* Purpose: Define interface for getting separate sequencing connection(s) *

* Description: *

* Responsibilities: *

    *
  • Used by SequencingManager only, to obtain separate sequencing connection(s) *
* @see SequencingManager */ interface SequencingConnectionHandler extends SequencingLogInOut { public Accessor acquireAccessor(); public void releaseAccessor(Accessor accessor); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/SequencingCallback.java0000664000000000000000000000302312216173126026744 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; import org.eclipse.persistence.internal.databaseaccess.Accessor; /** *

* Purpose: Define interface for sequencing callback. *

* Description: Objects implementing this interface * produced by SequencingCallbackFactory. *

* Responsibilities: *

    *
  • Provides sequencing callback to be called after transaction. *
* @see org.eclipse.persistence.sequencing.SequencingControl */ public interface SequencingCallback { /** * INTERNAL: * Called after transaction has committed by the object that owns it: * Accessor in non-jta case, SynchronizationListener in jta case. * Should not be called after rollback. */ void afterCommit(Accessor accessor); } ././@LongLink0000000000000000000000000000014700000000000011567 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/ServerSessionConnectionHandler.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/ServerSessionConnectionHandler.ja0000664000000000000000000000340412216173126031032 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; import org.eclipse.persistence.internal.databaseaccess.Accessor; import org.eclipse.persistence.sessions.server.ConnectionPool; class ServerSessionConnectionHandler implements SequencingConnectionHandler { ServerSessionConnectionHandler(ConnectionPool pool) { this.pool = pool; } ConnectionPool pool; public void onConnect() { if (!isConnected()) { pool.startUp(); } } public boolean isConnected() { return pool.isConnected(); } public Accessor acquireAccessor() { return pool.acquireConnection(); } public ConnectionPool getPool() { return pool; } public void releaseAccessor(Accessor accessor) { pool.releaseConnection(accessor); } public void onDisconnect() { if (isConnected()) { pool.shutDown(); } } protected void finalize() throws Throwable { onDisconnect(); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/SessionBrokerSequencing.java0000664000000000000000000000563512216173126030053 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; import java.util.Iterator; import org.eclipse.persistence.internal.sequencing.Sequencing; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.sessions.broker.SessionBroker; class SessionBrokerSequencing implements Sequencing { protected SessionBroker broker; protected int whenShouldAcquireValueForAll; public static boolean atLeastOneSessionHasSequencing(SessionBroker br) { boolean hasSequencing = false; Iterator sessionEnum = br.getSessionsByName().values().iterator(); while (sessionEnum.hasNext() && !hasSequencing) { AbstractSession session = (AbstractSession)sessionEnum.next(); hasSequencing = session.getSequencing() != null; } return hasSequencing; } public SessionBrokerSequencing(SessionBroker broker) { this.broker = broker; initialize(); } protected void initialize() { whenShouldAcquireValueForAll = UNDEFINED; boolean first = true; Iterator sessionEnum = broker.getSessionsByName().values().iterator(); while ((first || (whenShouldAcquireValueForAll != UNDEFINED)) && sessionEnum.hasNext()) { AbstractSession session = (AbstractSession)sessionEnum.next(); Sequencing sequencing = session.getSequencing(); if (sequencing != null) { if (first) { whenShouldAcquireValueForAll = sequencing.whenShouldAcquireValueForAll(); first = false; } else { if (whenShouldAcquireValueForAll != sequencing.whenShouldAcquireValueForAll()) { whenShouldAcquireValueForAll = UNDEFINED; } } } } } // internal protected Sequencing get(Class cls) { return broker.getSessionForClass(cls).getSequencing(); } public int whenShouldAcquireValueForAll() { return whenShouldAcquireValueForAll; } public Object getNextValue(Class cls) { return get(cls).getNextValue(cls); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/Sequencing.java0000664000000000000000000000441412216173126025334 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; /** *

* Purpose: Define interface to use sequencing. *

* Description: This interface accessed through Session.getSequencing() method. * Used by EclipseLink internals to obtain sequencing values. *

* Responsibilities: *

    *
  • Provides sequencing objects and supporting APIs. *
*/ public interface Sequencing { // Possible return values for whenShouldAcquireValueForAll() method: // all classes should acquire sequencing value before insert; public static final int BEFORE_INSERT = -1; // some classes should acquire sequencing value before insert, some after; public static final int UNDEFINED = 0; // all classes should acquire sequencing value after insert; public static final int AFTER_INSERT = 1; /** * INTERNAL: * Indicates when sequencing value should be acquired for all classes. * There are just three possible return values: * BEFORE_INSERT, UNDEFINED, AFTER_INSERT. * Used as a shortcut to avoid individual checks for each class: * shouldAcquireValueAfterInsert(Class cls). * Currently UNDEFINED only happens in a case of a SessionBroker: * session1 - BEFORE_INSERT, session2 - AFTER_INSERT */ int whenShouldAcquireValueForAll(); /** * INTERNAL: * Return the newly-generated sequencing value. * @param cls Class for which the sequencing value is generated. */ Object getNextValue(Class cls); } ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/DatabaseSessionConnectionHandler.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/DatabaseSessionConnectionHandler.0000664000000000000000000000455312216173126030763 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; import org.eclipse.persistence.sessions.Login; import org.eclipse.persistence.internal.databaseaccess.Accessor; import org.eclipse.persistence.internal.sessions.DatabaseSessionImpl; import org.eclipse.persistence.exceptions.ConcurrencyException; class DatabaseSessionConnectionHandler implements SequencingConnectionHandler { DatabaseSessionConnectionHandler(DatabaseSessionImpl ownerSession, Login login) { this.ownerSession = ownerSession; this.login = login; accessor = login.buildAccessor(); } DatabaseSessionImpl ownerSession; Login login; Accessor accessor; boolean isBusy; public void onConnect() { if (!isConnected()) { accessor.connect(login, ownerSession); } } public boolean isConnected() { return accessor.isConnected(); } public synchronized Accessor acquireAccessor() { if (isBusy) { try { wait();// Notify is called when connection is released. } catch (InterruptedException exception) { throw ConcurrencyException.waitFailureOnSequencingForDatabaseSession(exception); } } isBusy = true; return accessor; } public synchronized void releaseAccessor(Accessor accessor) { isBusy = false; notify(); } public void onDisconnect() { if (isConnected()) { accessor.disconnect(ownerSession); } } protected void finalize() throws Throwable { onDisconnect(); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/SequencingCallbackFactory.java0000664000000000000000000000247212216173126030303 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; /** *

* Purpose: Define interface for sequencing callback factory. *

* Description: Instantiated internally by SequencingManager. *

* Responsibilities: *

    *
  • Provides sequencing callback to be called after transaction has committed. *
* @see org.eclipse.persistence.sequencing.SequencingControl */ public interface SequencingCallbackFactory { /** * INTERNAL: * Create SequencingCallback. */ SequencingCallback createSequencingCallback(); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/SequencingServer.java0000664000000000000000000000375312216173126026530 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; import org.eclipse.persistence.internal.sequencing.Sequencing; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.sessions.server.ConnectionPool; /** *

* Purpose: Define interface for sequencing server. *

* Description: This interface accessed through * ServerSession.getSequencingServer() method. * Used for creation of ClientSessionSequencing object * and for access to sequencing connection pool. * Note that if session is disconnected ServerSession.getSequencingServer() always returns null. * Setup of SequencingConnectionPool is done only through SequencingControl interface. * Even if getSequencingControl().setShouldUseSeparateConnection(true) is specified, * SequencingConnectionPool is NOT created unless the session has at least one Sequence object * that requires transaction. *

* Responsibilities: *

    *
  • Connects sequencing on ClientSession with sequencing on ServerSession. *
* @see ClientSessionSequencing */ public interface SequencingServer extends Sequencing { Object getNextValue(AbstractSession writeSession, Class cls); ConnectionPool getConnectionPool(); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/SequencingManager.java0000664000000000000000000014141412216173126026631 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import org.eclipse.persistence.sequencing.*; import org.eclipse.persistence.sessions.Login; import org.eclipse.persistence.sessions.server.*; import org.eclipse.persistence.internal.databaseaccess.*; import org.eclipse.persistence.internal.helper.ConcurrencyManager; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.DatabaseSessionImpl; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.DatabaseException; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.logging.SessionLog; /** * SequencingManager is private to EclipseLink. * It provides most of sequencing functionality. * It's accessed by DatabaseSession through getSequencingHome() method. * * Here's the lifecycle of SequencingManager. * InitialState: SequencingManager doesn't exist. * Action: SequencingManager created -> Not connected State. * State: Not connected. * isConnected() returns false; * getSequencingControl() could be used; * getSequencing() == getSequencingServer() == getSequencingCallbackFactory() == null; * Action: onConnect is called -> Connected State. * State: Connected. * isConnected() returns true; * getSequencingControl() could be used; * getSequencing() could be used; * in case ownwerSession is a ServerSession getSequencingServer() could be used; * Action: onDisconnect is called -> Not connected State. * * Here's a sketch of SequencingManager architecture. * The main 4 objects comprising SessionManager are: * valueGenarationPolicy; * preallocationHandler; * connectionHandler; * state; * * That's how they evolve during lifetime of SequencingManager object: * Not connected State: * preallocationHandler doesn't have any preallocated sequencing values. * connectionHandler == null; * state == null; * * Connected State: * preallocationHandler may contain preallocated sequencing values. * valueGenarationPolicy != null; * state != null; * * The most important method of the class is onConnect(): * that's where, using values of the attributes'(accessible through SequencingControl): * shouldUseSeparateConnection; * login; * minPoolSize; * maxPoolSize; * as well as boolean flags returned by valueGenerationPolicy methods: * shouldAcquireValueAfterInsert(); * shouldUsePreallocation(); * shouldUseSeparateConnection(); * shouldUseTransaction(); * one of implementors of inner interface State is created. * * Once in Connected State, neither changes to attributes, nor to returns of valueGenerationPolicy's * four should... methods can change the state object. * To change the state object, onDisconnect(), than onConnect() should be called. * There is no need to do it directly: each of the following methods * available through SequencingControl does that: * setValueGenerationPolicy; * setShouldUseNativeSequencing; * setShouldUseTableSequencing; * resetSequencing; */ class SequencingManager implements SequencingHome, SequencingServer, SequencingControl { private DatabaseSessionImpl ownerSession; private SequencingConnectionHandler connectionHandler; private PreallocationHandler preallocationHandler; private int whenShouldAcquireValueForAll; private Vector connectedSequences; boolean atLeastOneSequenceShouldUseTransaction; boolean atLeastOneSequenceShouldUsePreallocation; // state ids private static final int NOPREALLOCATION = 0; private static final int PREALLOCATION_NOTRANSACTION = 1; private static final int PREALLOCATION_TRANSACTION_NOACCESSOR = 2; private static final int PREALLOCATION_TRANSACTION_ACCESSOR = 3; private static final int NUMBER_OF_STATES = 4; private State[] states; private Map locks; private SequencingCallbackFactory callbackFactory; private SequencingServer server; private Sequencing seq; private boolean shouldUseSeparateConnection; private Login login; private int minPoolSize = -1; private int maxPoolSize = -1; private int initialPoolSize = -1; private ConnectionPool connectionPool; public SequencingManager(DatabaseSessionImpl ownerSession) { this.ownerSession = ownerSession; } protected DatabaseSessionImpl getOwnerSession() { return ownerSession; } protected void createConnectionHandler() { boolean isServerSession = getOwnerSession().isServerSession(); if (getLogin() == null) { Login login; if (isServerSession) { login = ((ServerSession)getOwnerSession()).getReadConnectionPool().getLogin(); } else { login = getOwnerSession().getDatasourceLogin(); } setLogin(login); } if (getLogin() != null) { if (getLogin().shouldUseExternalTransactionController()) { throw ValidationException.invalidSequencingLogin(); } } if (isServerSession) { ConnectionPool pool = null; if (this.connectionPool == null) { if (getLogin().shouldUseExternalConnectionPooling()) { pool = new ExternalConnectionPool("sequencing", getLogin(), (ServerSession)getOwnerSession()); } else { if (getMinPoolSize() == -1) { setMinPoolSize(2); } if (getMaxPoolSize() == -1) { setMinPoolSize(2); } if (getInitialPoolSize() == -1) { setInitialPoolSize(1); } pool = new ConnectionPool("sequencing", getLogin(), getInitialPoolSize(), getMinPoolSize(), getMaxPoolSize(), (ServerSession)getOwnerSession()); } } else { pool = this.connectionPool; } setConnectionHandler(new ServerSessionConnectionHandler(pool)); } else { setConnectionHandler(new DatabaseSessionConnectionHandler(getOwnerSession(), getLogin())); } } public SequencingControl getSequencingControl() { return this; } protected void setSequencing(Sequencing sequencing) { this.seq = sequencing; } public Sequencing getSequencing() { return seq; } protected void setSequencingServer(SequencingServer server) { this.server = server; } public SequencingServer getSequencingServer() { return server; } protected void setSequencingCallbackFactory(SequencingCallbackFactory callbackFactory) { this.callbackFactory = callbackFactory; } public boolean isSequencingCallbackRequired() { return this.callbackFactory != null; } public boolean shouldUseSeparateConnection() { return shouldUseSeparateConnection; } public void setShouldUseSeparateConnection(boolean shouldUseSeparateConnection) { this.shouldUseSeparateConnection = shouldUseSeparateConnection; } public boolean isConnectedUsingSeparateConnection() { return isConnected() && (getConnectionHandler() != null); } public Login getLogin() { return login; } public void setLogin(Login login) { this.login = login; } public int getMinPoolSize() { return minPoolSize; } public void setMinPoolSize(int size) { this.minPoolSize = size; } public int getMaxPoolSize() { return maxPoolSize; } public void setMaxPoolSize(int size) { this.maxPoolSize = size; } public int getInitialPoolSize() { return this.initialPoolSize; } public void setInitialPoolSize(int size) { this.initialPoolSize = size; } public boolean isConnected() { return states != null; } // SequencingSetup protected SequencingConnectionHandler getConnectionHandler() { return connectionHandler; } protected void setConnectionHandler(SequencingConnectionHandler handler) { this.connectionHandler = handler; } public ConnectionPool getConnectionPool() { if ((getConnectionHandler() != null) && (getConnectionHandler() instanceof ServerSessionConnectionHandler)) { return ((ServerSessionConnectionHandler)getConnectionHandler()).getPool(); } return this.connectionPool; } public Object getNextValue(Class cls) { return getNextValue(getOwnerSession(), cls); } public void initializePreallocated() { if (getPreallocationHandler() != null) { getPreallocationHandler().initializePreallocated(); } } public void initializePreallocated(String seqName) { if (getPreallocationHandler() != null) { getPreallocationHandler().initializePreallocated(seqName); } } protected void setLocks(Map locks) { this.locks = locks; } protected Map getLocks() { return locks; } /** * Acquire a lock for the sequence name. * A lock should be, and only be, acquired when allocating new sequences from the database. */ protected ConcurrencyManager acquireLock(String sequenceName) { ConcurrencyManager manager = getLocks().get(sequenceName); if (manager == null) { synchronized (getLocks()) { manager = getLocks().get(sequenceName); if (manager == null) { manager = new ConcurrencyManager(); getLocks().put(sequenceName, manager); } } } manager.acquire(); return manager; } protected Sequence getSequence(Class cls) { //** should check here that sequencing is used? String seqName = getOwnerSession().getDescriptor(cls).getSequenceNumberName(); return getSequence(seqName); } protected void logDebugPreallocation(String seqName, Object firstSequenceValue, Vector sequences) { if (getOwnerSession().shouldLog(SessionLog.FINEST, SessionLog.SEQUENCING)) { // the first value has been already removed from sequences vector Object[] args = { seqName, Integer.valueOf(sequences.size() + 1), firstSequenceValue, sequences.lastElement() }; getOwnerSession().log(SessionLog.FINEST, SessionLog.SEQUENCING, "sequencing_preallocation", args); } } protected void logDebugLocalPreallocation(AbstractSession writeSession, String seqName, Vector sequences, Accessor accessor) { if (writeSession.shouldLog(SessionLog.FINEST, SessionLog.SEQUENCING)) { Object[] args = { seqName, Integer.valueOf(sequences.size()), sequences.firstElement(), sequences.lastElement() }; writeSession.log(SessionLog.FINEST, SessionLog.SEQUENCING, "sequencing_localPreallocation", args, accessor); } } static abstract class State { abstract Object getNextValue(Sequence sequence, AbstractSession writeSession); SequencingCallbackFactory getSequencingCallbackFactory() { return null; } public String toString() { String name = getClass().getName(); return name.substring(name.lastIndexOf('$') + 1); } } /** * Uses preallocation, uses transaction, no separate connection. * This is used for a DatabaseSession, or a ServerSession not using native sequencing, * and not using a sequence connection pool. * This is used by default for table sequencing, unless a sequence connection pool is specified, * however it should only be used if there is no non-JTA login available. * This will use the writeConnection, but use individual transactions per sequence allocation, * unless the unit of work is in an early transaction, or the connection is JTA (this may deadlock). */ class Preallocation_Transaction_NoAccessor_State extends State implements SequencingCallbackFactory { public class SequencingCallbackImpl implements SequencingCallback { Map localSequences = new HashMap(); /** * INTERNAL: * Called after transaction has committed (commit in non-jta case; after completion - jta case). * Should not be called after rollback. */ public void afterCommit(Accessor accessor) { afterCommitInternal(localSequences, accessor); } public Map getPreallocatedSequenceValues() { return localSequences; } } SequencingCallbackFactory getSequencingCallbackFactory() { return this; } /** * INTERNAL: * Creates SequencingCallback. */ public SequencingCallback createSequencingCallback() { return new SequencingCallbackImpl(); } /** * Release any locally allocated sequence back to the global sequence pool. */ void afterCommitInternal(Map localSequences, Accessor accessor) { Iterator it = localSequences.entrySet().iterator(); while(it.hasNext()) { Map.Entry entry = (Map.Entry)it.next(); String seqName = (String)entry.getKey(); Vector localSequenceForName = (Vector)entry.getValue(); if (!localSequenceForName.isEmpty()) { getPreallocationHandler().setPreallocated(seqName, localSequenceForName); // clear all localSequencesForName localSequenceForName.clear(); } } if(accessor != null) { getOwnerSession().log(SessionLog.FINEST, SessionLog.SEQUENCING, "sequencing_afterTransactionCommitted", null, accessor); } else { getOwnerSession().log(SessionLog.FINEST, SessionLog.SEQUENCING, "sequencing_afterTransactionCommitted", null); } } SequencingCallbackImpl getCallbackImpl(AbstractSession writeSession, Accessor accessor) { SequencingCallbackImpl seqCallbackImpl; if(writeSession.hasExternalTransactionController()) { // note that controller obtained from writeSession (not from ownerSession) - // the difference is important in case of ownerSession being a member of SessionBroker: // in that case only writeSession (which is either ClientSession or DatabaseSession) always has // the correct controller. seqCallbackImpl = (SequencingCallbackImpl)writeSession.getExternalTransactionController().getActiveSequencingCallback(getOwnerSession(), getSequencingCallbackFactory()); } else { seqCallbackImpl = (SequencingCallbackImpl)accessor.getSequencingCallback(getSequencingCallbackFactory()); } return seqCallbackImpl; } /** * Return the next sequence value. * First check the global pool, if empty then allocate new sequences locally. */ public Object getNextValue(Sequence sequence, AbstractSession writeSession) { String seqName = sequence.getName(); if(sequence.getPreallocationSize() > 1) { Queue sequencesForName = getPreallocationHandler().getPreallocated(seqName); // First grab the first sequence value without locking, a lock is only required if empty. Object sequenceValue = sequencesForName.poll(); if (sequenceValue != null) { return sequenceValue; } // KeepLocked indicates whether the sequence lock should be kept for the whole duration of this method. // Of course the lock should be released in any case when the method returns or throws an exception. // This is only used if a sequence transaction was begun by the unit of work, // and will be committed before the unit of work commit. boolean keepLocked = false; ConcurrencyManager lock = null; if (!getOwnerSession().getDatasourceLogin().shouldUseExternalTransactionController() && !writeSession.isInTransaction()) { // To prevent several threads from simultaneously allocating a separate bunch of // sequencing numbers each. With keepLocked==true the first thread locks out others // until it copies the obtained sequence numbers to the global storage. // Note that this optimization possible only in non-jts case when there is no transaction. lock = acquireLock(seqName); try { sequenceValue = sequencesForName.poll(); if (sequenceValue != null) { return sequenceValue; } writeSession.beginTransaction();//write accessor is set in begin keepLocked = true; } finally { if (!keepLocked) { lock.release(); } } } Accessor accessor; Vector localSequencesForName; if (!keepLocked) { writeSession.beginTransaction();//write accessor is set in begin } try { accessor = writeSession.getAccessor(); SequencingCallbackImpl seqCallbackImpl = getCallbackImpl(writeSession, accessor); Map localSequences = seqCallbackImpl.getPreallocatedSequenceValues(); localSequencesForName = (Vector)localSequences.get(seqName); if ((localSequencesForName == null) || localSequencesForName.isEmpty()) { localSequencesForName = sequence.getGeneratedVector(null, writeSession); localSequences.put(seqName, localSequencesForName); logDebugLocalPreallocation(writeSession, seqName, localSequencesForName, accessor); } } catch (RuntimeException ex) { if (keepLocked) { lock.release(); } try { // make sure to rollback the transaction we've begun writeSession.rollbackTransaction(); } catch (Exception rollbackException) { // ignore rollback exception } // don't eat the original exception throw ex; } try { try { // commitTransaction may copy preallocated sequence numbers // from localSequences to preallocationHandler: that happens // if it isn't a nested transaction, and sequencingCallback.afterCommit // method has been called. // In this case: // 1. localSequences corresponding to the accessor // has been removed from accessorToPreallocated; // 2. All its members are empty (therefore localSequenceForName is empty). writeSession.commitTransaction(); } catch (DatabaseException ex) { try { // make sure to rollback the transaction we've begun writeSession.rollbackTransaction(); } catch (Exception rollbackException) { // ignore rollback exception } // don't eat the original exception throw ex; } if (!localSequencesForName.isEmpty()) { // localSeqencesForName is not empty, that means // sequencingCallback has not been called. sequenceValue = localSequencesForName.remove(0); return sequenceValue; } else { // localSeqencesForName is empty, that means // sequencingCallback has been called. sequenceValue = sequencesForName.poll(); if (sequenceValue != null) { return sequenceValue; } return getNextValue(sequence, writeSession); } } finally { if(keepLocked) { lock.release(); } } } else { writeSession.beginTransaction(); try { // preallocation size is 1 - just return the first (and only) element of the allocated vector. Object sequenceValue = sequence.getGeneratedVector(null, writeSession).firstElement(); writeSession.commitTransaction(); return sequenceValue; } catch (RuntimeException ex) { try { // make sure to rollback the transaction we've begun writeSession.rollbackTransaction(); } catch (Exception rollbackException) { // ignore rollback exception } // don't eat the original exception throw ex; } } } } /** * Uses preallocation, uses transaction, and acquires an accessor. * This is used in a ServerSession with a sequence connection pool. * This is typically the default behavior. */ class Preallocation_Transaction_Accessor_State extends State { public Object getNextValue(Sequence sequence, AbstractSession writeSession) { String seqName = sequence.getName(); if(sequence.getPreallocationSize() > 1) { Queue sequencesForName = getPreallocationHandler().getPreallocated(seqName); // First try to get the next sequence value without locking. Object sequenceValue = sequencesForName.poll(); if (sequenceValue != null) { return sequenceValue; } // Sequences are empty, so must lock and allocate next batch of sequences. ConcurrencyManager lock = acquireLock(seqName); try { sequenceValue = sequencesForName.poll(); if (sequenceValue != null) { return sequenceValue; } // note that accessor.getLogin().shouldUseExternalTransactionController() // should be set to false Accessor accessor = getConnectionHandler().acquireAccessor(); try { accessor.beginTransaction(writeSession); try { Vector sequences = sequence.getGeneratedVector(accessor, writeSession); accessor.commitTransaction(writeSession); // Remove the first value before adding to the global cache to ensure this thread gets one. sequenceValue = sequences.remove(0); // copy remaining values to global cache. getPreallocationHandler().setPreallocated(seqName, sequences); logDebugPreallocation(seqName, sequenceValue, sequences); } catch (RuntimeException ex) { try { // make sure to rollback the transaction we've begun accessor.rollbackTransaction(writeSession); } catch (Exception rollbackException) { // ignore rollback exception } // don't eat the original exception throw ex; } } finally { getConnectionHandler().releaseAccessor(accessor); } } finally { lock.release(); } return sequenceValue; } else { // note that accessor.getLogin().shouldUseExternalTransactionController() // should be set to false Accessor accessor = getConnectionHandler().acquireAccessor(); try { accessor.beginTransaction(writeSession); try { // preallocation size is 1 - just return the first (and only) element of the allocated vector. Object sequenceValue = sequence.getGeneratedVector(accessor, writeSession).firstElement(); accessor.commitTransaction(writeSession); return sequenceValue; } catch (RuntimeException ex) { try { // make sure to rollback the transaction we've begun accessor.rollbackTransaction(writeSession); } catch (Exception rollbackException) { // ignore rollback exception } // don't eat the original exception throw ex; } } finally { getConnectionHandler().releaseAccessor(accessor); } } } } /** * Using preallocation, NoTransaction, NoAccessor. * This is used by native sequence objects. * No transaction is required as sequence objects are non-transactional. */ class Preallocation_NoTransaction_State extends State { public Object getNextValue(Sequence sequence, AbstractSession writeSession) { String seqName = sequence.getName(); if(sequence.getPreallocationSize() > 1) { Queue sequencesForName = getPreallocationHandler().getPreallocated(seqName); // First try to get the next sequence value without locking. Object sequenceValue = sequencesForName.poll(); if (sequenceValue != null) { return sequenceValue; } // Sequences are empty, so must lock and allocate next batch of sequences. ConcurrencyManager lock = acquireLock(seqName); try { sequenceValue = sequencesForName.poll(); if (sequenceValue != null) { return sequenceValue; } Vector sequences = sequence.getGeneratedVector(null, writeSession); // Remove the first value before adding to the global cache to ensure this thread gets one. sequenceValue = sequences.remove(0); // copy remaining values to global cache. getPreallocationHandler().setPreallocated(seqName, sequences); logDebugPreallocation(seqName, sequenceValue, sequences); } finally { lock.release(); } return sequenceValue; } else { // preallocation size is 1 - just return the first (and only) element of the allocated vector. return sequence.getGeneratedVector(null, writeSession).firstElement(); } } } /** * Using NoPreallocation, no transaction, no Accessor. * This is only used for identity sequencing when preallocation is not possible. * The writeSession is always in a transaction, so a transaction is never required. * Table or sequence object with preallocation size 1 still goes through the preallocation state. */ class NoPreallocation_State extends State { public Object getNextValue(Sequence sequence, AbstractSession writeSession) { return sequence.getGeneratedValue(null, writeSession); } } public void resetSequencing() { if (isConnected()) { onDisconnect(); onConnect(); } } /** * Initialize the sequences on login. */ public void onConnect() { if (isConnected()) { return; } if (!getOwnerSession().getProject().usesSequencing()) { return; } onConnectInternal(null); } /** * If sequencing is connected initialize the sequences used by descriptors, otherwise connect. */ public void onAddDescriptors(Collection descriptors) { if (!isConnected()) { onConnect(); return; } if (descriptors == null || descriptors.isEmpty()) { return; } onConnectInternal(descriptors); } /** * Initialize the sequences on login. */ protected void onConnectInternal(Collection descriptors) { // This method is called in two distinct cases. // // Connect case. // If descriptors == null then the sequencing has not been connected yet // and this method by onConnect method. // Nothing is allocated yet (connectedSequences, etc) and // therefore nAlreadyConnectedSequences = 0 // // AddDescriptors case. // If descriptors is not null then sequencing is already connected and this method // is called by onAddDescriptors method. // connectedSequences (and the rest of stuff allocated by onConnect) already exists. // Typically in this case nAlreadyConnectedSequences > 0 // (unless none sequences were connected by onConnect. int nAlreadyConnectedSequences = 0; if (connectedSequences != null) { nAlreadyConnectedSequences = connectedSequences.size(); } // These flags saved here to rollback the state of sequencing in case of failure. int whenShouldAcquireValueForAllOriginal = whenShouldAcquireValueForAll; boolean atLeastOneSequenceShouldUseTransactionOriginal = atLeastOneSequenceShouldUseTransaction; boolean atLeastOneSequenceShouldUsePreallocationOriginal = atLeastOneSequenceShouldUsePreallocation; onConnectSequences(descriptors); if (nAlreadyConnectedSequences == connectedSequences.size()) { // no sequences connected by onConnectSequences method - nothing to do return; } boolean onExceptionDisconnectPreallocationHandler = false; boolean onExceptionDisconnectConnectionHandler = false; boolean hasConnectionHandler = getConnectionHandler() != null; boolean hasPreallocationHandler = getPreallocationHandler() != null; try { // In AddDescriptors case the handler may have been already created if (!hasConnectionHandler) { if (!shouldUseSeparateConnection()) { setConnectionHandler(null); } else if (atLeastOneSequenceShouldUseTransaction) { if (getConnectionHandler() == null) { createConnectionHandler(); } if (getConnectionHandler() != null) { getConnectionHandler().onConnect(); onExceptionDisconnectConnectionHandler = true; } } } // In AddDescriptors case the handler may have been already created if (!hasPreallocationHandler) { if (atLeastOneSequenceShouldUsePreallocation) { if (getPreallocationHandler() == null) { createPreallocationHandler(); } getPreallocationHandler().onConnect(); onExceptionDisconnectPreallocationHandler = true; } } initializeStates(nAlreadyConnectedSequences); } catch (RuntimeException ex) { try { onDisconnectSequences(nAlreadyConnectedSequences); } catch (Exception ex2) { // Ignore } finally { whenShouldAcquireValueForAll = whenShouldAcquireValueForAllOriginal; atLeastOneSequenceShouldUseTransaction = atLeastOneSequenceShouldUseTransactionOriginal; atLeastOneSequenceShouldUsePreallocation = atLeastOneSequenceShouldUsePreallocationOriginal; } if (!hasConnectionHandler && getConnectionHandler() != null) { if (onExceptionDisconnectConnectionHandler) { getConnectionHandler().onDisconnect(); } setConnectionHandler(null); } if (!hasPreallocationHandler && getPreallocationHandler() != null) { if (onExceptionDisconnectPreallocationHandler) { getPreallocationHandler().onDisconnect(); } clearPreallocationHandler(); } throw ex; } // In AddDescriptors case locks may have been already created if (atLeastOneSequenceShouldUsePreallocation && getLocks() == null) { setLocks(new ConcurrentHashMap(20)); } // In AddDescriptors case the factory may have been already created and listeners initialized. boolean hasSequencingCallbackFactory = isSequencingCallbackRequired(); if (!hasSequencingCallbackFactory) { createSequencingCallbackFactory(); if(getOwnerSession().hasExternalTransactionController()) { getOwnerSession().getExternalTransactionController().initializeSequencingListeners(); } } // In AddDescriptors case sequencing is already set. if (descriptors == null) { if (getOwnerSession().isServerSession()) { setSequencingServer(this); } setSequencing(this); } logDebugSequencingConnected(nAlreadyConnectedSequences); } public void onDisconnect() { if (!isConnected()) { return; } setSequencing(null); setSequencingServer(null); setSequencingCallbackFactory(null); if(getOwnerSession().hasExternalTransactionController() && !getOwnerSession().hasBroker()) { getOwnerSession().getExternalTransactionController().clearSequencingListeners(); } setLocks(null); clearStates(); if (getConnectionHandler() != null) { getConnectionHandler().onDisconnect(); setConnectionHandler(null); } if (getPreallocationHandler() != null) { getPreallocationHandler().onDisconnect(); clearPreallocationHandler(); } onDisconnectSequences(0); getOwnerSession().log(SessionLog.FINEST, SessionLog.SEQUENCING, "sequencing_disconnected"); } protected PreallocationHandler getPreallocationHandler() { return preallocationHandler; } protected void createPreallocationHandler() { preallocationHandler = new PreallocationHandler(); } protected void clearPreallocationHandler() { preallocationHandler = null; } /* * If passed collection is null then connect all sequences used by owner session's descriptors. * Otherwise connect sequences used by passed descriptors. */ protected void onConnectSequences(Collection descriptors) { boolean isConnected = isConnected(); int nAlreadyConnectedSequences = 0; if (connectedSequences == null) { connectedSequences = new Vector(); } else { nAlreadyConnectedSequences = connectedSequences.size(); } boolean shouldUseTransaction = false; boolean shouldUsePreallocation = false; boolean shouldAcquireValueAfterInsert = false; if (descriptors == null) { descriptors = getOwnerSession().getDescriptors().values(); } Iterator itDescriptors = descriptors.iterator(); while (itDescriptors.hasNext()) { ClassDescriptor descriptor = (ClassDescriptor)itDescriptors.next(); // Find root sequence, because inheritance needs to be resolved here. // TODO: The way we initialize sequencing needs to be in line with descriptor init. ClassDescriptor parentDescriptor = descriptor; while (!parentDescriptor.usesSequenceNumbers() && parentDescriptor.isChildDescriptor()) { ClassDescriptor newDescriptor = getOwnerSession().getDescriptor(parentDescriptor.getInheritancePolicy().getParentClass()); // Avoid issue with error cases of self parent, or null parent. if ((newDescriptor == null) || (newDescriptor == parentDescriptor)) { break; } parentDescriptor = newDescriptor; } if (!parentDescriptor.usesSequenceNumbers()) { continue; } String seqName = parentDescriptor.getSequenceNumberName(); Sequence sequence = getSequence(seqName); if (sequence == null) { sequence = new DefaultSequence(seqName); getOwnerSession().getDatasourcePlatform().addSequence(sequence, isConnected); } // PERF: Initialize the sequence, this avoid having to look it up every time. descriptor.setSequence(sequence); if (connectedSequences.contains(sequence)) { continue; } try { if (sequence instanceof DefaultSequence && !connectedSequences.contains(getDefaultSequence())) { getDefaultSequence().onConnect(getOwnerSession().getDatasourcePlatform()); connectedSequences.add(nAlreadyConnectedSequences, getDefaultSequence()); shouldUseTransaction |= getDefaultSequence().shouldUseTransaction(); shouldUsePreallocation |= getDefaultSequence().shouldUsePreallocation(); shouldAcquireValueAfterInsert |= getDefaultSequence().shouldAcquireValueAfterInsert(); } sequence.onConnect(getOwnerSession().getDatasourcePlatform()); connectedSequences.addElement(sequence); shouldUseTransaction |= sequence.shouldUseTransaction(); shouldUsePreallocation |= sequence.shouldUsePreallocation(); shouldAcquireValueAfterInsert |= sequence.shouldAcquireValueAfterInsert(); } catch (RuntimeException ex) { // defaultSequence has to disconnect the last for (int i = connectedSequences.size() - 1; i >= nAlreadyConnectedSequences; i--) { try { Sequence sequenceToDisconnect = (Sequence)connectedSequences.elementAt(i); sequenceToDisconnect.onDisconnect(getOwnerSession().getDatasourcePlatform()); } catch (RuntimeException ex2) { //ignore } } if (nAlreadyConnectedSequences == 0) { connectedSequences = null; } throw ex; } } if (nAlreadyConnectedSequences == 0) { if (shouldAcquireValueAfterInsert && !shouldUsePreallocation) { whenShouldAcquireValueForAll = AFTER_INSERT; } else if (!shouldAcquireValueAfterInsert && shouldUsePreallocation) { whenShouldAcquireValueForAll = BEFORE_INSERT; } } else { if (whenShouldAcquireValueForAll == AFTER_INSERT) { if (!shouldAcquireValueAfterInsert || shouldUsePreallocation) { whenShouldAcquireValueForAll = UNDEFINED; } } else if (whenShouldAcquireValueForAll == BEFORE_INSERT) { if (shouldAcquireValueAfterInsert || !shouldUsePreallocation) { whenShouldAcquireValueForAll = UNDEFINED; } } } atLeastOneSequenceShouldUseTransaction |= shouldUseTransaction; atLeastOneSequenceShouldUsePreallocation |= shouldUsePreallocation; } /* * Keeps the first nAlreadyConnectedSequences sequences connected, * disconnects the rest. */ protected void onDisconnectSequences(int nAlreadyConnectedSequences) { RuntimeException exception = null; // defaultSequence has to disconnect the last for (int i = connectedSequences.size() - 1; i >= nAlreadyConnectedSequences; i--) { try { Sequence sequenceToDisconnect = (Sequence)connectedSequences.elementAt(i); sequenceToDisconnect.onDisconnect(getOwnerSession().getDatasourcePlatform()); } catch (RuntimeException ex) { if (exception == null) { exception = ex; } } } if (nAlreadyConnectedSequences == 0) { connectedSequences = null; whenShouldAcquireValueForAll = UNDEFINED; atLeastOneSequenceShouldUseTransaction = false; atLeastOneSequenceShouldUsePreallocation = false; } if (exception != null) { throw exception; } } protected void initializeStates(int nAlreadyConnectedSequences) { if (states == null) { states = new State[NUMBER_OF_STATES]; } int nSize = connectedSequences.size(); for (int i = nAlreadyConnectedSequences; i < nSize; i++) { Sequence sequence = (Sequence)connectedSequences.get(i); State state = getState(sequence.shouldUsePreallocation(), sequence.shouldUseTransaction()); if (state == null) { createState(sequence.shouldUsePreallocation(), sequence.shouldUseTransaction()); } } } protected void clearStates() { states = null; } protected int getStateId(boolean shouldUsePreallocation, boolean shouldUseTransaction) { if (!shouldUsePreallocation) { // Non-Oracle native sequencing uses this state return NOPREALLOCATION; } else if (!shouldUseTransaction) { // Oracle native sequencing uses this state return PREALLOCATION_NOTRANSACTION; } else if (getConnectionHandler() == null) { // TableSequence and UnaryTableSequence in case there is no separate connection(s) available use this state return PREALLOCATION_TRANSACTION_NOACCESSOR; } else/*if(getConnectionHandler()!=null)*/ { // TableSequence and UnaryTableSequence in case there is separate connection(s) available use this state return PREALLOCATION_TRANSACTION_ACCESSOR; } } protected State getState(boolean shouldUsePreallocation, boolean shouldUseTransaction) { return states[getStateId(shouldUsePreallocation, shouldUseTransaction)]; } protected void createState(boolean shouldUsePreallocation, boolean shouldUseTransaction) { if (!shouldUsePreallocation) { // Non-Oracle native sequencing uses this state states[NOPREALLOCATION] = new NoPreallocation_State(); } else if (!shouldUseTransaction) { // Oracle native sequencing uses this state states[PREALLOCATION_NOTRANSACTION] = new Preallocation_NoTransaction_State(); } else if (getConnectionHandler() == null) { // TableSequence and UnaryTableSequence in case there is no separate connection(s) available use this state states[PREALLOCATION_TRANSACTION_NOACCESSOR] = new Preallocation_Transaction_NoAccessor_State(); } else/*if(getConnectionHandler()!=null)*/ { // TableSequence and UnaryTableSequence in case there is separate connection(s) available use this state states[PREALLOCATION_TRANSACTION_ACCESSOR] = new Preallocation_Transaction_Accessor_State(); } } protected void createSequencingCallbackFactory() { if (states[PREALLOCATION_TRANSACTION_NOACCESSOR] != null) { setSequencingCallbackFactory(states[PREALLOCATION_TRANSACTION_NOACCESSOR].getSequencingCallbackFactory()); } else { setSequencingCallbackFactory(null); } } public Object getNextValue(AbstractSession writeSession, Class cls) { Sequence sequence = getSequence(cls); State state = getState(sequence.shouldUsePreallocation(), sequence.shouldUseTransaction()); return state.getNextValue(sequence, writeSession); } protected void logDebugSequencingConnected(int nAlreadyConnectedSequences) { Vector[] sequenceVectors = new Vector[NUMBER_OF_STATES]; int size = connectedSequences.size(); for (int i = nAlreadyConnectedSequences; i < size; i++) { Sequence sequence = (Sequence)connectedSequences.get(i); int stateId = getStateId(sequence.shouldUsePreallocation(), sequence.shouldUseTransaction()); Vector v = sequenceVectors[stateId]; if (v == null) { v = new Vector(); sequenceVectors[stateId] = v; } v.addElement(sequence); } for (int i = 0; i < NUMBER_OF_STATES; i++) { Vector v = sequenceVectors[i]; if (v != null) { getOwnerSession().log(SessionLog.FINEST, SessionLog.SEQUENCING, "sequencing_connected", states[i]); for (int j = 0; j < v.size(); j++) { Sequence sequence = (Sequence)v.elementAt(j); Object[] args = { sequence.getName(), Integer.toString(sequence.getPreallocationSize()), Integer.toString(sequence.getInitialValue())}; getOwnerSession().log(SessionLog.FINEST, SessionLog.SEQUENCING, "sequence_without_state", args); } } } } public int getPreallocationSize() { return getDefaultSequence().getPreallocationSize(); } public int getInitialValue() { return getDefaultSequence().getInitialValue(); } public int whenShouldAcquireValueForAll() { return whenShouldAcquireValueForAll; } protected Sequence getDefaultSequence() { return getOwnerSession().getDatasourcePlatform().getDefaultSequence(); } protected Sequence getSequence(String seqName) { return getOwnerSession().getDatasourcePlatform().getSequence(seqName); } public void setConnectionPool(ConnectionPool connectionPool) { this.connectionPool = connectionPool; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/RemoteConnectionSequencing.java0000664000000000000000000000532012216173126030525 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; import java.util.Hashtable; import org.eclipse.persistence.internal.sequencing.Sequencing; import org.eclipse.persistence.internal.sessions.remote.RemoteConnection; import org.eclipse.persistence.internal.sessions.remote.RemoteFunctionCall; import org.eclipse.persistence.internal.sessions.remote.SequencingFunctionCall; /* * RemoteConnectionSequencing class provides Sequencing through RemoteConnection. * It caches some info and communicates with Sequencing object * on its master session by sending SequencingFunctionCall objects * through the RemoteConnection. * Note that individual SequencingFunctionCalls * are implemented as static inner classes in SequenceFunctionCall class: * like SequencingFunctionCall.DoesExist. */ class RemoteConnectionSequencing implements Sequencing { protected RemoteConnection remoteConnection; protected Hashtable classToShouldAcquireValueAfterInsert; protected int whenShouldAcquireValueForAll; public static boolean masterSequencingExists(RemoteConnection con) { return ((Boolean)con.getSequenceNumberNamed(new SequencingFunctionCall.DoesExist())).booleanValue(); } public RemoteConnectionSequencing(RemoteConnection remoteConnection) { this.remoteConnection = remoteConnection; whenShouldAcquireValueForAll = ((Integer)processFunctionCall(new SequencingFunctionCall.WhenShouldAcquireValueForAll())).intValue(); if (whenShouldAcquireValueForAll == UNDEFINED) { classToShouldAcquireValueAfterInsert = new Hashtable(20); } } public int whenShouldAcquireValueForAll() { return whenShouldAcquireValueForAll; } public Object getNextValue(Class cls) { return processFunctionCall(new SequencingFunctionCall.GetNextValue(cls)); } protected Object processFunctionCall(RemoteFunctionCall call) { return remoteConnection.getSequenceNumberNamed(call); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/ClientSessionSequencing.java0000664000000000000000000000535612216173126030045 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; import org.eclipse.persistence.internal.sequencing.Sequencing; import org.eclipse.persistence.sessions.server.ClientSession; /** * ClientSessionSequencing is private to EclipseLink. * It provides sequencing for ClientSession. * It contains a reference to SequencingServer object owned by * ClientSession's parent ServerSession. * * @see SequencingServer * @see org.eclipse.persistence.sessions.server.ClientSession * */ class ClientSessionSequencing implements Sequencing { // ownerClientSession protected ClientSession clientSession; // SequencingServer owned by clientSession's parent ServerSession protected SequencingServer sequencingServer; /** * INTERNAL: * Takes a potential owner - ClientSession as an argument. * This static method is called before an instance of this class is created. * The goal is to verify whether the instance of ClientSessionSequencing should be created. */ public static boolean sequencingServerExists(ClientSession cs) { return cs.getParent().getSequencingServer() != null; } /** * INTERNAL: * Takes an owner - ClientSession as an argument. */ public ClientSessionSequencing(ClientSession clientSession) { this.clientSession = clientSession; sequencingServer = clientSession.getParent().getSequencingServer(); } /** * INTERNAL: * Simply calls the same method on SequencingServer */ public int whenShouldAcquireValueForAll() { return sequencingServer.whenShouldAcquireValueForAll(); } /** * INTERNAL: * This method is the reason for this class to exist: * SequencingServer.getNextValue takes two arguments * the first argument being a session which owns write connection * (either DatabaseSession or ClientSession). */ public Object getNextValue(Class cls) { return sequencingServer.getNextValue(clientSession, cls); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/SequencingHome.java0000664000000000000000000000336112216173126026145 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; import java.util.Collection; import org.eclipse.persistence.sequencing.SequencingControl; /** *

* Purpose: Define interface for getting all sequencing interfaces. *

* Description: This interface accessed through * DatabaseSession.getSequencingHome() method. *

* Responsibilities: *

    *
  • Provides a hub for all sequencing interfaces used by DatabaseSession. *
* @see org.eclipse.persistence.internal.sessions.DatabaseSessionImpl * @see Sequencing * @see org.eclipse.persistence.sequencing.SequencingControl * @see SequencingServer * @see SequencingCallback */ public interface SequencingHome extends SequencingLogInOut { public Sequencing getSequencing(); public SequencingControl getSequencingControl(); public SequencingServer getSequencingServer(); public boolean isSequencingCallbackRequired(); public void onAddDescriptors(Collection descriptors); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/sequencing/SequencingFactory.java0000664000000000000000000000624412216173126026667 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.sequencing; import org.eclipse.persistence.sessions.server.ClientSession; import org.eclipse.persistence.sessions.remote.DistributedSession; import org.eclipse.persistence.internal.sessions.remote.RemoteConnection; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.DatabaseSessionImpl; import org.eclipse.persistence.sessions.broker.SessionBroker; /** * SequencingFactory is private to EclipseLink. * It instantiates sequencing classes for sessions. * It allows sequencing classes to be encapsulated in * this package. * * @see ClientSessionSequencing * @see RemoteConnectionSequencing * @see SessionBrokerSequencing * @see SequencingManager * */ public class SequencingFactory { /** * INTERNAL: * Takes a potential owner - a Session, returns Sequencing object. * Note that before creating a Sequencing object there is a check performed * to determine whether the object could be created. */ public static Sequencing createSequencing(AbstractSession session) { Sequencing sequencing = null; if (session.isClientSession()) { ClientSession cs = (ClientSession)session; if (ClientSessionSequencing.sequencingServerExists(cs)) { sequencing = new ClientSessionSequencing(cs); } } else if (session.isRemoteSession()) { RemoteConnection con = ((DistributedSession)session).getRemoteConnection(); if (RemoteConnectionSequencing.masterSequencingExists(con)) { sequencing = new RemoteConnectionSequencing(con); } } else if (session.isBroker()) { SessionBroker br = (SessionBroker)session; if (SessionBrokerSequencing.atLeastOneSessionHasSequencing(br)) { sequencing = new SessionBrokerSequencing(br); } } return sequencing; } /** * INTERNAL: * Takes a potential owner - a DatabaseSession, returns SequencingHome object. * Only DatabaseSession and ServerSession should be passed (not SessionBroker). */ public static SequencingHome createSequencingHome(DatabaseSessionImpl ownerSession) { SequencingHome home = null; if (!ownerSession.isBroker()) { home = new SequencingManager(ownerSession); } return home; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/0000775000000000000000000000000012216174372022537 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/NoIndirectionPolicy.java0000664000000000000000000003631512216173126027332 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.*; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.remote.*; import org.eclipse.persistence.descriptors.changetracking.ChangeTracker; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.indirection.ValueHolderInterface; import org.eclipse.persistence.internal.queries.InterfaceContainerPolicy; import org.eclipse.persistence.internal.sessions.remote.*; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; /** *

Purpose

: * NoIndirectionPolicy implements the behavior necessary for a * a ForeignReferenceMapping (or TransformationMapping) to * directly use domain objects, as opposed to ValueHolders. * * @see ForeignReferenceMapping * @author Mike Norman * @since TOPLink/Java 2.5 */ public class NoIndirectionPolicy extends IndirectionPolicy { /** * INTERNAL: * Construct a new indirection policy. */ public NoIndirectionPolicy() { super(); } /** * INTERNAL: This method can be used when an Indirection Object is required * to be built from a provided ValueHolderInterface object. This may be used * for custom value holder types. Certain policies like the * TransparentIndirectionPolicy may wrap the valueholder in another object. */ public Object buildIndirectObject(ValueHolderInterface valueHolder){ return valueHolder.getValue(); } /** * INTERNAL: * Return a clone of the attribute. * @param buildDirectlyFromRow indicates that we are building the clone directly * from a row as opposed to building the original from the row, putting it in * the shared cache, and then cloning the original. */ public Object cloneAttribute(Object attributeValue, Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession, boolean buildDirectlyFromRow) { // Since valueFromRow was called with the UnitOfWork, attributeValue // is already a registered result. if (buildDirectlyFromRow) { return attributeValue; } if (!cloningSession.isUnitOfWork()){ return mapping.buildContainerClone(attributeValue, cloningSession); } boolean isExisting = !cloningSession.isUnitOfWork() || (((UnitOfWorkImpl) cloningSession).isObjectRegistered(clone) && (!(((UnitOfWorkImpl)cloningSession).isOriginalNewObject(original)))); return this.getMapping().buildCloneForPartObject(attributeValue, original, cacheKey, clone, cloningSession, refreshCascade, isExisting, isExisting);// only assume from shared cache if it is existing } /** * INTERNAL: * Return whether the collection type is appropriate for the indirection policy. * In this case, the type MUST be a Vector (or, in the case of jdk1.2, * Collection or Map). */ protected boolean collectionTypeIsValid(Class collectionType) { return getCollectionMapping().getContainerPolicy().isValidContainerType(collectionType); } /** * INTERNAL: * Return the reference row for the reference object. * This allows the new row to be built without instantiating * the reference object. * Return null if the object has already been instantiated. */ public AbstractRecord extractReferenceRow(Object referenceObject) { return null; } /** * INTERNAL: * An object has been serialized from the server to the client. * Replace the transient attributes of the remote value holders * with client-side objects. */ public void fixObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { this.mapping.fixRealObjectReferences(object, objectDescriptors, processedObjects, query, session); } /** * INTERNAL: * Return the original indirection object for a unit of work indirection object. */ @Override public Object getOriginalIndirectionObject(Object unitOfWorkIndirectionObject, AbstractSession session) { // This code appears broken, but actually is unreachable because // only called when indirection is true. return unitOfWorkIndirectionObject; } /** * INTERNAL: Return the original valueHolder object. Access to the * underlying valueholder may be required when serializing the valueholder * or converting the valueHolder to another type. */ @Override public Object getOriginalValueHolder(Object unitOfWorkIndirectionObject, AbstractSession session){ return unitOfWorkIndirectionObject; } /** * INTERNAL: * Return the "real" attribute value, as opposed to any wrapper. * This will trigger the wrapper to instantiate the value. */ public Object getRealAttributeValueFromObject(Object object, Object attribute) { return attribute; } /** * INTERNAL: * Extract and return the appropriate value from the * specified remote value holder. */ public Object getValueFromRemoteValueHolder(RemoteValueHolder remoteValueHolder) { throw DescriptorException.invalidIndirectionPolicyOperation(this, "getValueFromRemoteValueHolder"); } /** * INTERNAL * Replace the client value holder with the server value holder, * after copying some of the settings from the client value holder. */ public void mergeRemoteValueHolder(Object clientSideDomainObject, Object serverSideDomainObject, org.eclipse.persistence.internal.sessions.MergeManager mergeManager) { throw DescriptorException.invalidIndirectionPolicyOperation(this, "mergeRemoteValueHolder"); } /** * INTERNAL: * Return the null value of the appropriate attribute. That is, the * field from the database is NULL, return what should be * placed in the object's attribute as a result. */ public Object nullValueFromRow() { return null; } /** * INTERNAL: * Return whether the specified object is instantiated. */ public boolean objectIsInstantiated(Object object) { return true; } /** * INTERNAL: * Return whether the specified object can be instantiated without database access. */ public boolean objectIsEasilyInstantiated(Object object) { return true; } /** * INTERNAL: * Return whether the type is appropriate for the indirection policy. * In this case, the attribute type CANNOT be ValueHolderInterface. */ protected boolean typeIsValid(Class attributeType) { return attributeType != ClassConstants.ValueHolderInterface_Class; } /** * INTERNAL: * Return whether the indirection policy actually uses indirection. * Here, we must reply false. */ public boolean usesIndirection() { return false; } /** * INTERNAL: * Verify that attributeType is correct for the * indirection policy. If it is incorrect, add an exception to the * integrity checker. * In this case, the attribute type CANNOT be ValueHolderInterface. */ public void validateDeclaredAttributeType(Class attributeType, IntegrityChecker checker) throws DescriptorException { super.validateDeclaredAttributeType(attributeType, checker); if (!this.typeIsValid(attributeType)) { checker.handleError(DescriptorException.attributeAndMappingWithoutIndirectionMismatch(this.mapping)); } } /** * INTERNAL: * Verify that attributeType is an appropriate collection type for the * indirection policy. If it is incorrect, add an exception to the integrity checker. * In this case, the type MUST be a Vector (or, in the case of jdk1.2, * Collection or Map). */ public void validateDeclaredAttributeTypeForCollection(Class attributeType, IntegrityChecker checker) throws DescriptorException { super.validateDeclaredAttributeTypeForCollection(attributeType, checker); if (!this.collectionTypeIsValid(attributeType)) { InterfaceContainerPolicy policy = (InterfaceContainerPolicy)getCollectionMapping().getContainerPolicy(); checker.handleError(DescriptorException.attributeTypeNotValid(this.getCollectionMapping(), policy.getInterfaceType())); } } /** * INTERNAL: * Verify that getter returnType is correct for the * indirection policy. If it is incorrect, add an exception * to the integrity checker. * In this case, the return type CANNOT be ValueHolderInterface. */ public void validateGetMethodReturnType(Class returnType, IntegrityChecker checker) throws DescriptorException { super.validateGetMethodReturnType(returnType, checker); if (!this.typeIsValid(returnType)) { checker.handleError(DescriptorException.returnAndMappingWithoutIndirectionMismatch(this.mapping)); } } /** * INTERNAL: * Verify that getter returnType is an appropriate collection type for the * indirection policy. If it is incorrect, add an exception to the integrity checker. * In this case, the type MUST be a Vector (or, in the case of jdk1.2, * Collection or Map). */ public void validateGetMethodReturnTypeForCollection(Class returnType, IntegrityChecker checker) throws DescriptorException { super.validateGetMethodReturnTypeForCollection(returnType, checker); if (!this.collectionTypeIsValid(returnType)) { checker.handleError(DescriptorException.getMethodReturnTypeNotValid(getCollectionMapping())); } } /** * INTERNAL: * Verify that setter parameterType is correct for the * indirection policy. If it is incorrect, add an exception * to the integrity checker. * In this case, the parameter type CANNOT be ValueHolderInterface. */ public void validateSetMethodParameterType(Class parameterType, IntegrityChecker checker) throws DescriptorException { super.validateSetMethodParameterType(parameterType, checker); if (!this.typeIsValid(parameterType)) { checker.handleError(DescriptorException.parameterAndMappingWithoutIndirectionMismatch(this.mapping)); } } /** * INTERNAL: * Verify that setter parameterType is an appropriate collection type for the * indirection policy. If it is incorrect, add an exception to the integrity checker. * In this case, the type MUST be a Vector (or, in the case of jdk1.2, * Collection or Map). */ public void validateSetMethodParameterTypeForCollection(Class parameterType, IntegrityChecker checker) throws DescriptorException { super.validateSetMethodParameterTypeForCollection(parameterType, checker); if (!this.collectionTypeIsValid(parameterType)) { checker.handleError(DescriptorException.setMethodParameterTypeNotValid(getCollectionMapping())); } } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the batchQuery. * In this case, extract the result from the query. */ public Object valueFromBatchQuery(ReadQuery batchQuery, AbstractRecord row, ObjectLevelReadQuery originalQuery, CacheKey parentCacheKey) { return getForeignReferenceMapping().extractResultFromBatchQuery(batchQuery, parentCacheKey, row, originalQuery.getSession(), originalQuery); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by invoking the mapping's AttributeTransformer */ public Object valueFromMethod(Object object, AbstractRecord row, AbstractSession session) { return getTransformationMapping().invokeAttributeTransformer(row, object, session); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the query. * In this case, wrap the query in a ValueHolder for later invocation. */ public Object valueFromQuery(ReadQuery query, AbstractRecord row, Object sourceObject, AbstractSession session) { return valueFromQuery(query, row, session); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the query. * In this case, simply execute the query and return its results. */ public Object valueFromQuery(ReadQuery query, AbstractRecord row, AbstractSession session) { return session.executeQuery(query, row); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the row. * In this case, simply return the object. */ public Object valueFromRow(Object object) { return object; } /** * Set the value of the appropriate attribute of target to attributeValue. * In this case, place the value inside the target's ValueHolder. * if trackChanges is true, set the value in the object as if the user was setting it. Allow change tracking to pick up the change. */ @Override public void setRealAttributeValueInObject(Object target, Object attributeValue, boolean trackChanges) { Object oldValue = null; if (trackChanges && (target instanceof ChangeTracker)) { oldValue = getRealAttributeValueFromObject(target, mapping.getAttributeValueFromObject(target)); } setRealAttributeValueInObject(target, attributeValue); if (trackChanges && (target instanceof ChangeTracker)) { PropertyChangeListener listener = ((ChangeTracker) target)._persistence_getPropertyChangeListener(); if (listener != null && attributeValue != oldValue) { listener.propertyChange(new PropertyChangeEvent(target, mapping.getAttributeName(), oldValue, attributeValue)); } } } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/UnitOfWorkQueryValueHolder.java0000664000000000000000000002631112216173126030631 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.util.Collection; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.indirection.*; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.mappings.*; /** * UnitOfWorkQueryValueHolder wraps a database-stored object and * implements behavior to access it. The object is read from * the database by invoking a user-specified query. * This value holder is used only in the unit of work. * * @author Sati */ public class UnitOfWorkQueryValueHolder extends UnitOfWorkValueHolder { protected UnitOfWorkQueryValueHolder() { super(); } protected UnitOfWorkQueryValueHolder(ValueHolderInterface attributeValue, Object clone, DatabaseMapping mapping, UnitOfWorkImpl unitOfWork) { super(attributeValue, clone, mapping, unitOfWork); } public UnitOfWorkQueryValueHolder(ValueHolderInterface attributeValue, Object clone, ForeignReferenceMapping mapping, AbstractRecord row, UnitOfWorkImpl unitOfWork) { this(attributeValue, clone, mapping, unitOfWork); this.row = row; } /** * Backup the clone attribute value. */ protected Object buildBackupCloneFor(Object cloneAttributeValue) { return this.mapping.buildBackupCloneForPartObject(cloneAttributeValue, null, null, getUnitOfWork()); } /** * Clone the original attribute value. */ public Object buildCloneFor(Object originalAttributeValue) { Integer refreshCascade = null; if (wrappedValueHolder instanceof QueryBasedValueHolder){ refreshCascade = ((QueryBasedValueHolder)getWrappedValueHolder()).getRefreshCascadePolicy(); } return this.mapping.buildCloneForPartObject(originalAttributeValue, null, null, this.relationshipSourceObject, getUnitOfWork(), refreshCascade, true, true); } /** * Ensure that the backup value holder is populated. */ public void setValue(Object theValue) { // Must force instantiation to be able to compare with the old value. if (!this.isInstantiated) { instantiate(); } Object oldValue = getValue(); super.setValue(theValue); updateForeignReferenceSet(theValue, oldValue); } /** * INTERNAL: * Here we now must check for bi-directional relationship. * If the mapping has a relationship partner then we must maintain the original relationship. * We only worry about ObjectReferenceMappings as the collections mappings will be handled by transparentIndirection */ public void updateForeignReferenceRemove(Object value) { DatabaseMapping sourceMapping = this.getMapping(); if (sourceMapping == null) { //mapping is a transient attribute. If it does not exist then we have been serialized return; } if (sourceMapping.isPrivateOwned()) { // don't null out backpointer on private owned relationship because it will cause an // extra update. return; } // ForeignReferenceMapping partner = (ForeignReferenceMapping)getMapping().getRelationshipPartner(); ForeignReferenceMapping partner = this.getRelationshipPartnerFor(value); if (partner != null) { if (value != null) { Object unwrappedValue = partner.getDescriptor().getObjectBuilder().unwrapObject(value, getSession()); Object oldParent = partner.getRealAttributeValueFromObject(unwrappedValue, getSession()); Object sourceObject = getRelationshipSourceObject(); if (oldParent == null) { // value has already been set return; } // PERF: If the collection is not instantiated, then do not instantiated it. if (partner.isCollectionMapping()) { if ((!(oldParent instanceof IndirectContainer)) || ((IndirectContainer)oldParent).isInstantiated()) { if (!partner.getContainerPolicy().contains(sourceObject, oldParent, getSession())) { // value has already been set return; } } } if (partner.isObjectReferenceMapping()) { // Check if it's already been set to null partner.setRealAttributeValueInObject(unwrappedValue, null); } else if (partner.isCollectionMapping()) { // If it is not in the collection then it has already been removed. partner.getContainerPolicy().removeFrom(sourceObject, oldParent, getSession()); } } } } /** * INTERNAL: * Here we now must check for bi-directional relationship. * If the mapping has a relationship partner then we must maintain the original relationship. * We only worry about ObjectReferenceMappings as the collections mappings will be handled by transparentIndirection */ public void updateForeignReferenceSet(Object value, Object oldValue) { if ((value != null) && (value instanceof Collection)) { //I'm passing a collection into the valueholder not an object return; } if (getMapping() == null) { //mapping is a transient attribute. If it does not exist then we have been serialized return; } // ForeignReferenceMapping partner = (ForeignReferenceMapping)getMapping().getRelationshipPartner(); ForeignReferenceMapping partner = this.getRelationshipPartnerFor(value); if (partner != null) { if (value != null) { Object unwrappedValue = partner.getDescriptor().getObjectBuilder().unwrapObject(value, getSession()); Object oldParent = partner.getRealAttributeValueFromObject(unwrappedValue, getSession()); Object sourceObject = getRelationshipSourceObject(); Object wrappedSource = getMapping().getDescriptor().getObjectBuilder().wrapObject(sourceObject, getSession()); if (oldParent == sourceObject) { // value has already been set return; } // PERF: If the collection is not instantiated, then do not instantiated it. if (partner.isCollectionMapping()) { if ((!(oldParent instanceof IndirectContainer)) || ((IndirectContainer)oldParent).isInstantiated()) { if (partner.getContainerPolicy().contains(sourceObject, oldParent, getSession())) { // value has already been set return; } } } // Set the Object that was referencing this value to reference null, or remove value from its collection if (oldParent != null) { if (getMapping().isObjectReferenceMapping()) { if (!partner.isCollectionMapping()) { // If the back pointer is a collection it's OK that I'm adding myself into the collection ((ObjectReferenceMapping)getMapping()).setRealAttributeValueInObject(oldParent, null); } } else if (getMapping().isCollectionMapping() && (!partner.isManyToManyMapping())) { getMapping().getContainerPolicy().removeFrom(unwrappedValue, getMapping().getRealAttributeValueFromObject(oldParent, getSession()), getSession()); } } if (oldValue != null) { // CR 3487 Object unwrappedOldValue = partner.getDescriptor().getObjectBuilder().unwrapObject(oldValue, getSession()); // if this object was referencing a different object reset the back pointer on that object if (partner.isObjectReferenceMapping()) { partner.setRealAttributeValueInObject(unwrappedOldValue, null); } else if (partner.isCollectionMapping()) { partner.getContainerPolicy().removeFrom(sourceObject, partner.getRealAttributeValueFromObject(unwrappedOldValue, getSession()), getSession()); } } // Now set the back reference of the value being passed in to point to this object if (partner.isObjectReferenceMapping()) { partner.setRealAttributeValueInObject(unwrappedValue, wrappedSource); } else if (partner.isCollectionMapping()) { partner.getContainerPolicy().addInto(wrappedSource, oldParent, getSession()); } } else { updateForeignReferenceRemove(oldValue); } } } /** * Helper method to retrieve the relationship partner mapping. This will take inheritance * into account and return the mapping associated with correct subclass if necessary. This * is needed for EJB 2.0 inheritance */ private ForeignReferenceMapping getRelationshipPartnerFor(Object partnerObject) { ForeignReferenceMapping partner = (ForeignReferenceMapping)getMapping().getRelationshipPartner(); if ((partner == null) || (partnerObject == null)) { // no partner, nothing to do return partner; } // if the target object is not an instance of the class type associated with the partner // mapping, try and look up the same partner mapping but as part of the partnerObject's // descriptor. Only check if inheritance is involved... if (partner.getDescriptor().hasInheritance()) { ClassDescriptor partnerObjectDescriptor = this.getSession().getDescriptor(partnerObject); if (!(partner.getDescriptor().getJavaClass().isAssignableFrom(partnerObjectDescriptor.getJavaClass()))) { return (ForeignReferenceMapping)partnerObjectDescriptor.getObjectBuilder().getMappingForAttributeName(partner.getAttributeName()); } } return partner; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/ProxyIndirectionPolicy.java0000664000000000000000000005072512216173126030100 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.lang.reflect.Proxy; import java.security.AccessController; import org.eclipse.persistence.internal.descriptors.DescriptorIterator; import org.eclipse.persistence.internal.identitymaps.CacheKey; import java.util.*; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedNewInstanceFromClass; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.MergeManager; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.exceptions.IntegrityChecker; import org.eclipse.persistence.indirection.ValueHolder; import org.eclipse.persistence.indirection.ValueHolderInterface; import org.eclipse.persistence.queries.ReadQuery; import org.eclipse.persistence.queries.ObjectLevelReadQuery; import org.eclipse.persistence.sessions.remote.DistributedSession; import org.eclipse.persistence.internal.sessions.remote.*; /** *

ProxyIndirectionPolicy

* * Define the behavior for Proxy Indirection.

* * Proxy Indirection uses the Proxy and InvocationHandler features * of JDK 1.3 to provide "transparent indirection" for 1:1 relationships. In order to use Proxy * Indirection:

* *

    *
  • The target class must implement at least one public interface *
  • The attribute on the source class must be typed as that public interface *
* * In this policy, proxy objects are returned during object creation. When a message other than * toString is called on the proxy the real object data is retrieved from the database. * * @see org.eclipse.persistence.internal.indirection.ProxyIndirectionHandler * @author Rick Barkhouse * @since TopLink 3.0 */ public class ProxyIndirectionPolicy extends BasicIndirectionPolicy { private Class[] targetInterfaces; public ProxyIndirectionPolicy(Class[] targetInterfaces) { this.targetInterfaces = targetInterfaces; } public ProxyIndirectionPolicy() { this.targetInterfaces = new Class[] { }; } /** * INTERNAL: * Nothing required. */ public void initialize() { // Nothing required } /** * Reset the wrapper used to store the value. */ public void reset(Object target) { // Nothing required. } /** * INTERNAL: * Return if targetInterfaces is not empty. */ public boolean hasTargetInterfaces() { return (targetInterfaces != null) && (targetInterfaces.length != 0); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This will be a proxy object. */ public Object valueFromRow(Object object) { ValueHolderInterface valueHolder = new ValueHolder(object); return ProxyIndirectionHandler.newProxyInstance(object.getClass(), targetInterfaces, valueHolder); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This will be a proxy object. */ public Object valueFromQuery(ReadQuery query, AbstractRecord row, AbstractSession session) { ClassDescriptor descriptor = null; try { // Need an instance of the implementing class //CR#3838 descriptor = session.getDescriptor(query.getReferenceClass()); if (descriptor.isDescriptorForInterface()) { descriptor = (ClassDescriptor)descriptor.getInterfacePolicy().getChildDescriptors().firstElement(); } } catch (Exception e) { return null; } ValueHolderInterface valueHolder = new QueryBasedValueHolder(query, row, session); return ProxyIndirectionHandler.newProxyInstance(descriptor.getJavaClass(), targetInterfaces, valueHolder); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by invoking the appropriate method on the object and passing it the * row and session. */ public Object valueFromMethod(Object object, AbstractRecord row, AbstractSession session) { ValueHolderInterface valueHolder = new TransformerBasedValueHolder(this.getTransformationMapping().getAttributeTransformer(), object, row, session); return ProxyIndirectionHandler.newProxyInstance(object.getClass(), targetInterfaces, valueHolder); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the batch query. * * NOTE: Currently not supported anyway. */ public Object valueFromBatchQuery(ReadQuery batchQuery, AbstractRecord row, ObjectLevelReadQuery originalQuery, CacheKey parentCacheKey) { Object object; try { // Need an instance of the implementing class ClassDescriptor d = originalQuery.getDescriptor(); if (d.isDescriptorForInterface()) { d = (ClassDescriptor)originalQuery.getDescriptor().getInterfacePolicy().getChildDescriptors().firstElement(); } if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ object = AccessController.doPrivileged(new PrivilegedNewInstanceFromClass(d.getJavaClass())); }else{ object = PrivilegedAccessHelper.newInstanceFromClass(d.getJavaClass()); } } catch (Exception e) { //org.eclipse.persistence.internal.helper.Helper.toDo("*** Should probably throw some sort of TopLink exception here. ***"); e.printStackTrace(); return null; } ValueHolderInterface valueHolder = new BatchValueHolder(batchQuery, row, this.getForeignReferenceMapping(), originalQuery, parentCacheKey); return ProxyIndirectionHandler.newProxyInstance(object.getClass(), targetInterfaces, valueHolder); } /** * INTERNAL: * Return whether the specified object is instantiated. */ public boolean objectIsInstantiated(Object object) { if (object instanceof Proxy) { ProxyIndirectionHandler handler = (ProxyIndirectionHandler)Proxy.getInvocationHandler(object); ValueHolderInterface valueHolder = handler.getValueHolder(); return valueHolder.isInstantiated(); } else { return true; } } /** * INTERNAL: * Return whether the specified object can be instantiated without database access. */ public boolean objectIsEasilyInstantiated(Object object) { if (object instanceof Proxy) { ProxyIndirectionHandler handler = (ProxyIndirectionHandler)Proxy.getInvocationHandler(object); ValueHolderInterface valueHolder = handler.getValueHolder(); if (valueHolder instanceof DatabaseValueHolder) { return ((DatabaseValueHolder)valueHolder).isEasilyInstantiated(); } } return true; } /** * INTERNAL: * Return the null value of the appropriate attribute. That is, the field from the database is NULL, * return what should be placed in the object's attribute as a result. */ public Object nullValueFromRow() { return null; } /** * INTERNAL: * Replace the client value holder with the server value holder after copying some of the settings from * the client value holder. */ public void mergeRemoteValueHolder(Object clientSideDomainObject, Object serverSideDomainObject, MergeManager mergeManager) { getMapping().setAttributeValueInObject(clientSideDomainObject, serverSideDomainObject); } /** * INTERNAL: * Return the "real" attribute value, as opposed to any wrapper. This will trigger the wrapper to * instantiate the value. */ public Object getRealAttributeValueFromObject(Object obj, Object object) { if (object instanceof Proxy) { ProxyIndirectionHandler handler = (ProxyIndirectionHandler)Proxy.getInvocationHandler(object); ValueHolderInterface valueHolder = handler.getValueHolder(); return valueHolder.getValue(); } else { return object; } } /** * INTERNAL: * Given a proxy object, trigger the indirection and return the actual object represented by the proxy. */ public static Object getValueFromProxy(Object value) { if (Proxy.isProxyClass(value.getClass())) { return ((ProxyIndirectionHandler)Proxy.getInvocationHandler(value)).getValueHolder().getValue(); } return value; } /** * INTERNAL: * Set the "real" value of the attribute to attributeValue. */ public void setRealAttributeValueInObject(Object target, Object attributeValue) { this.getMapping().setAttributeValueInObject(target, attributeValue); } /** * INTERNAL: * Return the original indirection object for a unit of work indirection object. */ public Object getOriginalIndirectionObject(Object unitOfWorkIndirectionObject, AbstractSession session) { if (unitOfWorkIndirectionObject instanceof UnitOfWorkValueHolder) { ValueHolderInterface valueHolder = ((UnitOfWorkValueHolder)unitOfWorkIndirectionObject).getWrappedValueHolder(); if ((valueHolder == null) && session.isRemoteUnitOfWork()) { RemoteSessionController controller = ((RemoteUnitOfWork)session).getParentSessionController(); valueHolder = controller.getRemoteValueHolders().get(((UnitOfWorkValueHolder)unitOfWorkIndirectionObject).getWrappedValueHolderRemoteID()); } return valueHolder; } else { return unitOfWorkIndirectionObject; } } /** * INTERNAL: * An object has been serialized from the server to the client. Replace the transient attributes of the * remote value holders with client-side objects. */ public void fixObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { //org.eclipse.persistence.internal.helper.Helper.toDo("*** Something tells me this isn't going to work. *** [X]"); } /** * INTERNAL: * Return the reference row for the reference object. This allows the new row to be built without * instantiating the reference object. Return null if the object has already been instantiated. */ public AbstractRecord extractReferenceRow(Object referenceObject) { if ((referenceObject == null) || !Proxy.isProxyClass(referenceObject.getClass())) { return null; } ProxyIndirectionHandler handler = (ProxyIndirectionHandler)Proxy.getInvocationHandler(referenceObject); ValueHolderInterface valueHolder = handler.getValueHolder(); if (valueHolder.isInstantiated()) { return null; } else { return ((DatabaseValueHolder)valueHolder).getRow(); } } /** * INTERNAL: * Return a clone of the attribute. * @param buildDirectlyFromRow indicates that we are building the clone * directly from a row as opposed to building the original from the * row, putting it in the shared cache, and then cloning the original. */ public Object cloneAttribute(Object attributeValue, Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession, boolean buildDirectlyFromRow) { if (!(attributeValue instanceof Proxy)) { boolean isExisting = !cloningSession.isUnitOfWork() || (((UnitOfWorkImpl)cloningSession).isObjectRegistered(clone) && (!((UnitOfWorkImpl)cloningSession).isOriginalNewObject(original))); return this.getMapping().buildCloneForPartObject(attributeValue, original, null, clone, cloningSession, refreshCascade, isExisting, !buildDirectlyFromRow); } ValueHolderInterface newValueHolder; ProxyIndirectionHandler handler = (ProxyIndirectionHandler)Proxy.getInvocationHandler(attributeValue); ValueHolderInterface oldValueHolder = handler.getValueHolder(); if (!buildDirectlyFromRow && cloningSession.isUnitOfWork() && ((UnitOfWorkImpl)cloningSession).isOriginalNewObject(original)) { // CR#3156435 Throw a meaningful exception if a serialized/dead value holder is detected. // This can occur if an existing serialized object is attempt to be registered as new. if ((oldValueHolder instanceof DatabaseValueHolder) && (! ((DatabaseValueHolder) oldValueHolder).isInstantiated()) && (((DatabaseValueHolder) oldValueHolder).getSession() == null) && (! ((DatabaseValueHolder) oldValueHolder).isSerializedRemoteUnitOfWorkValueHolder())) { throw DescriptorException.attemptToRegisterDeadIndirection(original, getMapping()); } newValueHolder = new ValueHolder(); newValueHolder.setValue(this.getMapping().buildCloneForPartObject(oldValueHolder.getValue(), original, null, clone, cloningSession, refreshCascade, false, false)); } else { AbstractRecord row = null; if (oldValueHolder instanceof DatabaseValueHolder) { row = ((DatabaseValueHolder)oldValueHolder).getRow(); } newValueHolder = this.getMapping().createCloneValueHolder(oldValueHolder, original, clone, row, cloningSession, buildDirectlyFromRow); } return ProxyIndirectionHandler.newProxyInstance(attributeValue.getClass(), targetInterfaces, newValueHolder); } /** * INTERNAL: * Return a backup clone of the attribute. */ public Object backupCloneAttribute(Object attributeValue, Object clone, Object backup, UnitOfWorkImpl unitOfWork) { if (!(attributeValue instanceof Proxy)) { return this.getMapping().buildBackupCloneForPartObject(attributeValue, clone, backup, unitOfWork); } ProxyIndirectionHandler handler = (ProxyIndirectionHandler)Proxy.getInvocationHandler(attributeValue); ValueHolderInterface unitOfWorkValueHolder = handler.getValueHolder(); ValueHolderInterface backupValueHolder = null; if ((!(unitOfWorkValueHolder instanceof UnitOfWorkValueHolder)) || unitOfWorkValueHolder.isInstantiated()) { backupValueHolder = (ValueHolderInterface) super.backupCloneAttribute(unitOfWorkValueHolder, clone, backup, unitOfWork); } else { // CR#2852176 Use a BackupValueHolder to handle replacing of the original. backupValueHolder = new BackupValueHolder(unitOfWorkValueHolder); ((UnitOfWorkValueHolder)unitOfWorkValueHolder).setBackupValueHolder((BackupValueHolder) backupValueHolder); } return ProxyIndirectionHandler.newProxyInstance(attributeValue.getClass(), targetInterfaces, backupValueHolder); } /** * INTERNAL: * Iterate over the specified attribute value. */ public void iterateOnAttributeValue(DescriptorIterator iterator, Object attributeValue) { if (attributeValue instanceof Proxy) { ProxyIndirectionHandler handler = (ProxyIndirectionHandler)Proxy.getInvocationHandler(attributeValue); ValueHolderInterface valueHolder = handler.getValueHolder(); iterator.iterateValueHolderForMapping(valueHolder, this.getMapping()); } else { if (attributeValue != null) { this.getMapping().iterateOnRealAttributeValue(iterator, attributeValue); } } } /** * INTERNAL: * Verify that the value of the attribute within an instantiated object is of the appropriate type for * the indirection policy. In this case, the attribute must non-null and implement some public interface. */ public Object validateAttributeOfInstantiatedObject(Object attributeValue) { if ((attributeValue != null) && (attributeValue.getClass().getInterfaces().length == 0) && attributeValue instanceof Proxy) { //org.eclipse.persistence.internal.helper.Helper.toDo("*** Need a new DescriptorException here. ***"); // throw DescriptorException.valueHolderInstantiationMismatch(attributeValue, this.getMapping()); System.err.println("** ProxyIndirection attribute validation failed."); } return attributeValue; } /** * INTERNAL: * Verify that attribute type is correct for the indirection policy. If it is incorrect, add an exception to the * integrity checker. In this case, the attribute type must be contained in targetInterfaces. */ public void validateDeclaredAttributeType(Class attributeType, IntegrityChecker checker) throws DescriptorException { if (!isValidType(attributeType)) { checker.handleError(DescriptorException.invalidAttributeTypeForProxyIndirection(attributeType, targetInterfaces, getMapping())); } } /** * INTERNAL: * Verify that the return type of the attribute's get method is correct for the indirection policy. If it is * incorrect, add an exception to the integrity checker. In this case, the return type must be a * public interface. */ public void validateGetMethodReturnType(Class returnType, IntegrityChecker checker) throws DescriptorException { if (!isValidType(returnType)) { checker.handleError(DescriptorException.invalidGetMethodReturnTypeForProxyIndirection(returnType, targetInterfaces, getMapping())); } } /** * INTERNAL: * Verify that the parameter type of the attribute's set method is correct for the indirection policy. If it is * incorrect, add an exception to the integrity checker. In this case, the parameter type must be a * public interface. */ public void validateSetMethodParameterType(Class parameterType, IntegrityChecker checker) throws DescriptorException { if (!isValidType(parameterType)) { checker.handleError(DescriptorException.invalidSetMethodParameterTypeForProxyIndirection(parameterType, targetInterfaces, getMapping())); } } /** * INTERNAL: * The method validateAttributeOfInstantiatedObject(Object attributeValue) fixes the value of the attributeValue * in cases where it is null and indirection requires that it contain some specific data structure. Return whether this will happen. * This method is used to help determine if indirection has been triggered * @param attributeValue * @return * @see validateAttributeOfInstantiatedObject(Object attributeValue) */ @Override public boolean isAttributeValueFullyBuilt(Object attributeValue){ return true; } /** * INTERNAL: * Verify that a class type is valid to use for the proxy. The class must be one of the * interfaces in targetInterfaces. */ public boolean isValidType(Class attributeType) { if (!attributeType.isInterface()) { return false; } for (int i = 0; i < targetInterfaces.length; i++) { if (attributeType == targetInterfaces[i]) { return true; } } return false; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/WrappingValueHolder.java0000664000000000000000000000234112216173126027320 0ustar /******************************************************************************* * Copyright (c) 2011, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Gordon Yorke - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import org.eclipse.persistence.indirection.ValueHolderInterface; /** * WrappingValueHolder is an interface type that implementors use when they will be * wrapping another ValueHolder that has the original value. * * @see UnitOfWorkValueHolder, ProtectedValueHolder * @author Gordon Yorke */ public interface WrappingValueHolder { /** * Returns the valueholder that is wrapped by this ValueHolder */ public ValueHolderInterface getWrappedValueHolder(); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/CacheBasedValueHolder.java0000664000000000000000000001260712216173126027501 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Gordon Yorke - initial API and implementation ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.util.Collection; import java.util.Iterator; import java.util.Vector; import org.eclipse.persistence.exceptions.DatabaseException; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.internal.queries.ContainerPolicy; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.mappings.ForeignReferenceMapping; import org.eclipse.persistence.internal.sessions.AbstractRecord; /** *

* Purpose: In certain cases the contents of a relationship may be * retrievable from a cache. This ValueHolder instance provides the mechanism to * store a cached relationship and to load that relationship from a cache. This * functionality requires that the persistent identities of the targets can be * collected as database type foreign key queries are unavailable. * * @author gyorke * @since EclipseLink 1.1 */ public class CacheBasedValueHolder extends DatabaseValueHolder { protected transient ForeignReferenceMapping mapping; protected Object[] references; /** Setting to force the instantiation of the Collection on modification */ protected boolean shouldAllowInstantiationDeferral = true; public CacheBasedValueHolder(Object[] pks, AbstractRecord foreignKeys, AbstractSession session, ForeignReferenceMapping mapping){ super(); this.references = pks; this.mapping = mapping; this.session = session; this.row = foreignKeys; } public Object[] getCachedPKs(){ return this.references; } /** * Process against the UOW and attempt to load a local copy before going to the shared cache * If null is returned then the calling UOW will instantiate as normal. */ @Override public Object getValue(UnitOfWorkImpl uow) { if (this.references != null && this.references.length != 0){ if (mapping.isCollectionMapping()){ Collection result = uow.getIdentityMapAccessorInstance().getAllFromIdentityMapWithEntityPK(this.references, this.mapping.getReferenceDescriptor()).values(); if (result.size() == references.length){ ContainerPolicy cp = mapping.getContainerPolicy(); Object container = cp.containerInstance(result.size()); for (Object object : result){ cp.addInto(object, container, uow); } return container; } }else{ return uow.getIdentityMapAccessorInstance().getFromIdentityMap(this.references[0], this.mapping.getReferenceClass()); } } return null; } protected Object instantiate() throws DatabaseException { return instantiate(this.session); } protected Object instantiate(AbstractSession localSession) throws DatabaseException { if (session == null){ throw ValidationException.instantiatingValueholderWithNullSession(); } return mapping.valueFromPKList(references, row, localSession); } /** * Triggers UnitOfWork valueholders directly without triggering the wrapped * valueholder (this). *

* When in transaction and/or for pessimistic locking the UnitOfWorkValueHolder * needs to be triggered directly without triggering the wrapped valueholder. * However only the wrapped valueholder knows how to trigger the indirection, * i.e. it may be a batchValueHolder, and it stores all the info like the row * and the query. * Note: This method is not thread-safe. It must be used in a synchronized manner */ public Object instantiateForUnitOfWorkValueHolder(UnitOfWorkValueHolder unitOfWorkValueHolder) { return instantiate(unitOfWorkValueHolder.getUnitOfWork()); } @Override public boolean isPessimisticLockingValueHolder() { return false; } /** * Set if instantiation deferral on modification should be available. */ public void setShouldAllowInstantiationDeferral(boolean shouldAllowInstantiationDeferral){ this.shouldAllowInstantiationDeferral = shouldAllowInstantiationDeferral; } /** * INTERNAL: * Return if add/remove should trigger instantiation or avoid. * Current instantiation is avoided is using change tracking. */ public boolean shouldAllowInstantiationDeferral() { return this.shouldAllowInstantiationDeferral; } } ././@LongLink0000000000000000000000000000015300000000000011564 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/EISOneToManyQueryBasedValueHolder.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/EISOneToManyQueryBasedValueHolde0000664000000000000000000000600212216173126030624 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.util.Collection; import java.util.Iterator; import java.util.Vector; import org.eclipse.persistence.eis.mappings.EISOneToManyMapping; import org.eclipse.persistence.exceptions.DatabaseException; import org.eclipse.persistence.internal.queries.ContainerPolicy; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.queries.ReadAllQuery; import org.eclipse.persistence.queries.ReadQuery; /** * Value holder used to defer an EIS 1-m mapping query. * For composite source foreign keys EIS 1-m's a query must be performed * for each primary key, so a different type of value holder is required. */ public class EISOneToManyQueryBasedValueHolder extends QueryBasedValueHolder { private EISOneToManyMapping mapping; public EISOneToManyQueryBasedValueHolder(EISOneToManyMapping mapping, ReadQuery query, AbstractRecord sourceRow, AbstractSession session) { super(query, sourceRow, session); this.mapping = mapping; } protected Object instantiate(AbstractSession session) throws DatabaseException { Vector rows = this.mapping.getForeignKeyRows(this.getRow(), session); int size = rows.size(); ContainerPolicy cp = ((ReadAllQuery)this.getQuery()).getContainerPolicy(); Object returnValue = cp.containerInstance(size); for (int i = 0; i < size; i++) { AbstractRecord nextRow = (AbstractRecord)rows.get(i); Object results = session.executeQuery(getQuery(), nextRow); if (results instanceof Collection) { Iterator iter = ((Collection)results).iterator(); while (iter.hasNext()) { cp.addInto(iter.next(), returnValue, session); } } else if (results instanceof java.util.Map) { Iterator iter = ((java.util.Map)results).values().iterator(); while (iter.hasNext()) { cp.addInto(iter.next(), returnValue, session); } } else { cp.addInto(results, returnValue, session); } } return returnValue; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/TransformerBasedValueHolder.java0000664000000000000000000001465412216173126031004 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.lang.reflect.InvocationTargetException; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.mappings.transformers.*; /** * @version $Header: TransformerBasedValueHolder.java 30-aug-2006.11:32:36 gyorke Exp $ * @author mmacivor * @since release specific (what release of product did this appear in) * This class is to replace the MethodBasedValueHolder for TransformationMappings * Holds on to an AttributeTransformer and uses it to generate the attribute value when triggered. * That Transformer may be a MethodBasedAttributeTransformer or a user defined implementation. * */ public class TransformerBasedValueHolder extends DatabaseValueHolder { /** * Stores the method to be executed. The method can take * one (the row) or two parameters (the row and session). */ protected transient AttributeTransformer transformer; /** * Stores the object which owns this attribute. */ protected transient Object object; /** * Initialize the method-based value holder. * @param theMethod The method that returns the object when executed. * @param object the Object which owns this attribute. * @param theRow The row representation of the object. * @param theSession The session to the database that stores the object. */ public TransformerBasedValueHolder(AttributeTransformer theTransformer, Object theObject, AbstractRecord theRow, AbstractSession theSession) { super(); row = theRow; session = theSession; // Make sure not to put a ClientSession or IsolatedClientSession in // the shared cache (indirectly). // Skip this if unitOfWork, for we use session.isUnitOfWork() to implement // isTransactionalValueholder(), saving us from needing a boolean instance variable. // If unitOfWork this safety measure is deferred until merge time with // releaseWrappedValuehHolder. // Note that if isolated session & query will return itself, which is safe // for if isolated this valueholder is not in the shared cache. if (!session.isUnitOfWork()) { this.session = session.getRootSession(null); } transformer = theTransformer; object = theObject; } /** * Return the method. */ protected AttributeTransformer getTransformer() { return transformer; } /** * Return the receiver. */ protected Object getObject() { return object; } /** * Instantiate the object by executing the method on the transformer. */ protected Object instantiate() throws DescriptorException { return instantiate(getObject(), getSession()); } protected Object instantiate(Object object, AbstractSession session) throws DescriptorException { try { return transformer.buildAttributeValue(getRow(), object, session); } catch (DescriptorException ex) { Throwable nestedException = ex.getInternalException(); if (nestedException instanceof IllegalAccessException) { throw DescriptorException.illegalAccessWhileInstantiatingMethodBasedProxy(nestedException); } else if (nestedException instanceof IllegalArgumentException) { throw DescriptorException.illegalArgumentWhileInstantiatingMethodBasedProxy(nestedException); } else if (nestedException instanceof InvocationTargetException) { throw DescriptorException.targetInvocationWhileInstantiatingMethodBasedProxy(nestedException); } else { throw ex; } } } /** * Triggers UnitOfWork valueholders directly without triggering the wrapped * valueholder (this). *

* When in transaction and/or for pessimistic locking the UnitOfWorkValueHolder * needs to be triggered directly without triggering the wrapped valueholder. * However only the wrapped valueholder knows how to trigger the indirection, * i.e. it may be a batchValueHolder, and it stores all the info like the row * and the query. * Note: This method is not thread-safe. It must be used in a synchronized manner */ public Object instantiateForUnitOfWorkValueHolder(UnitOfWorkValueHolder unitOfWorkValueHolder) { return instantiate(getObject(), unitOfWorkValueHolder.getUnitOfWork()); } /** * INTERNAL: * Answers if this valueholder is a pessimistic locking one. Such valueholders * are special in that they can be triggered multiple times by different * UnitsOfWork. Each time a lock query will be issued. Hence even if * instantiated it may have to be instantiated again, and once instantiated * all fields can not be reset. * Note: This method is not thread-safe. It must be used in a synchronized manner */ public boolean isPessimisticLockingValueHolder() { // there is no way to tell, as a transformation mapping may have // a reference class or query to check, but by design there is no // way we can access at it. return false; } /** * Set the transformer. */ protected void setTransformer(AttributeTransformer theTransformer) { transformer = theTransformer; } /** * Set the receiver. */ protected void setObject(Object theObject) { object = theObject; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/DatabaseValueHolder.java0000664000000000000000000002547012216173126027245 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.io.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.indirection.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.localization.*; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; /** * DatabaseValueHolder wraps a database-stored object and implements * behavior to access it. The object is read only once from database * after which is cached for faster access. * * @see ValueHolderInterface * @author Dorin Sandu */ public abstract class DatabaseValueHolder implements WeavedAttributeValueHolderInterface, Cloneable, Serializable { /** Stores the object after it is read from the database. */ protected Object value; /** Indicates whether the object has been read from the database or not. */ protected boolean isInstantiated; /** Stores the session for the database that contains the object. */ protected transient AbstractSession session; /** Stores the row representation of the object. */ // Cannot be transient as may be required to extract the pk from a serialized object. protected AbstractRecord row; /** * The variable below is used as part of the implementation of WeavedAttributeValueHolderInterface * It is used to track whether a valueholder that has been weaved into a class is coordinated * with the underlying property * Set internally in EclipseLink when the state of coordination between a weaved valueholder and the underlying property is known */ protected boolean isCoordinatedWithProperty = false; public Object clone() { try { return super.clone(); } catch (CloneNotSupportedException exception) { throw new InternalError(); } } /** * Return the row. */ public AbstractRecord getRow() { return row; } /** * Return the session. */ public AbstractSession getSession() { return session; } public ValueHolderInterface getWrappedValueHolder() { return null; } /** * Return the object. */ public Object getValue() { if (!this.isInstantiated) { synchronized (this) { if (!this.isInstantiated) { // The value must be set directly because the setValue can also cause instantiation under UOW. privilegedSetValue(instantiate()); this.isInstantiated = true; postInstantiate(); resetFields(); } } } return value; } /** * Process against the UOW and attempt to load a local copy before going to the shared cache * If null is returned then the calling UOW will instantiate as normal. */ public Object getValue(UnitOfWorkImpl uow) { //This method simply returns null as this will cause the UOWVH to trigger //the relationship normally. return null; } /** * Instantiate the object. */ protected abstract Object instantiate() throws DatabaseException; /** * Triggers UnitOfWork valueholders directly without triggering the wrapped * valueholder (this). *

* When in transaction and/or for pessimistic locking the UnitOfWorkValueHolder * needs to be triggered directly without triggering the wrapped valueholder. * However only the wrapped valueholder knows how to trigger the indirection, * i.e. it may be a batchValueHolder, and it stores all the info like the row * and the query. * Note: Implementations of this method are not necessarily thread-safe. They must * be used in a synchronized manner */ public abstract Object instantiateForUnitOfWorkValueHolder(UnitOfWorkValueHolder unitOfWorkValueHolder); /** * This method is used as part of the implementation of WeavedAttributeValueHolderInterface * It is used to check whether a valueholder that has been weaved into a class is coordinated * with the underlying property */ public boolean isCoordinatedWithProperty(){ return isCoordinatedWithProperty; } /** * This method is used as part of the implementation of WeavedAttributeValueHolderInterface. * * A DatabaseValueHolder is set up by TopLink and will never be a newly weaved valueholder. * As a result, this method is stubbed out. */ public boolean isNewlyWeavedValueHolder(){ return false; } /** * INTERNAL: * Answers if this valueholder is easy to instantiate. * @return true if getValue() won't trigger a database read. */ public boolean isEasilyInstantiated() { return this.isInstantiated; } /** * Return a boolean indicating whether the object * has been read from the database or not. */ public boolean isInstantiated() { return isInstantiated; } /** * Answers if this valueholder is a pessimistic locking one. Such valueholders * are special in that they can be triggered multiple times by different * UnitsOfWork. Each time a lock query will be issued. Hence even if * instantiated it may have to be instantiated again, and once instantiated * all fields can not be reset. * Note: Implementations of this method are not necessarily thread-safe. They must * be used in a synchronizaed manner */ public abstract boolean isPessimisticLockingValueHolder(); /** * Answers if this valueholder is referenced only by a UnitOfWork valueholder. * I.e. it was built in valueFromRow which was called by buildCloneFromRow. *

* Sometimes in transaction a UnitOfWork clone, and all valueholders, are built * directly from the row; however a UnitOfWorkValueHolder does not know how to * instantiate itself so wraps this which does. *

* On a successful merge must be released to the session cache with * releaseWrappedValueHolder. */ protected boolean isTransactionalValueHolder() { return ((session != null) && session.isUnitOfWork()); } /** * Used to determine if this is a remote uow value holder that was serialized to the server. * It has no reference to its wrapper value holder, so must find its original object to be able to instantiate. */ public boolean isSerializedRemoteUnitOfWorkValueHolder() { return false; } /** * INTERNAL: * Run any extra code required after the valueholder instantiates * @see QueryBasedValueHolder */ public void postInstantiate(){ //noop } /** * Set the object. This is used only by the privileged methods. One must be very careful in using this method. */ public void privilegedSetValue(Object value) { this.value = value; isCoordinatedWithProperty = false; } /** * Releases a wrapped valueholder privately owned by a particular unit of work. *

* When unit of work clones are built directly from rows no object in the shared * cache points to this valueholder, so it can store the unit of work as its * session. However once that UnitOfWork commits and the valueholder is merged * into the shared cache, the session needs to be reset to the root session, ie. * the server session. */ public void releaseWrappedValueHolder(AbstractSession targetSession) { AbstractSession session = this.session; if ((session != null) && session.isUnitOfWork()) { this.session = targetSession; } } /** * Reset all the fields that are not needed after instantiation. */ protected void resetFields() { this.row = null; this.session = null; } /** * This method is used as part of the implementation of WeavedAttributeValueHolderInterface * It is used internally by EclipseLink to set whether a valueholder that has been weaved into a class is coordinated * with the underlying property */ public void setIsCoordinatedWithProperty(boolean coordinated){ this.isCoordinatedWithProperty = coordinated; } /** * This method is used as part of the implementation of WeavedAttributeValueHolderInterface * * A DatabaseValueHolder is set up by EclipseLink and will never be a newly weaved valueholder * As a result, this method is stubbed out. */ public void setIsNewlyWeavedValueHolder(boolean isNew){ } /** * Set the instantiated flag to true. */ public void setInstantiated() { isInstantiated = true; } /** * Set the row. */ public void setRow(AbstractRecord row) { this.row = row; } /** * Set the session. */ public void setSession(AbstractSession session) { this.session = session; } /** * Set the instantiated flag to false. */ public void setUninstantiated() { isInstantiated = false; } /** * Set the object. */ public void setValue(Object value) { this.value = value; setInstantiated(); } /** * INTERNAL: * Return if add/remove should trigger instantiation or avoid. * Current instantiation is avoided is using change tracking. */ public boolean shouldAllowInstantiationDeferral() { return true; } public String toString() { if (isInstantiated()) { return "{" + getValue() + "}"; } else { return "{" + Helper.getShortClassName(getClass()) + ": " + ToStringLocalization.buildMessage("not_instantiated", (Object[])null) + "}"; } } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/ProxyIndirectionHandler.java0000664000000000000000000001453212216173126030212 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.io.Serializable; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.lang.reflect.InvocationTargetException; import java.security.AccessController; import java.security.PrivilegedActionException; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.indirection.ValueHolderInterface; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedGetClassLoaderForClass; import org.eclipse.persistence.internal.security.PrivilegedMethodInvoker; import org.eclipse.persistence.exceptions.QueryException; /** *

ProxyIndirectionHandler

* * Transparently handles EclipseLink indirection for 1:1 relationships through use of the Java Proxy framework * in JDK 1.3. This class intercepts messages sent to the proxy object, and instantiates its internal * ValueHolder when necessary. * * @see org.eclipse.persistence.internal.indirection.ProxyIndirectionPolicy * @author Rick Barkhouse * @since TopLink 3.0 */ public class ProxyIndirectionHandler implements InvocationHandler, Serializable { private ValueHolderInterface valueHolder; // ===================================================================== /** * INTERNAL: * * Just in here to allow for Serialization. */ public ProxyIndirectionHandler() { } // ===================================================================== /** * INTERNAL: * * Store the value holder. */ private ProxyIndirectionHandler(ValueHolderInterface valueHolder) { this.valueHolder = valueHolder; } // ===================================================================== /** * INTERNAL: * * Handle the method calls on the proxy object. */ public Object invoke(Object proxy, Method m, Object[] args) throws Throwable { Object result = null; try { if ((!ValueHolderInterface.shouldToStringInstantiate) && m.getName().equals("toString")) { if (valueHolder.isInstantiated()) { if (valueHolder.getValue() == null) { result = "null"; } else { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ String toString = (String)AccessController.doPrivileged(new PrivilegedMethodInvoker(m, valueHolder.getValue(), args)); result = "{ " + toString + " }"; }else{ String toString = (String)PrivilegedAccessHelper.invokeMethod(m, valueHolder.getValue(), args); result = "{ " + toString + " }"; } } } else { result = "{ IndirectProxy: not instantiated }"; } } else if (m.getName().equals("equals") && (valueHolder.getValue() == null) && (args[0] == null)) { result = Boolean.TRUE; } else { Object value = valueHolder.getValue(); // CR2718 if (value == null) { throw ValidationException.nullUnderlyingValueHolderValue(m.getName()); } else { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ result = AccessController.doPrivileged(new PrivilegedMethodInvoker(m, value, args)); }else{ result = PrivilegedAccessHelper.invokeMethod(m, value, args); } } } } catch (InvocationTargetException e) { throw e.getTargetException(); } catch (ValidationException e) { // need to re-throw the validation exception throw e; } catch (Exception e) { throw QueryException.unexpectedInvocation(e.getMessage()); } return result; } // ===================================================================== /** * INTERNAL: * * Utility method to create a new proxy object. */ public static Object newProxyInstance(Class anInterface, Class[] interfaces, ValueHolderInterface valueHolder) { ClassLoader classLoader = null; if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try{ classLoader = (ClassLoader)AccessController.doPrivileged(new PrivilegedGetClassLoaderForClass(anInterface)); }catch (PrivilegedActionException ex){ throw (RuntimeException) ex.getCause(); } }else{ classLoader = PrivilegedAccessHelper.getClassLoaderForClass(anInterface); } return Proxy.newProxyInstance(classLoader, interfaces, new ProxyIndirectionHandler(valueHolder)); } // ===================================================================== /** * INTERNAL: * * Get the ValueHolder associated with this handler. */ public ValueHolderInterface getValueHolder() { return this.valueHolder; } // ===================================================================== /** * INTERNAL: * * Set the ValueHolder associated with this handler. */ public void setValueHolder(ValueHolderInterface value) { this.valueHolder = value; } } ././@LongLink0000000000000000000000000000015400000000000011565 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/WeavedObjectBasicIndirectionPolicy.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/WeavedObjectBasicIndirectionPoli0000664000000000000000000002436412216173126031007 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.beans.PropertyChangeListener; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.indirection.WeavedAttributeValueHolderInterface; import org.eclipse.persistence.internal.helper.Helper; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedMethodInvoker; import org.eclipse.persistence.mappings.ForeignReferenceMapping; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.security.AccessController; import java.security.PrivilegedActionException; import org.eclipse.persistence.descriptors.changetracking.ChangeTracker; /** * A WeavedObjectBasicIndirectionPolicy is used by OneToOne mappings that are LAZY through weaving * and which use Property(method) access. * * It extends BasicIndirection by providing the capability of calling the set method that was initially * mapped in addition to the set method for the weaved valueholder in order to coordinate the value of the * underlying property with the value stored in the valueholder * * @author Tom Ware */ public class WeavedObjectBasicIndirectionPolicy extends BasicIndirectionPolicy { /** Name of the initial set method. */ protected String setMethodName = null; /** Lazily initialized set method based on the set method name. */ protected transient Method setMethod = null; /** Name of the initial get method. */ protected String getMethodName; /** indicates whether the mapping has originally used method access */ protected boolean hasUsedMethodAccess; /** Stores the actual type of the mapping if different from the reference type. Used for set method invocation*/ protected String actualTypeClassName = null; public WeavedObjectBasicIndirectionPolicy(String getMethodName, String setMethodName, String actualTypeClassName, boolean hasUsedMethodAccess) { super(); this.setMethodName = setMethodName; this.getMethodName = getMethodName; this.hasUsedMethodAccess = hasUsedMethodAccess; this.actualTypeClassName = actualTypeClassName; } public String getActualTypeClassName() { return actualTypeClassName; } /** * Return the "real" attribute value, as opposed to any wrapper. * This will trigger the wrapper to instantiate the value. In a weaved policy, this will * also call the initial setter method to coordinate the values of the valueholder with * the underlying data. */ public Object getRealAttributeValueFromObject(Object object, Object attribute) { Object value = super.getRealAttributeValueFromObject(object, attribute); // Provide the indirection policy with a callback that allows it to do any updates it needs as the result of getting the value. if (value != attribute) { //if the attribute was already unwrapped then do not call this method updateValueInObject(object, value, attribute); } return value; } /** * This method will lazily initialize the set method * Lazy initialization occurs to that we are not required to have a handle on * the actual class that we are using until runtime. This helps to satisfy the * weaving requirement that demands that we avoid loading domain classes into * the main class loader until after weaving occurs. */ protected Method getSetMethod() { if (setMethod == null) { ForeignReferenceMapping sourceMapping = (ForeignReferenceMapping)mapping; // The parameter type for the set method must always be the return type of the get method. Class[] parameterTypes = new Class[1]; parameterTypes[0] = sourceMapping.getReferenceClass(); try { setMethod = Helper.getDeclaredMethod(sourceMapping.getDescriptor().getJavaClass(), setMethodName, parameterTypes); } catch (NoSuchMethodException e) { if (actualTypeClassName != null){ try{ // try the actual class of the field or property parameterTypes[0] = Helper.getClassFromClasseName(actualTypeClassName, sourceMapping.getReferenceClass().getClassLoader()); setMethod = Helper.getDeclaredMethod(sourceMapping.getDescriptor().getJavaClass(), setMethodName, parameterTypes); } catch (NoSuchMethodException nsme) {} if (setMethod != null){ return setMethod; } } // As a last ditch effort, change the parameter type to Object.class. // If the model uses generics: // public T getStuntDouble() // public void setStuntDouble(T) // The weaved methods will be: // public Object getStuntDouble() and // public void setStuntDouble(Object) try { parameterTypes[0] = Object.class; setMethod = Helper.getDeclaredMethod(sourceMapping.getDescriptor().getJavaClass(), setMethodName, parameterTypes); } catch (NoSuchMethodException ee) { // Throw the original exception. throw DescriptorException.errorAccessingSetMethodOfEntity(sourceMapping.getDescriptor().getJavaClass(), setMethodName, sourceMapping.getDescriptor(), e); } } } return setMethod; } /** * Coordinate the valueholder for this mapping with the underlying property by calling the * initial setter method. */ public void updateValueInObject(Object object, Object value, Object attributeValue){ setRealAttributeValueInObject(object, value); ((WeavedAttributeValueHolderInterface)attributeValue).setIsCoordinatedWithProperty(true); } /** * Set the value of the appropriate attribute of target to attributeValue. * In this case, place the value inside the target's ValueHolder. * Change tracking will be turned off when this method is called */ public void setRealAttributeValueInObject(Object target, Object attributeValue) { setRealAttributeValueInObject(target, attributeValue, false); } /** * Set the value of the appropriate attribute of target to attributeValue. * In this case, place the value inside the target's ValueHolder. * if trackChanges is true, set the value in the object as if the user was setting it. Allow change tracking to pick up the change. */ public void setRealAttributeValueInObject(Object target, Object attributeValue, boolean trackChanges) { // If the target object is using change tracking, it must be disable first to avoid thinking the value changed. PropertyChangeListener listener = null; ChangeTracker trackedObject = null; if (!trackChanges && target instanceof ChangeTracker) { trackedObject = (ChangeTracker)target; listener = trackedObject._persistence_getPropertyChangeListener(); trackedObject._persistence_setPropertyChangeListener(null); } Object[] parameters = new Object[1]; parameters[0] = attributeValue; try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()) { try { AccessController.doPrivileged(new PrivilegedMethodInvoker(getSetMethod(), target, parameters)); } catch (PrivilegedActionException exception) { Exception throwableException = exception.getException(); if (throwableException instanceof IllegalAccessException) { throw DescriptorException.illegalAccessWhileSettingValueThruMethodAccessor(setMethod.getName(), attributeValue, throwableException); } else { throw DescriptorException.targetInvocationWhileSettingValueThruMethodAccessor(setMethod.getName(), attributeValue, throwableException); } } } else { PrivilegedAccessHelper.invokeMethod(getSetMethod(), target, parameters); } } catch (IllegalAccessException exception) { throw DescriptorException.illegalAccessWhileSettingValueThruMethodAccessor(setMethod.getName(), attributeValue, exception); } catch (IllegalArgumentException exception) { throw DescriptorException.illegalArgumentWhileSettingValueThruMethodAccessor(setMethod.getName(), attributeValue, exception); } catch (InvocationTargetException exception) { throw DescriptorException.targetInvocationWhileSettingValueThruMethodAccessor(setMethod.getName(), attributeValue, exception); } finally { if (!trackChanges && trackedObject != null) { trackedObject._persistence_setPropertyChangeListener(listener); } } } public String getGetMethodName() { return this.getMethodName; } public String getSetMethodName() { return this.setMethodName; } public boolean hasUsedMethodAccess() { return this.hasUsedMethodAccess; } public boolean isWeavedObjectBasicIndirectionPolicy() { return true; } } ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/TransparentIndirectionPolicy.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/TransparentIndirectionPolicy.jav0000664000000000000000000010074512216173126031115 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.util.*; import org.eclipse.persistence.descriptors.changetracking.*; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.indirection.*; import org.eclipse.persistence.sessions.remote.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.queries.*; import org.eclipse.persistence.internal.sessions.remote.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.MergeManager; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; /** *

Purpose

: * TransparentIndirectionPolicy implements the behavior necessary for a * a CollectionMapping to use * IndirectContainers to delay the reading of objects from the database * until they are actually needed. * * @see CollectionMapping * @see IndirectContainer * @author Big Country * @since TOPLink/Java 2.5 */ public class TransparentIndirectionPolicy extends IndirectionPolicy { //3732 protected static Integer defaultContainerSize; /** PERF: Cache the mappings container policy. */ protected ContainerPolicy containerPolicy; /** * IndirectList and IndirectSet can be configured not to instantiate the list from the * database when you add and remove from them. IndirectList defaults to this behavior. When * Set to true, the collection associated with this TransparentIndirection will be setup so as * not to instantiate for adds and removes. The weakness of this setting for an IndirectSet is * that when the set is not instantiated, if a duplicate element is added, it will not be * detected until commit time. */ protected Boolean useLazyInstantiation; /** * INTERNAL: * Construct a new indirection policy. */ public TransparentIndirectionPolicy() { super(); } /** * INTERNAL: * Return a backup clone of the attribute. */ public Object backupCloneAttribute(Object attributeValue, Object clone, Object backup, UnitOfWorkImpl unitOfWork) { // delay instantiation until absolutely necessary if ((!(attributeValue instanceof IndirectContainer)) || objectIsInstantiated(attributeValue)) { return super.backupCloneAttribute(attributeValue, clone, backup, unitOfWork); } else { return buildBackupClone((IndirectContainer)attributeValue); } } /** * INTERNAL: * Return a backup clone of a clone container that has not been * read from the database yet. * This is used by the indirection policy to hook together a UOW * clone with its backup clone - only when the clone (the working * copy returned to the user) instantiates its contents from the * database will these contents be copied to the backup clone. */ protected Object buildBackupClone(IndirectContainer container) { UnitOfWorkValueHolder containerValueHolder = (UnitOfWorkValueHolder)container.getValueHolder(); // CR#2852176 Use a BackupValueHolder to handle replacing of the original. BackupValueHolder backupValueHolder = new BackupValueHolder(containerValueHolder); containerValueHolder.setBackupValueHolder(backupValueHolder); return this.buildIndirectContainer(backupValueHolder); } /** * Construct and return an instance of the specified * indirect container class. */ protected IndirectContainer buildIndirectContainer() { IndirectContainer container = null; //3732 if (defaultContainerSize != null) { container = (IndirectContainer)getContainerPolicy().containerInstance(getDefaultContainerSize()); } else { container = (IndirectContainer)getContainerPolicy().containerInstance(); } if (container instanceof IndirectCollection){ if (this.useLazyInstantiation != null){ ((IndirectCollection)container).setUseLazyInstantiation(this.useLazyInstantiation.booleanValue()); } } return container; } /** * Return a new IndirectContainer with the specified value holder. */ protected Object buildIndirectContainer(ValueHolderInterface valueHolder) { IndirectContainer result = buildIndirectContainer(); result.setValueHolder(valueHolder); return result; } /** * INTERNAL: This method can be used when an Indirection Object is required * to be built from a provided ValueHolderInterface object. This may be used * for custom value holder types. Certain policies like the * TransparentIndirectionPolicy may wrap the valueholder in another object. */ public Object buildIndirectObject(ValueHolderInterface valueHolder){ return buildIndirectContainer(valueHolder); } /** * Return a clone of the attribute. * @param buildDirectlyFromRow indicates that we are building the clone directly * from a row as opposed to building the original from the row, putting it in * the shared cache, and then cloning the original. */ public Object cloneAttribute(Object attributeValue, Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession, boolean buildDirectlyFromRow) { ValueHolderInterface valueHolder = null; Object container = null; IndirectList indirectList = null; IndirectContainer indirectContainer = null; if (attributeValue instanceof IndirectContainer) { indirectContainer = (IndirectContainer)attributeValue; valueHolder = indirectContainer.getValueHolder(); if (indirectContainer instanceof IndirectList) { indirectList = (IndirectList)indirectContainer; } } if (!buildDirectlyFromRow && cloningSession.isUnitOfWork() && ((UnitOfWorkImpl)cloningSession).isOriginalNewObject(original)) { // CR#3156435 Throw a meaningful exception if a serialized/dead value holder is detected. // This can occur if an existing serialized object is attempt to be registered as new. if ((valueHolder instanceof DatabaseValueHolder) && (! ((DatabaseValueHolder) valueHolder).isInstantiated()) && (((DatabaseValueHolder) valueHolder).getSession() == null) && (! ((DatabaseValueHolder) valueHolder).isSerializedRemoteUnitOfWorkValueHolder())) { throw DescriptorException.attemptToRegisterDeadIndirection(original, this.mapping); } if (this.mapping.getRelationshipPartner() == null) { container = this.mapping.buildCloneForPartObject(attributeValue, original, cacheKey, clone, cloningSession, refreshCascade, false, false); } else { if (indirectContainer == null) { valueHolder = new ValueHolder(attributeValue); } AbstractRecord row = null; if (valueHolder instanceof DatabaseValueHolder) { row = ((DatabaseValueHolder)valueHolder).getRow(); } //If a new object is being cloned then we must build a new UOWValueHolder // this is so that new clones can also have their relationships managed // here the code instantiates the valueholder in a privledged manner because a // UOWValueHolder will assume the objects in the collection are existing if the valueholder // Goes through it's own instantiation process. DatabaseValueHolder newValueHolder = this.mapping.createCloneValueHolder(valueHolder, original, clone, row, cloningSession, buildDirectlyFromRow); container = buildIndirectContainer(newValueHolder); Object cloneCollection = this.mapping.buildCloneForPartObject(attributeValue, original, cacheKey, clone, cloningSession, refreshCascade, false, false); newValueHolder.privilegedSetValue(cloneCollection); newValueHolder.setInstantiated(); } } else { if (indirectContainer == null) { valueHolder = new ValueHolder(attributeValue); } AbstractRecord row = null; if (valueHolder instanceof DatabaseValueHolder) { row = ((DatabaseValueHolder)valueHolder).getRow(); } DatabaseValueHolder uowValueHolder = this.mapping.createCloneValueHolder(valueHolder, original, clone, row, cloningSession, buildDirectlyFromRow); if ((indirectContainer == null) || !buildDirectlyFromRow) { container = buildIndirectContainer(uowValueHolder); } else { // PERF: If building from rows inside uow, there is no original, // so just use the already built indirect collection. indirectContainer.setValueHolder(uowValueHolder); container = indirectContainer; } } if (cloningSession.isUnitOfWork()){ // Set the change listener. if ((this.mapping.getDescriptor().getObjectChangePolicy().isObjectChangeTrackingPolicy()) && (((ChangeTracker)clone)._persistence_getPropertyChangeListener() != null) && (container instanceof CollectionChangeTracker) ) { ((CollectionChangeTracker)container).setTrackedAttributeName(this.mapping.getAttributeName()); ((CollectionChangeTracker)container)._persistence_setPropertyChangeListener(((ChangeTracker)clone)._persistence_getPropertyChangeListener()); } if (indirectList != null) { ((IndirectList)container).setIsListOrderBrokenInDb(indirectList.isListOrderBrokenInDb()); } } return container; } /** * INTERNAL: * Return whether the container policy is valid for the indirection policy. * In this case, the container policy MUST be configured * for an IndirectContainer. */ protected boolean containerPolicyIsValid() { if (Helper.classImplementsInterface(this.getContainerClass(), ClassConstants.IndirectContainer_Class)) { return true; } return false; } /** * INTERNAL: * Return the primary key for the reference object (i.e. the object * object referenced by domainObject and specified by mapping). * This key will be used by a RemoteValueHolder. * OneToOneMappings should not be using transparent direction. */ @Override public Object extractPrimaryKeyForReferenceObject(Object referenceObject, AbstractSession session) { throw DescriptorException.invalidUseOfTransparentIndirection(this.getMapping()); } /** * INTERNAL: * Return the reference row for the reference object. * This allows the new row to be built without instantiating * the reference object. * Return null if the object has already been instantiated. */ public AbstractRecord extractReferenceRow(Object referenceObject) { if (this.objectIsInstantiated(referenceObject)) { return null; } else { return ((DatabaseValueHolder)((IndirectContainer)referenceObject).getValueHolder()).getRow(); } } /** * INTERNAL: * An object has been serialized from the server to the client. * Replace the transient attributes of the remote value holders * with client-side objects. */ public void fixObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { Object container = getMapping().getAttributeValueFromObject(object); if (container instanceof IndirectContainer && ((((IndirectContainer) container).getValueHolder() instanceof RemoteValueHolder)) ) { RemoteValueHolder valueHolder = (RemoteValueHolder)((IndirectContainer)container).getValueHolder(); valueHolder.setSession(session); valueHolder.setMapping(getMapping()); if ((!query.shouldMaintainCache()) && ((!query.shouldCascadeParts()) || (query.shouldCascadePrivateParts() && (!this.mapping.isPrivateOwned())))) { valueHolder.setQuery(null); } else { valueHolder.setQuery(query); } // set to uninstantiated since no objects are serialized past remote value holders valueHolder.setUninstantiated(); } else { this.mapping.fixRealObjectReferences(object, objectDescriptors, processedObjects, query, session); } } /** * INTERNAL: * Return the container class for the mapping. */ protected Class getContainerClass() { return this.getContainerPolicy().getContainerClass(); } /** * INTERNAL: * Return the container policy for the mapping. */ protected ContainerPolicy getContainerPolicy() { if (this.containerPolicy == null) { this.containerPolicy = getCollectionMapping().getContainerPolicy(); } return this.containerPolicy; } /** * INTERNAL: * Return the the size to of container to create. Default to using default constructor. */ protected static int getDefaultContainerSize() { //3732 return defaultContainerSize.intValue(); } /** * INTERNAL: * Return the original indirection object for a unit of work indirection object. */ @Override public Object getOriginalIndirectionObject(Object unitOfWorkIndirectionObject, AbstractSession session) { IndirectContainer container = (IndirectContainer)unitOfWorkIndirectionObject; if (container.getValueHolder() instanceof UnitOfWorkValueHolder) { return buildIndirectContainer((ValueHolderInterface) getOriginalValueHolder(unitOfWorkIndirectionObject, session)); } else { return container; } } /** * INTERNAL: * Return the original indirection object for a unit of work indirection object. */ @Override public Object getOriginalIndirectionObjectForMerge(Object unitOfWorkIndirectionObject, AbstractSession session) { IndirectContainer container = (IndirectContainer) getOriginalIndirectionObject(unitOfWorkIndirectionObject, session); DatabaseValueHolder holder = (DatabaseValueHolder)container.getValueHolder(); if (holder != null && holder.getSession()!= null){ holder.setSession(session); } return container; } /** * INTERNAL: Return the original valueHolder object. Access to the * underlying valueholder may be required when serializing the valueholder * or converting the valueHolder to another type. */ @Override public Object getOriginalValueHolder(Object unitOfWorkIndirectionObject, AbstractSession session) { if (! (unitOfWorkIndirectionObject instanceof IndirectContainer)){ return new ValueHolder(); } IndirectContainer container = (IndirectContainer)unitOfWorkIndirectionObject; if (container.getValueHolder() instanceof WrappingValueHolder) { ValueHolderInterface valueHolder = ((WrappingValueHolder)container.getValueHolder()).getWrappedValueHolder(); if ((valueHolder == null) && session.isRemoteUnitOfWork()) { RemoteSessionController controller = ((RemoteUnitOfWork)session).getParentSessionController(); valueHolder = controller.getRemoteValueHolders().get(((UnitOfWorkValueHolder)container.getValueHolder()).getWrappedValueHolderRemoteID()); } if (!session.isProtectedSession()){ while (valueHolder instanceof WrappingValueHolder && ((WrappingValueHolder)valueHolder).getWrappedValueHolder() != null){ valueHolder = ((WrappingValueHolder)valueHolder).getWrappedValueHolder(); } } return valueHolder; } else { return container.getValueHolder(); } } /** * INTERNAL: * Return the "real" attribute value, as opposed to any wrapper. * This will trigger the wrapper to instantiate the value. */ public Object getRealAttributeValueFromObject(Object object, Object attribute) { // PERF: do not instantiate - this.getContainerPolicy().sizeFor(object);// forgive me for this hack: but we have to do something to trigger the database read return attribute; } /** * INTERNAL: * Trigger the instantiation of the value. */ public void instantiateObject(Object object, Object attribute) { getContainerPolicy().sizeFor(attribute); } /** * INTERNAL: * The method validateAttributeOfInstantiatedObject(Object attributeValue) fixes the value of the attributeValue * in cases where it is null and indirection requires that it contain some specific data structure. Return whether this will happen. * This method is used to help determine if indirection has been triggered * @param attributeValue * @return * @see validateAttributeOfInstantiatedObject(Object attributeValue) */ @Override public boolean isAttributeValueFullyBuilt(Object attributeValue){ return attributeValue != null; } public Boolean getUseLazyInstantiation() { return useLazyInstantiation; } /** * INTERNAL: * Extract and return the appropriate value from the * specified remote value holder. */ public Object getValueFromRemoteValueHolder(RemoteValueHolder remoteValueHolder) { Object result = remoteValueHolder.getServerIndirectionObject(); this.getContainerPolicy().sizeFor(result);// forgive me for this hack: but we have to do something to trigger the database read return result; } /** * INTERNAL: * Set the value of the appropriate attribute of target to attributeValue. * If the Target has yet to be instantiated then we need to instantiate the target to ensure that * the backup clone is instantiated for comparison. */ public void setRealAttributeValueInObject(Object target, Object attributeValue) { /* Bug 3573808 - do NOT trigger the valueholder; SPECj benchmark deadlocks in this method. Re-ran the original testcase IndirectContainerTestDatabase testMergeCloneWithSerializedTransparentIndirection and it passes without triggering the valueholder. MWN //cr 3788 // Trigger the valueholder when setting the value in an object Object object = this.getMapping().getAttributeValueFromObject(target); if (object instanceof IndirectContainer){ ((IndirectContainer)object).getValueHolder().getValue(); } */ super.setRealAttributeValueInObject(target, attributeValue); } /** * INTERNAL: * set the source object into QueryBasedValueHolder. */ public void setSourceObject(Object sourceObject, Object attributeValue) { if( attributeValue instanceof IndirectContainer) { ValueHolderInterface valueHolder = ((IndirectContainer)attributeValue).getValueHolder(); if (valueHolder instanceof QueryBasedValueHolder) { ((QueryBasedValueHolder)valueHolder).setSourceObject(sourceObject); } } } /** * ADVANCED: * IndirectList and IndirectSet can be configured not to instantiate the list from the * database when you add and remove from them. IndirectList defaults to this behavior. When * Set to true, the collection associated with this TransparentIndirection will be setup so as * not to instantiate for adds and removes. The weakness of this setting for an IndirectSet is * that when the set is not instantiated, if a duplicate element is added, it will not be * detected until commit time. */ @Override public void setUseLazyInstantiation(Boolean useLazyInstantiation) { this.useLazyInstantiation = useLazyInstantiation; } /** * ADVANCED: * IndirectList and IndirectSet can be configured not to instantiate the list from the * database when you add and remove from them. IndirectList defaults to this behavior. When * Set to true, the collection associated with this TransparentIndirection will be setup so as * not to instantiate for adds and removes. The weakness of this setting for an IndirectSet is * that when the set is not instantiated, if a duplicate element is added, it will not be * detected until commit time. */ @Override public Boolean shouldUseLazyInstantiation() { return useLazyInstantiation; } /** * INTERNAL: * Iterate over the specified attribute value. */ public void iterateOnAttributeValue(DescriptorIterator iterator, Object attributeValue) { if (attributeValue instanceof IndirectContainer) { iterator.iterateIndirectContainerForMapping((IndirectContainer)attributeValue, this.getMapping()); } else {// it must be a "real" collection super.iterateOnAttributeValue(iterator, attributeValue); } } /** * INTERNAL * Replace the client value holder with the server value holder, * after copying some of the settings from the client value holder. */ public void mergeRemoteValueHolder(Object clientSideDomainObject, Object serverSideDomainObject, MergeManager mergeManager) { // This will always be a transparent with a remote. IndirectContainer serverContainer = (IndirectContainer)getMapping().getAttributeValueFromObject(serverSideDomainObject); RemoteValueHolder serverValueHolder = (RemoteValueHolder)serverContainer.getValueHolder(); mergeClientIntoServerValueHolder(serverValueHolder, mergeManager); getMapping().setAttributeValueInObject(clientSideDomainObject, serverContainer); } /** * INTERNAL: * Return the null value of the appropriate attribute. That is, the * field from the database is NULL, return what should be * placed in the object's attribute as a result. * OneToOneMappings should not be using transparent direction. */ public Object nullValueFromRow() { throw DescriptorException.invalidUseOfTransparentIndirection(this.getMapping()); } /** * INTERNAL: * Return whether the specified object is instantiated. */ public boolean objectIsInstantiated(Object object) { if (object instanceof IndirectContainer) { return ((IndirectContainer)object).isInstantiated(); } else { return true;// it must be a "real" collection } } /** * INTERNAL: * Return whether the specified object can be instantiated without database access. */ public boolean objectIsEasilyInstantiated(Object object) { if (object instanceof IndirectContainer) { ValueHolderInterface valueHolder = ((IndirectContainer)object).getValueHolder(); if (valueHolder instanceof DatabaseValueHolder) { return ((DatabaseValueHolder)valueHolder).isEasilyInstantiated(); } } return true; } /** * INTERNAL: * Return whether the specified object is instantiated, or if it has changes. */ public boolean objectIsInstantiatedOrChanged(Object object) { return objectIsInstantiated(object) || ((object instanceof IndirectCollection) && ((IndirectCollection)object).hasDeferredChanges()); } /** * ADVANCED: * Set the size to of container to create. Default to using default constructor. */ public static void setDefaultContainerSize(int defaultSize) { //3732 defaultContainerSize = Integer.valueOf(defaultSize); } /** * INTERNAL: * Return whether the type is appropriate for the indirection policy. * In this case, the attribute type MUST be * compatible with the one specified by the ContainerPolicy * (i.e. either the container policy class is a subclass of the * declared type [jdk1.1] or the container policy class implements * the declared interface [jdk1.2]). */ protected boolean typeIsValid(Class declaredType) { if (Helper.classIsSubclass(this.getContainerClass(), declaredType)) { return true; } if (Helper.classImplementsInterface(this.getContainerClass(), declaredType)) { return true; } return false; } /** * INTERNAL: * Return whether the indirection policy uses transparent indirection. */ public boolean usesTransparentIndirection(){ return true; } /** * INTERNAL: * Verify that the value of the attribute within an instantiated object * is of the appropriate type for the indirection policy. * In this case, the attribute must be non-null and it must be at least a * subclass or implementor of the container type. * If the value is null return a new indirection object to be used for the attribute. */ public Object validateAttributeOfInstantiatedObject(Object attributeValue) { // PERF: If the value is null, create a new value holder instance for the attribute value, // this allows for indirection attributes to not be instantiated in the constructor as they // are typically replaced when reading or cloning so is very inefficient to initialize. if (attributeValue == null) { return buildIndirectContainer(); } return attributeValue; } /** * INTERNAL: * Verify that the container policy is compatible with the * indirection policy. If it is incorrect, add an exception to the * integrity checker. */ public void validateContainerPolicy(IntegrityChecker checker) throws DescriptorException { super.validateContainerPolicy(checker); if (!this.containerPolicyIsValid()) { checker.handleError(DescriptorException.invalidContainerPolicyWithTransparentIndirection(this.getMapping(), this.getContainerPolicy())); } // Bug 2618982 if (getContainerPolicy().isMapPolicy() && ((((ForeignReferenceMapping)getMapping()).getRelationshipPartnerAttributeName() != null) || (getMapping().getRelationshipPartner() != null))) { checker.handleError(DescriptorException.unsupportedTypeForBidirectionalRelationshipMaintenance(this.getMapping(), this.getContainerPolicy())); } } /** * INTERNAL: * Verify that attributeType is correct for the * indirection policy. If it is incorrect, add an exception to the * integrity checker. * In this case, the attribute type MUST be * compatible with the one specified by the ContainerPolicy. */ public void validateDeclaredAttributeType(Class attributeType, IntegrityChecker checker) throws DescriptorException { super.validateDeclaredAttributeType(attributeType, checker); if (!this.typeIsValid(attributeType)) { checker.handleError(DescriptorException.attributeAndMappingWithTransparentIndirectionMismatch(this.getMapping(), attributeType, this.validTypeName())); } } /** * INTERNAL: * Verify that getter returnType is correct for the * indirection policy. If it is incorrect, add an exception * to the integrity checker. * In this case, the attribute type MUST be * compatible with the one specified by the ContainerPolicy. */ public void validateGetMethodReturnType(Class returnType, IntegrityChecker checker) throws DescriptorException { super.validateGetMethodReturnType(returnType, checker); if (!this.typeIsValid(returnType)) { checker.handleError(DescriptorException.returnAndMappingWithTransparentIndirectionMismatch(this.getMapping(), returnType, this.validTypeName())); } } /** * INTERNAL: * Verify that setter parameterType is correct for the * indirection policy. If it is incorrect, add an exception * to the integrity checker. * In this case, the attribute type MUST be * compatible with the one specified by the ContainerPolicy. */ public void validateSetMethodParameterType(Class parameterType, IntegrityChecker checker) throws DescriptorException { super.validateSetMethodParameterType(parameterType, checker); if (!this.typeIsValid(parameterType)) { checker.handleError(DescriptorException.parameterAndMappingWithTransparentIndirectionMismatch(this.getMapping(), parameterType, this.validTypeName())); } } /** * INTERNAL: * Return the type that is appropriate for the indirection policy. */ protected String validTypeName() { return Helper.getShortClassName(this.getContainerClass()); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the batchQuery. * In this case, wrap the query in an IndirectContainer for later invocation. */ public Object valueFromBatchQuery(ReadQuery batchQuery, AbstractRecord row, ObjectLevelReadQuery originalQuery, CacheKey parentCacheKey) { return this.buildIndirectContainer(new BatchValueHolder(batchQuery, row, getForeignReferenceMapping(), originalQuery, parentCacheKey)); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by invoking the appropriate * method on the object and passing it the row and session. * TransformationMappings should not be using transparent direction. */ public Object valueFromMethod(Object object, AbstractRecord row, AbstractSession session) { throw DescriptorException.invalidUseOfTransparentIndirection(this.getMapping()); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the query. * In this case, wrap the query in an IndirectContainer for later invocation. */ public Object valueFromQuery(ReadQuery query, AbstractRecord row, AbstractSession session) { return this.buildIndirectContainer(new QueryBasedValueHolder(query, row, session)); } /** * INTERNAL: * A combination of valueFromQuery and valueFromRow(object). * Sometimes the attribute is known (joining) but we still need to hang on * to the query (pessimistic locking). */ public Object valueFromQuery(ReadQuery query, AbstractRecord row, Object object, AbstractSession session) { return this.buildIndirectContainer(new QueryBasedValueHolder(query, object, row, session)); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This allows wrapping of the real value, none is required for transparent. */ public Object valueFromRow(Object object) { return object; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/UnitOfWorkValueHolder.java0000664000000000000000000003230412216173126027602 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.rmi.server.ObjID; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.indirection.*; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.internal.sessions.remote.RemoteValueHolder; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.logging.SessionLog; /** * A UnitOfWorkValueHolder is put in a clone object. * It wraps the value holder in the original object to delay * cloning the attribute in a unit of work until it is * needed by the application. * This value holder is used only in the unit of work. * * @author Sati */ public abstract class UnitOfWorkValueHolder extends DatabaseValueHolder implements WrappingValueHolder{ /** The value holder in the original object. */ protected transient ValueHolderInterface wrappedValueHolder; /** The mapping for the attribute. */ protected transient DatabaseMapping mapping; /** The value holder stored in the backup copy, should not be transient. */ protected ValueHolder backupValueHolder; /** These cannot be transient because they are required for a remote unit of work. When the remote uow is serialized to the server to be committed, these are used to reconstruct the value holder on the server. They should be null for non-remote sessions. */ protected UnitOfWorkImpl remoteUnitOfWork; protected Object sourceObject; /** This attribute is used specifically for relationship support. It mimics the * sourceObject attribute which is used for RemoteValueholder */ protected transient Object relationshipSourceObject; protected String sourceAttributeName; protected ObjID wrappedValueHolderRemoteID; protected UnitOfWorkValueHolder() { super(); } protected UnitOfWorkValueHolder(ValueHolderInterface attributeValue, Object clone, DatabaseMapping mapping, UnitOfWorkImpl unitOfWork) { this.wrappedValueHolder = attributeValue; this.mapping = mapping; this.session = unitOfWork; this.sourceAttributeName = mapping.getAttributeName(); this.relationshipSourceObject = clone; if (unitOfWork.isRemoteUnitOfWork()) { if (attributeValue instanceof RemoteValueHolder) { this.wrappedValueHolderRemoteID = ((RemoteValueHolder)attributeValue).getID(); } this.remoteUnitOfWork = unitOfWork; this.sourceObject = clone; } } /** * Backup the clone attribute value. */ protected abstract Object buildBackupCloneFor(Object cloneAttributeValue); /** * Clone the original attribute value. */ public abstract Object buildCloneFor(Object originalAttributeValue); protected ValueHolder getBackupValueHolder() { return backupValueHolder; } public DatabaseMapping getMapping() { return mapping; } protected UnitOfWorkImpl getRemoteUnitOfWork() { return remoteUnitOfWork; } protected String getSourceAttributeName() { return sourceAttributeName; } protected Object getSourceObject() { return sourceObject; } protected Object getRelationshipSourceObject() { return this.relationshipSourceObject; } protected UnitOfWorkImpl getUnitOfWork() { return (UnitOfWorkImpl)this.session; } /** * This is used for a remote unit of work. * If the value holder is sent back to the server uninstantiated and * it needs to be instantiated, then we must find the original * object and get the appropriate attribute from it. */ protected Object getValueFromServerObject() { setSession(getRemoteUnitOfWork()); Object primaryKey = getSession().getId(getSourceObject()); Object originalObject = getUnitOfWork().getParent().getIdentityMapAccessor().getFromIdentityMap(primaryKey, getSourceObject().getClass()); if (originalObject == null) { originalObject = getUnitOfWork().getParent().readObject(getSourceObject()); } ClassDescriptor descriptor = getSession().getDescriptor(originalObject); DatabaseMapping mapping = descriptor.getObjectBuilder().getMappingForAttributeName(getSourceAttributeName()); setMapping(mapping); return getMapping().getRealAttributeValueFromObject(originalObject, getSession()); } /** * a.k.a getValueFromWrappedValueholder. * The old name is no longer correct, as query based valueholders are now * sometimes triggered directly without triggering the underlying valueholder. */ protected Object instantiateImpl() { if (this.wrappedValueHolder instanceof DatabaseValueHolder) { // Bug 3835202 - Ensure access to valueholders is thread safe. Several of the methods // called below are not threadsafe alone. synchronized (this.wrappedValueHolder) { DatabaseValueHolder wrapped = (DatabaseValueHolder)this.wrappedValueHolder; UnitOfWorkImpl unitOfWork = getUnitOfWork(); if (!wrapped.isEasilyInstantiated()) { if (wrapped.isPessimisticLockingValueHolder()) { if (!unitOfWork.getCommitManager().isActive() && !unitOfWork.wasTransactionBegunPrematurely()) { unitOfWork.beginEarlyTransaction(); } unitOfWork.log(SessionLog.FINEST, SessionLog.TRANSACTION, "instantiate_pl_relationship"); } if (unitOfWork.getCommitManager().isActive() || unitOfWork.wasTransactionBegunPrematurely()) { // At this point the wrapped valueholder is not triggered, // and we are in transaction. So just trigger the // UnitOfWork valueholder on the UnitOfWork only. return wrapped.instantiateForUnitOfWorkValueHolder(this); } } } if (!((DatabaseValueHolder)this.wrappedValueHolder).isInstantiated()){ //if not instantiated then try and load the UOW versions to prevent the whole loading from the cache and cloning //process Object result = ((DatabaseValueHolder)this.wrappedValueHolder).getValue((UnitOfWorkImpl) this.session); if (result != null){ return result; } } } return buildCloneFor(this.wrappedValueHolder.getValue()); } /** * INTERNAL: * Answers if this valueholder is easy to instantiate. * @return true if getValue() won't trigger a database read. */ public boolean isEasilyInstantiated() { return this.isInstantiated || ((this.wrappedValueHolder != null) && (!(this.wrappedValueHolder instanceof DatabaseValueHolder) || ((DatabaseValueHolder)this.wrappedValueHolder).isEasilyInstantiated())); } /** * INTERNAL: * Answers if this valueholder is a pessimistic locking one. Such valueholders * are special in that they can be triggered multiple times by different * UnitsOfWork. Each time a lock query will be issued. Hence even if * instantiated it may have to be instantiated again, and once instantatiated * all fields can not be reset. */ public boolean isPessimisticLockingValueHolder() { // This abstract method needs to be implemented but is not meaningfull for // this subclass. return ((this.wrappedValueHolder != null) && (this.wrappedValueHolder instanceof DatabaseValueHolder) && ((DatabaseValueHolder)this.wrappedValueHolder).isPessimisticLockingValueHolder()); } @Override public ValueHolderInterface getWrappedValueHolder() { return wrappedValueHolder; } /** * returns wrapped ValueHolder ObjID if available */ public ObjID getWrappedValueHolderRemoteID() { return this.wrappedValueHolderRemoteID; } /** * Used to determine if this is a remote uow value holder that was serialized to the server. * It has no reference to its wrapper value holder, so must find its original object to be able to instantiate. */ public boolean isSerializedRemoteUnitOfWorkValueHolder() { return (this.remoteUnitOfWork != null) && (this.remoteUnitOfWork.getParent() != null) && (this.wrappedValueHolder == null); } /** * Get the value from the wrapped value holder, instantiating it * if necessary, and clone it. */ protected Object instantiate() { Object originalAttributeValue; Object cloneAttributeValue; if (isSerializedRemoteUnitOfWorkValueHolder()) { originalAttributeValue = getValueFromServerObject(); cloneAttributeValue = buildCloneFor(originalAttributeValue); } else { if (getUnitOfWork() == null) { throw ValidationException.instantiatingValueholderWithNullSession(); } cloneAttributeValue = instantiateImpl(); } // Set the value in the backup clone also. // In some cases we may want to force instantiation before the backup is built if (this.backupValueHolder != null) { this.backupValueHolder.setValue(buildBackupCloneFor(cloneAttributeValue)); } return cloneAttributeValue; } /** * Triggers UnitOfWork valueholders directly without triggering the wrapped * valueholder (this). *

* When in transaction and/or for pessimistic locking the UnitOfWorkValueHolder * needs to be triggered directly without triggering the wrapped valueholder. * However only the wrapped valueholder knows how to trigger the indirection, * i.e. it may be a batchValueHolder, and it stores all the info like the row * and the query. */ public Object instantiateForUnitOfWorkValueHolder(UnitOfWorkValueHolder unitOfWorkValueHolder) { // This abstract method needs to be implemented but is not meaningful for // this subclass. return instantiate(); } /** * Releases a wrapped valueholder privately owned by a particular unit of work. *

* When unit of work clones are built directly from rows no object in the shared * cache points to this valueholder, so it can store the unit of work as its * session. However once that UnitOfWork commits and the valueholder is merged * into the shared cache, the session needs to be reset to the root session, ie. * the server session. */ @Override public void releaseWrappedValueHolder(AbstractSession targetSession) { // On UnitOfWork dont want to do anything. return; } /** * Reset all the fields that are not needed after instantiation. */ protected void resetFields() { //do nothing. nothing should be reset to null; } public void setBackupValueHolder(ValueHolder backupValueHolder) { this.backupValueHolder = backupValueHolder; } protected void setMapping(DatabaseMapping mapping) { this.mapping = mapping; } protected void setRemoteUnitOfWork(UnitOfWorkImpl remoteUnitOfWork) { this.remoteUnitOfWork = remoteUnitOfWork; } protected void setSourceAttributeName(String name) { sourceAttributeName = name; } protected void setSourceObject(Object sourceObject) { this.sourceObject = sourceObject; } protected void setRelationshipSourceObject(Object relationshipSourceObject) { this.relationshipSourceObject = relationshipSourceObject; } protected void setWrappedValueHolder(DatabaseValueHolder valueHolder) { wrappedValueHolder = valueHolder; } /** * INTERNAL: * Return if add/remove should trigger instantiation or avoid. * Current instantiation is avoided is using change tracking. */ public boolean shouldAllowInstantiationDeferral() { return ((WeavedAttributeValueHolderInterface)this.wrappedValueHolder).shouldAllowInstantiationDeferral(); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/ProtectedValueHolder.java0000664000000000000000000000564412216173126027473 0ustar /******************************************************************************* * Copyright (c) 2011, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Gordon Yorke - initial API and implementation ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import org.eclipse.persistence.exceptions.DatabaseException; import org.eclipse.persistence.exceptions.ValidationException; import org.eclipse.persistence.indirection.ValueHolderInterface; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.mappings.DatabaseMapping; /** *

Purpose: provides a value holder type that can be used in Protected instances of Entities. * Similar to UnitOfWork Value Holder it wraps the original cache version. * * @author Gordon Yorke * @since EclipseLink 2.2 */ public class ProtectedValueHolder extends DatabaseValueHolder implements WrappingValueHolder{ protected transient ValueHolderInterface wrappedValueHolder; protected transient DatabaseMapping mapping; public ProtectedValueHolder(ValueHolderInterface attributeValue, DatabaseMapping mapping, AbstractSession cloningSession) { this.wrappedValueHolder = attributeValue; this.mapping = mapping; this.session = cloningSession; } @Override protected Object instantiate() throws DatabaseException { if (this.session == null){ throw ValidationException.instantiatingValueholderWithNullSession(); } //no need for original or cachekey here as the relationship must be cacheable and present in // wrapped valueholder or the ProtectedValueHolder would not have been created. Integer refreshCascade = null; if (wrappedValueHolder instanceof QueryBasedValueHolder){ refreshCascade = ((QueryBasedValueHolder)getWrappedValueHolder()).getRefreshCascadePolicy(); } return mapping.buildCloneForPartObject(this.wrappedValueHolder.getValue(),null, null, null, this.session, refreshCascade, true, true); } @Override public Object instantiateForUnitOfWorkValueHolder(UnitOfWorkValueHolder unitOfWorkValueHolder) { throw ValidationException.operationNotSupported("instantiateForUnitOfWorkValueHolder"); } @Override public boolean isPessimisticLockingValueHolder() { return false; } public ValueHolderInterface getWrappedValueHolder() { return wrappedValueHolder; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/ContainerIndirectionPolicy.java0000664000000000000000000004257312216173126030703 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.lang.reflect.*; import java.security.AccessController; import java.security.PrivilegedActionException; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.indirection.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.sessions.remote.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.security.PrivilegedGetConstructorFor; import org.eclipse.persistence.internal.security.PrivilegedInvokeConstructor; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; /** * Purpose: Provide ability for developers to wrap ValueHolders (Basic Indirection)

* Responsibilities:

    *
  • Wrap & un-wrap a ValueHolder within an IndirectContainer *
  • Reflectivly instantiate the containers as required *
* @see org.eclipse.persistence.indirection.IndirectContainer * @author Doug Clarke (TOP) * @since 2.5.0.5 */ public class ContainerIndirectionPolicy extends BasicIndirectionPolicy { private Class containerClass; private String containerClassName; private transient Constructor containerConstructor; /** * INTERNAL: * Construct a new indirection policy. */ public ContainerIndirectionPolicy() { super(); } /** * INTERNAL: * Return a backup clone of the attribute. */ public Object backupCloneAttribute(Object attributeValue, Object clone, Object backup, UnitOfWorkImpl unitOfWork) { IndirectContainer container = (IndirectContainer)attributeValue; ValueHolderInterface valueHolder = container.getValueHolder(); ValueHolderInterface newValueHolder = (ValueHolderInterface)super.backupCloneAttribute(valueHolder, clone, backup, unitOfWork); return buildContainer(newValueHolder); } /** * Build a conatiner with the initialized constructor. * @see initializeContainer */ protected IndirectContainer buildContainer(ValueHolderInterface valueHolder) { try { IndirectContainer container = null; if (getContainerConstructor().getParameterTypes().length == 0) { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ container = (IndirectContainer)AccessController.doPrivileged(new PrivilegedInvokeConstructor(getContainerConstructor(), new Object[0])); }else{ container = (IndirectContainer)PrivilegedAccessHelper.invokeConstructor(getContainerConstructor(), new Object[0]); } container.setValueHolder(valueHolder); } else { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ container = (IndirectContainer)AccessController.doPrivileged(new PrivilegedInvokeConstructor(getContainerConstructor(), new Object[] { valueHolder })); }else{ container = (IndirectContainer)PrivilegedAccessHelper.invokeConstructor(getContainerConstructor(), new Object[] { valueHolder }); } } return container; } catch (Exception exception) { throw DescriptorException.invalidIndirectionPolicyOperation(this, "buildContainer constructor (" + getContainerConstructor() + ") Failed: " + exception); } } /** * INTERNAL: This method can be used when an Indirection Object is required * to be built from a provided ValueHolderInterface object. This may be used * for custom value holder types. Certain policies like the * TransparentIndirectionPolicy may wrap the valueholder in another object. */ public Object buildIndirectObject(ValueHolderInterface valueHolder){ return buildContainer(valueHolder); } /** * INTERNAL: * Return a clone of the attribute. * @param buildDirectlyFromRow indicates that we are building the clone directly * from a row as opposed to building the original from the row, putting it in * the shared cache, and then cloning the original. */ public Object cloneAttribute(Object attributeValue, Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession, boolean buildDirectlyFromRow) { IndirectContainer container = (IndirectContainer)attributeValue; ValueHolderInterface valueHolder = container.getValueHolder(); ValueHolderInterface newValueHolder = (ValueHolderInterface)super.cloneAttribute(valueHolder, original, cacheKey, clone, refreshCascade, cloningSession, buildDirectlyFromRow); return buildContainer(newValueHolder); } /** * INTERNAL: * Return the reference row for the reference object. * This allows the new row to be built without instantiating * the reference object. * Return null if the object has already been instantiated. */ public AbstractRecord extractReferenceRow(Object referenceObject) { if (this.objectIsInstantiated(referenceObject)) { return null; } else { return ((DatabaseValueHolder)((IndirectContainer)referenceObject).getValueHolder()).getRow(); } } /** * Returns the Container class which implements IndirectContainer. */ public Class getContainerClass() { return containerClass; } /** * INTERNAL: * Used by MW. */ public String getContainerClassName() { if ((containerClassName == null) && (containerClass != null)) { containerClassName = containerClass.getName(); } return containerClassName; } /** * * @return java.lang.reflect.Constructor */ protected Constructor getContainerConstructor() { return containerConstructor; } /** * INTERNAL: * Return the original indirection object for a unit of work indirection object. */ public Object getOriginalIndirectionObject(Object unitOfWorkIndirectionObject, AbstractSession session) { IndirectContainer container = (IndirectContainer)unitOfWorkIndirectionObject; if (container.getValueHolder() instanceof UnitOfWorkValueHolder) { ValueHolderInterface valueHolder = ((UnitOfWorkValueHolder)container.getValueHolder()).getWrappedValueHolder(); if ((valueHolder == null) && session.isRemoteUnitOfWork()) { RemoteSessionController controller = ((RemoteUnitOfWork)session).getParentSessionController(); valueHolder = controller.getRemoteValueHolders().get(((UnitOfWorkValueHolder)container.getValueHolder()).getWrappedValueHolderRemoteID()); } return buildContainer(valueHolder); } else { return container; } } /** * INTERNAL: * Return the original indirection object for a unit of work indirection object. */ public Object getOriginalIndirectionObjectForMerge(Object unitOfWorkIndirectionObject, AbstractSession session) { IndirectContainer container = (IndirectContainer) getOriginalIndirectionObject(unitOfWorkIndirectionObject, session); DatabaseValueHolder holder = (DatabaseValueHolder)container.getValueHolder(); if (holder != null && holder.getSession()!= null){ holder.setSession(session); } return container; } /** * INTERNAL: * Return the "real" attribute value, as opposed to any wrapper. * This will trigger the wrapper to instantiate the value. */ public Object getRealAttributeValueFromObject(Object object, Object attribute) { return ((IndirectContainer)attribute).getValueHolder().getValue(); } /** * INTERNAL: * Ensure the container class implements IndirectContainer and that it * has a constructor which can be used. */ public void initialize() { // Verify that the provided class implements IndirectContainer if (!ClassConstants.IndirectContainer_Class.isAssignableFrom(containerClass)) { throw DescriptorException.invalidIndirectionContainerClass(this, containerClass); } // Try to find constructor which takes a ValueHolderInterface try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try{ this.containerConstructor = (Constructor)AccessController.doPrivileged(new PrivilegedGetConstructorFor(getContainerClass(), new Class[] { ClassConstants.ValueHolderInterface_Class }, false)); }catch (PrivilegedActionException ex){ if (ex.getCause() instanceof NoSuchMethodException){ throw (NoSuchMethodException) ex.getCause(); } throw (RuntimeException)ex.getCause(); } }else{ this.containerConstructor = PrivilegedAccessHelper.getConstructorFor(getContainerClass(), new Class[] { ClassConstants.ValueHolderInterface_Class }, false); } return; } catch (NoSuchMethodException nsme) {// DO NOTHING, exception thrown at end } // Try to find the default constructor try { if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){ try{ this.containerConstructor = (Constructor)AccessController.doPrivileged(new PrivilegedGetConstructorFor(getContainerClass(), new Class[0], false)); }catch (PrivilegedActionException ex){ if (ex.getCause() instanceof NoSuchMethodException){ throw (NoSuchMethodException) ex.getCause(); } throw (RuntimeException)ex.getCause(); } }else{ this.containerConstructor = PrivilegedAccessHelper.getConstructorFor(getContainerClass(), new Class[0], false); } return; } catch (NoSuchMethodException nsme) {// DO NOTHING, exception thrown at end } // If no constructor is found then we throw an initialization exception throw DescriptorException.noConstructorIndirectionContainerClass(this, containerClass); } /** * INTERNAL: * The method validateAttributeOfInstantiatedObject(Object attributeValue) fixes the value of the attributeValue * in cases where it is null and indirection requires that it contain some specific data structure. Return whether this will happen. * This method is used to help determine if indirection has been triggered * @param attributeValue * @return * @see validateAttributeOfInstantiatedObject(Object attributeValue) */ @Override public boolean isAttributeValueFullyBuilt(Object attributeValue){ return true; } /** * INTERNAL: * Iterate over the specified attribute value, */ public void iterateOnAttributeValue(DescriptorIterator iterator, Object attributeValue) { super.iterateOnAttributeValue(iterator, ((IndirectContainer)attributeValue).getValueHolder()); } /** * INTERNAL: * Return the null value of the appropriate attribute. That is, the * field from the database is NULL, return what should be * placed in the object's attribute as a result. * In this case, return an empty ValueHolder. */ public Object nullValueFromRow() { return buildContainer(new ValueHolder()); } /** * Reset the wrapper used to store the value. */ public void reset(Object target) { getMapping().setAttributeValueInObject(target, buildContainer(new ValueHolder())); } /** * INTERNAL: * Return whether the specified object is instantiated. */ public boolean objectIsInstantiated(Object object) { return ((IndirectContainer)object).getValueHolder().isInstantiated(); } /** * INTERNAL: * Return whether the specified object can be instantiated without database access. */ public boolean objectIsEasilyInstantiated(Object object) { ValueHolderInterface valueHolder = ((IndirectContainer)object).getValueHolder(); if (valueHolder instanceof DatabaseValueHolder) { return ((DatabaseValueHolder)valueHolder).isEasilyInstantiated(); } else { return true; } } /** * Sets the Container class which implements IndirectContainer */ public void setContainerClass(Class containerClass) { this.containerClass = containerClass; } /** * Set the container classname for the MW */ public void setContainterClassName(String containerClassName) { this.containerClassName = containerClassName; } /** * INTERNAL: * Set the value of the appropriate attribute of target to attributeValue. * In this case, place the value inside the target's ValueHolder. */ public void setRealAttributeValueInObject(Object target, Object attributeValue) { IndirectContainer container = (IndirectContainer)this.getMapping().getAttributeValueFromObject(target); container.getValueHolder().setValue(attributeValue); this.getMapping().setAttributeValueInObject(target, container); } /** * INTERNAL: * Return whether the type is appropriate for the indirection policy. * In this case, the type must either be assignable from IndirectContainer or * allow the conatinerClass to be assigned to it. */ protected boolean typeIsValid(Class attributeType) { return ClassConstants.IndirectContainer_Class.isAssignableFrom(attributeType) || attributeType.isAssignableFrom(getContainerClass()); } /** * INTERNAL: * Verify that the value of the attribute within an instantiated object * is of the appropriate type for the indirection policy. * In this case, the attribute must be non-null and it must be a * ValueHolderInterface. */ public Object validateAttributeOfInstantiatedObject(Object attributeValue) { if (!(getContainerClass().isInstance(attributeValue))) { throw DescriptorException.valueHolderInstantiationMismatch(attributeValue, this.getMapping()); } return attributeValue; } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the batchQuery. * In this case, wrap the query in a ValueHolder for later invocation. */ public Object valueFromBatchQuery(ReadQuery batchQuery, AbstractRecord row, ObjectLevelReadQuery originalQuery, CacheKey parentCacheKey) { ValueHolderInterface valueHolder = (ValueHolderInterface)super.valueFromBatchQuery(batchQuery, row, originalQuery, parentCacheKey); return buildContainer(valueHolder); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by invoking the appropriate * method on the object and passing it the row and session. * In this case, wrap the row in a ValueHolder for later use. */ public Object valueFromMethod(Object object, AbstractRecord row, AbstractSession session) { ValueHolderInterface valueHolder = (ValueHolderInterface)super.valueFromMethod(object, row, session); return buildContainer(valueHolder); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the query. * In this case, wrap the query in a ValueHolder for later invocation. */ public Object valueFromQuery(ReadQuery query, AbstractRecord row, AbstractSession session) { ValueHolderInterface valueHolder = (ValueHolderInterface)super.valueFromQuery(query, row, session); return buildContainer(valueHolder); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the row. * In this case, simply wrap the object in a ValueHolder. */ public Object valueFromRow(Object object) { return buildContainer(new ValueHolder(object)); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/BasicIndirectionPolicy.java0000664000000000000000000005626712216173126030007 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.rmi.server.ObjID; import java.util.*; import org.eclipse.persistence.mappings.DatabaseMapping.WriteType; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.remote.DistributedSession; import org.eclipse.persistence.indirection.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.helper.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.sessions.remote.RemoteSessionController; import org.eclipse.persistence.internal.sessions.remote.RemoteUnitOfWork; import org.eclipse.persistence.internal.sessions.remote.RemoteValueHolder; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.MergeManager; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; /** *

Purpose

: * BasicIndirectionPolicy implements the behavior necessary for a * a ForeignReferenceMapping (or TransformationMapping) to * use ValueHolders to delay the reading of objects from the database * until they are actually needed. * * @see ForeignReferenceMapping * @author Mike Norman * @since TOPLink/Java 2.5 */ public class BasicIndirectionPolicy extends IndirectionPolicy { /** * INTERNAL: * Construct a new indirection policy. */ public BasicIndirectionPolicy() { super(); } /** * INTERNAL: * Return a backup clone of the attribute. */ public Object backupCloneAttribute(Object attributeValue, Object clone, Object backup, UnitOfWorkImpl unitOfWork) { //no need to check if the attribute is a valueholder because closeAttribute // should always be called first ValueHolderInterface valueHolder = (ValueHolderInterface)attributeValue;// cast the value ValueHolder result = new ValueHolder(); // delay instantiation until absolutely necessary if ((!(valueHolder instanceof UnitOfWorkValueHolder)) || valueHolder.isInstantiated()) { result.setValue(super.backupCloneAttribute(valueHolder.getValue(), clone, backup, unitOfWork)); } else { ((UnitOfWorkValueHolder)valueHolder).setBackupValueHolder(result); } return result; } /** * INTERNAL: This method can be used when an Indirection Object is required * to be built from a provided ValueHolderInterface object. This may be used * for custom value holder types. Certain policies like the * TransparentIndirectionPolicy may wrap the valueholder in another object. */ public Object buildIndirectObject(ValueHolderInterface valueHolder){ return valueHolder; } /** * INTERNAL: * Return a clone of the attribute. * @param buildDirectlyFromRow indicates that we are building the clone * directly from a row as opposed to building the original from the * row, putting it in the shared cache, and then cloning the original. */ public Object cloneAttribute(Object attributeValue, Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession, boolean buildDirectlyFromRow) { ValueHolderInterface valueHolder = (ValueHolderInterface) attributeValue; ValueHolderInterface result; if (!buildDirectlyFromRow && cloningSession.isUnitOfWork() && ((UnitOfWorkImpl)cloningSession).isOriginalNewObject(original)) { // CR#3156435 Throw a meaningful exception if a serialized/dead value holder is detected. // This can occur if an existing serialized object is attempt to be registered as new. if ((valueHolder instanceof DatabaseValueHolder) && (! ((DatabaseValueHolder) valueHolder).isInstantiated()) && (((DatabaseValueHolder) valueHolder).getSession() == null) && (! ((DatabaseValueHolder) valueHolder).isSerializedRemoteUnitOfWorkValueHolder())) { throw DescriptorException.attemptToRegisterDeadIndirection(original, this.mapping); } if (this.mapping.getRelationshipPartner() == null) { result = new ValueHolder(); result.setValue(this.mapping.buildCloneForPartObject(valueHolder.getValue(), original, null, clone, cloningSession, refreshCascade, false, false)); } else { //if I have a relationship partner trigger the indirection so that the value will be inserted // because of this call the entire tree should be recursively cloned AbstractRecord row = null; if (valueHolder instanceof DatabaseValueHolder) { row = ((DatabaseValueHolder)valueHolder).getRow(); } result = this.mapping.createCloneValueHolder(valueHolder, original, clone, row, cloningSession, buildDirectlyFromRow); Object newObject = this.mapping.buildCloneForPartObject(valueHolder.getValue(), original, cacheKey, clone, cloningSession, refreshCascade, false, false); ((UnitOfWorkValueHolder)result).privilegedSetValue(newObject); ((UnitOfWorkValueHolder)result).setInstantiated(); } } else { AbstractRecord row = null; if (valueHolder instanceof DatabaseValueHolder) { row = ((DatabaseValueHolder)valueHolder).getRow(); } result = this.mapping.createCloneValueHolder(valueHolder, original, clone, row, cloningSession, buildDirectlyFromRow); } return result; } /** * INTERNAL: * Return the primary key for the reference object (i.e. the object * object referenced by domainObject and specified by mapping). * This key will be used by a RemoteValueHolder. */ @Override public Object extractPrimaryKeyForReferenceObject(Object referenceObject, AbstractSession session) { if (objectIsEasilyInstantiated(referenceObject)) { return super.extractPrimaryKeyForReferenceObject(((ValueHolderInterface)referenceObject).getValue(), session); } else { return getOneToOneMapping().extractPrimaryKeysForReferenceObjectFromRow(extractReferenceRow(referenceObject)); } } /** * INTERNAL: * Return the reference row for the reference object. * This allows the new row to be built without instantiating * the reference object. * Return null if the object has already been instantiated. */ public AbstractRecord extractReferenceRow(Object referenceObject) { if (this.objectIsInstantiated(referenceObject)) { return null; } else { return ((DatabaseValueHolder)referenceObject).getRow(); } } /** * INTERNAL: * An object has been serialized from the server to the client. * Replace the transient attributes of the remote value holders * with client-side objects. */ public void fixObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session) { Object attributeValue = this.mapping.getAttributeValueFromObject(object); //bug 4147755 if it is not a Remote Valueholder then treat as if there was no VH... if (attributeValue instanceof RemoteValueHolder){ RemoteValueHolder rvh = (RemoteValueHolder)this.mapping.getAttributeValueFromObject(object); rvh.setSession(session); rvh.setMapping(this.mapping); if ((!query.shouldMaintainCache()) && ((!query.shouldCascadeParts()) || (query.shouldCascadePrivateParts() && (!this.mapping.isPrivateOwned())))) { rvh.setQuery(null); } else { rvh.setQuery(query); } // set to uninstantiated since no objects are serialized past remote value holders rvh.setUninstantiated(); }else{ this.mapping.fixRealObjectReferences(object, objectDescriptors, processedObjects, query, session); } } /** * INTERNAL: * Return the original indirection object for a unit of work indirection object. * This is used when building a new object from the unit of work when the original fell out of the cache. */ @Override public Object getOriginalIndirectionObject(Object unitOfWorkIndirectionObject, AbstractSession session) { return this.getOriginalValueHolder(unitOfWorkIndirectionObject, session); } /** * INTERNAL: * Return the original indirection object for a unit of work indirection object. */ @Override public Object getOriginalIndirectionObjectForMerge(Object unitOfWorkIndirectionObject, AbstractSession session) { DatabaseValueHolder holder = (DatabaseValueHolder)getOriginalIndirectionObject(unitOfWorkIndirectionObject, session); if (holder != null && holder.getSession()!= null){ holder.setSession(session); } return holder; } /** * INTERNAL: Return the original valueHolder object. Access to the * underlying valueholder may be required when serializing the valueholder * or converting the valueHolder to another type. */ @Override public Object getOriginalValueHolder(Object unitOfWorkIndirectionObject, AbstractSession session) { if ((unitOfWorkIndirectionObject instanceof UnitOfWorkValueHolder) && (((UnitOfWorkValueHolder)unitOfWorkIndirectionObject).getRemoteUnitOfWork() != null)) { ValueHolderInterface valueHolder = ((UnitOfWorkValueHolder) unitOfWorkIndirectionObject).getWrappedValueHolder(); if (valueHolder == null) { // For remote session the original value holder is transient, // so the value must be found in the registry or created. RemoteUnitOfWork remoteUnitOfWork = (RemoteUnitOfWork)((UnitOfWorkValueHolder)unitOfWorkIndirectionObject).getRemoteUnitOfWork(); RemoteSessionController controller = remoteUnitOfWork.getParentSessionController(); ObjID id = ((UnitOfWorkValueHolder) unitOfWorkIndirectionObject).getWrappedValueHolderRemoteID(); if (id != null) { // This value holder may be on the server, or the client, // on the server, the controller should exists, so can lock up in it, // on the client, the id should be enough to create a new remote value holder. if (controller != null) { valueHolder = controller.getRemoteValueHolders().get(id); } else if (session.isRemoteSession()) { valueHolder = new RemoteValueHolder(id); ((RemoteValueHolder)valueHolder).setSession(session); } } if (valueHolder == null) { // Must build a new value holder. Object object = ((UnitOfWorkValueHolder) unitOfWorkIndirectionObject).getSourceObject(); AbstractRecord row = this.mapping.getDescriptor().getObjectBuilder().buildRow(object, session, WriteType.UNDEFINED); ReadObjectQuery query = new ReadObjectQuery(); query.setSession(session); valueHolder = (ValueHolderInterface) this.mapping.valueFromRow(row, null, query, true); } return valueHolder; } } if (unitOfWorkIndirectionObject instanceof WrappingValueHolder) { ValueHolderInterface valueHolder = ((WrappingValueHolder)unitOfWorkIndirectionObject).getWrappedValueHolder(); if (!session.isProtectedSession()){ while (valueHolder instanceof WrappingValueHolder && ((WrappingValueHolder)valueHolder).getWrappedValueHolder() != null){ valueHolder = ((WrappingValueHolder)valueHolder).getWrappedValueHolder(); } } if ((valueHolder != null) && (valueHolder instanceof DatabaseValueHolder)) { ((DatabaseValueHolder) valueHolder).releaseWrappedValueHolder(session); } return valueHolder; } else { return unitOfWorkIndirectionObject; } } /** * Reset the wrapper used to store the value. */ public void reset(Object target) { this.mapping.setAttributeValueInObject(target, new ValueHolder()); } /** * INTERNAL: * Return the "real" attribute value, as opposed to any wrapper. * This will trigger the wrapper to instantiate the value. */ public Object getRealAttributeValueFromObject(Object object, Object attribute) { if (attribute instanceof ValueHolderInterface) { return ((ValueHolderInterface)attribute).getValue(); } else { return attribute; } } /** * INTERNAL: * Extract and return the appropriate value from the * specified remote value holder. */ public Object getValueFromRemoteValueHolder(RemoteValueHolder remoteValueHolder) { return remoteValueHolder.getValue(); } /** * INTERNAL: * The method validateAttributeOfInstantiatedObject(Object attributeValue) fixes the value of the attributeValue * in cases where it is null and indirection requires that it contain some specific data structure. Return whether this will happen. * This method is used to help determine if indirection has been triggered * @param attributeValue * @return * @see validateAttributeOfInstantiatedObject(Object attributeValue) */ @Override public boolean isAttributeValueFullyBuilt(Object attributeValue){ return attributeValue != null; } /** * INTERNAL: * Iterate over the specified attribute value, */ public void iterateOnAttributeValue(DescriptorIterator iterator, Object attributeValue) { iterator.iterateValueHolderForMapping((ValueHolderInterface)attributeValue, this.mapping); } /** * INTERNAL * Replace the client value holder with the server value holder, * after copying some of the settings from the client value holder. */ public void mergeRemoteValueHolder(Object clientSideDomainObject, Object serverSideDomainObject, MergeManager mergeManager) { // This will always be a remote value holder coming from the server, RemoteValueHolder serverValueHolder = (RemoteValueHolder)this.mapping.getAttributeValueFromObject(serverSideDomainObject); mergeClientIntoServerValueHolder(serverValueHolder, mergeManager); this.mapping.setAttributeValueInObject(clientSideDomainObject, serverValueHolder); } /** * INTERNAL: * Return the null value of the appropriate attribute. That is, the * field from the database is NULL, return what should be * placed in the object's attribute as a result. * In this case, return an empty ValueHolder. */ public Object nullValueFromRow() { return new ValueHolder(); } /** * INTERNAL: * Return whether the specified object is instantiated. */ public boolean objectIsInstantiated(Object object) { return ((ValueHolderInterface)object).isInstantiated(); } /** * INTERNAL: * Return whether the specified object can be instantiated without database access. */ public boolean objectIsEasilyInstantiated(Object object) { if (object instanceof DatabaseValueHolder) { return ((DatabaseValueHolder)object).isEasilyInstantiated(); } else { return true; } } /** * INTERNAL: * Set the value of the appropriate attribute of target to attributeValue. * In this case, place the value inside the target's ValueHolder. */ public void setRealAttributeValueInObject(Object target, Object attributeValue) { ValueHolderInterface holder = (ValueHolderInterface)this.mapping.getAttributeValueFromObject(target); if (holder == null) { holder = new ValueHolder(attributeValue); } else { holder.setValue(attributeValue); } super.setRealAttributeValueInObject(target, holder); } /** * INTERNAL: * set the source object into QueryBasedValueHolder. * Used only by transparent indirection. */ public void setSourceObject(Object sourceObject, Object attributeValue) { if (attributeValue instanceof QueryBasedValueHolder) { ((QueryBasedValueHolder)attributeValue).setSourceObject(sourceObject); } } /** * INTERNAL: * Return whether the type is appropriate for the indirection policy. * In this case, the attribute type MUST be ValueHolderInterface. */ protected boolean typeIsValid(Class attributeType) { return attributeType == ClassConstants.ValueHolderInterface_Class || attributeType == ClassConstants.WeavedAttributeValueHolderInterface_Class; } /** * INTERNAL: * Verify that the value of the attribute within an instantiated object * is of the appropriate type for the indirection policy. * In this case, the attribute must be non-null and it must be a * ValueHolderInterface. * If the value is null return a new indirection object to be used for the attribute. */ public Object validateAttributeOfInstantiatedObject(Object attributeValue) { // PERF: If the value is null, create a new value holder instance for the attribute value, // this allows for indirection attributes to not be instantiated in the constructor as they // are typically replaced when reading or cloning so is very inefficient to initialize. if (attributeValue == null) { return new ValueHolder(); } if (!(attributeValue instanceof ValueHolderInterface)) { throw DescriptorException.valueHolderInstantiationMismatch(attributeValue, this.mapping); } return attributeValue; } /** * INTERNAL: * Verify that attributeType is correct for the * indirection policy. If it is incorrect, add an exception to the * integrity checker. * In this case, the attribute type MUST be ValueHolderInterface. */ public void validateDeclaredAttributeType(Class attributeType, IntegrityChecker checker) throws DescriptorException { super.validateDeclaredAttributeType(attributeType, checker); if (!this.typeIsValid(attributeType)) { checker.handleError(DescriptorException.attributeAndMappingWithIndirectionMismatch(this.mapping)); } } /** * INTERNAL: * Verify that getter returnType is correct for the * indirection policy. If it is incorrect, add an exception * to the integrity checker. * In this case, the return type MUST be ValueHolderInterface. */ public void validateGetMethodReturnType(Class returnType, IntegrityChecker checker) throws DescriptorException { super.validateGetMethodReturnType(returnType, checker); if (!this.typeIsValid(returnType)) { checker.handleError(DescriptorException.returnAndMappingWithIndirectionMismatch(this.mapping)); } } /** * INTERNAL: * Verify that setter parameterType is correct for the * indirection policy. If it is incorrect, add an exception * to the integrity checker. * In this case, the parameter type MUST be ValueHolderInterface. */ public void validateSetMethodParameterType(Class parameterType, IntegrityChecker checker) throws DescriptorException { super.validateSetMethodParameterType(parameterType, checker); if (!this.typeIsValid(parameterType)) { checker.handleError(DescriptorException.parameterAndMappingWithIndirectionMismatch(this.mapping)); } } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the batchQuery. * In this case, wrap the query in a ValueHolder for later invocation. */ public Object valueFromBatchQuery(ReadQuery batchQuery, AbstractRecord row, ObjectLevelReadQuery originalQuery, CacheKey parentCacheKey) { return new BatchValueHolder(batchQuery, row, this.getForeignReferenceMapping(), originalQuery, parentCacheKey); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by invoking the appropriate * method on the object and passing it the row and session. * In this case, wrap the row in a ValueHolder for later use. */ public Object valueFromMethod(Object object, AbstractRecord row, AbstractSession session) { return new TransformerBasedValueHolder(this.getTransformationMapping().getAttributeTransformer(), object, row, session); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the query. * In this case, wrap the query in a ValueHolder for later invocation. */ public Object valueFromQuery(ReadQuery query, AbstractRecord row, Object sourceObject, AbstractSession session) { return new QueryBasedValueHolder(query, sourceObject, row, session); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the query. * In this case, wrap the query in a ValueHolder for later invocation. */ public Object valueFromQuery(ReadQuery query, AbstractRecord row, AbstractSession session) { return new QueryBasedValueHolder(query, row, session); } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the row. * In this case, simply wrap the object in a ValueHolder. */ public Object valueFromRow(Object object) { return new ValueHolder(object); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/QueryBasedValueHolder.java0000664000000000000000000002241112216173126027575 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.mappings.ForeignReferenceMapping; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; /** * QueryBasedValueHolder wraps a database-stored object and implements behavior * to access it. The object is read from the database by invoking a * user-specified query. * * @see ObjectLevelReadQuery * @author Dorin Sandu */ public class QueryBasedValueHolder extends DatabaseValueHolder { /** * Stores the query to be executed. */ protected transient ReadQuery query; protected transient Object sourceObject; protected Integer refreshCascade; protected QueryBasedValueHolder() { } /** * Initialize the query-based value holder. */ public QueryBasedValueHolder(ReadQuery query, AbstractRecord row, AbstractSession session) { this(query, null, row, session); } /** /** * Store the uow identity so that it can be used to determine new * transaction logic */ /** * Initialize the query-based value holder. */ public QueryBasedValueHolder(ReadQuery query, Object sourceObject, AbstractRecord row, AbstractSession session) { this.row = row; this.session = session; // Make sure not to put a ClientSession or IsolatedClientSession in // the shared cache (indirectly). // Skip this if unitOfWork, for we use session.isUnitOfWork() to // implement // isTransactionalValueholder(), saving us from needing a boolean // instance variable. // If unitOfWork this safety measure is deferred until merge time with // releaseWrappedValuehHolder. // Note that if isolated session & query will return itself, which is // safe // for if isolated it will not go in the shared cache. if (!session.isUnitOfWork()) { this.session = session.getRootSession(query); } this.query = query; this.sourceObject = sourceObject; } /** * INTERNAL: * Returns the refresh cascade policy that was set on the query that was used to instantiate the valueholder * a null value means that a non refresh query was used. */ public Integer getRefreshCascadePolicy(){ return this.refreshCascade; } /** * Process against the UOW and attempt to load a local copy before going to the shared cache * If null is returned then the calling UOW will instantiate as normal. */ @Override public Object getValue(UnitOfWorkImpl uow) { if (this.query.isReadObjectQuery()){ return this.query.getQueryMechanism().checkCacheForObject(this.query.getTranslationRow(), uow); } //not able to shortcircuit cache lookup to UOW return null; return null; } /** * Return the query. The query for a QueryBasedValueHolder is constructed * dynamically based on the original query on the parent object and the * mapping configuration. If modifying a query the developer must be careful * not to change the results returned as that may cause the application to * see incorrect results. */ public ReadQuery getQuery() { return query; } protected Object instantiate() throws DatabaseException { return instantiate(this.session); } /** * Instantiate the object by executing the query on the session. */ protected Object instantiate(AbstractSession session) throws DatabaseException { if (session == null) { throw ValidationException.instantiatingValueholderWithNullSession(); } if (this.query.isObjectBuildingQuery() && ((ObjectBuildingQuery)this.query).shouldRefreshIdentityMapResult()){ this.refreshCascade = ((ObjectBuildingQuery)this.query).getCascadePolicy(); } return session.executeQuery(getQuery(), getRow()); } /** * Triggers UnitOfWork valueholders directly without triggering the wrapped * valueholder (this). *

* When in transaction and/or for pessimistic locking the * UnitOfWorkValueHolder needs to be triggered directly without triggering * the wrapped valueholder. However only the wrapped valueholder knows how * to trigger the indirection, i.e. it may be a batchValueHolder, and it * stores all the info like the row and the query. Note: This method is not * thread-safe. It must be used in a synchronized manner */ public Object instantiateForUnitOfWorkValueHolder(UnitOfWorkValueHolder unitOfWorkValueHolder) { return instantiate(unitOfWorkValueHolder.getUnitOfWork()); } /** * INTERNAL: * Run any extra code required after the valueholder instantiates * For query based VH, we notify the cache that the valueholder has been triggered */ public void postInstantiate(){ DatabaseMapping mapping = query.getSourceMapping(); if (mapping != null && mapping.isForeignReferenceMapping()){ ClassDescriptor descriptor = mapping.getDescriptor(); if (descriptor == null || descriptor.isAggregateDescriptor()){ descriptor = session.getDescriptor(sourceObject); } if (descriptor != null){ session.getIdentityMapAccessorInstance().getIdentityMap(descriptor).lazyRelationshipLoaded(sourceObject, this, (ForeignReferenceMapping)query.getSourceMapping()); } } } /** * Releases a wrapped valueholder privately owned by a particular unit of * work. *

* When unit of work clones are built directly from rows no object in the * shared cache points to this valueholder, so it can store the unit of work * as its session. However once that UnitOfWork commits and the valueholder * is merged into the shared cache, the session needs to be reset to the * root session, ie. the server session. */ @Override public void releaseWrappedValueHolder(AbstractSession targetSession) { AbstractSession session = getSession(); if ((session != null) && session.isUnitOfWork()) { setSession(targetSession); } } /** * Reset all the fields that are not needed after instantiation. */ protected void resetFields() { super.resetFields(); this.query = null; } /** * Set the query. */ protected void setQuery(ReadQuery theQuery) { query = theQuery; } /** * INTERNAL: Answers if this valueholder is a pessimistic locking one. Such * valueholders are special in that they can be triggered multiple times by * different UnitsOfWork. Each time a lock query will be issued. Hence even * if instantiated it may have to be instantiated again, and once * instantiated all fields can not be reset. *

* Since locks will be issued each time this valueholder is triggered, * triggering this directly on the session in auto commit mode will generate * an exception. This only UnitOfWorkValueHolder's wrapping this can trigger * it. Note: This method is not thread-safe. It must be used in a * synchronized manner */ public boolean isPessimisticLockingValueHolder() { // Get the easy checks out of the way first. if ((this.query == null) || !this.query.isObjectLevelReadQuery()) { return false; } ObjectLevelReadQuery query = (ObjectLevelReadQuery) this.query; // Note even if the reference class is not locked, but the valueholder // query // has joined attributes, then this may count as a lock query. // This means it is possible to trigger a valueholder to get an object // which // is not to be pess. locked and get an exception for triggering it on // the // session outside a transaction. return query.isLockQuery(this.session); } public void setSourceObject(Object sourceObject) { this.sourceObject = sourceObject; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/IndirectionPolicy.java0000664000000000000000000004655112216173126027040 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import java.io.*; import java.util.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.indirection.ValueHolderInterface; import org.eclipse.persistence.internal.descriptors.*; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.sessions.remote.*; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.MergeManager; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.mappings.foundation.AbstractTransformationMapping; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.sessions.remote.*; /** *

Purpose

: * An IndirectionPolicy acts as a 'rules' holder that determines * the behavior of a ForeignReferenceMapping (or TransformationMapping) * with respect to indirection, or lack thereof. *

*

Description

: * IndirectionPolicy is an abstract class that defines the protocol to be implemented by * subclasses so that the assorted DatabaseMappings can use an assortment of * indirection policies:
    *
  • no indirection policy (read everything from database) *
  • basic indirection policy (use ValueHolders) *
  • transparent indirection policy (collections only) *
  • proxy indirection policy (transparent 1:1 indirection using JDK 1.3's Proxy) *
* *

*

Responsibilities

: *
    *
  • instantiate the various IndirectionPolicies *
*

* * @see ForeignReferenceMapping * @author Mike Norman * @since TOPLink/Java 2.5 */ public abstract class IndirectionPolicy implements Cloneable, Serializable { protected DatabaseMapping mapping; /** * INTERNAL: * Construct a new indirection policy. */ public IndirectionPolicy() { super(); } /** * INTERNAL: * Return a backup clone of the attribute. */ public Object backupCloneAttribute(Object attributeValue, Object clone, Object backup, UnitOfWorkImpl unitOfWork) { return this.mapping.buildBackupCloneForPartObject(attributeValue, clone, backup, unitOfWork); } /** * INTERNAL * Return true if the refresh should refresh on this mapping or not. */ protected ReadObjectQuery buildCascadeQuery(MergeManager mergeManager) { ReadObjectQuery query = new ReadObjectQuery(); if (this.mapping.getReferenceDescriptor() != null) { query.setReferenceClass(this.mapping.getReferenceDescriptor().getJavaClass()); } if (mergeManager.shouldCascadeAllParts()) { query.cascadeAllParts(); query.refreshIdentityMapResult(); } if (mergeManager.shouldCascadePrivateParts() && getForeignReferenceMapping().isPrivateOwned()) { query.cascadePrivateParts(); query.refreshIdentityMapResult(); } return query; } /** * INTERNAL: This method can be used when an Indirection Object is required * to be built from a provided ValueHolderInterface object. This may be used * for custom value holder types. Certain policies like the * TransparentIndirectionPolicy may wrap the valueholder in another object. */ public abstract Object buildIndirectObject(ValueHolderInterface valueHolder); /** * INTERNAL: * Clones itself. */ public Object clone() { try { return super.clone(); } catch (CloneNotSupportedException e) { throw new InternalError(); } } /** * INTERNAL: * Return a clone of the attribute. * @param builtDirectlyFromRow indicates that we are building the clone * directly from a row as opposed to building the original from the * row, putting it in the shared cache, and then cloning the original. */ public abstract Object cloneAttribute(Object attributeValue, Object original, CacheKey cacheKey, Object clone, Integer refreshCascade, AbstractSession cloningSession, boolean buildDirectlyFromRow); /** * INTERNAL: * Return the primary key for the reference object (i.e. the object * object referenced by domainObject and specified by mapping). * This key will be used by a RemoteValueHolder. */ public Object extractPrimaryKeyForReferenceObject(Object referenceObject, AbstractSession session) { return getOneToOneMapping().extractPrimaryKeysFromRealReferenceObject(referenceObject, session); } /** * INTERNAL: * Return the reference row for the reference object. * This allows the new row to be built without instantiating * the reference object. * Return null if the object has already been instantiated. */ public abstract AbstractRecord extractReferenceRow(Object referenceObject); /** * INTERNAL: * An object has been serialized from the server to the client. * Replace the transient attributes of the remote value holders * with client-side objects. */ public abstract void fixObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, DistributedSession session); /** * INTERNAL: * Reduce casting clutter.... */ protected CollectionMapping getCollectionMapping() { return (CollectionMapping)this.mapping; } /** * INTERNAL: * Reduce casting clutter.... */ protected ForeignReferenceMapping getForeignReferenceMapping() { return (ForeignReferenceMapping)this.mapping; } /** * INTERNAL: * Return the database mapping that uses the indirection policy. */ public DatabaseMapping getMapping() { return mapping; } /** * INTERNAL: * Reduce casting clutter.... */ protected ObjectReferenceMapping getOneToOneMapping() { return (ObjectReferenceMapping)this.mapping; } /** * INTERNAL: * Return the original indirection object for a unit of work indirection object. */ public abstract Object getOriginalIndirectionObject(Object unitOfWorkIndirectionObject, AbstractSession session); /** * INTERNAL: * Return the original indirection object for a unit of work indirection object. */ public Object getOriginalIndirectionObjectForMerge(Object unitOfWorkIndirectionObject, AbstractSession session){ return getOriginalIndirectionObject(unitOfWorkIndirectionObject, session); } /** * INTERNAL: Return the original valueHolder object. Access to the * underlying valueholder may be required when serializing the valueholder * or converting the valueHolder to another type. */ public abstract Object getOriginalValueHolder(Object unitOfWorkIndirectionObject, AbstractSession session); /** * INTERNAL: * Return the "real" attribute value, as opposed to any wrapper. * This will trigger the wrapper to instantiate the value. */ public abstract Object getRealAttributeValueFromObject(Object object, Object attribute); /** * INTERNAL: * Trigger the instantiation of the value. */ public void instantiateObject(Object object, Object attribute) { getRealAttributeValueFromObject(object, attribute); } /** * INTERNAL: * Reduce casting clutter.... */ protected AbstractTransformationMapping getTransformationMapping() { return (AbstractTransformationMapping)this.mapping; } /** * INTERNAL: * Extract and return the appropriate value from the * specified remote value holder. */ public abstract Object getValueFromRemoteValueHolder(RemoteValueHolder remoteValueHolder); /** * INTERNAL: * The method validateAttributeOfInstantiatedObject(Object attributeValue) fixes the value of the attributeValue * in cases where it is null and indirection requires that it contain some specific data structure. Return whether this will happen. * This method is used to help determine if indirection has been triggered * @param attributeValue * @return * @see validateAttributeOfInstantiatedObject(Object attributeValue) */ public boolean isAttributeValueFullyBuilt(Object attributeValue){ return true; } /** * INTERNAL: * Initialize the indirection policy (Do nothing by default) */ public void initialize() { } /** * INTERNAL: */ public boolean isWeavedObjectBasicIndirectionPolicy() { return false; } /** * INTERNAL: * Iterate over the specified attribute value, * heeding the settings in the iterator. */ public void iterateOnAttributeValue(DescriptorIterator iterator, Object attributeValue) { if (attributeValue != null) { this.mapping.iterateOnRealAttributeValue(iterator, attributeValue); } } /** * INTERNAL * Replace the client value holder with the server value holder, * after copying some of the settings from the client value holder. */ protected void mergeClientIntoServerValueHolder(RemoteValueHolder serverValueHolder, MergeManager mergeManager) { serverValueHolder.setMapping(this.mapping); serverValueHolder.setSession(mergeManager.getSession()); if (this.mapping.isForeignReferenceMapping()) { ObjectLevelReadQuery query = buildCascadeQuery(mergeManager); serverValueHolder.setQuery(query); } } /** * INTERNAL * Replace the client value holder with the server value holder, * after copying some of the settings from the client value holder. */ public abstract void mergeRemoteValueHolder(Object clientSideDomainObject, Object serverSideDomainObject, MergeManager mergeManager); /** * INTERNAL: * Return the null value of the appropriate attribute. That is, the * field from the database is NULL, return what should be * placed in the object's attribute as a result. */ public abstract Object nullValueFromRow(); /** * INTERNAL: * Return whether the specified object is instantiated. */ public abstract boolean objectIsInstantiated(Object object); /** * INTERNAL: * Return whether the specified object can be instantiated without database access. */ public abstract boolean objectIsEasilyInstantiated(Object object); /** * INTERNAL: * Return whether the specified object is instantiated, or if it has changes. */ public boolean objectIsInstantiatedOrChanged(Object object) { return objectIsInstantiated(object); } /** * INTERNAL: * set the database mapping that uses the indirection policy. */ public void setMapping(DatabaseMapping mapping) { this.mapping = mapping; } /** * INTERNAL: * Set the value of the appropriate attribute of target to attributeValue. * In this case, simply place the value inside the target. */ public void setRealAttributeValueInObject(Object target, Object attributeValue) { this.mapping.setAttributeValueInObject(target, attributeValue); } /** * INTERNAL: * Same functionality as setRealAttributeValueInObject(Object target, Object attributeValue) but allows * overridden behavior for IndirectionPolicies that track changes * @param target * @param attributeValue * @param allowChangeTracking */ public void setRealAttributeValueInObject(Object target, Object attributeValue, boolean allowChangeTracking) { setRealAttributeValueInObject(target, attributeValue); } /** * INTERNAL: * set the source object into QueryBasedValueHolder. * Used only by transparent indirection. */ public void setSourceObject(Object sourceObject, Object attributeValue) { } /** * ADVANCED: * This method will only change the behavior of TransparentIndirectionPolicy. * * IndirectList and IndirectSet can be configured not to instantiate the list from the * database when you add and remove from them. IndirectList defaults to this behavior. When * Set to true, the collection associated with this TransparentIndirection will be setup so as * not to instantiate for adds and removes. The weakness of this setting for an IndirectSet is * that when the set is not instantiated, if a duplicate element is added, it will not be * detected until commit time. * */ public void setUseLazyInstantiation(Boolean useLazyInstantiation) { } /** * ADVANCED: * Returns false unless this is a transparent indirection policy * * IndirectList and IndirectSet can be configured not to instantiate the list from the * database when you add and remove from them. IndirectList defaults to this behavior. When * Set to true, the collection associated with this TransparentIndirection will be setup so as * not to instantiate for adds and removes. The weakness of this setting for an IndirectSet is * that when the set is not instantiated, if a duplicate element is added, it will not be * detected until commit time. */ public Boolean shouldUseLazyInstantiation() { return false; } /** * Reset the wrapper used to store the value. * This is only required if a wrapper is used. */ public void reset(Object target) { // Nothing by default. } /** * INTERNAL: * Return whether the indirection policy actually uses indirection. * The default is true. */ public boolean usesIndirection() { return true; } /** * INTERNAL: * Return whether the indirection policy uses transparent indirection. * The default is false. */ public boolean usesTransparentIndirection(){ return false; } /** * INTERNAL: * Verify that the value of the attribute within an instantiated object * is of the appropriate type for the indirection policy. * If it is incorrect, throw an exception. * If the value is null return a new indirection object to be used for the attribute. */ public Object validateAttributeOfInstantiatedObject(Object attributeValue) throws DescriptorException { return attributeValue; } /** * INTERNAL: * Verify that the container policy is compatible with the * indirection policy. If it is incorrect, add an exception to the * integrity checker. */ public void validateContainerPolicy(IntegrityChecker checker) throws DescriptorException { // by default, do nothing } /** * INTERNAL: * Verify that attributeType is correct for the * indirection policy. If it is incorrect, add an exception to the * integrity checker. */ public void validateDeclaredAttributeType(Class attributeType, IntegrityChecker checker) throws DescriptorException { // by default, do nothing } /** * INTERNAL: * Verify that attributeType is an appropriate collection type for the * indirection policy. If it is incorrect, add an exception to the integrity checker. */ public void validateDeclaredAttributeTypeForCollection(Class attributeType, IntegrityChecker checker) throws DescriptorException { // by default, do nothing } /** * INTERNAL: * Verify that getter returnType is correct for the * indirection policy. If it is incorrect, add an exception * to the integrity checker. */ public void validateGetMethodReturnType(Class returnType, IntegrityChecker checker) throws DescriptorException { // by default, do nothing } /** * INTERNAL: * Verify that getter returnType is an appropriate collection type for the * indirection policy. If it is incorrect, add an exception to the integrity checker. */ public void validateGetMethodReturnTypeForCollection(Class returnType, IntegrityChecker checker) throws DescriptorException { // by default, do nothing } /** * INTERNAL: * Verify that setter parameterType is correct for the * indirection policy. If it is incorrect, add an exception * to the integrity checker. */ public void validateSetMethodParameterType(Class parameterType, IntegrityChecker checker) throws DescriptorException { // by default, do nothing } /** * INTERNAL: * Verify that setter parameterType is an appropriate collection type for the * indirection policy. If it is incorrect, add an exception to the integrity checker. */ public void validateSetMethodParameterTypeForCollection(Class parameterType, IntegrityChecker checker) throws DescriptorException { // by default, do nothing } /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the batchQuery. */ public abstract Object valueFromBatchQuery(ReadQuery batchQuery, AbstractRecord row, ObjectLevelReadQuery originalQuery, CacheKey parentCacheKey); /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by invoking the appropriate * method on the object and passing it the row and session. */ public abstract Object valueFromMethod(Object object, AbstractRecord row, AbstractSession session); /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the query. */ public abstract Object valueFromQuery(ReadQuery query, AbstractRecord row, Object sourceObject, AbstractSession session); /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the query. */ public abstract Object valueFromQuery(ReadQuery query, AbstractRecord row, AbstractSession session); /** * INTERNAL: * Return the value to be stored in the object's attribute. * This value is determined by the row. */ public abstract Object valueFromRow(Object object); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/BackupValueHolder.java0000664000000000000000000000374712216173126026751 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import org.eclipse.persistence.indirection.*; /** * Used as the backup value holder in the unit of work for transparent indirection. * This ensure that a reference to the original value holder is held in case the * transparent collection or proxy is replace without first instantiating the original. * * @since 10.1.3 * @author James Sutherland */ public class BackupValueHolder extends ValueHolder { /** Stores the original uow clone's value holder. */ protected ValueHolderInterface unitOfWorkValueHolder; public BackupValueHolder(ValueHolderInterface unitOfWorkValueHolder) { this.unitOfWorkValueHolder = unitOfWorkValueHolder; } /** * If the original value holder was not instantiated, * then first instantiate it to obtain the backup value. */ public Object getValue() { // Ensures instantiation of the original, and setting of this back value holder's value. getUnitOfWorkValueHolder().getValue(); return value; } /** * Return the original uow clone's value holder. */ public ValueHolderInterface getUnitOfWorkValueHolder() { return unitOfWorkValueHolder; } } ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/UnitOfWorkTransformerValueHolder.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/UnitOfWorkTransformerValueHolder0000664000000000000000000000606312216173126031110 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import org.eclipse.persistence.indirection.*; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.mappings.foundation.AbstractTransformationMapping; /** * UnitOfWorkTransformerValueHolder wraps a database-stored object and * implements behavior to access it. The object is read from * the database by invoking a user-specified transformer object. * This value holder is used only in the unit of work. * * Most of the methods ignore the actual attribute values and are * simply used to trigger instantiation. * * @author Sati */ public class UnitOfWorkTransformerValueHolder extends UnitOfWorkValueHolder { protected transient Object cloneOfObject; protected transient Object object; public UnitOfWorkTransformerValueHolder(ValueHolderInterface attributeValue, Object original, Object clone, AbstractTransformationMapping mapping, UnitOfWorkImpl unitOfWork) { this(attributeValue, clone, mapping, unitOfWork); this.object = original; this.cloneOfObject = clone; } protected UnitOfWorkTransformerValueHolder(ValueHolderInterface attributeValue, Object clone, DatabaseMapping mapping, UnitOfWorkImpl unitOfWork) { super(attributeValue, clone, mapping, unitOfWork); } /** * Backup the clone attribute value. */ protected Object buildBackupCloneFor(Object cloneAttributeValue) { return buildCloneFor(cloneAttributeValue); } /** * Clone the original attribute value. */ public Object buildCloneFor(Object originalAttributeValue) { return getMapping().buildCloneForPartObject(originalAttributeValue, getObject(), null, getCloneOfObject(), getUnitOfWork(), null, true, true); } protected Object getCloneOfObject() { return cloneOfObject; } protected Object getObject() { return object; } /** * Ensure that the backup value holder is populated. */ public void setValue(Object theValue) { // Must force instantiation to be able to compare with the old value. if (!this.isInstantiated) { instantiate(); } super.setValue(theValue); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/indirection/BatchValueHolder.java0000664000000000000000000001170712216173126026560 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.indirection; import org.eclipse.persistence.internal.identitymaps.CacheKey; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.queries.*; import org.eclipse.persistence.exceptions.*; /** * BatchValueHolder is used by the mappings that use indirection when using query optimization (batch reading). * This value holder is different from QueryBasedValueHolder in that its value must be extracted from one of the * results return by the query, not the entire result. * The query is also shared by all other value holders within the batch and it must be ensured that the query is only * executed once. Concurrency must also be maintained across all of the value holders in the batch. */ public class BatchValueHolder extends QueryBasedValueHolder { protected transient ForeignReferenceMapping mapping; protected transient ObjectLevelReadQuery originalQuery; protected transient CacheKey parentCacheKey; /** * Initialize the query-based value holder. * @param query The query that returns the object when executed. * @param row The row representation of the object. * @param mapping The mapping that is uses batch reading. */ public BatchValueHolder(ReadQuery query, AbstractRecord row, ForeignReferenceMapping mapping, ObjectLevelReadQuery originalQuery, CacheKey parentCacheKey) { super(query, row, originalQuery.getSession()); this.mapping = mapping; this.originalQuery = originalQuery; this.parentCacheKey = parentCacheKey; } protected ForeignReferenceMapping getMapping() { return mapping; } /** * Instantiate the object by having the mapping extract its value from the query. * Concurrency must be maintained across all of the value holders, * since they all share the same query, the extractResultFromBatchQuery method must be synchronized. */ protected Object instantiate(AbstractSession session) throws EclipseLinkException { return this.mapping.extractResultFromBatchQuery(this.query, this.parentCacheKey, this.row, session, this.originalQuery); } /** * Triggers UnitOfWork valueholders directly without triggering the wrapped * valueholder (this). *

* When in transaction and/or for pessimistic locking the * UnitOfWorkValueHolder needs to be triggered directly without triggering * the wrapped valueholder. However only the wrapped valueholder knows how * to trigger the indirection, i.e. it may be a batchValueHolder, and it * stores all the info like the row and the query. Note: This method is not * thread-safe. It must be used in a synchronized manner. * The batch value holder must use a batch query relative to the unit of work, * as the batch is local to the unit of work. */ public Object instantiateForUnitOfWorkValueHolder(UnitOfWorkValueHolder unitOfWorkValueHolder) { UnitOfWorkImpl unitOfWork = unitOfWorkValueHolder.getUnitOfWork(); ReadQuery localQuery = unitOfWork.getBatchQueries().get(this.query); if (localQuery == null) { localQuery = (ReadQuery)this.query.clone(); unitOfWork.getBatchQueries().put(this.query, localQuery); } return this.mapping.extractResultFromBatchQuery(localQuery, this.parentCacheKey, this.row, unitOfWorkValueHolder.getUnitOfWork(), this.originalQuery); } /** * INTERNAL: * Answers if this valueholder is easy to instantiate. * @return true if getValue() won't trigger a database read. */ public boolean isEasilyInstantiated() { return this.isInstantiated; } /** * Reset all the fields that are not needed after instantiation. */ protected void resetFields() { super.resetFields(); this.mapping = null; this.originalQuery = null; } protected void setMapping(ForeignReferenceMapping mapping) { this.mapping = mapping; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/0000775000000000000000000000000012216174372021033 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/TypeNodeValue.java0000664000000000000000000001530012216173126024415 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.util.List; import javax.xml.namespace.QName; import org.eclipse.persistence.exceptions.ConversionException; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.mappings.DirectMapping; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.mappings.UnionField; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.ObjectMarshalContext; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; public class TypeNodeValue extends NodeValue { private DirectMapping directMapping; public boolean isOwningNode(XPathFragment xPathFragment) { return (null != xPathFragment) && xPathFragment.isAttribute(); } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver) { return this.marshal(xPathFragment, marshalRecord, object, session, namespaceResolver, ObjectMarshalContext.getInstance()); } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { Object objectValue = directMapping.getAttributeValueFromObject(object); return this.marshalSingleValue(xPathFragment, marshalRecord, object, objectValue, session, namespaceResolver, marshalContext); } public boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object objectValue, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { Object fieldValue = directMapping.getFieldValue(objectValue, session, marshalRecord); if ((null == fieldValue) || (null == namespaceResolver)) { return false; } Field xmlField = (Field) directMapping.getField(); QName schemaType = getSchemaType(xmlField, fieldValue, session); if (null == schemaType) { return false; } if(xmlField.getSchemaType() == null){ if(schemaType.equals(Constants.STRING_QNAME)){ return false; } }else{ if(xmlField.isSchemaType(schemaType)){ return false; } } XPathFragment groupingFragment = marshalRecord.openStartGroupingElements(namespaceResolver); String typeQName = namespaceResolver.resolveNamespaceURI(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI) + Constants.COLON + Constants.SCHEMA_TYPE_ATTRIBUTE; String schemaTypePrefix = namespaceResolver.resolveNamespaceURI(schemaType.getNamespaceURI()); if(schemaTypePrefix == null){ if(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI.equals(schemaType.getNamespaceURI())){ schemaTypePrefix = namespaceResolver.generatePrefix(Constants.SCHEMA_PREFIX); }else{ schemaTypePrefix = namespaceResolver.generatePrefix(); } marshalRecord.namespaceDeclaration(schemaTypePrefix, schemaType.getNamespaceURI()); } marshalRecord.attribute(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, Constants.SCHEMA_TYPE_ATTRIBUTE, typeQName, schemaTypePrefix + Constants.COLON + schemaType.getLocalPart()); marshalRecord.closeStartGroupingElements(groupingFragment); return true; } public DirectMapping getDirectMapping() { return directMapping; } public void setDirectMapping(DirectMapping directMapping) { this.directMapping = directMapping; } private QName getSchemaType(Field xmlField, Object value, CoreAbstractSession session) { QName schemaType = null; if (xmlField.isTypedTextField()) { schemaType = xmlField.getXMLType(value.getClass()); } else if (xmlField.isUnionField()) { return getSchemaTypeForUnion((UnionField) xmlField, value, session); } else if (xmlField.getSchemaType() != null) { schemaType = xmlField.getSchemaType(); } return schemaType; } private QName getSchemaTypeForUnion(UnionField xmlField, Object value, CoreAbstractSession session) { List schemaTypes = xmlField.getSchemaTypes(); QName schemaType = null; QName nextQName; Class javaClass; for (int i = 0; i < schemaTypes.size(); i++) { nextQName = (QName) xmlField.getSchemaTypes().get(i); try { if (nextQName != null) { javaClass = xmlField.getJavaClass(nextQName); value = ((XMLConversionManager) session.getDatasourcePlatform().getConversionManager()).convertObject(value, javaClass, nextQName); schemaType = nextQName; break; } } catch (ConversionException ce) { if (i == (schemaTypes.size() - 1)) { schemaType = nextQName; } } } return schemaType; } public void attribute(UnmarshalRecord unmarshalRecord, String namespaceURI, String localName, String value) { //assume this is being called for xsi:type field if (value != null) { String namespace = null; int colonIndex = value.indexOf(Constants.COLON); if (colonIndex > -1) { String prefix = value.substring(0, colonIndex); namespace = unmarshalRecord.resolveNamespacePrefix(prefix); value = value.substring(colonIndex + 1); } unmarshalRecord.setTypeQName(new QName(namespace, value)); } } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLConversionPair.java0000664000000000000000000000326312216173126025220 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import javax.xml.namespace.QName; /** * INTERNAL: *

Purpose: This class represents a XMLConversionPair. It is used to * persist a collection of XMLConversionPair to Deployment XML. An * XMLConversionPair has a QName representing a schema type and a String * representing a class name.

*/ public class XMLConversionPair { protected QName xmlType; protected String javaType; public XMLConversionPair() { } public XMLConversionPair(QName xmlType, String javaType) { this.xmlType = xmlType; this.javaType = javaType; } public void setXmlType(QName xmlType) { this.xmlType = xmlType; } public QName getXmlType() { return xmlType; } public void setJavaType(String javaType) { this.javaType = javaType; } public String getJavaType() { return javaType; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/ContainerValue.java0000664000000000000000000000651012216173126024613 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.mappings.Mapping; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; /** * INTERNAL: *

Purpose: If an implementation of NodeValue is capable of returning * a collection value then it must implement this interface to be handled * correctly by the TreeObjectBuilder.

*/ public interface ContainerValue { public Object getContainerInstance(); public void setContainerInstance(Object object, Object containerInstance); public CoreContainerPolicy getContainerPolicy(); /** * Marshal only one of the values from the collection. * @param xPathFragment * @param marshalRecord * @param object * @param value * @param session * @param namespaceResolver * @param marshalContext */ public boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object value, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext); public Mapping getMapping(); /** * Return true if the original container on the object should be used if * present. If it is not present then the container policy will be used to * create the container. */ public boolean getReuseContainer(); /** * INTERNAL: * Used to track the index of the corresponding containerInstance in the containerInstances Object[] on UnmarshalRecord */ public int getIndex(); /** * INTERNAL * Return true if an empty container should be set on the object if there * is no presence of the collection in the XML document. * @since EclipseLink 2.3.3 */ public boolean isDefaultEmptyContainer(); /** * For media types that provide a native representation of collections (such * as JSON arrays), can the representation be simplified so that the * grouping element can be used as the collection name. */ public boolean isWrapperAllowedAsCollectionName(); /** * INTERNAL: * Set to track the index of the corresponding containerInstance in the containerInstances Object[] on UnmarshalRecord * Set during TreeObjectBuilder initialization */ public void setIndex(int index); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/Reference.java0000664000000000000000000000715112216173126023574 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.util.HashMap; import org.eclipse.persistence.internal.oxm.mappings.Mapping; import org.eclipse.persistence.oxm.sequenced.Setting; /** * INTERNAL: *

Purpose: Holds mapping reference info. The info held in this class * will be used after unmarshal to resolve 1-1 and 1-M mapping references. This * is necessary to ensure that all related objects have been created before * attempting to set instance values in related objects.

*/ public class Reference { protected Mapping mapping; // mapping associated with this reference protected Object sourceObject; // the source object instance protected Class targetClass; // the reference class protected Object primaryKey; // primary key values for cache lookup - used in single case protected HashMap primaryKeyMap; // map of primary key values for cache lookup - used in collection case private Setting setting; private Object container; /** * Get the container this reference should be added to if it was from a collection mapping * @return */ public Object getContainer() { return container; } public Setting getSetting() { return setting; } public void setSetting(Setting setting) { this.setting = setting; } /** * Constructor typically used in the collection case. */ public Reference(Mapping mapping, Object source, Class target, HashMap primaryKeyMap, Object container) { this.mapping = mapping; sourceObject = source; targetClass = target; this.primaryKeyMap = primaryKeyMap; this.container = container; } /** * Constructor typically used in the single case. */ public Reference(Mapping mapping, Object source, Class target, Object primaryKey) { this.mapping = mapping; sourceObject = source; targetClass = target; this.primaryKey = primaryKey; } /** * Return the XMLMapping associated with this reference. * * @return */ public Mapping getMapping() { return mapping; } /** * Return the map of primary key/values required to lookup * the reference class in the cache. * * @return */ public HashMap getPrimaryKeyMap() { return primaryKeyMap; } /** * Return the list of primary key values required to lookup * the reference class in the cache. * * @return */ public Object getPrimaryKey() { return primaryKey; } /** * Return the source object for this reference. * * @return */ public Object getSourceObject() { return sourceObject; } /** * Return the target (reference) class for this reference. * * @return */ public Class getTargetClass() { return targetClass; } /** * Set the primary key value required to lookup * the reference class in the cache. */ public void setPrimaryKey(Object primaryKey) { this.primaryKey = primaryKey; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/0000775000000000000000000000000012216174372022651 5ustar ././@LongLink0000000000000000000000000000014700000000000011567 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/UnmarshalKeepAsElementPolicy.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/UnmarshalKeepAsElementPolicy.ja0000664000000000000000000000167012216173126030702 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; public interface UnmarshalKeepAsElementPolicy { public boolean isKeepAllAsElement(); public boolean isKeepNoneAsElement(); public boolean isKeepUnknownAsElement(); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/Mapping.java0000664000000000000000000001104712216173126025106 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import java.util.Map; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.record.XMLRecord; public interface Mapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, XML_RECORD extends XMLRecord> { public void convertClassNamesToClasses(ClassLoader classLoader); public ATTRIBUTE_ACCESSOR getAttributeAccessor(); /** * The classification type for the attribute this mapping represents */ public abstract Class getAttributeClassification(); /** * Return the name of the attribute set in the mapping. */ public abstract String getAttributeName(); public Object getAttributeValueFromObject(Object object); public abstract CONTAINER_POLICY getContainerPolicy(); /** * Return the descriptor to which this mapping belongs */ public DESCRIPTOR getDescriptor(); public FIELD getField(); public DESCRIPTOR getReferenceDescriptor(); /** * Related mapping should implement this method to return true. */ public abstract boolean isAbstractCompositeCollectionMapping(); /** * Related mapping should implement this method to return true. */ public abstract boolean isAbstractCompositeDirectCollectionMapping(); /** * Related mapping should implement this method to return true. */ public abstract boolean isAbstractCompositeObjectMapping(); /** * Related mapping should implement this method to return true. */ public boolean isAbstractDirectMapping(); public boolean isCollectionMapping(); public boolean isReadOnly(); public boolean isReferenceMapping(); /** * Related mapping should implement this method to return true. */ public abstract boolean isTransformationMapping(); /** * ADVANCED: * Set the attributeAccessor. * The attribute accessor is responsible for setting and retrieving the attribute value * from the object for this mapping. * This can be set to an implementor of AttributeAccessor if the attribute * requires advanced conversion of the mapping value, or a real attribute does not exist. */ public void setAttributeAccessor(ATTRIBUTE_ACCESSOR attributeAccessor); /** * Sets the name of the attribute in the mapping. */ public void setAttributeName(String attributeName); public void setAttributeValueInObject(Object object, Object value); public void writeSingleValue(Object value, Object object, XML_RECORD record, ABSTRACT_SESSION session); /** * This method is invoked reflectively on the reference object to return the value of the * attribute in the object. This method sets the name of the getMethodName. */ public void setGetMethodName(String methodName); /** * Set this mapping to be read only. * Read-only mappings can be used if two attributes map to the same field. * Read-only mappings cannot be used for the primary key or other required fields. */ public void setIsReadOnly(boolean aBoolean); /** * INTERNAL: * Allow user defined properties. */ public void setProperties(Map properties); /** * Set the methodName used to set the value for the mapping's attribute into the object. */ public void setSetMethodName(String methodName); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/MimeTypePolicy.java0000664000000000000000000000231312216173126026420 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; /** *

Purpose:

A MimeTypePolicy is used in conjunction with an BinaryData/CollectionMapping * in order to allow customization of the mime type for a specific property at runtime rather than * at design time. */ public interface MimeTypePolicy { /** * return a MIME type string * @param anObject - fixed non-dynamic implementors will ignore this parameter * @return String */ public String getMimeType(Object object); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/Login.java0000664000000000000000000000233312216173126024561 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.sessions.CoreLogin; import org.eclipse.persistence.internal.core.databaseaccess.CorePlatform; import org.eclipse.persistence.oxm.documentpreservation.DocumentPreservationPolicy; public interface Login extends CoreLogin { public DocumentPreservationPolicy getDocumentPreservationPolicy(); public void setDocumentPreservationPolicy(DocumentPreservationPolicy documentPreservationPolicy); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/ChoiceObjectMapping.java0000664000000000000000000000707412216173126027355 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import java.util.List; import java.util.Map; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.converters.CoreConverter; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.Marshaller; import org.eclipse.persistence.internal.oxm.Unmarshaller; import org.eclipse.persistence.internal.oxm.XMLChoiceFieldToClassAssociation; import org.eclipse.persistence.internal.oxm.record.AbstractMarshalRecord; import org.eclipse.persistence.internal.oxm.record.XMLRecord; public interface ChoiceObjectMapping< ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, ABSTRACT_SESSION extends CoreAbstractSession, CONTAINER_POLICY extends CoreContainerPolicy, CONVERTER extends CoreConverter, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, MARSHALLER extends Marshaller, SESSION extends CoreSession, UNMARSHALLER extends Unmarshaller, XML_FIELD extends Field, XML_MAPPING extends Mapping, XML_RECORD extends XMLRecord> extends Mapping, XMLConverterMapping { public void addChoiceElement(List srcFields, String elementTypeName, List tgtFields); public void addChoiceElement(String srcXpath, String elementTypeName, String tgtXpath); public void addChoiceElement(XML_FIELD field, String elementTypeName); public void addConverter(XML_FIELD field, CONVERTER converter); public Map getChoiceElementMappings(); public Map getChoiceElementMappingsByClass(); public List getChoiceFieldToClassAssociations(); public Map getClassNameToFieldMappings(); public Map getClassToFieldMappings(); public Map> getClassToSourceFieldsMappings(); public CONVERTER getConverter(); public CONVERTER getConverter(XML_FIELD field); public List getFields(); public Map getFieldToClassMappings(); public Object getFieldValue(Object object, CoreAbstractSession session, AbstractMarshalRecord marshalRecord); /** * Set the converter on the mapping. * A converter can be used to convert between the object's value and database value of the attribute. */ public void setConverter(CONVERTER converter); public void setIsWriteOnly(boolean b); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/Descriptor.java0000664000000000000000000002507212216173126025634 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Denise Smith - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import java.util.List; import java.util.Map; import java.util.Vector; import javax.xml.namespace.QName; import org.eclipse.persistence.core.descriptors.CoreInheritancePolicy; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.CoreMapping; import org.eclipse.persistence.core.queries.CoreAttributeGroup; import org.eclipse.persistence.internal.core.descriptors.CoreInstantiationPolicy; import org.eclipse.persistence.internal.core.descriptors.CoreObjectBuilder; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.helper.DatabaseTable; import org.eclipse.persistence.internal.oxm.NamespaceResolver; import org.eclipse.persistence.internal.oxm.Unmarshaller; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.eclipse.persistence.oxm.schema.XMLSchemaReference; public interface Descriptor < ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CORE_MAPPING extends CoreMapping, FIELD extends CoreField, INHERITANCE_POLICY extends CoreInheritancePolicy, INSTANTIATION_POLICY extends CoreInstantiationPolicy, NAMESPACE_RESOLVER extends NamespaceResolver, OBJECT_BUILDER extends CoreObjectBuilder, UNMARSHAL_RECORD extends UnmarshalRecord, UNMARSHALLER extends Unmarshaller> { /** * Add a database mapping to the receiver. Perform any required * initialization of both the mapping and the receiving descriptor * as a result of adding the new mapping. */ public CORE_MAPPING addMapping(CORE_MAPPING mapping); /** * ADVANCED: * Specify the primary key field of the descriptors table. * This should be called for each field that makes up the primary key of the table. * This can be used for advanced field types, such as XML nodes, or to set the field type. */ public void addPrimaryKeyField(FIELD field); /** * Add a root element name for the Descriptor * This value is stored in place of a table name * @param rootElementName a root element to specify on this Descriptor */ public void addRootElement(String rootElementName); /** * Get the alias */ public String getAlias(); /** * Return the default root element name for the ClassDescriptor * This value is stored in place of a table name * This value is mandatory for all root objects * @return the default root element specified on this ClassDescriptor */ public String getDefaultRootElement(); public QName getDefaultRootElementType(); public Field getDefaultRootElementField(); /** * The inheritance policy is used to define how a descriptor takes part in inheritance. * All inheritance properties for both child and parent classes is configured in inheritance policy. * Caution must be used in using this method as it lazy initializes an inheritance policy. * Calling this on a descriptor that does not use inheritance will cause problems, #hasInheritance() must always first be called. */ public INHERITANCE_POLICY getInheritancePolicy(); /** * INTERNAL: * Return the inheritance policy. */ public INHERITANCE_POLICY getInheritancePolicyOrNull(); /** * INTERNAL: * Returns the instantiation policy. */ public INSTANTIATION_POLICY getInstantiationPolicy(); /** * Return the java class. */ public Class getJavaClass(); /** * Return the class name, used by the MW. */ public String getJavaClassName(); /** * INTERNAL: * Returns this Descriptor's location accessor, if one is defined. */ public ATTRIBUTE_ACCESSOR getLocationAccessor(); /** * Returns the mapping associated with a given attribute name. * This can be used to find a descriptors mapping in a amendment method before the descriptor has been initialized. */ public CORE_MAPPING getMappingForAttributeName(String attributeName); /** * Returns mappings */ public Vector getMappings(); /** * Return the NamespaceResolver associated with this descriptor * @return the NamespaceResolver associated with this descriptor */ public NAMESPACE_RESOLVER getNamespaceResolver(); public NAMESPACE_RESOLVER getNonNullNamespaceResolver(); /** * INTERNAL: * Return the object builder */ public OBJECT_BUILDER getObjectBuilder(); /** * Return the names of all the primary keys. */ public Vector getPrimaryKeyFieldNames(); /** * INTERNAL: * Return all the primary key fields */ public List getPrimaryKeyFields(); /** * Return the SchemaReference associated with this descriptor * @return the SchemaReference associated with this descriptor * @see org.eclipse.persistence.oxm.schema */ public XMLSchemaReference getSchemaReference(); /** * Return the table names. */ public Vector getTableNames(); /** * INTERNAL: * Return all the tables. */ public Vector getTables(); /** * INTERNAL: * searches first descriptor than its ReturningPolicy for an equal field */ public FIELD getTypedField(FIELD field); /** * INTERNAL: * Return if this descriptor is involved in inheritance, (is child or parent). * Note: If this class is part of table per class inheritance strategy this * method will return false. * @see hasTablePerClassPolicy() */ public boolean hasInheritance(); /** * If true, the descriptor may be lazily initialized. This is useful if the * descriptor may not get used. */ public boolean isLazilyInitialized(); public boolean isResultAlwaysXMLRoot(); /** * INTERNAL: *

Indicates if the Object mapped by this descriptor is a sequenced data object * and should be marshalled accordingly. */ public boolean isSequencedObject(); public boolean isWrapper(); /** * Return the default root element name for the ClassDescriptor * This value is stored in place of a table name * This value is mandatory for all root objects * @param newDefaultRootElement the default root element to specify on this ClassDescriptor */ public void setDefaultRootElement(String newDefaultRootElement); /** * INTERNAL: * Sets the instantiation policy. */ public void setInstantiationPolicy(INSTANTIATION_POLICY instantiationPolicy); /** * Set the Java class that this descriptor maps. * Every descriptor maps one and only one class. */ public void setJavaClass(Class theJavaClass); /** * INTERNAL: * Return the java class name, used by the MW. */ public void setJavaClassName(String theJavaClassName); /** * INTERNAL: * Set this Descriptor's location accessor. */ public void setLocationAccessor(ATTRIBUTE_ACCESSOR attributeAccessor); /** * Set the NamespaceResolver to associate with this descriptor * @param newNamespaceResolver the NamespaceResolver to associate with this descriptor */ public void setNamespaceResolver(NAMESPACE_RESOLVER newNamespaceResolver); /** * INTERNAL: * Set the user defined properties. */ public void setProperties(Map properties); public void setResultAlwaysXMLRoot(boolean resultAlwaysXMLRoot); /** * Set the SchemaReference to associate with this descriptor * @param newSchemaReference the SchemaReference to associate with this descriptor * @see org.eclipse.persistence.oxm.schema */ public void setSchemaReference(XMLSchemaReference newSchemaReference); /** * Return if unmapped information from the XML document should be maintained for this * descriptor * By default unmapped data is not preserved. * @return if this descriptor should preserve unmapped data */ public boolean shouldPreserveDocument(); /** * INTERNAL: * Determines the appropriate object to return from the unmarshal * call. The method will either return the object created in the * xmlReader.parse() call or an instance of XMLRoot. An XMLRoot * instance will be returned if the DOMRecord element being * unmarshalled does not equal the descriptor's default root * element. * * @param object * @param elementNamespaceUri * @param elementLocalName * @param elementPrefix * @return object */ public Object wrapObjectInXMLRoot(Object object, String elementNamespaceUri, String elementLocalName, String elementPrefix, boolean forceWrap, boolean isNamespaceAware, UNMARSHALLER xmlUnmarshaller); /** * INTERNAL: */ public Object wrapObjectInXMLRoot(Object object, String elementNamespaceUri, String elementLocalName, String elementPrefix, String encoding, String version, boolean forceWrap, boolean isNamespaceAware, UNMARSHALLER unmarshaller); /** * INTERNAL: * Determines the appropriate object to return from the unmarshal * call. The method will either return the object created in the * xmlReader.parse() call or an instance of XMLRoot. An XMLRoot * instance will be returned if the DOMRecord element being * unmarshalled does not equal the descriptor's default root * element. * * @param unmarshalRecord * @return object */ public Object wrapObjectInXMLRoot(UNMARSHAL_RECORD unmarshalRecord, boolean forceWrap); public CoreAttributeGroup getAttributeGroup(String subgraph); } ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/VariableXPathCollectionMapping.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/VariableXPathCollectionMapping.0000664000000000000000000000403212216173126030667 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Denise Smith - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.converters.CoreConverter; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.Marshaller; import org.eclipse.persistence.internal.oxm.Unmarshaller; import org.eclipse.persistence.internal.oxm.record.XMLRecord; public interface VariableXPathCollectionMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, CONVERTER extends CoreConverter, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, MARSHALLER extends Marshaller, SESSION extends CoreSession, UNMARSHALLER extends Unmarshaller, XML_RECORD extends XMLRecord> extends VariableXPathObjectMapping, XMLContainerMapping, XMLConverterMapping{ } ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/BinaryDataCollectionMapping.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/BinaryDataCollectionMapping.jav0000664000000000000000000001004512216173126030715 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.converters.CoreConverter; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.Marshaller; import org.eclipse.persistence.internal.oxm.Unmarshaller; import org.eclipse.persistence.internal.oxm.mappings.MimeTypePolicy; import org.eclipse.persistence.internal.oxm.record.XMLRecord; import org.eclipse.persistence.oxm.mappings.nullpolicy.AbstractNullPolicy; public interface BinaryDataCollectionMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, CONVERTER extends CoreConverter, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, MARSHALLER extends Marshaller, MIME_TYPE_POLICY extends MimeTypePolicy, SESSION extends CoreSession, UNMARSHALLER extends Unmarshaller, XML_RECORD extends XMLRecord> extends Mapping, XMLContainerMapping, XMLConverterMapping { public Class getAttributeElementClass(); /** * INTERNAL */ public String getMimeType(); public String getMimeType(Object object); public MIME_TYPE_POLICY getMimeTypePolicy(); public AbstractNullPolicy getNullPolicy(); public boolean isSwaRef(); public boolean isWriteOnly(); /** * Set the class each element in the object's * collection should be converted to, before the collection * is inserted into the object. * This is optional - if left null, the elements will be added * to the object's collection unconverted. */ public void setAttributeElementClass(Class attributeElementClass); /** * ADVANCED: * Set the field in the mapping. * This can be used for advanced field types, such as XML nodes, or to set the field type. */ public void setField(FIELD theField); public void setIsWriteOnly(boolean b); /** * Allow implementer to set the MimeTypePolicy class FixedMimeTypePolicy or AttributeMimeTypePolicy (dynamic) * @param aPolicy MimeTypePolicy */ public void setMimeTypePolicy(MIME_TYPE_POLICY aPolicy); /** * Set the AbstractNullPolicy on the mapping
* The default policy is NullPolicy.
* * @param aNullPolicy */ public void setNullPolicy(AbstractNullPolicy aNullPolicy); public void setShouldInlineBinaryData(boolean b); public void setSwaRef(boolean swaRef); /** * Set the converter on the mapping. * A converter can be used to convert between the direct collection's object value and database value. */ public void setValueConverter(CONVERTER valueConverter); public boolean shouldInlineBinaryData(); public void useCollectionClassName(String concreteContainerClassName); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/DirectCollectionMapping.java0000664000000000000000000001303412216173126030253 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.converters.CoreConverter; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.Marshaller; import org.eclipse.persistence.internal.oxm.Unmarshaller; import org.eclipse.persistence.internal.oxm.record.XMLRecord; import org.eclipse.persistence.oxm.mappings.nullpolicy.AbstractNullPolicy; public interface DirectCollectionMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, CONVERTER extends CoreConverter, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, MARSHALLER extends Marshaller, SESSION extends CoreSession, UNMARSHALLER extends Unmarshaller, XML_RECORD extends XMLRecord> extends Mapping, XMLContainerMapping, XMLConverterMapping { /** * Return the class each element in the object's * collection should be converted to, before the collection * is inserted into the object. * This is optional - if left null, the elements will be added * to the object's collection unconverted. */ public Class getAttributeElementClass(); public AbstractNullPolicy getNullPolicy(); /** * Return the converter on the mapping. * A converter can be used to convert between the direct collection's object value and database value. */ public CONVERTER getValueConverter(); public boolean isCDATA(); public boolean isCollapsingStringValues(); public boolean isNormalizingStringValues(); /** * Set the class each element in the object's * collection should be converted to, before the collection * is inserted into the object. * This is optional - if left null, the elements will be added * to the object's collection unconverted. */ public void setAttributeElementClass(Class attributeElementClass); /** * Indicates that this mapping should collapse all string values before adding them * to the collection on unmarshal. Collapse removes leading and trailing whitespaces, and replaces * any sequence of whitespace characters with a single space. * @param normalize */ public void setCollapsingStringValues(boolean collapse); /** * Set the field that holds the nested collection. */ public void setField(FIELD field); /** * PUBLIC: * Set the class each element in the database row's * collection should be converted to, before the collection * is inserted into the database. * This is optional - if left null, the elements will be added * to the database row's collection unconverted. */ public void setFieldElementClass(Class fieldElementClass); public void setIsCDATA(boolean CDATA); public void setIsWriteOnly(boolean b); /** * Indicates that this mapping should normalize all string values before adding them * to the collection on unmarshal. Normalize replaces any CR, LF or Tab characters with a * single space character. * @param normalize */ public void setNormalizingStringValues(boolean normalize); public void setNullPolicy(AbstractNullPolicy nullPolicyFromProperty); /** * Sets whether the mapping uses a single node. * @param True if the items in the collection are in a single node or false if each of the items in the collection is in its own node */ public void setUsesSingleNode(boolean usesSingleNode); /** * Set the converter on the mapping. * A converter can be used to convert between the direct collection's object value and database value. */ public void setValueConverter(CONVERTER valueConverter); /** * Set the Mapping field name attribute to the given XPath String * @param xpathString String */ public void setXPath(String xpathString); public void useCollectionClassName(String concreteContainerClassName); /** * Checks whether the mapping uses a single node. * * @returns True if the items in the collection are in a single node or false if each of the items in the collection is in its own node. */ public boolean usesSingleNode(); public void setNullValue(Object nullValue); public Object getNullValue(); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/BinaryDataMapping.java0000664000000000000000000001074312216173126027047 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.converters.CoreConverter; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.Marshaller; import org.eclipse.persistence.internal.oxm.Unmarshaller; import org.eclipse.persistence.internal.oxm.mappings.MimeTypePolicy; import org.eclipse.persistence.internal.oxm.record.XMLRecord; import org.eclipse.persistence.oxm.mappings.nullpolicy.AbstractNullPolicy; public interface BinaryDataMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, CONVERTER extends CoreConverter, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, MARSHALLER extends Marshaller, MIME_TYPE_POLICY extends MimeTypePolicy, SESSION extends CoreSession, UNMARSHALLER extends Unmarshaller, XML_RECORD extends XMLRecord> extends Mapping, XMLConverterMapping { /** * INTERNAL */ public String getMimeType(); public String getMimeType(Object object); public AbstractNullPolicy getNullPolicy(); public Object getObjectValue(Object object, SESSION session); /** * Get the XPath String * @return String the XPath String associated with this Mapping */ public String getXPath(); public boolean isSwaRef(); /** * Some databases do not properly support all of the base data types. For these databases, * the base data type must be explicitly specified in the mapping to tell EclipseLink to force * the instance variable value to that data type */ public void setAttributeClassification(Class attributeClassification); /** * INTERNAL: * Set the name of the class for MW usage. */ public void setAttributeClassificationName(String attributeClassificationName); /** * Set the converter on the mapping. * A converter can be used to convert between the object's value and database value of the attribute. */ public void setConverter(CONVERTER converter); /** * ADVANCED: * Set the field in the mapping. * This can be used for advanced field types, such as XML nodes, or to set the field type. */ public void setField(FIELD theField); public void setIsWriteOnly(boolean b); /** * Force mapping to set default FixedMimeTypePolicy using the MimeType string as argument * @param mimeTypeString */ public void setMimeType(String mimeTypeString); /** * Allow implementer to set the MimeTypePolicy class FixedMimeTypePolicy or AttributeMimeTypePolicy (dynamic) * @param aPolicy MimeTypePolicy */ public void setMimeTypePolicy(MIME_TYPE_POLICY aPolicy); /** * Set the AbstractNullPolicy on the mapping
* The default policy is NullPolicy.
* * @param aNullPolicy */ public void setNullPolicy(AbstractNullPolicy aNullPolicy); public void setShouldInlineBinaryData(boolean b); public void setSwaRef(boolean swaRef); /** * Set the Mapping field name attribute to the given XPath String * @param xpathString String */ public void setXPath(String xpathString); public boolean shouldInlineBinaryData(); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/CompositeObjectMapping.java0000664000000000000000000000642312216173126030122 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.converters.CoreConverter; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.Marshaller; import org.eclipse.persistence.internal.oxm.Unmarshaller; import org.eclipse.persistence.internal.oxm.record.XMLRecord; import org.eclipse.persistence.oxm.mappings.nullpolicy.AbstractNullPolicy; public interface CompositeObjectMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, CONVERTER extends CoreConverter, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, MARSHALLER extends Marshaller, SESSION extends CoreSession, UNMARSHAL_KEEP_AS_ELEMENT_POLICY extends UnmarshalKeepAsElementPolicy, UNMARSHALLER extends Unmarshaller, XML_RECORD extends XMLRecord> extends Mapping, XMLConverterMapping { public InverseReferenceMapping getInverseReferenceMapping(); public UNMARSHAL_KEEP_AS_ELEMENT_POLICY getKeepAsElementPolicy(); public AbstractNullPolicy getNullPolicy(); /** * PUBLIC: * Returns the reference class */ public Class getReferenceClass(); public String getReferenceClassName(); public boolean hasConverter(); public void setConverter(CONVERTER converter); public void setIsWriteOnly(boolean b); public void setKeepAsElementPolicy(UNMARSHAL_KEEP_AS_ELEMENT_POLICY keepAsElementPolicy); /** * Set the AbstractNullPolicy on the mapping
* The default policy is NullPolicy.
* * @param aNullPolicy */ public void setNullPolicy(AbstractNullPolicy aNullPolicy); /** * This is a reference class whose instances this mapping will store in the domain objects. */ public void setReferenceClass(Class aClass); public void setReferenceClassName(String aClassName); public void setXPath(String string); /** * ADVANCED: * Set the field in the mapping. */ public void setField(FIELD theField); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/FragmentMapping.java0000664000000000000000000000334012216173126026567 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.record.XMLRecord; public interface FragmentMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, SESSION extends CoreSession, XML_RECORD extends XMLRecord> extends Mapping { public Object getObjectValue(Object object, SESSION session); public String getXPath(); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/AnyCollectionMapping.java0000664000000000000000000000610612216173126027572 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.converters.CoreConverter; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.Marshaller; import org.eclipse.persistence.internal.oxm.Unmarshaller; import org.eclipse.persistence.internal.oxm.record.XMLRecord; public interface AnyCollectionMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, CONVERTER extends CoreConverter, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, MARSHALLER extends Marshaller, SESSION extends CoreSession, UNMARSHAL_KEEP_AS_ELEMENT_POLICY extends UnmarshalKeepAsElementPolicy, UNMARSHALLER extends Unmarshaller, XML_RECORD extends XMLRecord> extends Mapping, XMLContainerMapping, XMLConverterMapping { public UNMARSHAL_KEEP_AS_ELEMENT_POLICY getKeepAsElementPolicy(); public boolean isMixedContent(); public boolean isWhitespacePreservedForMixedContent(); public void setConverter(CONVERTER conv); public void setKeepAsElementPolicy(UNMARSHAL_KEEP_AS_ELEMENT_POLICY unmarshalKeepAsElementPolicy); public void setField(FIELD field); public void setIsWriteOnly(boolean b); public void setMixedContent(boolean mixed); public void setPreserveWhitespaceForMixedContent(boolean preserveWhitespace); public void setUseXMLRoot(boolean useXMLRoot); /** * Configure the mapping to use an instance of the specified container class * to hold the target objects. *

jdk1.2.x: The container class must implement (directly or indirectly) the Collection interface. *

jdk1.1.x: The container class must be a subclass of Vector. */ public void useCollectionClass(Class concreteContainerClass); public boolean usesXMLRoot(); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/Field.java0000664000000000000000000001523412216173126024540 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Denise Smith - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import javax.xml.namespace.QName; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.NamespaceResolver; import org.eclipse.persistence.internal.oxm.XMLConversionManager; import org.eclipse.persistence.internal.oxm.XPathFragment; import org.eclipse.persistence.internal.oxm.record.AbstractUnmarshalRecord; public interface Field extends CoreField{ /** * INTERNAL: * Called from DOMRecord and XMLReader. MappingNodeValues call XMLReader which calls this method so that other XMLReader subclasses can override. */ public Object convertValueBasedOnSchemaType(Object value, XMLConversionManager xmlConversionManager, AbstractUnmarshalRecord record); /** * Return the class for a given qualified XML Schema type * @param qname The qualified name of the XML Schema type to use as a key in the lookup * @return The class corresponding to the specified schema type, if no corresponding match found returns null */ public Class getJavaClass(QName qname); /** * INTERNAL: * Return the last XPathFragment. */ public XPathFragment getLastXPathFragment(); public QName getLeafElementType(); /** * Return the unqualified name of the field. */ public String getName(); /** * Get the NamespaceResolver associated with this XMLField * @return The NamespaceResolver associated with this XMLField */ public NAMESPACE_RESOLVER getNamespaceResolver(); /** * Return the schema type associated with this field * @return the schema type */ public QName getSchemaType(); /** * INTERNAL: */ public QName getSchemaTypeForValue(Object value, CoreAbstractSession session); public Class getType(); /** * Return the qualified XML Schema type for a given class * @param javaClass The class to use as a key in the lookup * @return QName The qualified XML Schema type, if no corresponding match found returns null */ public QName getXMLType(Class javaClass); /** * Returns the xpath statement associated with this XMLField * @return The xpath statement associated with this XMLField */ public String getXPath(); /** * INTERNAL: * Maintain a direct pointer to the first XPathFragment. For example given * the following XPath first/middle/@last, first is the first XPathFragment. */ public XPathFragment getXPathFragment(); /** * INTERNAL: * @return */ public boolean hasLastXPathFragment(); public void initialize(); /** * INTERNAL: * @return */ public boolean isCDATA(); /** * Indicates if this XMLField represents a "required" XML element or attribute * ([minOccurs="1"] for elements, [use="required"] for attributes). NOTE: This * API is used only for Schema Generation. * * @see org.eclipse.persistence.internal.oxm.schema.SchemaModelGenerator */ public boolean isRequired(); /** * INTERNAL */ public boolean isSchemaType(QName schemaType); /** * INTERNAL: * Indicates if the xpath for this field is "." * * @return true if the xpath is ".", false otherwise */ public boolean isSelfField(); /** * Returns if the field is a typed text field * True when we should base conversions on the "type" attribute on elements * @return True when we should base conversions on the "type" attribute on elements, otherwise false */ public boolean isTypedTextField(); /** * INTERNAL: * Returns false since this is a union field * The subclass XMLUnionField returns true for this */ public boolean isUnionField(); /** * INTERNAL: *@param CDATA */ public void setIsCDATA(boolean CDATA); /** * Set if the field is a typed text field * True when we should base conversions on the "type" attribute on elements * @param value The boolean value specifiy if this is a typed text field */ public void setIsTypedTextField(boolean value); /** * Set the NamespaceResolver associated with this XMLField * @param newNamespaceResolver The namespaceResolver to be associated with this XMLField */ public void setNamespaceResolver(NAMESPACE_RESOLVER newNamespaceResolver); /** * Set whether this XMLField represents a "required" XML element or attribute * ([minOccurs="1"] for elements, [use="required"] for attributes). NOTE: This * API is used only for Schema Generation. * * @see org.eclipse.persistence.internal.oxm.schema.SchemaModelGenerator */ public void setRequired(boolean isRequired); /** * Sets the schematype associated with this XMLField * This is an optional setting; when set the schema type will be used to format the XML appropriately * @param value QName to be added to the list of schema types */ public void setSchemaType(QName value); /** * Sets whether the mapping uses a single node. * @param usesSingleNode True if the items in the collection are in a single node or false if each of the items in the collection is in its own node */ public void setUsesSingleNode(boolean usesSingleNode); /** * Set the xpath statment for this XMLField. * @param xPath The xpath statement to be associated with this XMLField */ public void setXPath(String xPath); /** * Checks whether the mapping uses a single node. * * @return True if the items in the collection are in a single node or false if each of the items in the collection is in its own node. */ public boolean usesSingleNode(); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/ObjectReferenceMapping.java0000664000000000000000000000523212216173126030053 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import java.util.List; import java.util.Map; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.eclipse.persistence.internal.oxm.record.XMLRecord; public interface ObjectReferenceMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, UNMARSHAL_RECORD extends UnmarshalRecord, XML_FIELD extends Field, XML_RECORD extends XMLRecord > extends Mapping { /** * Add a source-target xpath pair to the map. * * @param srcXPath * @param tgtXPath */ public void addSourceToTargetKeyFieldAssociation(String srcXPath, String tgtXPath); public Object buildFieldValue(Object targetObject, XML_FIELD xmlField, ABSTRACT_SESSION session); public void buildReference(UNMARSHAL_RECORD unmarshalRecord, XML_FIELD xmlField, Object realValue, ABSTRACT_SESSION session); public List getFields(); public InverseReferenceMapping getInverseReferenceMapping(); public Class getReferenceClass(); /** * INTERNAL: */ public String getReferenceClassName(); /** * Return a list of source-target xmlfield pairs. */ public Map getSourceToTargetKeyFieldAssociations(); public boolean isWriteOnly(); public void setIsWriteOnly(boolean b); public void setReferenceClassName(String aClassName); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/UnionField.java0000664000000000000000000000377512216173126025560 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import java.util.List; import javax.xml.namespace.QName; import org.eclipse.persistence.internal.oxm.NamespaceResolver; /** *

Subclass of Field for fields that are mapped to unions. Maintains a list * of schema types instead of just one single schema type. Schema types can be * added using the addSchemaType API. * * Constants has a list of useful constants including a list of QNames for * built-in schema types that can be used when adding schema types. * *

When reading and writing an element that is mapped with an UnionField, a * conversion to each of the schema types on the field (in the order they are * specified ) is tried until a conversion is successful. The Java type to * convert to is based on the list of schema type to Java conversion pairs * specified on the field. * @see Field * @see Constants */ public interface UnionField< NAMESPACE_RESOLVER extends NamespaceResolver> extends Field { /** * Adds the new type value to the list of types * @param value QName to be added to the list of schema types */ public void addSchemaType(QName value); /** * Return the list of schema types * @return the list of types */ public List getSchemaTypes(); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/CompositeCollectionMapping.java0000664000000000000000000000510212216173126031000 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.converters.CoreConverter; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.Marshaller; import org.eclipse.persistence.internal.oxm.Unmarshaller; import org.eclipse.persistence.internal.oxm.record.XMLRecord; public interface CompositeCollectionMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, CONVERTER extends CoreConverter, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, MARSHALLER extends Marshaller, SESSION extends CoreSession, UNMARSHAL_KEEP_AS_ELEMENT_POLICY extends UnmarshalKeepAsElementPolicy, UNMARSHALLER extends Unmarshaller, XML_RECORD extends XMLRecord> extends CompositeObjectMapping, XMLContainerMapping { /** * Configure the mapping to use an instance of the specified container class * to hold the target objects. *

jdk1.2.x: The container class must implement (directly or indirectly) the Collection interface. *

jdk1.1.x: The container class must be a subclass of Vector. */ public void useCollectionClass(Class concreteContainerClass); public void useCollectionClassName(String concreteContainerClassName); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/InverseReferenceMapping.java0000664000000000000000000000452312216173126030262 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.CoreMapping; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.record.XMLRecord; import org.eclipse.persistence.internal.queries.ContainerPolicy; public interface InverseReferenceMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, MAPPING extends CoreMapping, XML_RECORD extends XMLRecord> extends Mapping { /** * This method is invoked reflectively on the reference object to return the value of the * attribute in the object. This method returns the name of the getMethodName or null if not using method access. */ public String getGetMethodName(); public MAPPING getInlineMapping(); public String getReferenceClassName(); public void setContainerPolicy(ContainerPolicy containerPolicy); public void setInlineMapping(MAPPING inlineMapping); public void setMappedBy(String mappedBy); public void setReferenceClassName(String aClassName); public void useCollectionClass(Class concreteClass); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/FragmentCollectionMapping.java0000664000000000000000000000323512216173126030606 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.record.XMLRecord; public interface FragmentCollectionMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, SESSION extends CoreSession, XML_RECORD extends XMLRecord> extends Mapping, XMLContainerMapping { }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/XMLContainerMapping.java0000664000000000000000000000444012216173126027331 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.3.3 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.oxm.mappings.nullpolicy.AbstractNullPolicy; /** * This interface represents behaviour that is common to all XML mappings that * involve containers: Collection, Map, Arrays, etc. */ public interface XMLContainerMapping { /** * This is the default setting for the defaultEmptyContainer property. This * can be overridden at the mapping level. */ static final boolean EMPTY_CONTAINER_DEFAULT = true; /** * Return true if a pre-initialized container already set on the * field/property should be used. If false a new container will always be * created. */ boolean getReuseContainer(); /** * Get the Wrapper NullPolicy from the Mapping. */ public AbstractNullPolicy getWrapperNullPolicy(); /** * Return true if an empty container should be set on the object if there * is no presence of the collection in the XML document. */ boolean isDefaultEmptyContainer(); /** * Indicate whether by default an empty container should be set on the * field/property if the collection is not present in the XML document. */ void setDefaultEmptyContainer(boolean defaultEmptyContainer); /** * Specify if a pre-existing container on the field/property should be used. */ void setReuseContainer(boolean reuseContainer); /** * Set the Wrapper NullPolicy on the Mapping. */ void setWrapperNullPolicy(AbstractNullPolicy policy); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/ChoiceCollectionMapping.java0000664000000000000000000001073212216173126030235 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import java.util.List; import java.util.Map; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.converters.CoreConverter; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.Marshaller; import org.eclipse.persistence.internal.oxm.Unmarshaller; import org.eclipse.persistence.internal.oxm.XMLChoiceFieldToClassAssociation; import org.eclipse.persistence.internal.oxm.record.XMLRecord; public interface ChoiceCollectionMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, CONVERTER extends CoreConverter, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, MARSHALLER extends Marshaller, SESSION extends CoreSession, UNMARSHALLER extends Unmarshaller, XML_FIELD extends Field, XML_MAPPING extends Mapping, XML_RECORD extends XMLRecord > extends Mapping, XMLContainerMapping, XMLConverterMapping { public void addChoiceElement(List srcFields, String elementTypeName, List tgtFields); public void addChoiceElement(String xpath, String elementTypeName); public void addChoiceElement(String srcXpath, String elementTypeName, String tgtXpath); public void addChoiceElement(XML_FIELD field, String elementTypeName); public void addConverter(XML_FIELD field, CONVERTER converter); public Map getChoiceElementMappings(); public Map getChoiceElementMappingsByClass(); public List getChoiceFieldToClassAssociations(); public Map getClassNameToFieldMappings(); public Map getClassToFieldMappings(); public Map> getClassToSourceFieldsMappings(); public CONVERTER getConverter(); public CONVERTER getConverter(XML_FIELD field); public Map getFieldToClassMappings(); public XML_MAPPING getMixedContentMapping(); public boolean isMixedContent(); /** * Set the converter on the mapping. * A converter can be used to convert between the object's value and database value of the attribute. */ public void setConverter(CONVERTER converter); public void setIsWriteOnly(boolean b); /** * Allows the user to indicate that this mapping should also allow for mixed content in addition to * any of the elements in the choice. * @since EclipseLink 2.3.1 */ public void setMixedContent(boolean mixed); /** * Allows the user to indicate that this mapping should also allow for mixed content in addition to * any of the elements in the choice. The grouping element parameter is used in the case that there is * a common grouping element to all the other elements in this choice. If so, that grouping element can * be specified here to allow the mixed content to be written/detected inside the wrapper element. * @since EclipseLink 2.3.1 */ public void setMixedContent(String groupingElement); public void useCollectionClassName(String concreteContainerClassName); public boolean isAny(); public AnyCollectionMapping getAnyMapping(); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/VariableXPathObjectMapping.java0000664000000000000000000000551312216173126030651 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Denise Smith - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.converters.CoreConverter; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.Marshaller; import org.eclipse.persistence.internal.oxm.NamespaceResolver; import org.eclipse.persistence.internal.oxm.Unmarshaller; import org.eclipse.persistence.internal.oxm.XPathFragment; import org.eclipse.persistence.internal.oxm.record.XMLRecord; import org.eclipse.persistence.mappings.AttributeAccessor; public interface VariableXPathObjectMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, CONVERTER extends CoreConverter, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, MARSHALLER extends Marshaller, SESSION extends CoreSession, UNMARSHALLER extends Unmarshaller, XML_RECORD extends XMLRecord> extends Mapping, XMLConverterMapping { public AttributeAccessor getVariableAttributeAccessor(); public XPathFragment getXPathFragmentForValue(Object obj, NamespaceResolver nr, boolean isNamespaceAware,char sep); public boolean isAttribute(); public void setAttribute(boolean isAttribute); public void setConverter(CONVERTER converter); public void setIsWriteOnly(boolean isWriteOnly); public void setReferenceClassName(String aClassName); public void setVariableAttributeAccessor(AttributeAccessor variableAttributeAccessor); public void setVariableAttributeName(String variableAttributeName); public void setVariableGetMethodName(String variableGetMethodName); public void setVariableSetMethodName(String variableSetMethodName); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/AnyAttributeMapping.java0000664000000000000000000000417012216173126027441 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.record.XMLRecord; public interface AnyAttributeMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, XML_RECORD extends XMLRecord> extends Mapping, XMLContainerMapping { public boolean isNamespaceDeclarationIncluded(); public boolean isSchemaInstanceIncluded(); public void setField(FIELD field); public void setIsWriteOnly(boolean b); public void setNamespaceDeclarationIncluded(boolean isNamespaceDeclarationIncluded); public void setSchemaInstanceIncluded(boolean isSchemaInstanceIncluded); /** * INTERNAL: * Indicates the name of the Map class to be used. * * @param concreteMapClassName */ public void useMapClassName(String concreteMapClassName); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/XMLConverterMapping.java0000664000000000000000000000241512216173126027356 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.oxm.Marshaller; import org.eclipse.persistence.internal.oxm.Unmarshaller; public interface XMLConverterMapping< MARSHALLER extends Marshaller, SESSION extends CoreSession, UNMARSHALLER extends Unmarshaller> { Object convertDataValueToObjectValue(Object fieldValue, SESSION session, UNMARSHALLER unmarshaller); Object convertObjectValueToDataValue(Object value, SESSION session, MARSHALLER marshaller); } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/CollectionReferenceMapping.java0000664000000000000000000000453112216173126030741 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.eclipse.persistence.internal.oxm.record.XMLRecord; public interface CollectionReferenceMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, UNMARSHAL_RECORD extends UnmarshalRecord, XML_FIELD extends Field, XML_RECORD extends XMLRecord > extends ObjectReferenceMapping, XMLContainerMapping { public void buildReference(UNMARSHAL_RECORD unmarshalRecord, XML_FIELD xmlField, Object value, ABSTRACT_SESSION session, Object container); public void setUsesSingleNode(boolean useSingleNode); public void useCollectionClassName(String concreteContainerClassName); /** * For the purpose of XMLCollectionReferenceMappings, 'usesSingleNode' * refers to the fact that the source key xpath fields should all be written as * space-separated lists. Would be used for mapping to an IDREFS field in a schema */ public boolean usesSingleNode(); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/AnyObjectMapping.java0000664000000000000000000000503712216173126026707 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.converters.CoreConverter; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.Marshaller; import org.eclipse.persistence.internal.oxm.Unmarshaller; import org.eclipse.persistence.internal.oxm.record.XMLRecord; public interface AnyObjectMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, CONVERTER extends CoreConverter, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, MARSHALLER extends Marshaller, SESSION extends CoreSession, UNMARSHAL_KEEP_AS_ELEMENT_POLICY extends UnmarshalKeepAsElementPolicy, UNMARSHALLER extends Unmarshaller, XML_RECORD extends XMLRecord> extends Mapping, XMLConverterMapping { public UNMARSHAL_KEEP_AS_ELEMENT_POLICY getKeepAsElementPolicy(); public boolean isMixedContent(); public void setConverter(CONVERTER converter); public void setKeepAsElementPolicy(UNMARSHAL_KEEP_AS_ELEMENT_POLICY unmarshalKeepAsElementPolicy); public void setField(FIELD field); public void setIsWriteOnly(boolean b); public void setMixedContent(boolean mixed); public void setUseXMLRoot(boolean useXMLRoot); public boolean usesXMLRoot(); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/TransformationMapping.java0000664000000000000000000000642312216173126030037 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import java.util.List; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.record.XMLRecord; public interface TransformationMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, XML_RECORD extends XMLRecord> extends Mapping { /** * Add the name of field and the name of the method * that returns the value to be placed in said field * when the object is written to the database. * The method may take zero arguments, or it may * take a single argument of type * org.eclipse.persistence.sessions.Session. */ public void addFieldTransformation(String fieldName, String methodName); /** * INTERNAL: * Add the name of a field and the name of a class which implements * the FieldTransformer interface. When the object is written, the transform * method will be called on the FieldTransformer to acquire the value to put * in the field. */ public void addFieldTransformerClassName(String fieldName, String className); /** * INTERNAL: * @return a vector which stores fields and their respective transformers. */ public List getFieldToTransformers(); /** * To set the attribute method name. The method is invoked internally by TopLink * to retrieve the value to store in the domain object. The method receives Record * as its parameter and optionally Session, and should extract the value from the * record to set into the object, but should not set the value on the object, only return it. */ public void setAttributeTransformation(String methodName); /** * INTERNAL: * Set the Attribute Transformer Class Name * @param className */ public void setAttributeTransformerClassName(String className); /** * Used to specify whether the value of this mapping may be null. * This is used when generating DDL. */ public void setIsOptional(boolean isOptional); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/mappings/DirectMapping.java0000664000000000000000000001332512216173126026242 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.mappings; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.converters.CoreConverter; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.Marshaller; import org.eclipse.persistence.internal.oxm.Unmarshaller; import org.eclipse.persistence.internal.oxm.record.AbstractMarshalRecord; import org.eclipse.persistence.internal.oxm.record.AbstractUnmarshalRecord; import org.eclipse.persistence.internal.oxm.record.XMLRecord; import org.eclipse.persistence.oxm.mappings.nullpolicy.AbstractNullPolicy; public interface DirectMapping< ABSTRACT_SESSION extends CoreAbstractSession, ATTRIBUTE_ACCESSOR extends CoreAttributeAccessor, CONTAINER_POLICY extends CoreContainerPolicy, CONVERTER extends CoreConverter, DESCRIPTOR extends CoreDescriptor, FIELD extends CoreField, MARSHALLER extends Marshaller, SESSION extends CoreSession, UNMARSHALLER extends Unmarshaller, XML_RECORD extends XMLRecord> extends Mapping, XMLConverterMapping { public Object getAttributeValue(Object object, ABSTRACT_SESSION session, AbstractUnmarshalRecord record); /** * Return the converter on the mapping. * A converter can be used to convert between the object's value and database value of the attribute. */ public CONVERTER getConverter(); public Object getFieldValue(Object object, CoreAbstractSession session, AbstractMarshalRecord record); public AbstractNullPolicy getNullPolicy(); public Object getNullValue(); public Object getObjectValue(Object object, SESSION session); /** * Get the XPath String * @return String the XPath String associated with this Mapping */ public String getXPath(); public boolean hasConverter(); public boolean isCDATA(); /** * Some databases do not properly support all of the base data types. For these databases, * the base data type must be explicitly specified in the mapping to tell EclipseLink to force * the instance variable value to that data type */ public void setAttributeClassification(Class attributeClassification); /** * INTERNAL: * Set the name of the class for MW usage. */ public void setAttributeClassificationName(String attributeClassificationName); /** * Indicates that this mapping should collapse all string values before setting them * in the object on unmarshal. Collapse removes leading and trailing whitespaces, and replaces * any sequence of whitepsace characters with a single space. * @param collapse */ public void setCollapsingStringValues(boolean collapse); /** * Set the converter on the mapping. * A converter can be used to convert between the object's value and database value of the attribute. */ public void setConverter(CONVERTER converter); /** * ADVANCED: * Set the field in the mapping. * This can be used for advanced field types, such as XML nodes, or to set the field type. */ public void setField(FIELD theField); public void setIsCDATA(boolean CDATA); public void setIsWriteOnly(boolean b); /** * Indicates that this mapping should normalize all string values before setting them * in the object on unmarshal. Normalize replaces any CR, LF or Tab characters with a * single space character. * @param normalize */ public void setNormalizingStringValues(boolean normalize); /** * Set the AbstractNullPolicy on the mapping
* The default policy is NullPolicy.
* * @param aNullPolicy */ public void setNullPolicy(AbstractNullPolicy aNullPolicy); /** * Allow for the value used for null to be specified. * This can be used to convert database null values to application specific values, when null values * are not allowed by the application (such as in primitives). * Note: the default value for NULL is used on reads, writes, and query SQL generation */ public void setNullValue(Object nullValue); /** * Set whether this mapping's value should be marshalled, in the case that * it is equal to the default null value. */ public void setNullValueMarshalled(boolean value); /** * Set the Mapping field name attribute to the given XPath String * @param xpathString String */ public void setXPath(String xpathString); public Object valueFromObject(Object object, FIELD field, ABSTRACT_SESSION abstractSession); }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/NamespaceResolver.java0000664000000000000000000002606112216173126025315 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.Vector; import org.eclipse.persistence.internal.oxm.Namespace; import org.eclipse.persistence.platform.xml.XMLNamespaceResolver; import org.eclipse.persistence.platform.xml.XMLPlatformFactory; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; /** *

It is common for an XML document to include one or more namespaces. * TopLink supports this using its NamespaceResolver. The namespace resolver maintains * pairs of namespace prefixes and URIs. TopLink uses these prefixes in conjunction with the * XPath statements you specify on EIS mappings to XML records and XML mappings. * *

Although TopLink captures namespace prefixes in the XPath statements for mappings (if applicable), * the input document is not required to use the same namespace prefixes. TopLink will use the namespace * prefixes specified in the mapping when creating new documents. * *

Code Sample
* * NamespaceResolver resolver = new NamespaceResolver();
* resolver.put( "ns", "urn:namespace-example");

* * XMLDescriptor descriptor = new XMLDescriptor();
* descriptor.setJavaClass(Customer.class);
* descriptor.setDefaultRootElement("ns:customer");
* descriptor.setNamespaceResolver(resolver);

* * XMLDirectMapping mapping = new XMLDirectMapping();
* mapping.setAttributeName("id");
* mapping.setXPath("ns:id/text()");
* descriptor.addMapping(mapping); *
* * @see org.eclipse.persistence.oxm.XMLDescriptor * @see org.eclipse.persistence.eis.EISDescriptor * */ public class NamespaceResolver implements XMLNamespaceResolver { private static final String BASE_PREFIX = "ns"; private String defaultNamespaceURI; private Map prefixesToNamespaces; int prefixCounter; private Node dom; /** * Default constructor, creates a new NamespaceResolver. */ public NamespaceResolver() { super(); } /** * Copy Constructor * @since EclipseLink 2.5.0 */ public NamespaceResolver(NamespaceResolver namespaceResolver) { this.defaultNamespaceURI = namespaceResolver.defaultNamespaceURI; Map namespaceResolverPrefixesToNamespaces = namespaceResolver.prefixesToNamespaces; if(namespaceResolverPrefixesToNamespaces != null) { this.prefixesToNamespaces = new HashMap(namespaceResolverPrefixesToNamespaces.size()); this.prefixesToNamespaces.putAll(namespaceResolver.prefixesToNamespaces); } this.prefixCounter = namespaceResolver.prefixCounter; this.dom = namespaceResolver.dom; } public Map getPrefixesToNamespaces() { if(null == prefixesToNamespaces) { prefixesToNamespaces = new HashMap(); } return prefixesToNamespaces; } public boolean hasPrefixesToNamespaces() { return null != prefixesToNamespaces; } public void setDOM(Node dom) { this.dom = dom; } /** * Returns the namespace URI associated with a specified namespace prefix * * @param prefix The prefix to lookup a namespace URI for * @return The namespace URI associated with the specified prefix */ public String resolveNamespacePrefix(String prefix) { if (null == prefix || prefix.length() == 0) { return defaultNamespaceURI; } String uri = null; if(null != prefixesToNamespaces) { uri = prefixesToNamespaces.get(prefix); } if(null != uri) { return uri; } else if (javax.xml.XMLConstants.XML_NS_PREFIX.equals(prefix)) { return javax.xml.XMLConstants.XML_NS_URI; } else if (javax.xml.XMLConstants.XMLNS_ATTRIBUTE.equals(prefix)) { return javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI; } if(dom != null) { return XMLPlatformFactory.getInstance().getXMLPlatform().resolveNamespacePrefix(dom, prefix); } return null; } /** * Return the namespace prefix associated with a namespace URI. * @param uri A namespace URI. * @return The prefix associated with the namespace URI. */ public String resolveNamespaceURI(String uri) { if(null == uri) { return null; } if(null != prefixesToNamespaces) { for(Entry entry : prefixesToNamespaces.entrySet()) { if(uri.equals(entry.getValue())) { return entry.getKey(); } } } if (uri.equalsIgnoreCase(javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI)) { return javax.xml.XMLConstants.XMLNS_ATTRIBUTE; } else if (uri.equalsIgnoreCase(javax.xml.XMLConstants.XML_NS_URI)) { return javax.xml.XMLConstants.XML_NS_PREFIX; } return resolveNamespaceURI(dom, uri); } private String resolveNamespaceURI(Node node, String uri) { if(null == node) { return null; } // If the element is of the same namespace URI, then return the prefix. if(uri.equals(node.getNamespaceURI())) { return node.getPrefix(); } // Check the namespace URI declarations. NamedNodeMap namedNodeMap = node.getAttributes(); if(null != namedNodeMap) { int namedNodeMapSize = namedNodeMap.getLength(); for(int x=0; x entry: prefixesToNamespaces.entrySet()) { Namespace namespace = new Namespace(entry.getKey(), entry.getValue()); names.addElement(namespace); } return names; } /** * INTERNAL: * Set the namespaces on the namespace resolver based on the specified Vector of Namespace objects * Used for deployment XML * @param names A Vector of namespace URIs */ public void setNamespaces(Vector names) { prefixesToNamespaces = new HashMap(names.size()); for(Namespace namespace : (Vector) names) { if ((namespace.getPrefix() != null) && (namespace.getNamespaceURI() != null)) { prefixesToNamespaces.put(namespace.getPrefix(), namespace.getNamespaceURI()); } } } public String generatePrefix() { return generatePrefix(getNextPrefix()); } private String getNextPrefix() { return BASE_PREFIX + prefixCounter++; } public String generatePrefix(String defaultPrefix) { String lookup = resolveNamespacePrefix(defaultPrefix); while (lookup != null) { defaultPrefix = getNextPrefix(); lookup = resolveNamespacePrefix(defaultPrefix); } return defaultPrefix; } public void removeNamespace(String prefix) { if(null != prefixesToNamespaces) { prefixesToNamespaces.remove(prefix); } } public void setDefaultNamespaceURI(String namespaceUri) { if(namespaceUri == null){ defaultNamespaceURI = namespaceUri; }else{ defaultNamespaceURI = namespaceUri.intern(); } } public String getDefaultNamespaceURI() { if(null != defaultNamespaceURI) { return defaultNamespaceURI; } else if(dom != null) { return XMLPlatformFactory.getInstance().getXMLPlatform().resolveNamespacePrefix(dom, null); } return null; } private static class IteratorEnumeration implements Enumeration { private Iterator iterator; public IteratorEnumeration(Iterator iterator) { this.iterator = iterator; } public boolean hasMoreElements() { if(null == iterator) { return false; } return iterator.hasNext(); } public Object nextElement() { return iterator.next(); } } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/conversion/0000775000000000000000000000000012216174372023220 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/conversion/Base64.java0000664000000000000000000001553412216173126025113 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm.conversion; import java.util.BitSet; /** * INTERNAL: *

Purpose: Convert to/from XML base64Binary.

*/ public class Base64 { private static BitSet BoundChar; private static BitSet EBCDICUnsafeChar; private static byte[] Base64EncMap; private static byte[] Base64DecMap; private static char[] UUEncMap; private static byte[] UUDecMap; // Class Initializer static { // rfc-2046 & rfc-2045: (bcharsnospace & token) // used for multipart codings BoundChar = new BitSet(256); for (int ch = '0'; ch <= '9'; ch++) { BoundChar.set(ch); } for (int ch = 'A'; ch <= 'Z'; ch++) { BoundChar.set(ch); } for (int ch = 'a'; ch <= 'z'; ch++) { BoundChar.set(ch); } BoundChar.set('+'); BoundChar.set('_'); BoundChar.set('-'); BoundChar.set('.'); // EBCDIC unsafe characters to be quoted in quoted-printable // See first NOTE in section 6.7 of rfc-2045 EBCDICUnsafeChar = new BitSet(256); EBCDICUnsafeChar.set('!'); EBCDICUnsafeChar.set('"'); EBCDICUnsafeChar.set('#'); EBCDICUnsafeChar.set('$'); EBCDICUnsafeChar.set('@'); EBCDICUnsafeChar.set('['); EBCDICUnsafeChar.set('\\'); EBCDICUnsafeChar.set(']'); EBCDICUnsafeChar.set('^'); EBCDICUnsafeChar.set('`'); EBCDICUnsafeChar.set('{'); EBCDICUnsafeChar.set('|'); EBCDICUnsafeChar.set('}'); EBCDICUnsafeChar.set('~'); // rfc-2045: Base64 Alphabet byte[] map = { (byte)'A', (byte)'B', (byte)'C', (byte)'D', (byte)'E', (byte)'F', (byte)'G', (byte)'H', (byte)'I', (byte)'J', (byte)'K', (byte)'L', (byte)'M', (byte)'N', (byte)'O', (byte)'P', (byte)'Q', (byte)'R', (byte)'S', (byte)'T', (byte)'U', (byte)'V', (byte)'W', (byte)'X', (byte)'Y', (byte)'Z', (byte)'a', (byte)'b', (byte)'c', (byte)'d', (byte)'e', (byte)'f', (byte)'g', (byte)'h', (byte)'i', (byte)'j', (byte)'k', (byte)'l', (byte)'m', (byte)'n', (byte)'o', (byte)'p', (byte)'q', (byte)'r', (byte)'s', (byte)'t', (byte)'u', (byte)'v', (byte)'w', (byte)'x', (byte)'y', (byte)'z', (byte)'0', (byte)'1', (byte)'2', (byte)'3', (byte)'4', (byte)'5', (byte)'6', (byte)'7', (byte)'8', (byte)'9', (byte)'+', (byte)'/' }; Base64EncMap = map; Base64DecMap = new byte[128]; for (int idx = 0; idx < Base64EncMap.length; idx++) { Base64DecMap[Base64EncMap[idx]] = (byte)idx; } // uuencode'ing maps UUEncMap = new char[64]; for (int idx = 0; idx < UUEncMap.length; idx++) { UUEncMap[idx] = (char)(idx + 0x20); } UUDecMap = new byte[128]; for (int idx = 0; idx < UUEncMap.length; idx++) { UUDecMap[UUEncMap[idx]] = (byte)idx; } } /** * Base64 constructor comment. */ public Base64() { super(); } /** * This method decodes the given byte[] using the base64-encoding * specified in RFC-2045 (Section 6.8). * * @param data the base64-encoded data. * @return the decoded data. */ public final static byte[] base64Decode(byte[] data) { if (data == null) { return null; } if (data.length == 0) { return new byte[0]; } int tail = data.length; while (data[tail - 1] == '=') { tail--; } byte[] dest = new byte[tail - (data.length / 4)]; // ascii printable to 0-63 conversion for (int idx = 0; idx < data.length; idx++) { data[idx] = Base64DecMap[data[idx]]; } // 4-byte to 3-byte conversion int sidx; // 4-byte to 3-byte conversion int didx; for (sidx = 0, didx = 0; didx < (dest.length - 2); sidx += 4, didx += 3) { dest[didx] = (byte)(((data[sidx] << 2) & 255) | ((data[sidx + 1] >>> 4) & 003)); dest[didx + 1] = (byte)(((data[sidx + 1] << 4) & 255) | ((data[sidx + 2] >>> 2) & 017)); dest[didx + 2] = (byte)(((data[sidx + 2] << 6) & 255) | (data[sidx + 3] & 077)); } if (didx < dest.length) { dest[didx] = (byte)(((data[sidx] << 2) & 255) | ((data[sidx + 1] >>> 4) & 003)); } if (++didx < dest.length) { dest[didx] = (byte)(((data[sidx + 1] << 4) & 255) | ((data[sidx + 2] >>> 2) & 017)); } return dest; } /** * This method encodes the given byte[] using the base64-encoding * specified in RFC-2045 (Section 6.8). * * @param data the data * @return the base64-encoded data */ public final static byte[] base64Encode(byte[] data) { if (data == null) { return null; } int sidx; int didx; byte[] dest = new byte[((data.length + 2) / 3) * 4]; // 3-byte to 4-byte conversion + 0-63 to ascii printable conversion for (sidx = 0, didx = 0; sidx < (data.length - 2); sidx += 3) { dest[didx++] = Base64EncMap[(data[sidx] >>> 2) & 077]; dest[didx++] = Base64EncMap[((data[sidx + 1] >>> 4) & 017) | ((data[sidx] << 4) & 077)]; dest[didx++] = Base64EncMap[((data[sidx + 2] >>> 6) & 003) | ((data[sidx + 1] << 2) & 077)]; dest[didx++] = Base64EncMap[data[sidx + 2] & 077]; } if (sidx < data.length) { dest[didx++] = Base64EncMap[(data[sidx] >>> 2) & 077]; if (sidx < (data.length - 1)) { dest[didx++] = Base64EncMap[((data[sidx + 1] >>> 4) & 017) | ((data[sidx] << 4) & 077)]; dest[didx++] = Base64EncMap[(data[sidx + 1] << 2) & 077]; } else { dest[didx++] = Base64EncMap[(data[sidx] << 4) & 077]; } } // add padding for (; didx < dest.length; didx++) { dest[didx] = (byte)'='; } return dest; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/WeakObjectWrapper.java0000664000000000000000000000367412216173126025263 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.lang.ref.WeakReference; /** * INTENRAL: *

Purpose: This class is used for caching objects based on Object * Identity instead of a Primary Key. This class acts as the Primary Key, and * wraps the domain object itself in a weak reference. hashCode and equals * methods are implemented to insure identity is maintained.

* @author mmacivor * @since 10g */ public class WeakObjectWrapper { protected WeakReference reference; public WeakObjectWrapper(Object domainObject) { reference = new WeakReference(domainObject); } public Object getDomainObject() { return reference.get(); } public void setDomainObject(Object object) { reference = new WeakReference(object); } public int hashCode() { if (getDomainObject() == null) { return -1; } return getDomainObject().hashCode(); } public boolean equals(Object wrapper) { if (!(wrapper instanceof WeakObjectWrapper)) { return false; } return getDomainObject() == ((WeakObjectWrapper)wrapper).getDomainObject(); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/SAXFragmentBuilder.java0000664000000000000000000002241512216173126025324 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.util.Iterator; import java.util.List; import java.util.Map; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.eclipse.persistence.platform.xml.XMLPlatform; import org.eclipse.persistence.platform.xml.XMLPlatformFactory; import org.w3c.dom.Attr; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.Text; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.eclipse.persistence.platform.xml.SAXDocumentBuilder; /** * @version $Header: SAXFragmentBuilder.java 18-sep-2007.14:36:11 dmahar Exp $ * @author mmacivor * @since release specific (what release of product did this appear in) */ public class SAXFragmentBuilder extends SAXDocumentBuilder { private UnmarshalRecord owningRecord; public SAXFragmentBuilder(UnmarshalRecord unmarshalRecord) { super(); owningRecord = unmarshalRecord; } public void startElement(String namespaceURI, String localName, String qName, Attributes atts) throws SAXException { boolean bufferContainsOnlyWhitespace = stringBuffer.toString().trim().length() == 0; if (bufferContainsOnlyWhitespace) { stringBuffer.reset(); } if ((stringBuffer.length() > 0) && !(nodes.size() == 1)) { Text text = getInitializedDocument().createTextNode(stringBuffer.toString()); Node parent = this.nodes.get(nodes.size() - 1); parent.appendChild(text); processNamespacesForText(text.getTextContent(), (Element)parent); stringBuffer.reset(); } if (null != namespaceURI && namespaceURI.length() == 0) { namespaceURI = null; } if(qName == null){ qName = localName; if(namespaceURI != null){ if(owningRecord != null){ String prefix = owningRecord.resolveNamespaceUri(namespaceURI); if(prefix != null && prefix.length() > 0){ qName = prefix +Constants.COLON+ qName; } } } } int qNameColonIndex = qName.indexOf(Constants.COLON); if ((namespaceURI != null) && (qNameColonIndex == -1)) { //check for a prefix from the unmarshal record: String prefix = owningRecord.resolveNamespaceUri(namespaceURI); if (prefix != null && prefix.length() >0){ qName = prefix + Constants.COLON + qName; qNameColonIndex = prefix.length(); } } Element element = getInitializedDocument().createElementNS(namespaceURI, qName); Node parentNode = nodes.get(nodes.size() - 1); appendChildNode(parentNode, element); nodes.add(element); if (qNameColonIndex > -1) { String prefix = qName.substring(0, qNameColonIndex); String parentUri = null; if (element.getParentNode() != null) { parentUri = XMLPlatformFactory.getInstance().getXMLPlatform().resolveNamespacePrefix(element.getParentNode(), prefix); } if ((parentUri == null) || parentUri.length() == 0) { startPrefixMapping(prefix, namespaceURI); } } if (null != namespaceDeclarations) { Iterator namespaces = namespaceDeclarations.entrySet().iterator(); while (namespaces.hasNext()) { Map.Entry entry = (Map.Entry)namespaces.next(); addNamespaceDeclaration(element, (String)entry.getKey(), (String)entry.getValue()); } namespaceDeclarations = null; } int numberOfAttributes = atts.getLength(); String attributeNamespaceURI; for (int x = 0; x < numberOfAttributes; x++) { attributeNamespaceURI = atts.getURI(x); // Empty string will be treated as a null URI if (null != attributeNamespaceURI && attributeNamespaceURI.length() == 0) { attributeNamespaceURI = null; } // Handle case where prefix/uri are not set on an xmlns prefixed attribute if (attributeNamespaceURI == null && atts.getQName(x).startsWith(javax.xml.XMLConstants.XMLNS_ATTRIBUTE + Constants.COLON)) { attributeNamespaceURI = javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI; } String value = atts.getValue(x); if (attributeNamespaceURI == null) { element.setAttribute(atts.getQName(x), value); } else { element.setAttributeNS(attributeNamespaceURI, atts.getQName(x), value == null ? Constants.EMPTY_STRING : value); } if(value != null) { processNamespacesForText(value, element); } } } public void endElement(String namespaceURI, String localName, String qName) throws SAXException { if (super.nodes.size() == 2) { Element endedElement = (Element)nodes.get(nodes.size() -1); if (stringBuffer.length() > 0) { Text text = getInitializedDocument().createTextNode(stringBuffer.toString()); endedElement.appendChild(text); stringBuffer.reset(); processNamespacesForText(text.getTextContent(), endedElement); } while(owningRecord.isSelfRecord() && owningRecord.getParentRecord() != null){ owningRecord = owningRecord.getParentRecord(); } //just the doc left in the stack. Finish this off. owningRecord.getXMLReader().setContentHandler(owningRecord); owningRecord.endElement(namespaceURI, localName, qName); } else { super.endElement(namespaceURI, localName, qName); } } public void endSelfElement(String namespaceURI, String localName, String qName) throws SAXException { if (super.nodes.size() == 2) { Element endedElement = (Element)nodes.get(nodes.size() -1); if (stringBuffer.length() > 0) { Text text = getInitializedDocument().createTextNode(stringBuffer.toString()); endedElement.appendChild(text); stringBuffer.reset(); } } else { super.endElement(namespaceURI, localName, qName); } } public List getNodes() { return super.nodes; } public void setOwningRecord(UnmarshalRecord record) { this.owningRecord = record; } public void appendChildNode(Node parent, Node child) { if (parent != this.getDocument()) { parent.appendChild(child); } } public Attr buildAttributeNode(String namespaceURI, String localName, String value) { try { Attr attribute = getInitializedDocument().createAttributeNS(namespaceURI, localName); attribute.setValue(value); return attribute; } catch (SAXException ex) { } return null; } public Text buildTextNode(String textValue) { try { Text text = getInitializedDocument().createTextNode(textValue); return text; } catch (SAXException ex) { } return null; } /** * Adds a namespace declaration to the parent element if the textValue represents a * prefixed qualified name. The determination of a qname is based on the existance of a * colon character and the ability to resolve the characters before the colon to a * namespace uri. * @param textValue * @param parentNode */ private void processNamespacesForText(String textValue, Element parentNode) { //If the text value is a qname, we may need to do namespace processing int colon = textValue.indexOf(':'); if(colon != -1) { String prefix = textValue.substring(0, colon); XMLPlatform platform = XMLPlatformFactory.getInstance().getXMLPlatform(); String uri = platform.resolveNamespacePrefix(parentNode, prefix); if(uri == null) { uri = this.owningRecord.resolveNamespacePrefix(prefix); if(uri != null) { //add namespace declaration addNamespaceDeclaration(parentNode, prefix, uri); } } } } }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLSetting.java0000664000000000000000000000342012216173126023667 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import org.eclipse.persistence.internal.oxm.mappings.Mapping; /** * INTERNAL: *

Purpose:This class defines an interface that provides information about mappings * and values, used to marshal Sequenced objects. *

Responsibilities:

    *
  • Provide access to a Mapping
  • *
  • Provide access to the a value to be marshalled for the specific mapping
  • *
*

When marshalling a sequenced Data Object, that Object will provide TopLink OXM with an * Ordered list of TopLinkSetting objects. These will be used to marshal the appropriate values * in the correct order. * * @author mmacivor * @since Oracle TopLink 11.1.1.0.0 */ public interface XMLSetting { /** * @return The TopLink OXM mapping associated with this setting */ public Mapping getMapping(); /** * Gets the value to be marshalled using the specified mapping. * @return The value to be marshalled. */ public Object getValue(); } ././@LongLink0000000000000000000000000000015600000000000011567 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLChoiceCollectionMappingUnmarshalNodeValue.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLChoiceCollectionMappingUnmarshalNodeV0000664000000000000000000002476512216173126030702 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.util.Collection; import java.util.Map; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.mappings.AnyCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.BinaryDataCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.ChoiceCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.CollectionReferenceMapping; import org.eclipse.persistence.internal.oxm.mappings.CompositeCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.DirectCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.mappings.Mapping; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.UnmarshalContext; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.xml.sax.Attributes; /** * INTERNAL: *

Purpose: This is how the XML Choice Collection Mapping is * handled when used with the TreeObjectBuilder.

* @author mmacivor */ public class XMLChoiceCollectionMappingUnmarshalNodeValue extends MappingNodeValue implements ContainerValue { private NodeValue choiceElementNodeValue; private NodeValue choiceElementMarshalNodeValue; private ChoiceCollectionMapping xmlChoiceCollectionMapping; private Mapping nestedMapping; private Map fieldToNodeValues; private Field xmlField; private ContainerValue containerNodeValue; private boolean isMixedNodeValue; private boolean isAny; private int index = -1; public XMLChoiceCollectionMappingUnmarshalNodeValue(ChoiceCollectionMapping mapping, Field xmlField) { this.xmlChoiceCollectionMapping = mapping; this.xmlField = xmlField; if(xmlField == null && mapping.isAny()){ isAny = true; }else{ this.nestedMapping = (Mapping)mapping.getChoiceElementMappings().get(xmlField); } initializeNodeValue(); } public XMLChoiceCollectionMappingUnmarshalNodeValue(ChoiceCollectionMapping mapping, Field xmlField, Mapping nestedMapping) { this.xmlChoiceCollectionMapping = mapping; this.xmlField = xmlField; this.nestedMapping = nestedMapping; initializeNodeValue(); } public boolean isOwningNode(XPathFragment xPathFragment) { if(isMixedNodeValue) { if(xPathFragment.nameIsText()) { return true; } else { return false; } } return choiceElementNodeValue.isOwningNode(xPathFragment); } private void initializeNodeValue() { if(nestedMapping == null && isAny){ nestedMapping = xmlChoiceCollectionMapping.getAnyMapping(); } Mapping xmlMapping = this.nestedMapping; if(xmlMapping instanceof BinaryDataCollectionMapping) { choiceElementNodeValue = new XMLBinaryDataCollectionMappingNodeValue((BinaryDataCollectionMapping)xmlMapping); choiceElementMarshalNodeValue = choiceElementNodeValue; } else if(xmlMapping instanceof DirectCollectionMapping) { choiceElementNodeValue = new XMLCompositeDirectCollectionMappingNodeValue((DirectCollectionMapping)xmlMapping); choiceElementMarshalNodeValue = choiceElementNodeValue; } else if(xmlMapping instanceof CompositeCollectionMapping){ choiceElementNodeValue = new XMLCompositeCollectionMappingNodeValue((CompositeCollectionMapping)xmlMapping); choiceElementMarshalNodeValue = choiceElementNodeValue; }else if(xmlMapping instanceof AnyCollectionMapping){ choiceElementNodeValue = new XMLAnyCollectionMappingNodeValue((AnyCollectionMapping)xmlMapping); choiceElementMarshalNodeValue = choiceElementNodeValue; } else { choiceElementNodeValue = new XMLCollectionReferenceMappingNodeValue((CollectionReferenceMapping)xmlMapping, xmlField); CollectionReferenceMapping refMapping = ((CollectionReferenceMapping)xmlMapping); if(refMapping.usesSingleNode() || refMapping.getFields().size() == 1) { choiceElementMarshalNodeValue = new XMLCollectionReferenceMappingNodeValue(refMapping, xmlField); } else { choiceElementMarshalNodeValue = new XMLCollectionReferenceMappingMarshalNodeValue((CollectionReferenceMapping)xmlMapping); } } } public void setContainerNodeValue(XMLChoiceCollectionMappingUnmarshalNodeValue nodeValue) { this.containerNodeValue = nodeValue; } public void setNullValue(Object object, CoreSession session) { xmlChoiceCollectionMapping.setAttributeValueInObject(object, null); } public void endElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord) { Object collection = unmarshalRecord.getContainerInstance(this.containerNodeValue); if(null != xmlChoiceCollectionMapping.getConverter()) { UnmarshalContext unmarshalContext = unmarshalRecord.getUnmarshalContext(); unmarshalRecord.setUnmarshalContext(new ChoiceUnmarshalContext(unmarshalContext, xmlChoiceCollectionMapping)); this.choiceElementNodeValue.endElement(xPathFragment, unmarshalRecord, collection); unmarshalRecord.setUnmarshalContext(unmarshalContext); } else { this.choiceElementNodeValue.endElement(xPathFragment, unmarshalRecord, collection); } } public boolean startElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord, Attributes atts) { return this.choiceElementNodeValue.startElement(xPathFragment, unmarshalRecord, atts); } public void setXPathNode(XPathNode xPathNode) { super.setXPathNode(xPathNode); this.choiceElementNodeValue.setXPathNode(xPathNode); } public Object getContainerInstance() { return getContainerPolicy().containerInstance(); } public void setContainerInstance(Object object, Object containerInstance) { xmlChoiceCollectionMapping.setAttributeValueInObject(object, containerInstance); } public CoreContainerPolicy getContainerPolicy() { return xmlChoiceCollectionMapping.getContainerPolicy(); } public boolean isContainerValue() { return true; } public boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object value, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { //empty impl in the unmarshal node value return false; } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver) { //dummy impl in the unmarshal node value return false; } public NodeValue getChoiceElementNodeValue() { return this.choiceElementNodeValue; } public NodeValue getChoiceElementMarshalNodeValue() { return this.choiceElementMarshalNodeValue; } public boolean isUnmarshalNodeValue() { return true; } public boolean isWrapperAllowedAsCollectionName() { return false; } public boolean isMarshalNodeValue() { return false; } public ChoiceCollectionMapping getMapping() { return xmlChoiceCollectionMapping; } public boolean getReuseContainer() { return getMapping().getReuseContainer(); } public void setFieldToNodeValues(Map fieldToNodeValues) { this.fieldToNodeValues = fieldToNodeValues; } public Collection getAllNodeValues() { return this.fieldToNodeValues.values(); } /** * The underlying choice element node value will handle attributes. * */ public void attribute(UnmarshalRecord unmarshalRecord, String URI, String localName, String value) { this.choiceElementNodeValue.attribute(unmarshalRecord, URI, localName, value); } /** * INTERNAL: * Indicates that this is the choice mapping node value that represents the mixed content. */ public void setIsMixedNodeValue(boolean isMixed) { this.isMixedNodeValue = isMixed; } /** * INTERNAL: * Return true if this is the node value representing mixed content. */ public boolean isMixedContentNodeValue() { return this.isMixedNodeValue; } /** * INTERNAL: * Used to track the index of the corresponding containerInstance in the containerInstances Object[] on UnmarshalRecord */ public void setIndex(int index){ this.index = index; } /** * INTERNAL: * Set to track the index of the corresponding containerInstance in the containerInstances Object[] on UnmarshalRecord * Set during TreeObjectBuilder initialization */ public int getIndex(){ return index; } /** * INTERNAL * Return true if an empty container should be set on the object if there * is no presence of the collection in the XML document. * @since EclipseLink 2.3.3 */ public boolean isDefaultEmptyContainer() { return getMapping().isDefaultEmptyContainer(); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/FieldTransformerNodeValue.java0000664000000000000000000001352612216173126026752 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.ObjectMarshalContext; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.eclipse.persistence.internal.oxm.record.XMLTransformationRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.mappings.transformers.FieldTransformer; import org.eclipse.persistence.oxm.XMLField; /** * INTERNAL: *

Purpose: This is how the Field Transformer is handled when used * with the TreeObjectBuilder. Field Transformers are used with the XML * Transformation Mapping.

*/ public class FieldTransformerNodeValue extends NodeValue { private FieldTransformer fieldTransformer; private Field xmlField; public FieldTransformer getFieldTransformer() { return fieldTransformer; } public void setFieldTransformer(FieldTransformer fieldTransformer) { this.fieldTransformer = fieldTransformer; } public Field getXMLField() { return xmlField; } public void setXMLField(Field xmlField) { this.xmlField = xmlField; } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver) { return marshal(xPathFragment, marshalRecord, object, session, namespaceResolver, ObjectMarshalContext.getInstance()); } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { Object value = fieldTransformer.buildFieldValue(object, getXMLField().getXPath(), (AbstractSession) session); return this.marshalSingleValue(xPathFragment, marshalRecord, object, value, session, namespaceResolver, marshalContext); } public boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object value, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { if(value == null) { return false; } XPathFragment groupingFragment = marshalRecord.openStartGroupingElements(namespaceResolver); if (getXMLField().getLastXPathFragment().isAttribute()) { marshalRecord.add(getXMLField(), value); marshalRecord.closeStartGroupingElements(groupingFragment); } else { marshalRecord.closeStartGroupingElements(groupingFragment); marshalRecord.add(getXMLField(), value); } return true; } public void attribute(UnmarshalRecord unmarshalRecord, String namespaceURI, String localName, String value) { XMLConversionManager xmlConversionManager = (XMLConversionManager) unmarshalRecord.getSession().getDatasourcePlatform().getConversionManager(); Object objectValue = unmarshalRecord.getXMLReader().convertValueBasedOnSchemaType(xmlField, value, xmlConversionManager, unmarshalRecord); // PUT VALUE INTO A RECORD KEYED ON XMLFIELD if (null == unmarshalRecord.getTransformationRecord()) { unmarshalRecord.setTransformationRecord(new XMLTransformationRecord("ROOT", unmarshalRecord)); } unmarshalRecord.getTransformationRecord().put(xmlField, objectValue); } public void endElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord) { Object value = unmarshalRecord.getCharacters().toString(); boolean isCDATA = unmarshalRecord.isBufferCDATA(); unmarshalRecord.resetStringBuffer(); Field toWrite = xmlField; if(xmlField.isCDATA() != isCDATA) { toWrite = new XMLField(xmlField.getName()); toWrite.setNamespaceResolver(xmlField.getNamespaceResolver()); toWrite.setIsCDATA(isCDATA); } //xmlField.setIsCDATA(isCDATA); XMLConversionManager xmlConversionManager = (XMLConversionManager) unmarshalRecord.getSession().getDatasourcePlatform().getConversionManager(); if (unmarshalRecord.getTypeQName() != null) { Class typeClass = xmlField.getJavaClass(unmarshalRecord.getTypeQName()); value = xmlConversionManager.convertObject(value, typeClass, unmarshalRecord.getTypeQName()); } else { value = unmarshalRecord.getXMLReader().convertValueBasedOnSchemaType(xmlField, value, xmlConversionManager, unmarshalRecord); } // PUT VALUE INTO A RECORD KEYED ON XMLFIELD if (null == unmarshalRecord.getTransformationRecord()) { unmarshalRecord.setTransformationRecord(new XMLTransformationRecord("ROOT", unmarshalRecord)); } unmarshalRecord.getTransformationRecord().put(toWrite, value); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLConversionManager.java0000664000000000000000000025071412216173126025704 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.math.BigDecimal; import java.math.BigInteger; import java.sql.Time; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.StringTokenizer; import java.util.TimeZone; import javax.xml.datatype.DatatypeConfigurationException; import javax.xml.datatype.DatatypeConstants; import javax.xml.datatype.DatatypeFactory; import javax.xml.datatype.Duration; import javax.xml.datatype.XMLGregorianCalendar; import javax.xml.namespace.QName; import org.eclipse.persistence.exceptions.ConversionException; import org.eclipse.persistence.exceptions.XMLConversionException; import org.eclipse.persistence.internal.core.helper.CoreClassConstants; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.helper.ConversionManager; import org.eclipse.persistence.internal.helper.Helper; import org.eclipse.persistence.internal.helper.TimeZoneHolder; import org.eclipse.persistence.internal.oxm.conversion.Base64; import org.eclipse.persistence.internal.oxm.record.AbstractUnmarshalRecord; import org.eclipse.persistence.internal.queries.ContainerPolicy; /** * INTERNAL: *

Purpose: Primarily used to convert objects from a given XML Schema type to a different type in Java. * Uses a singleton instance

* @since OracleAS TopLink 10g */ public class XMLConversionManager extends ConversionManager implements TimeZoneHolder { protected static final String GMT_ID = "GMT"; protected static final String GMT_SUFFIX = "Z"; protected static XMLConversionManager defaultXMLManager; // Static hash tables for the default conversion pairs protected static HashMap defaultXMLTypes; protected static HashMap defaultJavaTypes; protected boolean timeZoneQualified; protected TimeZone timeZone; protected static int TOTAL_MS_DIGITS = 3; // total digits for millisecond formatting protected static int TOTAL_NS_DIGITS = 9; // total digits for nanosecond formatting protected static long YEAR_ONE_AD_TIME = -62135769600000L; // time of 1 AD private static final char PLUS = '+'; protected DatatypeFactory datatypeFactory; public XMLConversionManager() { super(); timeZoneQualified = false; } /** * INTERNAL: * * Return the DatatypeFactory instance. * * @return */ protected DatatypeFactory getDatatypeFactory() { if (datatypeFactory == null) { try { datatypeFactory = DatatypeFactory.newInstance(); } catch (DatatypeConfigurationException e) { throw new RuntimeException(e); } } return datatypeFactory; } public static XMLConversionManager getDefaultXMLManager() { if (defaultXMLManager == null) { defaultXMLManager = new XMLConversionManager(); } return defaultXMLManager; } /** * Return the specified TimeZone used for all String to date object * conversions. */ public TimeZone getTimeZone() { if (timeZone == null) { return TimeZone.getDefault(); } else { return timeZone; } } /** * The specified TimeZone will be used for all String to date object * conversions. By default the TimeZone from the JVM is used. */ public void setTimeZone(TimeZone timeZone) { this.timeZone = timeZone; } /** * */ public boolean isTimeZoneQualified() { return timeZoneQualified; } /** * Specify if * Specify if when date objects are converted to Strings in the XML Schema * time or dateTime format */ public void setTimeZoneQualified(boolean timeZoneQualified) { this.timeZoneQualified = timeZoneQualified; } /** * Convert the given object to the appropriate type by invoking the appropriate * ConversionManager method. * * @param sourceObject - will always be a string if read from XML * @param javaClass - the class that the object must be converted to * @return - the newly converted object */ public Object convertObject(Object sourceObject, Class javaClass) throws ConversionException { if (sourceObject == null) {//Let the parent handle default null values return super.convertObject(sourceObject, javaClass); } else if (javaClass == null || javaClass == CoreClassConstants.OBJECT || sourceObject.getClass() == javaClass) { return sourceObject; } else if (javaClass == CoreClassConstants.STRING) { if(sourceObject instanceof List){ return convertListToString(sourceObject, null); }else{ return convertObjectToString(sourceObject); } } else if ((javaClass == Constants.QNAME_CLASS) && (sourceObject != null)) { return convertObjectToQName(sourceObject); } else if ((javaClass == CoreClassConstants.List_Class) && (sourceObject instanceof String)) { return convertStringToList(sourceObject); } else if ((javaClass == CoreClassConstants.CALENDAR)) { return convertObjectToCalendar(sourceObject); } else if ((javaClass == CoreClassConstants.UTILDATE)) { return convertObjectToUtilDate(sourceObject, Constants.DATE_TIME_QNAME); } else if ((javaClass == CoreClassConstants.SQLDATE)) { return convertObjectToSQLDate(sourceObject, Constants.DATE_QNAME); } else if ((javaClass == CoreClassConstants.TIME)) { return convertObjectToSQLTime(sourceObject, Constants.TIME_QNAME); } else if ((javaClass == CoreClassConstants.TIMESTAMP)) { return convertObjectToTimestamp(sourceObject, Constants.DATE_TIME_QNAME); } else if ((javaClass == java.net.URI.class)) { return convertObjectToURI(sourceObject); } else if ((javaClass == CoreClassConstants.XML_GREGORIAN_CALENDAR)) { return convertObjectToXMLGregorianCalendar(sourceObject); } else if ((javaClass == CoreClassConstants.DURATION)) { return convertObjectToDuration(sourceObject); } else { try { return super.convertObject(sourceObject, javaClass); } catch (ConversionException ex) { if (sourceObject.getClass() == CoreClassConstants.STRING) { return super.convertObject(((String) sourceObject).trim(), javaClass); } throw ex; } } } /** * Convert the given object to the appropriate type by invoking the appropriate * ConversionManager method. * * @param sourceObject - will always be a string if read from XML * @param javaClass - the class that the object must be converted to * @param schemaTypeQName - the XML schema that the object is being converted from * @return - the newly converted object */ public Object convertObject(Object sourceObject, Class javaClass, QName schemaTypeQName) throws ConversionException { if (schemaTypeQName == null) { return convertObject(sourceObject, javaClass); } if (sourceObject == null) { return super.convertObject(sourceObject, javaClass); } else if ((sourceObject.getClass() == javaClass) || (javaClass == null) || (javaClass == CoreClassConstants.OBJECT)) { return sourceObject; } else if ((javaClass == CoreClassConstants.CALENDAR) || (javaClass == CoreClassConstants.GREGORIAN_CALENDAR)) { return convertObjectToCalendar(sourceObject, schemaTypeQName); } else if (javaClass == CoreClassConstants.ABYTE) { if (schemaTypeQName.getLocalPart().equalsIgnoreCase(Constants.HEX_BINARY)) { return super.convertObjectToByteObjectArray(sourceObject); } else if (schemaTypeQName.getLocalPart().equalsIgnoreCase(Constants.BASE_64_BINARY)) { return convertSchemaBase64ToByteObjectArray(sourceObject); } } else if (javaClass == CoreClassConstants.APBYTE) { if (schemaTypeQName.getLocalPart().equalsIgnoreCase(Constants.HEX_BINARY)) { return super.convertObjectToByteArray(sourceObject); } else if (schemaTypeQName.getLocalPart().equalsIgnoreCase(Constants.BASE_64_BINARY)) { return convertSchemaBase64ToByteArray(sourceObject); } } else if ((javaClass == CoreClassConstants.List_Class) && (sourceObject instanceof String)) { return convertStringToList(sourceObject); } else if ((javaClass == CoreClassConstants.STRING) && (sourceObject instanceof List)) { return convertListToString(sourceObject, schemaTypeQName); } else if (sourceObject instanceof byte[]) { if (schemaTypeQName.getLocalPart().equalsIgnoreCase(Constants.BASE_64_BINARY)) { return buildBase64StringFromBytes((byte[]) sourceObject); } return Helper.buildHexStringFromBytes((byte[]) sourceObject); } else if (sourceObject instanceof Byte[]) { if (schemaTypeQName.getLocalPart().equalsIgnoreCase(Constants.BASE_64_BINARY)) { return buildBase64StringFromObjectBytes((Byte[]) sourceObject); } return buildHexStringFromObjectBytes((Byte[]) sourceObject); } else if ((javaClass == CoreClassConstants.UTILDATE)) { return convertObjectToUtilDate(sourceObject, schemaTypeQName); } else if (javaClass == CoreClassConstants.SQLDATE) { return convertObjectToSQLDate(sourceObject, schemaTypeQName); } else if (javaClass == CoreClassConstants.TIME) { return convertObjectToSQLTime(sourceObject, schemaTypeQName); } else if (javaClass == CoreClassConstants.TIMESTAMP) { return convertObjectToTimestamp(sourceObject, schemaTypeQName); } else if ((javaClass == Constants.QNAME_CLASS) && (sourceObject != null)) { return convertObjectToQName(sourceObject); } else if (javaClass == CoreClassConstants.STRING) { return convertObjectToString(sourceObject, schemaTypeQName); } else if ((javaClass == java.net.URI.class)) { return convertObjectToURI(sourceObject); } else if ((javaClass == CoreClassConstants.XML_GREGORIAN_CALENDAR)) { return convertObjectToXMLGregorianCalendar(sourceObject, schemaTypeQName); } else if ((javaClass == CoreClassConstants.DURATION)) { return convertObjectToDuration(sourceObject); } else if ((javaClass == CoreClassConstants.CHAR)) { return convertObjectToChar(sourceObject, schemaTypeQName); } else { try { return super.convertObject(sourceObject, javaClass); } catch (ConversionException ex) { if (sourceObject.getClass() == CoreClassConstants.STRING) { return super.convertObject(((String) sourceObject).trim(), javaClass); } throw ex; } } throw ConversionException.couldNotBeConverted(sourceObject, javaClass); } /** * Build a valid instance of XMLGregorianCalendar from the provided sourceObject. * * @param sourceObject * @param schemaTypeQName */ protected XMLGregorianCalendar convertObjectToXMLGregorianCalendar(Object sourceObject, QName schemaTypeQName) throws ConversionException { if (sourceObject instanceof XMLGregorianCalendar) { return (XMLGregorianCalendar) sourceObject; } if (sourceObject instanceof String) { return convertStringToXMLGregorianCalendar((String) sourceObject, schemaTypeQName); } throw ConversionException.couldNotBeConverted(sourceObject, CoreClassConstants.XML_GREGORIAN_CALENDAR); } /** * Build a valid instance of XMLGregorianCalendar from the provided sourceObject. * * @param sourceObject */ protected XMLGregorianCalendar convertObjectToXMLGregorianCalendar(Object sourceObject) throws ConversionException { if (sourceObject instanceof XMLGregorianCalendar) { return (XMLGregorianCalendar) sourceObject; } if (sourceObject instanceof String) { return convertStringToXMLGregorianCalendar((String) sourceObject); } throw ConversionException.couldNotBeConverted(sourceObject, CoreClassConstants.XML_GREGORIAN_CALENDAR); } /** * Build a valid instance of Duration from the provided sourceObject. * * @param sourceObject */ protected Duration convertObjectToDuration(Object sourceObject) throws ConversionException { if (sourceObject instanceof Duration) { return (Duration) sourceObject; } if (sourceObject instanceof String) { return convertStringToDuration((String) sourceObject); } throw ConversionException.couldNotBeConverted(sourceObject, CoreClassConstants.DURATION); } /** * Build a valid instance of Character from the provided sourceObject. * * @param sourceObject */ protected Character convertObjectToChar(Object sourceObject, QName schemaTypeQName) throws ConversionException { if (sourceObject == null || sourceObject.equals(Constants.EMPTY_STRING)) { return (char) 0; } if(sourceObject instanceof String && isNumericQName(schemaTypeQName)){ int integer = Integer.parseInt((String)sourceObject); return Character.valueOf((char)integer); } return super.convertObjectToChar(sourceObject); } /** * Build a valid instance of Character from the provided sourceObject. * * @param sourceObject */ protected Character convertObjectToChar(Object sourceObject) throws ConversionException { if (sourceObject == null || sourceObject.equals(Constants.EMPTY_STRING)) { return (char) 0; } return super.convertObjectToChar(sourceObject); } /** * Convert a String to a URI. * * @param sourceObject * @return * @throws ConversionException */ protected java.net.URI convertObjectToURI(Object sourceObject) throws ConversionException { if (sourceObject instanceof String) { try { return new java.net.URI((String) sourceObject); } catch (Exception ex) { } } throw ConversionException.couldNotBeConverted(sourceObject, java.net.URI.class); } /** * INTERNAL: * Converts given object to a QName object */ protected QName convertObjectToQName(Object sourceObject) throws ConversionException { if (sourceObject instanceof QName) { return (QName) sourceObject; } if (sourceObject instanceof String) { return qnameFromString((String) sourceObject); } throw ConversionException.couldNotBeConverted(sourceObject, Constants.QNAME_CLASS); } /** * INTERNAL: * Converts given object to a Calendar object */ protected Calendar convertObjectToCalendar(Object sourceObject) throws ConversionException { if (sourceObject instanceof String) { String sourceString = (String) sourceObject; if (sourceString.lastIndexOf('T') != -1) { return convertStringToCalendar((String) sourceObject, Constants.DATE_TIME_QNAME); } else { if (sourceString.lastIndexOf(Constants.COLON) != -1) { return convertStringToCalendar((String) sourceObject, Constants.TIME_QNAME); } else { return convertStringToCalendar((String) sourceObject, Constants.DATE_QNAME); } } } return super.convertObjectToCalendar(sourceObject); } /** * INTERNAL: * Converts objects to their string representations. */ protected String convertObjectToString(Object sourceObject) throws ConversionException { if (sourceObject instanceof Calendar) { return stringFromCalendar((Calendar) sourceObject); } if (sourceObject instanceof Character && sourceObject.equals((char) 0)) { return Constants.EMPTY_STRING; } if (sourceObject instanceof QName) { return stringFromQName((QName) sourceObject); } if (sourceObject instanceof java.sql.Date) { return stringFromSQLDate((java.sql.Date) sourceObject); } if (sourceObject instanceof java.sql.Time) { return stringFromSQLTime((java.sql.Time) sourceObject); } if (sourceObject instanceof java.sql.Timestamp) { return stringFromTimestamp((Timestamp) sourceObject); } if (sourceObject instanceof java.util.Date) { return stringFromDate((java.util.Date) sourceObject); } if (sourceObject instanceof XMLGregorianCalendar) { return stringFromXMLGregorianCalendar((XMLGregorianCalendar) sourceObject); } if (sourceObject instanceof Duration) { return stringFromDuration((Duration) sourceObject); } if(sourceObject instanceof Double){ if(Double.POSITIVE_INFINITY == ((Double)sourceObject)){ return Constants.POSITIVE_INFINITY; } if(Double.NEGATIVE_INFINITY == ((Double)sourceObject)){ return Constants.NEGATIVE_INFINITY; } return ((Double)sourceObject).toString(); } if(sourceObject instanceof Float){ if(Float.POSITIVE_INFINITY == ((Float)sourceObject)){ return Constants.POSITIVE_INFINITY; } if(Float.NEGATIVE_INFINITY == ((Float)sourceObject)){ return Constants.NEGATIVE_INFINITY; } return ((Float)sourceObject).toString(); } return super.convertObjectToString(sourceObject); } protected String convertObjectToString(Object sourceObject, QName schemaTypeQName) throws ConversionException { if (sourceObject instanceof Calendar) { return stringFromCalendar((Calendar) sourceObject, schemaTypeQName); } if (sourceObject instanceof Character){ if(isNumericQName(schemaTypeQName)){ return Integer.toString((int) (Character)sourceObject); } else { if(sourceObject.equals((char) 0)) { return Constants.EMPTY_STRING; } super.convertObjectToString(sourceObject); } } if (sourceObject instanceof QName) { return stringFromQName((QName) sourceObject); } if (sourceObject instanceof java.sql.Date) { return stringFromSQLDate((java.sql.Date) sourceObject, schemaTypeQName); } if (sourceObject instanceof java.sql.Time) { return stringFromSQLTime((java.sql.Time) sourceObject, schemaTypeQName); } if (sourceObject instanceof java.sql.Timestamp) { return stringFromTimestamp((Timestamp) sourceObject, schemaTypeQName); } if (sourceObject instanceof java.util.Date) { return stringFromDate((java.util.Date) sourceObject, schemaTypeQName); } if (sourceObject instanceof XMLGregorianCalendar) { return stringFromXMLGregorianCalendar((XMLGregorianCalendar) sourceObject, schemaTypeQName); } if (sourceObject instanceof Duration) { return stringFromDuration((Duration) sourceObject); } if(sourceObject instanceof Double){ if(Double.POSITIVE_INFINITY == ((Double)sourceObject)){ return Constants.POSITIVE_INFINITY; } if(Double.NEGATIVE_INFINITY == ((Double)sourceObject)){ return Constants.NEGATIVE_INFINITY; } return ((Double)sourceObject).toString(); } if(sourceObject instanceof Float){ if(Float.POSITIVE_INFINITY == ((Float)sourceObject)){ return Constants.POSITIVE_INFINITY; } if(Float.NEGATIVE_INFINITY == ((Float)sourceObject)){ return Constants.NEGATIVE_INFINITY; } return ((Float)sourceObject).toString(); } return super.convertObjectToString(sourceObject); } private Calendar convertObjectToCalendar(Object sourceObject, QName schemaTypeQName) { if (sourceObject instanceof String) { return convertStringToCalendar((String) sourceObject, schemaTypeQName); } return super.convertObjectToCalendar(sourceObject); } protected java.sql.Date convertObjectToDate(Object sourceObject) throws ConversionException { Object o = sourceObject; if (sourceObject instanceof Calendar) { // Clone the calendar, because calling get() methods // later on will alter the original calendar o = ((Calendar) sourceObject).clone(); } return super.convertObjectToDate(o); } /** * Convert the object to an instance of Double. * @param sourceObject Object of type String or Number. * @caught exception The Double(String) constructor throws a * NumberFormatException if the String does not contain a * parsable double. */ protected Double convertObjectToDouble(Object sourceObject) throws ConversionException { if (sourceObject instanceof String) { if(((String) sourceObject).length() == 0) { return 0d; }else if(Constants.POSITIVE_INFINITY.equals(sourceObject)){ return Double.valueOf(Double.POSITIVE_INFINITY); }else if(Constants.NEGATIVE_INFINITY.equals(sourceObject)){ return Double.valueOf(Double.NEGATIVE_INFINITY); }else{ return super.convertObjectToDouble(sourceObject); } }else{ return super.convertObjectToDouble(sourceObject); } } /** * Build a valid Float instance from a String or another Number instance. * @caught exception The Float(String) constructor throws a * NumberFormatException if the String does not contain a * parsable Float. */ protected Float convertObjectToFloat(Object sourceObject) throws ConversionException { if (sourceObject instanceof String) { if(((String) sourceObject).length() == 0) { return 0f; } else if(Constants.POSITIVE_INFINITY.equals(sourceObject)){ return new Float(Float.POSITIVE_INFINITY); }else if(Constants.NEGATIVE_INFINITY.equals(sourceObject)){ return new Float(Float.NEGATIVE_INFINITY); } } return super.convertObjectToFloat(sourceObject); } /** * Build a valid Integer instance from a String or another Number instance. * @caught exception The Integer(String) constructor throws a * NumberFormatException if the String does not contain a * parsable integer. */ protected Integer convertObjectToInteger(Object sourceObject) throws ConversionException { if(sourceObject instanceof String) { String sourceString = (String) sourceObject; if(sourceString.length() == 0) { return 0; } else if(sourceString.charAt(0) == PLUS) { return super.convertObjectToInteger(sourceString.substring(1)); } } return super.convertObjectToInteger(sourceObject); } /** * Build a valid Long instance from a String or another Number instance. * @caught exception The Long(String) constructor throws a * NumberFormatException if the String does not contain a * parsable long. * */ protected Long convertObjectToLong(Object sourceObject) throws ConversionException { if(sourceObject instanceof String) { String sourceString = (String) sourceObject; if(sourceString.length() == 0) { return 0l; } else if(sourceString.charAt(0) == PLUS) { return super.convertObjectToLong(sourceString.substring(1)); } } return super.convertObjectToLong(sourceObject); } /** * INTERNAL: * Build a valid Short instance from a String or another Number instance. * @caught exception The Short(String) constructor throws a * NumberFormatException if the String does not contain a * parsable short. */ protected Short convertObjectToShort(Object sourceObject) throws ConversionException { if(sourceObject instanceof String) { String sourceString = (String) sourceObject; if(sourceString.length() == 0) { return 0; } else if(sourceString.charAt(0) == PLUS) { return super.convertObjectToShort(sourceString.substring(1)); } } return super.convertObjectToShort(sourceObject); } /** * INTERNAL: * Build a valid BigDecimal instance from a String or another * Number instance. BigDecimal is the most general type so is * must be returned when an object is converted to a number. * @caught exception The BigDecimal(String) constructor throws a * NumberFormatException if the String does not contain a * parsable BigDecimal. */ protected BigDecimal convertObjectToNumber(Object sourceObject) throws ConversionException { if(sourceObject instanceof String) { String sourceString = (String) sourceObject; if(sourceString.length() == 0) { return BigDecimal.ZERO; } else if(sourceString.charAt(0) == PLUS) { return super.convertObjectToNumber(sourceString.substring(1)); } } return super.convertObjectToNumber(sourceObject); } /** * Build a valid instance of BigInteger from the provided sourceObject. * @param sourceObject Valid instance of String, BigDecimal, or any Number */ protected BigInteger convertObjectToBigInteger(Object sourceObject) throws ConversionException { if(sourceObject instanceof String) { String sourceString = (String) sourceObject; if(sourceString.length() == 0) { return null; } else if(sourceString.charAt(0) == PLUS) { return super.convertObjectToBigInteger(sourceString.substring(1)); } } return super.convertObjectToBigInteger(sourceObject); } /** * Build a valid instance of BigDecimal from the given sourceObject * @param sourceObject Valid instance of String, BigInteger, any Number */ protected BigDecimal convertObjectToBigDecimal(Object sourceObject) throws ConversionException { if(sourceObject instanceof String) { String sourceString = (String) sourceObject; if(sourceString.length() == 0) { return null; } try { return new BigDecimal(sourceString); } catch (NumberFormatException exception) { throw ConversionException.couldNotBeConverted(sourceObject, CoreClassConstants.BIGDECIMAL, exception); } } return super.convertObjectToBigDecimal(sourceObject); } @Override protected Boolean convertObjectToBoolean(Object sourceObject) { if (sourceObject == null || sourceObject.equals(Constants.EMPTY_STRING)) { return false; } return super.convertObjectToBoolean(sourceObject); } /** * Build a valid instance of Byte from the provided sourceObject * @param sourceObject Valid instance of String or any Number * @caught exception The Byte(String) constructor throws a * NumberFormatException if the String does not contain a * parsable byte. * */ protected Byte convertObjectToByte(Object sourceObject) throws ConversionException { if(sourceObject instanceof String) { String sourceString = (String) sourceObject; if(sourceString.length() == 0) { return 0; } else if(sourceString.charAt(0) == PLUS) { return super.convertObjectToByte(sourceString.substring(1)); } } return super.convertObjectToByte(sourceObject); } public XMLGregorianCalendar convertStringToXMLGregorianCalendar(String sourceString, QName schemaTypeQName) { XMLGregorianCalendar xmlGregorianCalender = null; try { xmlGregorianCalender = convertStringToXMLGregorianCalendar(sourceString); } catch (Exception ex) { if (Constants.DATE_QNAME.equals(schemaTypeQName)) { throw ConversionException.incorrectDateFormat(sourceString); } else if (Constants.TIME_QNAME.equals(schemaTypeQName)) { throw ConversionException.incorrectTimeFormat(sourceString); } else if (Constants.G_DAY_QNAME.equals(schemaTypeQName)) { throw XMLConversionException.incorrectGDayFormat(sourceString); } else if (Constants.G_MONTH_QNAME.equals(schemaTypeQName)) { throw XMLConversionException.incorrectGMonthFormat(sourceString); } else if (Constants.G_MONTH_DAY_QNAME.equals(schemaTypeQName)) { throw XMLConversionException.incorrectGMonthDayFormat(sourceString); } else if (Constants.G_YEAR_QNAME.equals(schemaTypeQName)) { throw XMLConversionException.incorrectGYearFormat(sourceString); } else if (Constants.G_YEAR_MONTH_QNAME.equals(schemaTypeQName)) { throw XMLConversionException.incorrectGYearMonthFormat(sourceString); } else if (Constants.DURATION_QNAME.equals(schemaTypeQName)) { throw new IllegalArgumentException(); } else { throw ConversionException.incorrectDateTimeFormat(sourceString); } } if(xmlGregorianCalender == null){ return null; } QName calendarQName = xmlGregorianCalender.getXMLSchemaType(); if (!calendarQName.equals(schemaTypeQName)) { if (Constants.DATE_QNAME.equals(schemaTypeQName)) { if (calendarQName.equals(Constants.DATE_TIME_QNAME)) { //clear out the time portion xmlGregorianCalender.setHour(DatatypeConstants.FIELD_UNDEFINED); xmlGregorianCalender.setMinute(DatatypeConstants.FIELD_UNDEFINED); xmlGregorianCalender.setSecond(DatatypeConstants.FIELD_UNDEFINED); xmlGregorianCalender.setMillisecond(DatatypeConstants.FIELD_UNDEFINED); return xmlGregorianCalender; } else { throw ConversionException.incorrectDateFormat(sourceString); } } else if (Constants.TIME_QNAME.equals(schemaTypeQName)) { throw ConversionException.incorrectTimeFormat(sourceString); } else if (Constants.G_DAY_QNAME.equals(schemaTypeQName)) { throw XMLConversionException.incorrectGDayFormat(sourceString); } else if (Constants.G_MONTH_QNAME.equals(schemaTypeQName)) { throw XMLConversionException.incorrectGMonthFormat(sourceString); } else if (Constants.G_MONTH_DAY_QNAME.equals(schemaTypeQName)) { throw XMLConversionException.incorrectGMonthDayFormat(sourceString); } else if (Constants.G_YEAR_QNAME.equals(schemaTypeQName)) { throw XMLConversionException.incorrectGYearFormat(sourceString); } else if (Constants.G_YEAR_MONTH_QNAME.equals(schemaTypeQName)) { throw XMLConversionException.incorrectGYearMonthFormat(sourceString); } else if (Constants.DURATION_QNAME.equals(schemaTypeQName)) { throw new IllegalArgumentException(); } else if (Constants.DATE_TIME_QNAME.equals(schemaTypeQName)) { throw ConversionException.incorrectDateTimeFormat(sourceString); } } return xmlGregorianCalender; } /** * Return an XMLGregorianCalander created with a given date string * * @param dateString * @return */ public XMLGregorianCalendar convertStringToXMLGregorianCalendar(String sourceString) { // Trim in case of leading or trailing whitespace String trimmedSourceString = sourceString.trim(); if(trimmedSourceString.length() == 0) { return null; } XMLGregorianCalendar calToReturn = null; try { calToReturn = getDatatypeFactory().newXMLGregorianCalendar(trimmedSourceString); } catch (IllegalArgumentException e1) { try { // GMonths may have different representations: // JDK 1.5: "--MM--" JDK 1.6: "--MM" // If we caught an IllegalArgumentException, try the other syntax int length = trimmedSourceString.length(); String retryString = null; if (length >= 6 && (trimmedSourceString.charAt(4) == '-') && (trimmedSourceString.charAt(5) == '-')) { // Try to omit the trailing dashes if any (--MM--), // but preserve time zone specifier, if any. retryString = new StringBuilder( trimmedSourceString.substring(0, 4)).append( length > 6 ? trimmedSourceString.substring(6) : "") .toString(); } else if (length >= 4) { // For "--MM" add the trailing dashes, preserving // any trailing time zone specifier. retryString = new StringBuilder( trimmedSourceString.substring(0, 4)).append("--").append( length > 4 ? trimmedSourceString.substring(4) : "") .toString(); } if (retryString != null) { calToReturn = getDatatypeFactory().newXMLGregorianCalendar(retryString); } else { throw e1; } } catch (IllegalArgumentException e2) { throw e1; } } return calToReturn; } /** * Return a Duration created with a given date string. * * @param dateString * @return */ public Duration convertStringToDuration(String sourceString) { if(sourceString == null || sourceString.length() == 0) { return null; } return getDatatypeFactory().newDuration(sourceString); } public Calendar convertStringToCalendar(String sourceString, QName schemaTypeQName) { XMLGregorianCalendar xmlGregorianCalender = convertStringToXMLGregorianCalendar(sourceString, schemaTypeQName); if(xmlGregorianCalender == null) { return null; } return toCalendar(xmlGregorianCalender); } private Date convertObjectToUtilDate(Object sourceObject, QName schemaTypeQName) { if (sourceObject instanceof String) { return convertStringToDate((String) sourceObject, schemaTypeQName); } return super.convertObjectToUtilDate(sourceObject); } protected java.sql.Date convertObjectToSQLDate(Object sourceObject, QName schemaTypeQName) { if (sourceObject instanceof String) { Date date = convertStringToDate((String) sourceObject, schemaTypeQName); return new java.sql.Date((date.getTime() / 1000) * 1000); } return convertObjectToDate(sourceObject); } protected Time convertObjectToSQLTime(Object sourceObject, QName schemaTypeQName) { if (sourceObject instanceof String) { Date date = convertStringToDate((String) sourceObject, schemaTypeQName); return new java.sql.Time((date.getTime() / 1000) * 1000); } return super.convertObjectToTime(sourceObject); } protected Timestamp convertStringToTimestamp(String sourceObject) { return convertStringToTimestamp(sourceObject, Constants.DATE_TIME_QNAME); } protected Timestamp convertObjectToTimestamp(Object sourceObject, QName schemaTypeQName) { if (sourceObject instanceof String) { return convertStringToTimestamp((String) sourceObject, schemaTypeQName); } return super.convertObjectToTimestamp(sourceObject); } public java.sql.Timestamp convertStringToTimestamp(String sourceString, QName schemaType) { XMLGregorianCalendar xmlGregorianCalender = null; try { xmlGregorianCalender = convertStringToXMLGregorianCalendar(sourceString); } catch(Exception ex) { if (Constants.DATE_QNAME.equals(schemaType)) { throw ConversionException.incorrectDateFormat(sourceString); }else if (Constants.TIME_QNAME.equals(schemaType)) { throw XMLConversionException.incorrectTimestampTimeFormat(sourceString); }else if (Constants.G_DAY_QNAME.equals(schemaType)) { throw XMLConversionException.incorrectGDayFormat(sourceString); } else if (Constants.G_MONTH_QNAME.equals(schemaType)) { throw XMLConversionException.incorrectGMonthFormat(sourceString); }else if (Constants.G_MONTH_DAY_QNAME.equals(schemaType)) { throw XMLConversionException.incorrectGMonthDayFormat(sourceString); }else if (Constants.G_YEAR_QNAME.equals(schemaType)) { throw XMLConversionException.incorrectGYearFormat(sourceString); }else if (Constants.G_YEAR_MONTH_QNAME.equals(schemaType)) { throw XMLConversionException.incorrectGYearMonthFormat(sourceString); }else if (Constants.DURATION_QNAME.equals(schemaType)) { throw new IllegalArgumentException(); }else{ throw XMLConversionException.incorrectTimestampDateTimeFormat(sourceString); } } if(xmlGregorianCalender == null) { return null; } GregorianCalendar cal = xmlGregorianCalender.toGregorianCalendar(); if(xmlGregorianCalender.getTimezone() == DatatypeConstants.FIELD_UNDEFINED) { cal.setTimeZone(getTimeZone()); } QName calendarQName = xmlGregorianCalender.getXMLSchemaType(); if(!calendarQName.equals(schemaType)){ if (Constants.DATE_QNAME.equals(schemaType)){ if(calendarQName.equals(Constants.DATE_TIME_QNAME)) { //clear out the time portion cal.clear(Calendar.HOUR_OF_DAY); cal.clear(Calendar.MINUTE); cal.clear(Calendar.SECOND); cal.clear(Calendar.MILLISECOND); return Helper.timestampFromCalendar(cal); }else{ throw ConversionException.incorrectDateFormat(sourceString); } }else if (Constants.TIME_QNAME.equals(schemaType)){ throw XMLConversionException.incorrectTimestampTimeFormat(sourceString); }else if (Constants.G_DAY_QNAME.equals(schemaType)) { throw XMLConversionException.incorrectGDayFormat(sourceString); } else if (Constants.G_MONTH_QNAME.equals(schemaType)) { throw XMLConversionException.incorrectGMonthFormat(sourceString); }else if (Constants.G_MONTH_DAY_QNAME.equals(schemaType)) { throw XMLConversionException.incorrectGMonthDayFormat(sourceString); }else if (Constants.G_YEAR_QNAME.equals(schemaType)) { throw XMLConversionException.incorrectGYearFormat(sourceString); }else if (Constants.G_YEAR_MONTH_QNAME.equals(schemaType)) { throw XMLConversionException.incorrectGYearMonthFormat(sourceString); }else if (Constants.DURATION_QNAME.equals(schemaType)) { throw new IllegalArgumentException(); }else if (Constants.DATE_TIME_QNAME.equals(schemaType)) { //&& Constants.DATE_QNAME.equals(calendarQName)) { throw XMLConversionException.incorrectTimestampDateTimeFormat(sourceString); } } Timestamp timestamp = Helper.timestampFromCalendar(cal); String trimmedSourceString = sourceString.trim(); int decimalIndex = trimmedSourceString.lastIndexOf('.'); if(-1 == decimalIndex) { return timestamp; }else{ int timeZoneIndex = trimmedSourceString.lastIndexOf(GMT_SUFFIX); if(-1 == timeZoneIndex) { timeZoneIndex = trimmedSourceString.lastIndexOf('-'); if(timeZoneIndex < decimalIndex) { timeZoneIndex = -1; } if(-1 == timeZoneIndex) { timeZoneIndex = trimmedSourceString.lastIndexOf('+'); } } String nsString; if(-1 == timeZoneIndex) { nsString = trimmedSourceString.substring(decimalIndex + 1); } else { nsString = trimmedSourceString.substring((decimalIndex + 1), timeZoneIndex); } double ns = Long.valueOf(nsString).doubleValue(); ns = ns * Math.pow(10, 9 - nsString.length()); timestamp.setNanos((int) ns); return timestamp; } } public String stringFromCalendar(Calendar sourceCalendar, QName schemaTypeQName) { // Clone the calendar, because calling get() methods will // alter the original calendar Calendar cal = (Calendar) sourceCalendar.clone(); XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(); // use the timezone info on source calendar, if any boolean isTimeZoneSet = sourceCalendar.isSet(Calendar.ZONE_OFFSET); if (isTimeZoneSet) { if(sourceCalendar.isSet(Calendar.DST_OFFSET)) { xgc.setTimezone((cal.get(Calendar.ZONE_OFFSET) + cal.get(Calendar.DST_OFFSET)) / 60000); } else { xgc.setTimezone((cal.get(Calendar.ZONE_OFFSET)) / 60000); } } // gDay if (Constants.G_DAY_QNAME.equals(schemaTypeQName)) { xgc.setDay(cal.get(Calendar.DATE)); return xgc.toXMLFormat(); } // gMonth if (Constants.G_MONTH_QNAME.equals(schemaTypeQName)) { //There was previously some workaround in the method for handling gMonth and the older/invalid //--MM-- format. Output should now always be in the --MM format //bug #410084 xgc.setMonth(cal.get(Calendar.MONTH) + 1); String xmlFormat = xgc.toXMLFormat(); int lastDashIndex = xmlFormat.lastIndexOf('-'); if(lastDashIndex > 1){ //this means the format is the --MM--, --MM--Z, --MM--+03:00 and we need to trim the String String pre = xmlFormat.substring(0, 4); if(xmlFormat.length() > 6){ String post = xmlFormat.substring(6, xmlFormat.length()); return pre + post; }else{ return pre; } } return xmlFormat; } // gMonthDay if (Constants.G_MONTH_DAY_QNAME.equals(schemaTypeQName)) { xgc.setMonth(cal.get(Calendar.MONTH) + 1); xgc.setDay(cal.get(Calendar.DATE)); return xgc.toXMLFormat(); } // gYear if (Constants.G_YEAR_QNAME.equals(schemaTypeQName)) { if (cal.get(Calendar.ERA) == GregorianCalendar.BC){ xgc.setYear(-cal.get(Calendar.YEAR)); } else { xgc.setYear(cal.get(Calendar.YEAR)); } return xgc.toXMLFormat(); } // gYearMonth if (Constants.G_YEAR_MONTH_QNAME.equals(schemaTypeQName)) { if (cal.get(Calendar.ERA) == GregorianCalendar.BC){ xgc.setYear(-cal.get(Calendar.YEAR)); } else { xgc.setYear(cal.get(Calendar.YEAR)); } xgc.setMonth(cal.get(Calendar.MONTH) + 1); return xgc.toXMLFormat(); } // Date if (Constants.DATE_QNAME.equals(schemaTypeQName)) { if (cal.get(Calendar.ERA) == GregorianCalendar.BC){ xgc.setYear(-cal.get(Calendar.YEAR)); } else { xgc.setYear(cal.get(Calendar.YEAR)); } xgc.setMonth(cal.get(Calendar.MONTH) + 1); xgc.setDay(cal.get(Calendar.DATE)); return xgc.toXMLFormat(); } // Time if (Constants.TIME_QNAME.equals(schemaTypeQName)) { int milliseconds = cal.get(Calendar.MILLISECOND); if(0 == milliseconds) { milliseconds = DatatypeConstants.FIELD_UNDEFINED; } xgc.setTime( cal.get(Calendar.HOUR_OF_DAY), cal.get(Calendar.MINUTE), cal.get(Calendar.SECOND), milliseconds); if(!isTimeZoneSet && isTimeZoneQualified()) { xgc.setTimezone(getTimeZone().getOffset(sourceCalendar.getTimeInMillis()) / 60000); } return truncateMillis(xgc.toXMLFormat()); } // DateTime if (cal.get(Calendar.ERA) == GregorianCalendar.BC){ xgc.setYear(-cal.get(Calendar.YEAR)); } else { xgc.setYear(cal.get(Calendar.YEAR)); } xgc.setMonth(cal.get(Calendar.MONTH) + 1); xgc.setDay(cal.get(Calendar.DATE)); int milliseconds = cal.get(Calendar.MILLISECOND); if(0 == milliseconds) { milliseconds = DatatypeConstants.FIELD_UNDEFINED; } xgc.setTime( cal.get(Calendar.HOUR_OF_DAY), cal.get(Calendar.MINUTE), cal.get(Calendar.SECOND), milliseconds); if(!isTimeZoneSet && isTimeZoneQualified()) { xgc.setTimezone(getTimeZone().getOffset(sourceCalendar.getTimeInMillis()) / 60000); } return truncateMillis(xgc.toXMLFormat()); } /** * Truncate any trailing zeros from the millisecond portion of a given string. * The string is assumed to be in dateTime or time format, as returned by * XMLGregorianCalendar.toXMLFormat(). * * @param xmlFormat * @return */ private String truncateMillis(String xmlFormat) { String result = xmlFormat; int dotIdx = xmlFormat.indexOf('.'); if (dotIdx > 0) { String pre = xmlFormat.substring(0, dotIdx); String post = Constants.EMPTY_STRING; if (xmlFormat.length() > (dotIdx + 4)) { post = xmlFormat.substring(dotIdx + 4, xmlFormat.length()); } String milliStr = xmlFormat.substring(dotIdx + 1, dotIdx + 4); char[] numbChar = new char[milliStr.length()]; milliStr.getChars(0, milliStr.length(), numbChar, 0); int truncIndex = 2; while (truncIndex >= 1 && numbChar[truncIndex] == '0') { truncIndex--; } milliStr = new String(numbChar, 0, truncIndex + 1); if (milliStr.length() > 0 && !"0".equals(milliStr)) { milliStr = '.' + milliStr; result = pre + milliStr + post; } else { result = pre + post; } } return result; } private String stringFromCalendar(Calendar sourceCalendar) { if (!(sourceCalendar.isSet(Calendar.HOUR) || sourceCalendar.isSet(Calendar.MINUTE) || sourceCalendar.isSet(Calendar.SECOND) || sourceCalendar.isSet(Calendar.MILLISECOND))) { return stringFromCalendar(sourceCalendar, Constants.DATE_QNAME); } else if (!(sourceCalendar.isSet(Calendar.YEAR) || sourceCalendar.isSet(Calendar.MONTH) || sourceCalendar.isSet(Calendar.DATE))) { return stringFromCalendar(sourceCalendar, Constants.TIME_QNAME); } else { return stringFromCalendar(sourceCalendar, Constants.DATE_TIME_QNAME); } } public java.util.Date convertStringToDate(String sourceString, QName schemaType) { XMLGregorianCalendar xmlGregorianCalender = convertStringToXMLGregorianCalendar(sourceString, schemaType); if(xmlGregorianCalender == null) { return null; } Calendar cal = toCalendar(xmlGregorianCalender); Date returnDate = cal.getTime(); return returnDate; } private Calendar toCalendar(XMLGregorianCalendar xgc) { TimeZone tz = null; if (xgc.getTimezone() == DatatypeConstants.FIELD_UNDEFINED) { tz = getTimeZone(); } else { tz = xgc.getTimeZone(xgc.getTimezone()); } Calendar cal = Calendar.getInstance(tz, Locale.getDefault()); cal.clear(); if (xgc.getTimezone() != DatatypeConstants.FIELD_UNDEFINED) { cal.set(Calendar.ZONE_OFFSET, xgc.getTimezone() * 60000); } BigInteger year = xgc.getEonAndYear(); if (year != null) { cal.set(Calendar.ERA, year.signum() < 0 ? GregorianCalendar.BC : GregorianCalendar.AD); cal.set(Calendar.YEAR, year.abs().intValue()); } if (xgc.getDay() != DatatypeConstants.FIELD_UNDEFINED) cal.set(Calendar.DAY_OF_MONTH, xgc.getDay()); if (xgc.getMonth() != DatatypeConstants.FIELD_UNDEFINED) cal.set(Calendar.MONTH, xgc.getMonth() - 1); if (xgc.getHour() != DatatypeConstants.FIELD_UNDEFINED) cal.set(Calendar.HOUR_OF_DAY, xgc.getHour()); if (xgc.getMinute() != DatatypeConstants.FIELD_UNDEFINED) cal.set(Calendar.MINUTE, xgc.getMinute()); if (xgc.getSecond() != DatatypeConstants.FIELD_UNDEFINED) cal.set(Calendar.SECOND, xgc.getSecond()); if (xgc.getFractionalSecond() != null) cal.set(Calendar.MILLISECOND, xgc.getMillisecond()); return cal; } /** * This method returns a dateTime string representing a given * java.util.Date. * * BC dates (sourceDate.getTime() < YEAR_ONE_AD_TIME) are handled * as follows: '2007 BC' --> '-2006 AD' * * @param sourceDate * @return */ private String stringFromDate(java.util.Date sourceDate) { GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setTime(sourceDate); XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(cal); String string = xgc.toXMLFormat(); string = truncateMillis(string); return string; } /** * This method returns a string representing a given java.util.Date * based on a given schema type QName. * * BC dates (sourceDate.getTime() < YEAR_ONE_AD_TIME) are handled * as follows: '2007 BC' --> '-2006 AD'. * * @param sourceDate * @param schemaType * @return */ public String stringFromDate(java.util.Date sourceDate, QName schemaType) { XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(); if (Constants.DATE_QNAME.equals(schemaType)) { GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setTime(sourceDate); xgc.setDay(cal.get(Calendar.DAY_OF_MONTH)); xgc.setMonth(cal.get(Calendar.MONTH)+1); if(cal.get(Calendar.ERA) == GregorianCalendar.BC){ xgc.setYear(-cal.get(Calendar.YEAR)); }else{ xgc.setYear(cal.get(Calendar.YEAR)); } return xgc.toXMLFormat(); } if (Constants.TIME_QNAME.equals(schemaType)) { GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setTime(sourceDate); xgc.setHour(cal.get(Calendar.HOUR_OF_DAY)); xgc.setMinute(cal.get(Calendar.MINUTE)); xgc.setSecond(cal.get(Calendar.SECOND)); String string = xgc.toXMLFormat(); string = appendMillis(string, sourceDate.getTime()); return appendTimeZone(string, sourceDate); } if (Constants.G_DAY_QNAME.equals(schemaType)) { GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setTime(sourceDate); xgc.setDay(cal.get(Calendar.DATE)); return xgc.toXMLFormat(); } if (Constants.G_MONTH_QNAME.equals(schemaType)) { GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setTime(sourceDate); xgc.setMonth(cal.get(Calendar.MONTH)+1); return stringFromXMLGregorianCalendar(xgc, schemaType); } if (Constants.G_MONTH_DAY_QNAME.equals(schemaType)) { GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setTime(sourceDate); xgc.setMonth(cal.get(Calendar.MONTH)+1); xgc.setDay(cal.get(Calendar.DAY_OF_MONTH)); return xgc.toXMLFormat(); } if (Constants.G_YEAR_QNAME.equals(schemaType)) { GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setTime(sourceDate); if(cal.get(Calendar.ERA) == GregorianCalendar.BC){ xgc.setYear(-cal.get(Calendar.YEAR)); }else{ xgc.setYear(cal.get(Calendar.YEAR)); } return xgc.toXMLFormat(); } if (Constants.G_YEAR_MONTH_QNAME.equals(schemaType)) { GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setTime(sourceDate); if(cal.get(Calendar.ERA) == GregorianCalendar.BC){ xgc.setYear(-cal.get(Calendar.YEAR)); }else{ xgc.setYear(cal.get(Calendar.YEAR)); } xgc.setMonth(cal.get(Calendar.MONTH)+1); return xgc.toXMLFormat(); } if (Constants.DURATION_QNAME.equals(schemaType)) { throw new IllegalArgumentException(); } // default is dateTime return stringFromDate(sourceDate); } private String stringFromSQLDate(java.sql.Date sourceDate) { XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(); GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setGregorianChange(new Date(Long.MIN_VALUE)); cal.setTime(sourceDate); if(cal.get(Calendar.ERA) == GregorianCalendar.BC){ xgc.setYear(-cal.get(Calendar.YEAR)); }else{ xgc.setYear(cal.get(Calendar.YEAR)); } xgc.setMonth(cal.get(Calendar.MONTH)+1); xgc.setDay(cal.get(Calendar.DAY_OF_MONTH)); return xgc.toXMLFormat(); } private String stringFromSQLDate(java.sql.Date sourceDate, QName schemaType) { if(null == schemaType) { return stringFromSQLDate(sourceDate); } else { return stringFromDate(sourceDate, schemaType); } } private String stringFromSQLTime(Time sourceTime) { XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(); GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setGregorianChange(new Date(Long.MIN_VALUE)); cal.setTime(sourceTime); xgc.setHour(cal.get(Calendar.HOUR_OF_DAY)); xgc.setMinute(cal.get(Calendar.MINUTE)); xgc.setSecond(cal.get(Calendar.SECOND)); String string= xgc.toXMLFormat(); string = appendMillis(string, sourceTime.getTime()); return appendTimeZone(string, sourceTime); } private String stringFromSQLTime(Time sourceTime, QName schemaType) { if(null == schemaType) { return stringFromSQLTime(sourceTime); } else { return stringFromDate(sourceTime, schemaType); } } /** * This method returns a dateTime string representing a given * Timestamp. * * BC dates (sourceDate.getTime() < YEAR_ONE_AD_TIME) are handled * as follows: '2007 BC' --> '-2006 AD' * * @param sourceDate * @return */ private String stringFromTimestamp(Timestamp sourceDate) { GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setGregorianChange(new Date(Long.MIN_VALUE)); cal.setTime(sourceDate); XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(); if(cal.get(Calendar.ERA)== GregorianCalendar.BC){ xgc.setYear(-cal.get(Calendar.YEAR)); }else{ xgc.setYear(cal.get(Calendar.YEAR)); } xgc.setMonth(cal.get(Calendar.MONTH)+1); xgc.setDay(cal.get(Calendar.DAY_OF_MONTH)); xgc.setHour(cal.get(Calendar.HOUR_OF_DAY)); xgc.setMinute(cal.get(Calendar.MINUTE)); xgc.setSecond(cal.get(Calendar.SECOND)); String string= xgc.toXMLFormat(); string = appendNanos(string, sourceDate); return appendTimeZone(string, sourceDate); } /** * This method returns a string representing a given Timestamp * based on a given schema type QName. * * BC dates (sourceDate.getTime() < YEAR_ONE_AD_TIME) are handled * as follows: '2007 BC' --> '-2006 AD'. * * @param sourceDate * @param schemaType * @return */ private String stringFromTimestamp(Timestamp sourceDate, QName schemaType) { if (Constants.DATE_QNAME.equals(schemaType)) { GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setGregorianChange(new Date(Long.MIN_VALUE)); cal.setTime(sourceDate); XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(); if(cal.get(Calendar.ERA) == GregorianCalendar.BC){ xgc.setYear(-cal.get(Calendar.YEAR)); }else{ xgc.setYear(cal.get(Calendar.YEAR)); } xgc.setMonth(cal.get(Calendar.MONTH)+1); xgc.setDay(cal.get(Calendar.DAY_OF_MONTH)); return xgc.toXMLFormat(); } if (Constants.TIME_QNAME.equals(schemaType)) { Calendar cal = Calendar.getInstance(getTimeZone()); cal.setTimeInMillis(sourceDate.getTime()); XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(); xgc.setHour(cal.get(Calendar.HOUR_OF_DAY)); xgc.setMinute(cal.get(Calendar.MINUTE)); xgc.setSecond(cal.get(Calendar.SECOND)); String string = xgc.toXMLFormat(); string = appendNanos(string, sourceDate); return appendTimeZone(string, sourceDate); } if (Constants.G_DAY_QNAME.equals(schemaType)) { XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(); GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setGregorianChange(new Date(Long.MIN_VALUE)); cal.setTime(sourceDate); xgc.setDay(cal.get(Calendar.DAY_OF_MONTH)); return xgc.toXMLFormat(); } if (Constants.G_MONTH_QNAME.equals(schemaType)) { XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(); GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setGregorianChange(new Date(Long.MIN_VALUE)); cal.setTime(sourceDate); xgc.setMonth(cal.get(Calendar.MONTH)+1); return stringFromXMLGregorianCalendar(xgc, schemaType); } if (Constants.G_MONTH_DAY_QNAME.equals(schemaType)) { XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(); GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setGregorianChange(new Date(Long.MIN_VALUE)); cal.setTime(sourceDate); xgc.setMonth(cal.get(Calendar.MONTH)+1); xgc.setDay(cal.get(Calendar.DAY_OF_MONTH)); return xgc.toXMLFormat(); } if (Constants.G_YEAR_QNAME.equals(schemaType)) { XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(); GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setGregorianChange(new Date(Long.MIN_VALUE)); cal.setTime(sourceDate); if(cal.get(Calendar.ERA) == GregorianCalendar.BC){ xgc.setYear(-cal.get(Calendar.YEAR)); }else{ xgc.setYear(cal.get(Calendar.YEAR)); } return xgc.toXMLFormat(); } if (Constants.G_YEAR_MONTH_QNAME.equals(schemaType)) { XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(); GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setGregorianChange(new Date(Long.MIN_VALUE)); cal.setTime(sourceDate); if(cal.get(Calendar.ERA) == GregorianCalendar.BC){ xgc.setYear(-cal.get(Calendar.YEAR)); }else{ xgc.setYear(cal.get(Calendar.YEAR)); } xgc.setMonth(cal.get(Calendar.MONTH)+1); return xgc.toXMLFormat(); } if (Constants.DURATION_QNAME.equals(schemaType)) { throw new IllegalArgumentException(); } // default is dateTime XMLGregorianCalendar xgc = getDatatypeFactory().newXMLGregorianCalendar(); GregorianCalendar cal = new GregorianCalendar(getTimeZone()); cal.setGregorianChange(new Date(Long.MIN_VALUE)); cal.setTime(sourceDate); if(cal.get(Calendar.ERA) == GregorianCalendar.BC){ xgc.setYear(-cal.get(Calendar.YEAR)); }else{ xgc.setYear(cal.get(Calendar.YEAR)); } xgc.setMonth(cal.get(Calendar.MONTH)+1); xgc.setDay(cal.get(Calendar.DAY_OF_MONTH)); xgc.setHour(cal.get(Calendar.HOUR_OF_DAY)); xgc.setMinute(cal.get(Calendar.MINUTE)); xgc.setSecond(cal.get(Calendar.SECOND)); String string= xgc.toXMLFormat(); string = appendNanos(string, sourceDate); return appendTimeZone(string, sourceDate); } private String stringFromXMLGregorianCalendar(XMLGregorianCalendar cal, QName schemaTypeQName) { if(schemaTypeQName !=null && schemaTypeQName.equals(cal.getXMLSchemaType()) && schemaTypeQName != Constants.G_MONTH_QNAME){ return cal.toXMLFormat(); } GregorianCalendar gCal = cal.toGregorianCalendar(); if(cal.getTimezone() == DatatypeConstants.FIELD_UNDEFINED) { gCal.clear(Calendar.ZONE_OFFSET); } return stringFromCalendar(gCal, schemaTypeQName); } private String stringFromXMLGregorianCalendar(XMLGregorianCalendar cal) { return cal.toXMLFormat(); } private String stringFromDuration(Duration dur) { return dur.toString(); } private String stringFromQName(QName sourceQName) { // String will be formatted as: {namespaceURI}localPart return sourceQName.toString(); } private QName qnameFromString(String sourceString) { // String will be formatted as: {namespaceURI}localPart if (sourceString.indexOf('{') != -1) { String uri = sourceString.substring(sourceString.indexOf('{') + 1, sourceString.indexOf('}')); String localpart = sourceString.substring(sourceString.indexOf('}') + 1); return new QName(uri, localpart); } else { return new QName(sourceString); } } /** * INTERNAL: * Converts a String which is in Base64 format to a Byte[] */ public byte[] convertSchemaBase64ToByteArray(Object sourceObject) throws ConversionException { if (sourceObject instanceof String) { //the base64 string may have contained embedded whitespaces. Try again after //Removing any whitespaces. StringTokenizer tokenizer = new StringTokenizer((String)sourceObject); StringBuilder builder = new StringBuilder(); while(tokenizer.hasMoreTokens()) { builder.append(tokenizer.nextToken()); } byte[] bytes = Base64.base64Decode(builder.toString().getBytes()); return bytes; } return convertObjectToByteArray(sourceObject); } public Object convertSchemaBase64ListToByteArrayList(Object sourceObject, CoreContainerPolicy containerPolicy, CoreAbstractSession session) throws ConversionException { if (sourceObject instanceof String) { StringTokenizer tokenizer = new StringTokenizer((String) sourceObject, " "); Object container = containerPolicy.containerInstance(); while (tokenizer.hasMoreElements()) { String token = tokenizer.nextToken(); byte[] bytes = Base64.base64Decode(token.getBytes()); containerPolicy.addInto(bytes, container, session); } return container; } throw ConversionException.couldNotBeConverted(sourceObject, CoreClassConstants.ABYTE); } protected Byte[] convertSchemaBase64ToByteObjectArray(Object sourceObject) throws ConversionException { byte[] bytes = convertSchemaBase64ToByteArray(sourceObject); Byte[] objectBytes = new Byte[bytes.length]; for (int index = 0; index < bytes.length; index++) { objectBytes[index] = bytes[index]; } return objectBytes; } public String buildBase64StringFromBytes(byte[] bytes) { byte[] convertedBytes = Base64.base64Encode(bytes); StringBuffer buffer = new StringBuffer(); for (int i = 0; i < convertedBytes.length; i++) { buffer.append((char) convertedBytes[i]); } return buffer.toString(); } public String buildBase64StringFromObjectBytes(Byte[] bytes) { byte[] primitiveBytes = new byte[bytes.length]; for (int i = 0; i < bytes.length; i++) { primitiveBytes[i] = bytes[i].byteValue(); } return buildBase64StringFromBytes(primitiveBytes); } protected String buildHexStringFromObjectBytes(Byte[] bytes) { byte[] primitiveBytes = new byte[bytes.length]; for (int i = 0; i < bytes.length; i++) { primitiveBytes[i] = bytes[i].byteValue(); } return Helper.buildHexStringFromBytes(primitiveBytes); } protected List convertStringToList(Object sourceObject) throws ConversionException { ArrayList list = new ArrayList(); if (sourceObject instanceof String) { StringTokenizer tokenizer = new StringTokenizer((String) sourceObject, " "); while (tokenizer.hasMoreElements()) { String token = tokenizer.nextToken(); list.add(token); } } return list; } /** * Convert the given sourceObject (String) to the appropriate collection type specified by the * containerPolicy, using the elementType to properly convert each element of the list. * * @param sourceObject - will always be a string if read from XML * @param elementType - the type of the elements contained in the list * @return - the newly converted object */ public Object convertStringToList(Object sourceObject, Class elementType, ContainerPolicy containerPolicy, QName schemaType) throws ConversionException { Collection collection = (Collection) containerPolicy.containerInstance(); if (sourceObject instanceof String) { StringTokenizer tokenizer = new StringTokenizer((String) sourceObject, " "); while (tokenizer.hasMoreElements()) { String token = tokenizer.nextToken(); collection.add(convertObject(token, elementType,schemaType )); } } return collection; } public String convertListToString(Object sourceObject, QName schemaType) throws ConversionException { StringBuilder returnStringBuilder = new StringBuilder(); if (sourceObject instanceof List) { List list = (List) sourceObject; for (int i = 0, listSize = list.size(); i < listSize; i++) { Object next = list.get(i); if (i > 0) { returnStringBuilder.append(' '); } returnStringBuilder.append((String)convertObject(next, String.class, schemaType)); } } return returnStringBuilder.toString(); } public static HashMap getDefaultXMLTypes() { if (defaultXMLTypes == null) { defaultXMLTypes = buildXMLTypes(); } return defaultXMLTypes; } public static HashMap getDefaultJavaTypes() { if (defaultJavaTypes == null) { defaultJavaTypes = buildJavaTypes(); } return defaultJavaTypes; } /** * Build and return a Hashtable containing the default XML to Java conversion pairs */ private static HashMap buildXMLTypes() { HashMap XMLTypes = new HashMap(); //jaxb 1.0 spec pairs XMLTypes.put(Constants.ANY_SIMPLE_TYPE_QNAME, CoreClassConstants.STRING); XMLTypes.put(Constants.BASE_64_BINARY_QNAME, CoreClassConstants.APBYTE); XMLTypes.put(Constants.BOOLEAN_QNAME, CoreClassConstants.PBOOLEAN); XMLTypes.put(Constants.BYTE_QNAME, CoreClassConstants.PBYTE); XMLTypes.put(Constants.DATE_QNAME, CoreClassConstants.CALENDAR); XMLTypes.put(Constants.DATE_TIME_QNAME, CoreClassConstants.CALENDAR); XMLTypes.put(Constants.DECIMAL_QNAME, CoreClassConstants.BIGDECIMAL); XMLTypes.put(Constants.DOUBLE_QNAME, CoreClassConstants.PDOUBLE); XMLTypes.put(Constants.FLOAT_QNAME, CoreClassConstants.PFLOAT); XMLTypes.put(Constants.HEX_BINARY_QNAME, CoreClassConstants.APBYTE); XMLTypes.put(Constants.INT_QNAME, CoreClassConstants.PINT); XMLTypes.put(Constants.INTEGER_QNAME, CoreClassConstants.BIGINTEGER); XMLTypes.put(Constants.LONG_QNAME, CoreClassConstants.PLONG); XMLTypes.put(Constants.NAME_QNAME, CoreClassConstants.STRING); XMLTypes.put(Constants.NCNAME_QNAME, CoreClassConstants.STRING); XMLTypes.put(Constants.QNAME_QNAME, Constants.QNAME_CLASS); XMLTypes.put(Constants.SHORT_QNAME, CoreClassConstants.PSHORT); XMLTypes.put(Constants.STRING_QNAME, CoreClassConstants.STRING); XMLTypes.put(Constants.TIME_QNAME, CoreClassConstants.CALENDAR); XMLTypes.put(Constants.UNSIGNED_BYTE_QNAME, CoreClassConstants.PSHORT); XMLTypes.put(Constants.UNSIGNED_INT_QNAME, CoreClassConstants.PLONG); XMLTypes.put(Constants.UNSIGNED_SHORT_QNAME, CoreClassConstants.PINT); XMLTypes.put(Constants.DURATION_QNAME, CoreClassConstants.DURATION); XMLTypes.put(Constants.G_DAY_QNAME, CoreClassConstants.XML_GREGORIAN_CALENDAR); XMLTypes.put(Constants.G_MONTH_QNAME, CoreClassConstants.XML_GREGORIAN_CALENDAR); XMLTypes.put(Constants.G_MONTH_DAY_QNAME, CoreClassConstants.XML_GREGORIAN_CALENDAR); XMLTypes.put(Constants.G_YEAR_QNAME, CoreClassConstants.XML_GREGORIAN_CALENDAR); XMLTypes.put(Constants.G_YEAR_MONTH_QNAME, CoreClassConstants.XML_GREGORIAN_CALENDAR); XMLTypes.put(Constants.NEGATIVE_INTEGER_QNAME, CoreClassConstants.BIGINTEGER); XMLTypes.put(Constants.NOTATION_QNAME, Constants.QNAME_CLASS); XMLTypes.put(Constants.NON_NEGATIVE_INTEGER_QNAME, CoreClassConstants.BIGINTEGER); XMLTypes.put(Constants.NON_POSITIVE_INTEGER_QNAME,CoreClassConstants.BIGINTEGER); XMLTypes.put(Constants.NORMALIZEDSTRING_QNAME, CoreClassConstants.STRING); XMLTypes.put(Constants.POSITIVE_INTEGER_QNAME, CoreClassConstants.BIGINTEGER); XMLTypes.put(Constants.UNSIGNED_LONG_QNAME, CoreClassConstants.BIGINTEGER); return XMLTypes; } /** * Build and return a Hashtable containing the default Java to XML conversion pairs */ private static HashMap buildJavaTypes() { HashMap javaTypes = new HashMap(); //jaxb 1.0 spec pairs javaTypes.put(CoreClassConstants.APBYTE, Constants.HEX_BINARY_QNAME); javaTypes.put(CoreClassConstants.BIGDECIMAL, Constants.DECIMAL_QNAME); javaTypes.put(CoreClassConstants.BIGINTEGER, Constants.INTEGER_QNAME); javaTypes.put(CoreClassConstants.PBOOLEAN, Constants.BOOLEAN_QNAME); javaTypes.put(CoreClassConstants.PBYTE, Constants.BYTE_QNAME); javaTypes.put(CoreClassConstants.CALENDAR, Constants.DATE_TIME_QNAME); javaTypes.put(CoreClassConstants.PDOUBLE, Constants.DOUBLE_QNAME); javaTypes.put(CoreClassConstants.PFLOAT, Constants.FLOAT_QNAME); javaTypes.put(CoreClassConstants.PINT, Constants.INT_QNAME); javaTypes.put(CoreClassConstants.PLONG, Constants.LONG_QNAME); javaTypes.put(CoreClassConstants.PSHORT, Constants.SHORT_QNAME); javaTypes.put(Constants.QNAME_CLASS, Constants.QNAME_QNAME); javaTypes.put(CoreClassConstants.STRING, Constants.STRING_QNAME); //other pairs javaTypes.put(CoreClassConstants.ABYTE, Constants.HEX_BINARY_QNAME); javaTypes.put(CoreClassConstants.BOOLEAN, Constants.BOOLEAN_QNAME); javaTypes.put(CoreClassConstants.BYTE, Constants.BYTE_QNAME); javaTypes.put(CoreClassConstants.GREGORIAN_CALENDAR, Constants.DATE_TIME_QNAME); javaTypes.put(CoreClassConstants.DOUBLE, Constants.DOUBLE_QNAME); javaTypes.put(CoreClassConstants.FLOAT, Constants.FLOAT_QNAME); javaTypes.put(CoreClassConstants.INTEGER, Constants.INT_QNAME); javaTypes.put(CoreClassConstants.LONG, Constants.LONG_QNAME); javaTypes.put(CoreClassConstants.SHORT, Constants.SHORT_QNAME); javaTypes.put(CoreClassConstants.UTILDATE, Constants.DATE_TIME_QNAME); javaTypes.put(CoreClassConstants.CHAR, Constants.UNSIGNED_INT_QNAME); javaTypes.put(CoreClassConstants.PCHAR, Constants.UNSIGNED_INT_QNAME); javaTypes.put(CoreClassConstants.DURATION, Constants.DURATION_QNAME); javaTypes.put(Constants.UUID, Constants.STRING_QNAME); javaTypes.put(Constants.URI, Constants.STRING_QNAME); javaTypes.put(CoreClassConstants.URL_Class, Constants.ANY_URI_QNAME); return javaTypes; } private String appendTimeZone(String string, Date date) { StringBuilder stringBuilder = new StringBuilder(string); // GMT Time Zone int rawMinuteOffset = getTimeZone().getOffset(date.getTime()) / 60000; if (0 == rawMinuteOffset) { stringBuilder.append(GMT_SUFFIX); return stringBuilder.toString(); } // +HH:MM if (rawMinuteOffset < 0) { stringBuilder.append('-'); rawMinuteOffset = Math.abs(rawMinuteOffset); } else { stringBuilder.append('+'); } int hourOffset = rawMinuteOffset / 60; if (hourOffset < 10) { stringBuilder.append('0'); } stringBuilder.append(hourOffset); stringBuilder.append(Constants.COLON); int minuteOffset = rawMinuteOffset % 60; if (minuteOffset < 10) { stringBuilder.append('0'); } stringBuilder.append(minuteOffset); return stringBuilder.toString(); } /** * INTERNAL: */ public Object clone() { XMLConversionManager clone = (XMLConversionManager) super.clone(); return clone; } /** * Convenience method that appends nanosecond values from a given * time to a given string. * * @param string * @param time * @return */ private String appendNanos(String string, Timestamp ts) { StringBuilder strBldr = new StringBuilder(string); int nanos = ts.getNanos(); strBldr.append(nanos==0 ? "" : '.' + Helper.buildZeroPrefixAndTruncTrailZeros(nanos, TOTAL_NS_DIGITS)).toString(); return strBldr.toString(); } /** * Convenience method that appends millisecond values from a given * time to a given string. * * @param string * @param time * @return */ private String appendMillis(String string, long time) { StringBuilder strBldr = new StringBuilder(string); int msns = (int) (time % 1000); if (msns < 0) { // adjust for negative time values, i.e. before Epoch msns = msns + 1000; } strBldr.append(msns==0 ? "" : '.' + Helper.buildZeroPrefixAndTruncTrailZeros(msns, TOTAL_MS_DIGITS)).toString(); return strBldr.toString(); } public QName buildQNameFromString(String stringValue, AbstractUnmarshalRecord record){ stringValue = stringValue.trim(); int index = stringValue.lastIndexOf(Constants.COLON); if(index > -1) { String prefix = stringValue.substring(0, index); String localName = stringValue.substring(index + 1); if(record.isNamespaceAware()){ String namespaceURI = record.resolveNamespacePrefix(prefix); return new QName(namespaceURI, localName, prefix); }else{ return new QName(null, localName, prefix); } } else { String namespaceURI = record.resolveNamespacePrefix(Constants.EMPTY_STRING); if(namespaceURI == null){ namespaceURI = record.resolveNamespacePrefix(null); } return new QName(namespaceURI, stringValue); } } /** * Replaces any CR, Tab or LF characters in the string with a single ' ' character. */ public String normalizeStringValue(String value) { int i = 0; int length = value.length(); //check for the first whitespace while(i < length) { if(isWhitespace(value.charAt(i), false)) { break; } i++; } if(i == length) { return value; } char[] buffer = value.toCharArray(); buffer[i] = ' '; i++; for(; i < length; i++) { if(isWhitespace(buffer[i], false)) { buffer[i] = ' '; } } return new String(buffer); } /** * Removes all leading and trailing whitespaces, and replaces any sequences of whitespaces * that occur in the string with a single ' ' character. */ public String collapseStringValue(String value) { int length = value.length(); int start = 0; while(start < length) { if(isWhitespace(value.charAt(start), true)) { break; } start++; } if(start == length) { return value; } StringBuffer collapsedString = new StringBuffer(length); if(start != 0) { for(int i = 0; i < start; i++) { collapsedString.append(value.charAt(i)); } collapsedString.append(' '); } boolean inSequence = true; for(int i = start + 1; i < length; i++) { char nextCharacter = value.charAt(i); if(!isWhitespace(nextCharacter, true)) { collapsedString.append(nextCharacter); inSequence = false; } else { if(inSequence) { continue; } else { collapsedString.append(' '); inSequence = true; } } } length = collapsedString.length(); if(length > 0 && collapsedString.charAt(length -1) == ' ') { collapsedString.setLength(length - 1); } return collapsedString.toString(); } private boolean isWhitespace(char character, boolean includeSpace) { if(character > 0x20) { return false; } if(character == 0x9 || character == 0xA || character == 0xD) { return true; } if(includeSpace) { return character == 0x20; } return false; } private boolean isNumericQName(QName schemaTypeQName){ if(schemaTypeQName == null){ return false; } return(schemaTypeQName.equals(Constants.BYTE_QNAME )) ||(schemaTypeQName.equals(Constants.DECIMAL_QNAME )) ||(schemaTypeQName.equals(Constants.INT_QNAME )) ||(schemaTypeQName.equals(Constants.INTEGER_QNAME )) ||(schemaTypeQName.equals(Constants.FLOAT_QNAME )) ||(schemaTypeQName.equals(Constants.LONG_QNAME )) ||(schemaTypeQName.equals(Constants.NEGATIVE_INTEGER_QNAME)) ||(schemaTypeQName.equals(Constants.NON_NEGATIVE_INTEGER_QNAME)) ||(schemaTypeQName.equals(Constants.NON_POSITIVE_INTEGER_QNAME)) ||(schemaTypeQName.equals(Constants.POSITIVE_INTEGER_QNAME)) ||(schemaTypeQName.equals(Constants.SHORT_QNAME )) ||(schemaTypeQName.equals(Constants.UNSIGNED_SHORT_QNAME )) ||(schemaTypeQName.equals(Constants.UNSIGNED_LONG_QNAME )) ||(schemaTypeQName.equals(Constants.UNSIGNED_INT_QNAME )) ||(schemaTypeQName.equals(Constants.UNSIGNED_BYTE_QNAME )); } public Object convertHexBinaryListToByteArrayList(Object sourceObject, CoreContainerPolicy containerPolicy, CoreAbstractSession session) { if (sourceObject instanceof String) { StringTokenizer tokenizer = new StringTokenizer((String) sourceObject, " "); Object container = containerPolicy.containerInstance(); while (tokenizer.hasMoreElements()) { String token = tokenizer.nextToken(); byte[] bytes = Helper.buildBytesFromHexString(token); containerPolicy.addInto(bytes, container, session); } return container; } throw ConversionException.couldNotBeConverted(sourceObject, CoreClassConstants.ABYTE); } }././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLVariableXPathObjectMappingNodeValue.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLVariableXPathObjectMappingNodeValue.j0000664000000000000000000000475312216173126030534 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Denise Smith - 2.5.1 - Initial Implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.mappings.VariableXPathObjectMapping; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.ObjectMarshalContext; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; public class XMLVariableXPathObjectMappingNodeValue extends XMLVariableXPathMappingNodeValue { VariableXPathObjectMapping mapping; public XMLVariableXPathObjectMappingNodeValue(VariableXPathObjectMapping mapping) { this.mapping = mapping; } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver) { return marshal(xPathFragment, marshalRecord, object, session, namespaceResolver, ObjectMarshalContext.getInstance()); } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { if (mapping.isReadOnly()) { return false; } Object objectValue = marshalContext.getAttributeValue(object, mapping); return this.marshalSingleValue(xPathFragment, marshalRecord, object, objectValue, session, namespaceResolver, marshalContext); } @Override public VariableXPathObjectMapping getMapping() { return mapping; } protected void setOrAddAttributeValue(UnmarshalRecord unmarshalRecord, Object value, XPathFragment xPathFragment, Object collection){ unmarshalRecord.setAttributeValue(value, mapping); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLAnyAttributeMappingNodeValue.java0000664000000000000000000001765312216173126030021 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.xml.namespace.QName; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.queries.CoreMappedKeyMapContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.mappings.AnyAttributeMapping; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; /** * INTERNAL: *

Purpose: This is how the XML Any Attribute Mapping is handled when * used with the TreeObjectBuilder.

*/ public class XMLAnyAttributeMappingNodeValue extends MappingNodeValue implements ContainerValue { private AnyAttributeMapping xmlAnyAttributeMapping; private int index = -1; public XMLAnyAttributeMappingNodeValue(AnyAttributeMapping xmlAnyAttributeMapping) { super(); this.xmlAnyAttributeMapping = xmlAnyAttributeMapping; } public boolean isOwningNode(XPathFragment xPathFragment) { return xPathFragment == null; } @Override public boolean isWrapperAllowedAsCollectionName() { return false; } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver) { if (xmlAnyAttributeMapping.isReadOnly()) { return false; } Object collection = xmlAnyAttributeMapping.getAttributeValueFromObject(object); if (collection == null) { return false; } CoreContainerPolicy cp = getContainerPolicy(); Object iter = cp.iteratorFor(collection); if (!cp.hasNext(iter)) { return false; } XPathFragment groupingElements = marshalRecord.openStartGroupingElements(namespaceResolver); List extraNamespaces = new ArrayList(); NamespaceResolver nr = marshalRecord.getNamespaceResolver(); while (cp.hasNext(iter)) { Map.Entry entry = (Map.Entry)cp.nextEntry(iter, session); Object key = entry.getKey(); if (key instanceof QName) { QName name = (QName) key; String value = entry.getValue().toString(); String qualifiedName = name.getLocalPart(); if (nr != null) { String prefix = nr.resolveNamespaceURI(name.getNamespaceURI()); if ((prefix != null) && prefix.length() > 0) { qualifiedName = prefix + Constants.COLON+ qualifiedName; } else if (name.getNamespaceURI() != null && name.getNamespaceURI().length() > 0) { String generatedPrefix = nr.generatePrefix(); if(marshalRecord.hasCustomNamespaceMapper()) { String customPrefix = marshalRecord.getMarshaller().getNamespacePrefixMapper().getPreferredPrefix(name.getNamespaceURI(), generatedPrefix, true); if(customPrefix != null && customPrefix.length() > 0) { generatedPrefix = customPrefix; } } qualifiedName = generatedPrefix + Constants.COLON + qualifiedName; nr.put(generatedPrefix, name.getNamespaceURI()); extraNamespaces.add(generatedPrefix); marshalRecord.namespaceDeclaration(generatedPrefix, name.getNamespaceURI()); } } if(javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI.equals(name.getNamespaceURI())){ marshalRecord.namespaceDeclaration(name.getLocalPart(), name.getNamespaceURI()); }else{ marshalRecord.attribute(name.getNamespaceURI(), name.getLocalPart(), qualifiedName, value); } } } for (int i = 0; i < extraNamespaces.size(); i++) { marshalRecord.getNamespaceResolver().removeNamespace((String) extraNamespaces.get(i)); } marshalRecord.closeStartGroupingElements(groupingElements); return true; } public void attribute(UnmarshalRecord unmarshalRecord, String namespaceURI, String localName, String value) { boolean includeAttribute = true; if(!xmlAnyAttributeMapping.isNamespaceDeclarationIncluded() && javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI.equals(namespaceURI)){ includeAttribute = false; }else if(!xmlAnyAttributeMapping.isSchemaInstanceIncluded() && javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI.equals(namespaceURI)){ includeAttribute = false; } if(includeAttribute){ CoreContainerPolicy cp = xmlAnyAttributeMapping.getContainerPolicy(); Object containerInstance = unmarshalRecord.getContainerInstance(this); QName key = new QName(namespaceURI, localName); cp.addInto(key, value, containerInstance, unmarshalRecord.getSession()); } } public Object getContainerInstance() { return xmlAnyAttributeMapping.getContainerPolicy().containerInstance(); } public void setContainerInstance(Object object, Object container) { xmlAnyAttributeMapping.setAttributeValueInObject(object, container); } public CoreMappedKeyMapContainerPolicy getContainerPolicy() { return (CoreMappedKeyMapContainerPolicy) xmlAnyAttributeMapping.getContainerPolicy(); } public boolean isContainerValue() { return true; } public boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object value, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { return true; } public AnyAttributeMapping getMapping() { return xmlAnyAttributeMapping; } public boolean getReuseContainer() { return getMapping().getReuseContainer(); } /** * INTERNAL: * Used to track the index of the corresponding containerInstance in the containerInstances Object[] on UnmarshalRecord */ public void setIndex(int index){ this.index = index; } /** * INTERNAL: * Set to track the index of the corresponding containerInstance in the containerInstances Object[] on UnmarshalRecord * Set during TreeObjectBuilder initialization */ public int getIndex(){ return index; } /** * INTERNAL * Return true if an empty container should be set on the object if there * is no presence of the collection in the XML document. * @since EclipseLink 2.3.3 */ public boolean isDefaultEmptyContainer() { return getMapping().isDefaultEmptyContainer(); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/NullCapableValue.java0000664000000000000000000000242012216173126025047 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import org.eclipse.persistence.core.sessions.CoreSession; /** * INTERNAL: *

Purpose: If an implementation of NodeValue is capable of returning * a null value then it must implement this interface to be handled correctly by * the TreeObjectBuilder.

*/ public interface NullCapableValue { /** * INTERNAL: * Set the null representation of the (object). * @param object * @param session */ public void setNullValue(Object object, CoreSession session); } ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLBinaryDataCollectionMappingNodeValue.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLBinaryDataCollectionMappingNodeValue.0000664000000000000000000004032012216173126030561 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import javax.activation.DataHandler; import javax.xml.namespace.QName; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.eclipse.persistence.exceptions.XMLMarshalException; import org.eclipse.persistence.internal.core.helper.CoreClassConstants; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.mappings.BinaryDataCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.ObjectMarshalContext; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.eclipse.persistence.internal.oxm.record.XMLReader; import org.eclipse.persistence.internal.oxm.record.deferred.BinaryMappingContentHandler; import org.eclipse.persistence.oxm.mappings.nullpolicy.AbstractNullPolicy; import org.eclipse.persistence.oxm.mappings.nullpolicy.XMLNullRepresentationType; /** * INTERNAL: *

Purpose: This is how the XML Binary Data Collection Mapping is * handled when used with the TreeObjectBuilder.

*/ public class XMLBinaryDataCollectionMappingNodeValue extends MappingNodeValue implements ContainerValue { private BinaryDataCollectionMapping xmlBinaryDataCollectionMapping; private int index = -1; public XMLBinaryDataCollectionMappingNodeValue(BinaryDataCollectionMapping mapping) { this.xmlBinaryDataCollectionMapping = mapping; } public void setContainerInstance(Object object, Object containerInstance) { xmlBinaryDataCollectionMapping.setAttributeValueInObject(object, containerInstance); } public Object getContainerInstance() { return getContainerPolicy().containerInstance(); } public CoreContainerPolicy getContainerPolicy() { return xmlBinaryDataCollectionMapping.getContainerPolicy(); } protected String getValueToWrite(QName schemaType, Object value, CoreAbstractSession session) { return (String) ((XMLConversionManager) session.getDatasourcePlatform().getConversionManager()).convertObject(value, CoreClassConstants.STRING, schemaType); } public boolean isOwningNode(XPathFragment xPathFragment) { return xPathFragment.getNextFragment() == null || xPathFragment.isAttribute(); } @Override public boolean isWrapperAllowedAsCollectionName() { return true; } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver) { if (xmlBinaryDataCollectionMapping.isReadOnly()) { return false; } Object collection = xmlBinaryDataCollectionMapping.getAttributeAccessor().getAttributeValueFromObject(object); if (null == collection) { AbstractNullPolicy wrapperNP = xmlBinaryDataCollectionMapping.getWrapperNullPolicy(); if (wrapperNP != null && wrapperNP.getMarshalNullRepresentation() == XMLNullRepresentationType.XSI_NIL) { marshalRecord.nilSimple(namespaceResolver); return true; } else { return false; } } CoreContainerPolicy cp = getContainerPolicy(); Object iterator = cp.iteratorFor(collection); if (!cp.hasNext(iterator)) { return marshalRecord.emptyCollection(xPathFragment, namespaceResolver, xmlBinaryDataCollectionMapping.getWrapperNullPolicy() != null); } XPathFragment groupingFragment = marshalRecord.openStartGroupingElements(namespaceResolver); marshalRecord.closeStartGroupingElements(groupingFragment); marshalRecord.startCollection(); while (cp.hasNext(iterator)) { Object objectValue = cp.next(iterator, session); marshalSingleValue(xPathFragment, marshalRecord, object, objectValue, session, namespaceResolver, ObjectMarshalContext.getInstance()); } marshalRecord.endCollection(); return true; } public boolean startElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord, Attributes atts) { try { Field xmlField = (Field)xmlBinaryDataCollectionMapping.getField(); XPathFragment lastFragment = xmlField.getLastXPathFragment(); if(!lastFragment.isAttribute()) { //set a new content handler to deal with the Include element's event. BinaryMappingContentHandler handler = new BinaryMappingContentHandler(unmarshalRecord, this, this.xmlBinaryDataCollectionMapping); String qnameString = xPathFragment.getLocalName(); if (xPathFragment.getPrefix() != null) { qnameString = xPathFragment.getPrefix() + Constants.COLON + qnameString; } handler.startElement(xPathFragment.getNamespaceURI(), xPathFragment.getLocalName(), qnameString, atts); XMLReader xmlReader = unmarshalRecord.getXMLReader(); xmlReader.setContentHandler(handler); xmlReader.setLexicalHandler(handler); } else if (lastFragment.isAttribute()) { //handle swaRef and inline attribute cases here: String value = atts.getValue(lastFragment.getNamespaceURI(), lastFragment.getLocalName()); Object fieldValue = null; if (xmlBinaryDataCollectionMapping.isSwaRef()) { if (unmarshalRecord.getUnmarshaller().getAttachmentUnmarshaller() != null) { if (xmlBinaryDataCollectionMapping.getAttributeClassification() == XMLBinaryDataHelper.getXMLBinaryDataHelper().DATA_HANDLER) { fieldValue = unmarshalRecord.getUnmarshaller().getAttachmentUnmarshaller().getAttachmentAsDataHandler(value); } else { fieldValue = unmarshalRecord.getUnmarshaller().getAttachmentUnmarshaller().getAttachmentAsByteArray(value); } xmlBinaryDataCollectionMapping.setAttributeValueInObject(unmarshalRecord.getCurrentObject(), XMLBinaryDataHelper.getXMLBinaryDataHelper().convertObject(fieldValue, xmlBinaryDataCollectionMapping.getAttributeClassification(), unmarshalRecord.getSession(), xmlBinaryDataCollectionMapping.getContainerPolicy())); } } else { //value should be base64 binary string fieldValue = ((XMLConversionManager) unmarshalRecord.getSession().getDatasourcePlatform().getConversionManager()).convertSchemaBase64ToByteArray(value); xmlBinaryDataCollectionMapping.setAttributeValueInObject(unmarshalRecord.getCurrentObject(), XMLBinaryDataHelper.getXMLBinaryDataHelper().convertObject(fieldValue, xmlBinaryDataCollectionMapping.getAttributeClassification(), unmarshalRecord.getSession(),xmlBinaryDataCollectionMapping.getContainerPolicy())); } } return true; } catch(SAXException ex) { throw XMLMarshalException.unmarshalException(ex); } } public void endElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord) { unmarshalRecord.resetStringBuffer(); } public void endElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord, Object container) { this.endElement(xPathFragment, unmarshalRecord); } public DataHandler getDataHandlerForObjectValue(Object obj, Class classification) { if (classification == DataHandler.class) { return (DataHandler) obj; } return null; } public boolean isContainerValue() { return true; } @Override public boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object objectValue, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { if(objectValue == null) { AbstractNullPolicy nullPolicy = xmlBinaryDataCollectionMapping.getNullPolicy(); if (nullPolicy.getMarshalNullRepresentation() != XMLNullRepresentationType.ABSENT_NODE) { XPathNode holderXPathNode = new XPathNode(); holderXPathNode.setXPathFragment(xPathFragment); marshalRecord.addGroupingElement(holderXPathNode); boolean returnVal = xmlBinaryDataCollectionMapping.getNullPolicy().directMarshal(xPathFragment, marshalRecord, object, session, namespaceResolver); if(returnVal){ marshalRecord.endElement(xPathFragment, namespaceResolver); } marshalRecord.removeGroupingElement(holderXPathNode); return returnVal; } return true; } String mimeType = this.xmlBinaryDataCollectionMapping.getMimeType(object); String attachmentType = mimeType; if(mimeType == null) { mimeType = Constants.EMPTY_STRING; attachmentType = "application/octet-stream"; } Marshaller marshaller = marshalRecord.getMarshaller(); objectValue = xmlBinaryDataCollectionMapping.convertObjectValueToDataValue(objectValue, session, marshaller); marshalRecord.openStartElement(xPathFragment, namespaceResolver); marshalRecord.closeStartElement(); if (xmlBinaryDataCollectionMapping.isSwaRef() && marshaller.getAttachmentMarshaller() != null) { //object value should be a DataHandler String c_id = null; byte[] bytes = null; if (xmlBinaryDataCollectionMapping.getAttributeElementClass() == XMLBinaryDataHelper.getXMLBinaryDataHelper().DATA_HANDLER) { c_id = marshaller.getAttachmentMarshaller().addSwaRefAttachment((DataHandler) objectValue); } else { XMLBinaryDataHelper.EncodedData data = XMLBinaryDataHelper.getXMLBinaryDataHelper().getBytesForBinaryValue(// objectValue, marshaller, xmlBinaryDataCollectionMapping.getMimeType(object)); bytes = data.getData(); c_id = marshaller.getAttachmentMarshaller().addSwaRefAttachment(bytes, 0, bytes.length); } if(c_id != null) { marshalRecord.characters(c_id); } else { marshalRecord.characters(((Field) xmlBinaryDataCollectionMapping.getField()).getSchemaType(), objectValue, mimeType, false); } } else { if (marshalRecord.isXOPPackage() && !xmlBinaryDataCollectionMapping.shouldInlineBinaryData()) { XPathFragment lastFrag = ((Field) xmlBinaryDataCollectionMapping.getField()).getLastXPathFragment(); String c_id = Constants.EMPTY_STRING; byte[] bytes = null; if (objectValue.getClass() == CoreClassConstants.APBYTE) { bytes = (byte[]) objectValue; c_id = marshaller.getAttachmentMarshaller().addMtomAttachment(bytes, 0, bytes.length, attachmentType, lastFrag.getLocalName(), lastFrag.getNamespaceURI()); } else if (xmlBinaryDataCollectionMapping.getAttributeElementClass() == XMLBinaryDataHelper.getXMLBinaryDataHelper().DATA_HANDLER) { c_id = marshaller.getAttachmentMarshaller().addMtomAttachment((DataHandler) objectValue, lastFrag.getLocalName(), lastFrag.getNamespaceURI()); } else { XMLBinaryDataHelper.EncodedData data = XMLBinaryDataHelper.getXMLBinaryDataHelper().getBytesForBinaryValue(// objectValue, marshaller, xmlBinaryDataCollectionMapping.getMimeTypePolicy().getMimeType(object)); bytes = data.getData(); c_id = marshaller.getAttachmentMarshaller().addMtomAttachment(bytes, 0, bytes.length, // data.getMimeType(), lastFrag.getLocalName(), lastFrag.getNamespaceURI()); } if(c_id == null) { marshalRecord.characters(((Field) xmlBinaryDataCollectionMapping.getField()).getSchemaType(), objectValue, mimeType, false); } else { boolean addDeclaration = false; String xopPrefix = null; if(marshalRecord.getNamespaceResolver() != null){ xopPrefix = marshalRecord.getNamespaceResolver().resolveNamespaceURI(Constants.XOP_URL); } if (xopPrefix == null) { addDeclaration = true; xopPrefix = marshalRecord.getNamespaceResolver().generatePrefix(Constants.XOP_PREFIX); marshalRecord.getNamespaceResolver().put(xopPrefix, Constants.XOP_URL); namespaceResolver = marshalRecord.getNamespaceResolver(); } XPathFragment xopInclude = new XPathFragment(xopPrefix + ":Include"); xopInclude.setNamespaceURI(Constants.XOP_URL); marshalRecord.openStartElement(xopInclude, namespaceResolver); marshalRecord.attribute(Constants.EMPTY_STRING, "href", "href", c_id); if (addDeclaration) { marshalRecord.namespaceDeclaration(xopPrefix, Constants.XOP_URL); } marshalRecord.closeStartElement(); marshalRecord.endElement(xPathFragment, namespaceResolver); //marshal as an attachment. if (addDeclaration) { marshalRecord.getNamespaceResolver().removeNamespace(Constants.XOP_PREFIX); } } } else { marshalRecord.characters(((Field)xmlBinaryDataCollectionMapping.getField()).getSchemaType(), objectValue, mimeType, false); } } marshalRecord.endElement(xPathFragment, namespaceResolver); return true; } public BinaryDataCollectionMapping getMapping() { return xmlBinaryDataCollectionMapping; } public boolean getReuseContainer() { return getMapping().getReuseContainer(); } /** * INTERNAL: * Used to track the index of the corresponding containerInstance in the containerInstances Object[] on UnmarshalRecord */ public void setIndex(int index){ this.index = index; } /** * INTERNAL: * Set to track the index of the corresponding containerInstance in the containerInstances Object[] on UnmarshalRecord * Set during TreeObjectBuilder initialization */ public int getIndex(){ return index; } /** * INTERNAL * Return true if an empty container should be set on the object if there * is no presence of the collection in the XML document. * @since EclipseLink 2.3.3 */ public boolean isDefaultEmptyContainer() { return getMapping().isDefaultEmptyContainer(); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLBinaryAttachmentHandler.java0000664000000000000000000001637312216173126027020 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import javax.activation.DataHandler; import org.eclipse.persistence.exceptions.XMLMarshalException; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.oxm.mappings.BinaryDataCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.BinaryDataMapping; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.mappings.Mapping; import org.eclipse.persistence.internal.oxm.mappings.XMLConverterMapping; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.eclipse.persistence.internal.oxm.record.XMLReader; import org.eclipse.persistence.oxm.attachment.XMLAttachmentUnmarshaller; import org.xml.sax.Attributes; import org.xml.sax.SAXException; /** * INTERNAL: *

Purpose:This class is a content handler that specifically handles the "Include" element in an mtom style * attachment. * @author mmacivor */ public class XMLBinaryAttachmentHandler extends org.eclipse.persistence.internal.oxm.record.UnmarshalRecordImpl { UnmarshalRecord record; Mapping mapping; String c_id = null; XMLConverterMapping converter; NodeValue nodeValue; boolean isCollection = false; private static final String INCLUDE_ELEMENT_NAME = "Include"; private static final String HREF_ATTRIBUTE_NAME = "href"; public XMLBinaryAttachmentHandler(UnmarshalRecord unmarshalRecord, NodeValue nodeValue, Mapping mapping, XMLConverterMapping converter, boolean isCollection) { super(null); record = unmarshalRecord; this.mapping = mapping; this.nodeValue = nodeValue; this.converter = converter; this.isCollection = isCollection; } @Override public void characters(char[] ch, int offset, int length) throws SAXException { //we don't care about characters here. Probably a whitespace } @Override public void startElement(String namespaceURI, String localName, String qName, Attributes atts) throws SAXException { if(INCLUDE_ELEMENT_NAME.equals(localName) || INCLUDE_ELEMENT_NAME.equals(qName)) { if(record.isNamespaceAware()){ if(Constants.XOP_URL.equals(namespaceURI)){ this.c_id = atts.getValue(Constants.EMPTY_STRING, HREF_ATTRIBUTE_NAME); } }else{ this.c_id = atts.getValue(Constants.EMPTY_STRING, HREF_ATTRIBUTE_NAME); } } else if(c_id == null ){ //Return control to the UnmarshalRecord XMLReader xmlReader = record.getXMLReader(); xmlReader.setContentHandler(record); xmlReader.setLexicalHandler(record); record.startElement(namespaceURI, localName, qName, atts); } } @Override public void endElement(String namespaceURI, String localName, String qName) throws SAXException { Field xmlField = null; if(isCollection) { xmlField = (Field)((BinaryDataCollectionMapping)mapping).getField(); } else { xmlField = (Field)((BinaryDataMapping)mapping).getField(); } if(INCLUDE_ELEMENT_NAME.equals(localName) || INCLUDE_ELEMENT_NAME.equals(qName)) { if(record.isNamespaceAware() && !Constants.XOP_URL.equals(namespaceURI)){ return; } //Get the attachment and set it in the object. XMLAttachmentUnmarshaller attachmentUnmarshaller = record.getUnmarshaller().getAttachmentUnmarshaller(); Object data = null; Class attributeClassification = null; if(isCollection) { attributeClassification = ((BinaryDataCollectionMapping)mapping).getAttributeElementClass(); } else { attributeClassification = mapping.getAttributeClassification(); } if(attachmentUnmarshaller == null) { //if there's no attachment unmarshaller, it isn't possible to retrieve //the attachment. Throw an exception. throw XMLMarshalException.noAttachmentUnmarshallerSet(this.c_id); } if(attributeClassification.equals(XMLBinaryDataHelper.getXMLBinaryDataHelper().DATA_HANDLER)) { data = attachmentUnmarshaller.getAttachmentAsDataHandler(this.c_id); } else { data = attachmentUnmarshaller.getAttachmentAsByteArray(this.c_id); } CoreContainerPolicy cp = null; if(isCollection){ cp = mapping.getContainerPolicy(); } data = XMLBinaryDataHelper.getXMLBinaryDataHelper().convertObject(data, mapping.getAttributeClassification(), record.getSession(), cp); data = converter.convertDataValueToObjectValue(data, record.getSession(), unmarshaller); //check for collection case if (isCollection) { if(data != null) { record.addAttributeValue((ContainerValue)nodeValue, data); } } else { record.setAttributeValue(data, mapping); } //Return control to the UnmarshalRecord if(!xmlField.isSelfField()){ XMLReader xmlReader = record.getXMLReader(); xmlReader.setContentHandler(record); xmlReader.setLexicalHandler(record); } } else if(c_id == null){ if(!xmlField.isSelfField()){ //Return control to the parent record XMLReader xmlReader = record.getXMLReader(); xmlReader.setContentHandler(record); xmlReader.setLexicalHandler(record); record.endElement(namespaceURI, localName, qName); } } } public void processingInstruction(String target, String data) throws SAXException { } public String getCID() { return this.c_id; } public Object getObjectValueFromDataHandler(DataHandler handler, Class cls) { CoreContainerPolicy cp = null; if(isCollection){ cp = mapping.getContainerPolicy(); } return XMLBinaryDataHelper.getXMLBinaryDataHelper().convertObject(handler, cls, record.getSession(), cp); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XPathObjectBuilder.java0000664000000000000000000010352112216173126025356 0ustar /******************************************************************************* * Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map.Entry; import javax.xml.namespace.QName; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.mappings.CoreMapping; import org.eclipse.persistence.exceptions.XMLMarshalException; import org.eclipse.persistence.internal.core.descriptors.CoreObjectBuilder; import org.eclipse.persistence.internal.core.helper.CoreField; import org.eclipse.persistence.internal.core.sessions.CoreAbstractRecord; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.mappings.AnyAttributeMapping; import org.eclipse.persistence.internal.oxm.mappings.AnyCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.AnyObjectMapping; import org.eclipse.persistence.internal.oxm.mappings.BinaryDataCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.BinaryDataMapping; import org.eclipse.persistence.internal.oxm.mappings.ChoiceCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.ChoiceObjectMapping; import org.eclipse.persistence.internal.oxm.mappings.CollectionReferenceMapping; import org.eclipse.persistence.internal.oxm.mappings.CompositeCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.CompositeObjectMapping; import org.eclipse.persistence.internal.oxm.mappings.Descriptor; import org.eclipse.persistence.internal.oxm.mappings.DirectCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.DirectMapping; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.mappings.FragmentCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.FragmentMapping; import org.eclipse.persistence.internal.oxm.mappings.InverseReferenceMapping; import org.eclipse.persistence.internal.oxm.mappings.Mapping; import org.eclipse.persistence.internal.oxm.mappings.ObjectReferenceMapping; import org.eclipse.persistence.internal.oxm.mappings.TransformationMapping; import org.eclipse.persistence.internal.oxm.mappings.VariableXPathCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.VariableXPathObjectMapping; import org.eclipse.persistence.internal.oxm.record.AbstractMarshalRecord; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.ObjectMarshalContext; import org.eclipse.persistence.internal.oxm.record.SequencedMarshalContext; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.eclipse.persistence.internal.oxm.record.XMLRecord; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.mappings.transformers.FieldTransformer; import org.eclipse.persistence.oxm.XMLField; import org.eclipse.persistence.oxm.mappings.XMLCompositeObjectMapping; import org.eclipse.persistence.oxm.sequenced.SequencedObject; public class XPathObjectBuilder extends CoreObjectBuilder implements ObjectBuilder { public static final String CYCLE_RECOVERABLE = "com.sun.xml.bind.CycleRecoverable"; public static final String CYCLE_RECOVERABLE_CONTEXT = "com.sun.xml.bind.CycleRecoverable$Context"; public static final String ON_CYCLE_DETECTED = "onCycleDetected"; private List containerValues; private int counter = 0; private Class cycleRecoverableClass = null; private Class cycleRecoverableContextClass = null; private List defaultEmptyContainerValues; //a list of container values that have isDefaultEmptyContainer() set to true private CoreDescriptor descriptor; private volatile boolean initialized = false; private List nullCapableValues; private XPathNode rootXPathNode; private List transformationMappings; private boolean xsiTypeIndicatorField; public XPathObjectBuilder(CoreDescriptor descriptor) { this.descriptor = descriptor; this.rootXPathNode = new XPathNode(); } private XPathNode addChild(XPathFragment xPathFragment, NodeValue nodeValue, NamespaceResolver namespaceResolver) { return rootXPathNode.addChild(xPathFragment, nodeValue, namespaceResolver); } @Override public boolean addClassIndicatorFieldToRow(AbstractMarshalRecord abstractMarshalRecord) { if (descriptor.hasInheritance() && !xsiTypeIndicatorField) { descriptor.getInheritancePolicy().addClassIndicatorFieldToRow((CoreAbstractRecord) abstractMarshalRecord); return true; } return false; } private void addContainerValue(ContainerValue containerValue) { if (null == this.containerValues) { this.containerValues = new ArrayList(); } containerValue.setIndex(counter++); this.containerValues.add(containerValue); if(containerValue.isDefaultEmptyContainer()){ addDefaultEmptyContainerValue(containerValue); } } private void addDefaultEmptyContainerValue(ContainerValue containerValue){ if (null == this.defaultEmptyContainerValues) { this.defaultEmptyContainerValues = new ArrayList(); } this.defaultEmptyContainerValues.add(containerValue); } @Override public List addExtraNamespacesToNamespaceResolver(Descriptor desc, AbstractMarshalRecord marshalRecord, CoreAbstractSession session, boolean allowOverride, boolean ignoreEqualResolvers) { if (rootXPathNode.getNonAttributeChildren() == null) { return null; } else { return marshalRecord.addExtraNamespacesToNamespaceResolver(desc, session, allowOverride, ignoreEqualResolvers); } } private void addNullCapableValue(NullCapableValue nullCapableValue) { if (null == this.nullCapableValues) { this.nullCapableValues = new ArrayList(); } this.nullCapableValues.add(nullCapableValue); } public void addTransformationMapping(TransformationMapping transformationMapping) { if (null == this.transformationMappings) { this.transformationMappings = new ArrayList(); } transformationMappings.add(transformationMapping); } /** * Return a new instance of the receiver's javaClass. */ @Override public Object buildNewInstance() { return this.descriptor.getInstantiationPolicy().buildNewInstance(); } @Override public XMLRecord buildRow(XMLRecord record, Object object, CoreAbstractSession session, Marshaller marshaller, XPathFragment rootFragment) { lazyInitialize(); XPathNode textNode = rootXPathNode.getTextNode(); List nonAttributeChildren = rootXPathNode.getNonAttributeChildren(); if (null == textNode && null == nonAttributeChildren) { return record; } Descriptor xmlDescriptor = (Descriptor) descriptor; XPathNode node = rootXPathNode; MarshalRecord marshalRecord = (MarshalRecord) record; QName schemaType = null; if (marshalRecord.getCycleDetectionStack().contains(object, marshaller.isEqualUsingIdenity())) { if (cycleRecoverableClass == null) { initCycleRecoverableClasses(); } if (cycleRecoverableClass != null && cycleRecoverableClass.isAssignableFrom(object.getClass())) { try { Object jaxbMarshaller = marshaller.getProperty(Constants.JAXB_MARSHALLER); // Create a proxy instance of CycleRecoverable$Context, a parameter to // the onCycleDetected method Object contextProxy = CycleRecoverableContextProxy.getProxy(cycleRecoverableContextClass, jaxbMarshaller); // Invoke onCycleDetected method, passing in proxy, and reset // 'object' to the returned value Method onCycleDetectedMethod = object.getClass().getMethod(ON_CYCLE_DETECTED, new Class[] { cycleRecoverableContextClass }); object = PrivilegedAccessHelper.invokeMethod(onCycleDetectedMethod, object, new Object[] { contextProxy }); } catch (Exception e) { throw XMLMarshalException.marshalException(e); } // Returned object might have a different descriptor xmlDescriptor = (Descriptor) session.getDescriptor(object.getClass()); if (xmlDescriptor != null) { node = ((TreeObjectBuilder) xmlDescriptor.getObjectBuilder()).getRootXPathNode(); } else { node = null; } // Push new object marshalRecord.getCycleDetectionStack().push(object); // Write xsi:type if onCycleDetected returned an object of a type different than the one mapped if (xmlDescriptor != descriptor) { if (xmlDescriptor == null) { schemaType = (QName) XMLConversionManager.getDefaultJavaTypes().get(object.getClass()); } else { schemaType = xmlDescriptor.getSchemaReference().getSchemaContextAsQName(); } marshalRecord.writeXsiTypeAttribute(xmlDescriptor, schemaType.getNamespaceURI(), schemaType.getLocalPart(), schemaType.getPrefix(), false); } } else { // Push the duplicate object anyway, so that we can get the complete cycle string marshalRecord.getCycleDetectionStack().push(object); throw XMLMarshalException.objectCycleDetected(marshalRecord.getCycleDetectionStack().getCycleString()); } } else { marshalRecord.getCycleDetectionStack().push(object); } NamespaceResolver namespaceResolver = null; if (xmlDescriptor != null) { namespaceResolver = xmlDescriptor.getNamespaceResolver(); } MarshalContext marshalContext = null; if (xmlDescriptor != null && xmlDescriptor.isSequencedObject()) { SequencedObject sequencedObject = (SequencedObject) object; marshalContext = new SequencedMarshalContext(sequencedObject.getSettings()); } else { marshalContext = ObjectMarshalContext.getInstance(); } if (null == nonAttributeChildren) { textNode.marshal((MarshalRecord) record, object, session, namespaceResolver, marshaller, marshalContext, rootFragment); } else { if (node == null) { // No descriptor for this object, so manually create a MappingNodeValue and marshal it XPathNode n = new XPathNode(); CompositeObjectMapping m = new XMLCompositeObjectMapping(); m.setXPath("."); XMLCompositeObjectMappingNodeValue nv = new XMLCompositeObjectMappingNodeValue(m); n.setMarshalNodeValue(nv); nv.marshalSingleValue(new XPathFragment("."), marshalRecord, null, object, session, namespaceResolver, marshalContext); } else { for (int x = 0, size = marshalContext.getNonAttributeChildrenSize(node); x < size; x++) { XPathNode xPathNode = (XPathNode) marshalContext.getNonAttributeChild(x, node); xPathNode.marshal((MarshalRecord) record, object, session, namespaceResolver, marshaller, marshalContext.getMarshalContext(x), rootFragment); } } } marshalRecord.getCycleDetectionStack().pop(); return record; } @Override public Class classFromRow(UnmarshalRecord record, CoreAbstractSession session) { return descriptor.getInheritancePolicy().classFromRow((CoreAbstractRecord) record, session); } /** * Create a new row/record for the object builder. * This allows subclasses to define different record types. */ @Override public CoreAbstractRecord createRecord(CoreAbstractSession session) { throw new UnsupportedOperationException(); } @Override public Object extractPrimaryKeyFromObject(Object object, CoreAbstractSession session) { throw new UnsupportedOperationException(); } public List getContainerValues() { return this.containerValues; } public List getDefaultEmptyContainerValues() { return this.defaultEmptyContainerValues; } @Override public CoreDescriptor getDescriptor() { return descriptor; } @Override public CoreMapping getMappingForField(CoreField field) { throw new UnsupportedOperationException(); } public List getNullCapableValues() { return this.nullCapableValues; } @Override public XPathNode getRootXPathNode() { lazyInitialize(); return this.rootXPathNode; } public List getTransformationMappings() { return this.transformationMappings; } private void initCycleRecoverableClasses() { try { this.cycleRecoverableClass = PrivilegedAccessHelper.getClassForName(CYCLE_RECOVERABLE); this.cycleRecoverableContextClass = PrivilegedAccessHelper.getClassForName(CYCLE_RECOVERABLE_CONTEXT); } catch (Exception e) { } } @Override public boolean isXsiTypeIndicatorField() { return xsiTypeIndicatorField; } void lazyInitialize() { if(initialized) { return; } synchronized(this) { if(initialized) { return; } Descriptor xmlDescriptor = (Descriptor) descriptor; // MAPPINGS Iterator mappingIterator = xmlDescriptor.getMappings().iterator(); Iterator fieldTransformerIterator; Mapping xmlMapping; // Transformation Mapping TransformationMapping transformationMapping; FieldTransformerNodeValue fieldTransformerNodeValue; Object[] nextFieldToTransformer; // Simple Type Translator TypeNodeValue typeNodeValue; NodeValue mappingNodeValue = null; Field xmlField; while (mappingIterator.hasNext()) { xmlMapping = (Mapping)mappingIterator.next(); xmlField = (Field)xmlMapping.getField(); if (xmlMapping.isTransformationMapping()) { transformationMapping = (TransformationMapping)xmlMapping; addTransformationMapping(transformationMapping); fieldTransformerIterator = transformationMapping.getFieldToTransformers().iterator(); while (fieldTransformerIterator.hasNext()) { fieldTransformerNodeValue = new FieldTransformerNodeValue(); nextFieldToTransformer = (Object[])fieldTransformerIterator.next(); xmlField = (Field)nextFieldToTransformer[0]; fieldTransformerNodeValue.setXMLField(xmlField); fieldTransformerNodeValue.setFieldTransformer((FieldTransformer)nextFieldToTransformer[1]); addChild(xmlField.getXPathFragment(), fieldTransformerNodeValue, xmlDescriptor.getNamespaceResolver()); } } else { if (xmlMapping instanceof InverseReferenceMapping) { xmlMapping = (Mapping)((InverseReferenceMapping)xmlMapping).getInlineMapping(); if(xmlMapping == null){ continue; } xmlField = (Field)xmlMapping.getField(); if(xmlMapping.isAbstractCompositeCollectionMapping()){ mappingNodeValue=new XMLCompositeCollectionMappingNodeValue((CompositeCollectionMapping)xmlMapping, true); } if(xmlMapping.isAbstractCompositeObjectMapping()){ mappingNodeValue=new XMLCompositeObjectMappingNodeValue((CompositeObjectMapping)xmlMapping, true); } } else if (xmlMapping.isAbstractDirectMapping()) { mappingNodeValue = new XMLDirectMappingNodeValue((DirectMapping)xmlMapping); } else if (xmlMapping.isAbstractCompositeObjectMapping()) { mappingNodeValue = new XMLCompositeObjectMappingNodeValue((CompositeObjectMapping)xmlMapping); } else if (xmlMapping.isAbstractCompositeDirectCollectionMapping()) { DirectCollectionMapping collectionMapping = (DirectCollectionMapping) xmlMapping; mappingNodeValue = new XMLCompositeDirectCollectionMappingNodeValue(collectionMapping); if (collectionMapping.getWrapperNullPolicy() != null) { addChild(xmlField.getXPathFragment(), new CollectionGroupingElementNodeValue((ContainerValue) mappingNodeValue), xmlDescriptor.getNamespaceResolver()); } } else if (xmlMapping.isAbstractCompositeCollectionMapping()) { CompositeCollectionMapping collectionMapping = (CompositeCollectionMapping) xmlMapping; mappingNodeValue = new XMLCompositeCollectionMappingNodeValue(collectionMapping); if (collectionMapping.getWrapperNullPolicy() != null) { addChild(xmlField.getXPathFragment(), new CollectionGroupingElementNodeValue((ContainerValue) mappingNodeValue), xmlDescriptor.getNamespaceResolver()); } } else if (xmlMapping instanceof VariableXPathCollectionMapping) { mappingNodeValue = new XMLVariableXPathCollectionMappingNodeValue((VariableXPathCollectionMapping)xmlMapping); } else if (xmlMapping instanceof VariableXPathObjectMapping){ mappingNodeValue = new XMLVariableXPathObjectMappingNodeValue((VariableXPathObjectMapping)xmlMapping); } else if (xmlMapping instanceof AnyObjectMapping) { mappingNodeValue = new XMLAnyObjectMappingNodeValue((AnyObjectMapping)xmlMapping); } else if (xmlMapping instanceof AnyCollectionMapping) { mappingNodeValue = new XMLAnyCollectionMappingNodeValue((AnyCollectionMapping)xmlMapping); } else if (xmlMapping instanceof AnyAttributeMapping) { mappingNodeValue = new XMLAnyAttributeMappingNodeValue((AnyAttributeMapping)xmlMapping); } else if (xmlMapping instanceof BinaryDataMapping) { mappingNodeValue = new XMLBinaryDataMappingNodeValue((BinaryDataMapping)xmlMapping); } else if (xmlMapping instanceof BinaryDataCollectionMapping) { mappingNodeValue = new XMLBinaryDataCollectionMappingNodeValue((BinaryDataCollectionMapping)xmlMapping); } else if (xmlMapping instanceof FragmentMapping) { mappingNodeValue = new XMLFragmentMappingNodeValue((FragmentMapping)xmlMapping); } else if (xmlMapping instanceof FragmentCollectionMapping) { mappingNodeValue = new XMLFragmentCollectionMappingNodeValue((FragmentCollectionMapping)xmlMapping); } else if (xmlMapping instanceof CollectionReferenceMapping) { CollectionReferenceMapping xmlColMapping = (CollectionReferenceMapping)xmlMapping; List fields = xmlColMapping.getFields(); Field xmlColMappingField = (Field) xmlColMapping.getField(); XPathNode branchNode; if(null == xmlColMappingField) { if(fields.size() > 1 && !xmlColMapping.usesSingleNode()) { addChild(XPathFragment.SELF_FRAGMENT, new XMLCollectionReferenceMappingMarshalNodeValue(xmlColMapping), xmlDescriptor.getNamespaceResolver()); } branchNode = rootXPathNode; } else { branchNode = addChild(((Field) xmlColMapping.getField()).getXPathFragment(), new XMLCollectionReferenceMappingMarshalNodeValue(xmlColMapping), xmlDescriptor.getNamespaceResolver()); } int containerIndex = -1; for (int i = 0, size = fields.size(); i < size; i++) { Field xmlFld = (Field)fields.get(i); mappingNodeValue = new XMLCollectionReferenceMappingNodeValue(xmlColMapping, xmlFld); if(i == 0){ addContainerValue((ContainerValue)mappingNodeValue); containerIndex = ((ContainerValue)mappingNodeValue).getIndex(); }else{ ((ContainerValue)mappingNodeValue).setIndex(containerIndex); } if (mappingNodeValue.isNullCapableValue()) { addNullCapableValue((NullCapableValue)mappingNodeValue); } branchNode.addChild(xmlFld.getXPathFragment(), mappingNodeValue, xmlDescriptor.getNamespaceResolver()); } continue; } else if (xmlMapping instanceof ObjectReferenceMapping) { ObjectReferenceMapping xmlORMapping = (ObjectReferenceMapping)xmlMapping; Iterator fieldIt = xmlORMapping.getFields().iterator(); while (fieldIt.hasNext()) { Field xmlFld = (Field)fieldIt.next(); mappingNodeValue = new XMLObjectReferenceMappingNodeValue(xmlORMapping, xmlFld); if (mappingNodeValue.isContainerValue()) { addContainerValue((ContainerValue)mappingNodeValue); } if (mappingNodeValue.isNullCapableValue()) { addNullCapableValue((NullCapableValue)mappingNodeValue); } addChild(xmlFld.getXPathFragment(), mappingNodeValue, xmlDescriptor.getNamespaceResolver()); } continue; } else if (xmlMapping instanceof ChoiceObjectMapping) { ChoiceObjectMapping xmlChoiceMapping = (ChoiceObjectMapping)xmlMapping; Iterator fields = xmlChoiceMapping.getChoiceElementMappings().keySet().iterator(); Field firstField = (Field)fields.next(); XMLChoiceObjectMappingNodeValue firstNodeValue = new XMLChoiceObjectMappingNodeValue(xmlChoiceMapping, firstField); firstNodeValue.setNullCapableNodeValue(firstNodeValue); addChild(firstField.getXPathFragment(), firstNodeValue, xmlDescriptor.getNamespaceResolver()); while(fields.hasNext()) { Field next = (Field)fields.next(); XMLChoiceObjectMappingNodeValue nodeValue = new XMLChoiceObjectMappingNodeValue(xmlChoiceMapping, next); nodeValue.setNullCapableNodeValue(firstNodeValue); addChild(next.getXPathFragment(), nodeValue, xmlDescriptor.getNamespaceResolver()); } continue; } else if(xmlMapping instanceof ChoiceCollectionMapping) { ChoiceCollectionMapping xmlChoiceMapping = (ChoiceCollectionMapping)xmlMapping; Iterator> fields = xmlChoiceMapping.getChoiceElementMappings().entrySet().iterator(); Entry firstEntry = fields.next(); Field firstField = firstEntry.getKey(); XMLChoiceCollectionMappingUnmarshalNodeValue unmarshalValue = new XMLChoiceCollectionMappingUnmarshalNodeValue(xmlChoiceMapping, firstField); XMLChoiceCollectionMappingMarshalNodeValue marshalValue = new XMLChoiceCollectionMappingMarshalNodeValue(xmlChoiceMapping, firstField); HashMap fieldToNodeValues = new HashMap(); unmarshalValue.setContainerNodeValue(unmarshalValue); unmarshalValue.setFieldToNodeValues(fieldToNodeValues); if(xmlChoiceMapping.isMixedContent() && (xmlChoiceMapping.getMixedContentMapping() == firstEntry.getValue())) { unmarshalValue.setIsMixedNodeValue(true); marshalValue.setIsMixedNodeValue(true); } this.addContainerValue(unmarshalValue); ((ContainerValue)unmarshalValue.getChoiceElementNodeValue()).setIndex(unmarshalValue.getIndex()); fieldToNodeValues.put(firstField, unmarshalValue); addChild(firstField.getXPathFragment(), unmarshalValue, xmlDescriptor.getNamespaceResolver()); addChild(firstField.getXPathFragment(), marshalValue, xmlDescriptor.getNamespaceResolver()); while(fields.hasNext()) { Entry nextEntry = fields.next(); Field nextField = nextEntry.getKey(); XMLChoiceCollectionMappingUnmarshalNodeValue nodeValue = new XMLChoiceCollectionMappingUnmarshalNodeValue(xmlChoiceMapping, nextField); nodeValue.setContainerNodeValue(unmarshalValue); nodeValue.setIndex(unmarshalValue.getIndex()); ((ContainerValue)nodeValue.getChoiceElementNodeValue()).setIndex(unmarshalValue.getIndex()); addChild(nextField.getXPathFragment(), nodeValue, xmlDescriptor.getNamespaceResolver()); fieldToNodeValues.put(nextField, nodeValue); if(xmlChoiceMapping.isMixedContent() && (xmlChoiceMapping.getMixedContentMapping() == nextEntry.getValue())) { nodeValue.setIsMixedNodeValue(true); } } if(xmlChoiceMapping.isAny()) { XMLChoiceCollectionMappingUnmarshalNodeValue nodeValue = new XMLChoiceCollectionMappingUnmarshalNodeValue(xmlChoiceMapping, null, xmlChoiceMapping.getAnyMapping()); nodeValue.setContainerNodeValue(unmarshalValue); nodeValue.setIndex(unmarshalValue.getIndex()); ((ContainerValue)nodeValue.getChoiceElementNodeValue()).setIndex(unmarshalValue.getIndex()); addChild(null, nodeValue, xmlDescriptor.getNamespaceResolver()); fieldToNodeValues.put(null, nodeValue); if(xmlChoiceMapping.isMixedContent()) { nodeValue.setIsMixedNodeValue(true); } } marshalValue.setFieldToNodeValues(fieldToNodeValues); continue; } if (mappingNodeValue.isContainerValue()) { addContainerValue((ContainerValue)mappingNodeValue); } if (mappingNodeValue.isNullCapableValue()) { addNullCapableValue((NullCapableValue)mappingNodeValue); } if (xmlField != null) { addChild(xmlField.getXPathFragment(), mappingNodeValue, xmlDescriptor.getNamespaceResolver()); } else { addChild(null, mappingNodeValue, xmlDescriptor.getNamespaceResolver()); } if (xmlMapping.isAbstractDirectMapping() && xmlField.isTypedTextField()) { XPathFragment nextFragment = xmlField.getXPathFragment(); StringBuilder typeXPathStringBuilder = new StringBuilder(); while (nextFragment.getNextFragment() != null) { typeXPathStringBuilder.append(nextFragment.getXPath()); nextFragment = nextFragment.getNextFragment(); } Field typeField = new XMLField(); if(typeXPathStringBuilder.length() > 0) { typeXPathStringBuilder.append('/'); } typeField.setXPath(typeXPathStringBuilder.toString() + Constants.ATTRIBUTE + xmlDescriptor.getNonNullNamespaceResolver().resolveNamespaceURI(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI) + Constants.COLON + Constants.SCHEMA_TYPE_ATTRIBUTE); typeNodeValue = new TypeNodeValue(); typeNodeValue.setDirectMapping((DirectMapping)xmlMapping); addChild(typeField.getXPathFragment(), typeNodeValue, xmlDescriptor.getNamespaceResolver()); } } } if (descriptor.hasInheritance()) { Field indicatorField = (Field) descriptor .getInheritancePolicy().getClassIndicatorField(); if (indicatorField != null) { if (indicatorField.getLastXPathFragment().getNamespaceURI() != null && indicatorField .getLastXPathFragment() .getNamespaceURI() .equals(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI) && indicatorField.getLastXPathFragment() .getLocalName() .equals(Constants.SCHEMA_TYPE_ATTRIBUTE)) { xsiTypeIndicatorField = true; } } } initialized = true; } } @Override public boolean marshalAttributes(MarshalRecord marshalRecord, Object object, CoreAbstractSession session) { lazyInitialize(); boolean hasValue = false; NamespaceResolver namespaceResolver = ((Descriptor)descriptor).getNamespaceResolver(); List attributeChildren = rootXPathNode.getAttributeChildren(); if (null != attributeChildren) { ObjectMarshalContext objectMarshalContext = ObjectMarshalContext.getInstance(); for (int x = 0, attributeChildrenSize=attributeChildren.size(); x < attributeChildrenSize; x++) { hasValue = attributeChildren.get(x).marshal(marshalRecord, object, session, namespaceResolver, null, objectMarshalContext, null) || hasValue; } } if (rootXPathNode.getAnyAttributeNode() != null) { hasValue = rootXPathNode.getAnyAttributeNode().marshal(marshalRecord, object, session, namespaceResolver, null, ObjectMarshalContext.getInstance(), null) || hasValue; } List selfChildren = rootXPathNode.getSelfChildren(); if (null != selfChildren) { for (XPathNode selfXPathNode : selfChildren) { NodeValue marshalNodeValue = selfXPathNode.getMarshalNodeValue(); if(marshalNodeValue instanceof MappingNodeValue) { Mapping selfMapping = ((MappingNodeValue) marshalNodeValue).getMapping(); Object value = selfMapping.getAttributeValueFromObject(object); Descriptor referenceDescriptor = (Descriptor)selfMapping.getReferenceDescriptor(); Descriptor valueDescriptor; if(value != null && (referenceDescriptor == null || referenceDescriptor.hasInheritance())){ valueDescriptor = (Descriptor)session.getDescriptor(value.getClass()); } else { valueDescriptor = referenceDescriptor; } if(null != valueDescriptor) { marshalRecord.addXsiTypeAndClassIndicatorIfRequired(valueDescriptor, referenceDescriptor, (Field) selfMapping.getField(), false); } } selfXPathNode.marshalSelfAttributes(marshalRecord, object, session, namespaceResolver, marshalRecord.getMarshaller()); } } return hasValue; } }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/Unmarshaller.java0000664000000000000000000001206612216173126024334 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import javax.xml.validation.Schema; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.mappings.Descriptor; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.eclipse.persistence.oxm.XMLUnmarshalListener; import org.eclipse.persistence.oxm.attachment.XMLAttachmentUnmarshaller; import org.xml.sax.ErrorHandler; public abstract class Unmarshaller< ABSTRACT_SESSION extends CoreAbstractSession, CONTEXT extends Context, DESCRIPTOR extends Descriptor, ID_RESOLVER extends IDResolver, MEDIA_TYPE extends MediaType, ROOT extends Root, UNMARSHALLER_HANDLER extends UnmarshallerHandler> { protected CONTEXT context; public Unmarshaller(CONTEXT context) { this.context = context; } /** * INTERNAL */ public abstract ROOT createRoot(); /** * INTERNAL */ public abstract UnmarshalRecord createRootUnmarshalRecord(Class clazz); /** * INTERNAL */ public abstract UnmarshalRecord createUnmarshalRecord(DESCRIPTOR descriptor, ABSTRACT_SESSION session); public abstract XMLAttachmentUnmarshaller getAttachmentUnmarshaller(); /** * Value that will be used to prefix attributes. * Ignored unmarshalling XML. * @return */ public abstract String getAttributePrefix(); /** * Return the instance of XMLContext that was used to create this instance * of Unmarshaller. */ public CONTEXT getContext() { return context; } /** * Get the ErrorHandler set on this Unmarshaller * @return the ErrorHandler set on this Unmarshaller */ public abstract ErrorHandler getErrorHandler(); /** * Return this Unmarshaller's custom IDResolver. * @see IDResolver * @return the custom IDResolver, or null if one has not been specified. */ public abstract ID_RESOLVER getIDResolver(); /** * Get the MediaType for this unmarshaller. * See org.eclipse.persistence.oxm.MediaType for the media types supported by EclipseLink MOXy * If not set the default is MediaType.APPLICATION_XML * @return MediaType */ public abstract MEDIA_TYPE getMediaType(); /** * Name of the NamespaceResolver to be used during unmarshal * Ignored unmarshalling XML. */ public abstract NamespaceResolver getNamespaceResolver(); /** * Get the namespace separator used during unmarshal operations. * If mediaType is application/json '.' is the default * Ignored unmarshalling XML. */ public abstract char getNamespaceSeparator(); /** * Return the property for a given key, if one exists. * * @parm key * @return */ public abstract Object getProperty(Object key); public abstract Schema getSchema(); /** * INTERNAL: * This is the text handler during unmarshal operations. */ public abstract StrBuffer getStringBuffer(); /** * Get the class that will be instantiated to handled unmapped content * Class must implement the org.eclipse.persistence.oxm.unmapped.UnmappedContentHandler interface */ public abstract Class getUnmappedContentHandlerClass(); /** * INTERNAL: * Returns the AttributeGroup or the name of the AttributeGroup to be used to * unmarshal. */ public abstract Object getUnmarshalAttributeGroup(); public abstract UNMARSHALLER_HANDLER getUnmarshallerHandler(); public abstract XMLUnmarshalListener getUnmarshalListener(); /** * Name of the property to marshal/unmarshal as a wrapper on the text() mappings * Ignored unmarshalling XML. */ public abstract String getValueWrapper(); /** * Return if this Unmarshaller should try to automatically determine * the MediaType of the document (instead of using the MediaType set * by setMediaType) */ public abstract boolean isAutoDetectMediaType(); /** * Determine if the @XMLRootElement should be marshalled when present. * Ignored unmarshalling XML. * @return */ public abstract boolean isIncludeRoot(); public abstract boolean isResultAlwaysXMLRoot(); public abstract boolean isWrapperAsCollectionName(); public abstract void setIDResolver(ID_RESOLVER idResolver); } ././@LongLink0000000000000000000000000000015700000000000011570 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLCollectionReferenceMappingMarshalNodeValue.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLCollectionReferenceMappingMarshalNode0000664000000000000000000002322212216173126030700 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * bdoughan - October 21/2009 - 2.0 - Initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.util.List; import javax.xml.namespace.QName; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.mappings.CollectionReferenceMapping; import org.eclipse.persistence.internal.oxm.mappings.Descriptor; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.ObjectMarshalContext; public class XMLCollectionReferenceMappingMarshalNodeValue extends MappingNodeValue implements ContainerValue { private CollectionReferenceMapping xmlCollectionReferenceMapping; private XPathNode branchNode; private int index = -1; public XMLCollectionReferenceMappingMarshalNodeValue(CollectionReferenceMapping xmlCollectionReferenceMapping) { this.xmlCollectionReferenceMapping = xmlCollectionReferenceMapping; branchNode = new XPathNode(); NamespaceResolver namespaceResolver = ((Descriptor) xmlCollectionReferenceMapping.getDescriptor()).getNamespaceResolver(); List fkFields = xmlCollectionReferenceMapping.getFields(); for(int x=0, fkFieldsSize=fkFields.size(); xPurpose: This is how the XML Choice Collection Mapping is * handled when used with the TreeObjectBuilder.

* @author mmacivor */ public class XMLChoiceObjectMappingNodeValue extends MappingNodeValue { private NodeValue choiceElementNodeValue; private Map choiceElementNodeValues; private ChoiceObjectMapping xmlChoiceMapping; //The first node value of the choice will be registered as a null capable value. If any //of the choice elements get hit, this needs to be removed as a null value. private XMLChoiceObjectMappingNodeValue nullCapableNodeValue; private Field xmlField; public XMLChoiceObjectMappingNodeValue(ChoiceObjectMapping mapping, Field xmlField) { this.xmlChoiceMapping = mapping; this.xmlField = xmlField; initializeNodeValue(); } public boolean isOwningNode(XPathFragment xPathFragment) { return choiceElementNodeValue.isOwningNode(xPathFragment); } public void initializeNodeValue() { Mapping xmlMapping = (Mapping) xmlChoiceMapping.getChoiceElementMappings().get(xmlField); choiceElementNodeValue = getNodeValueForMapping(xmlMapping); //check for mappings to other classes with the same field for(Entry entry: ((Map)xmlChoiceMapping.getChoiceElementMappingsByClass()).entrySet()) { Field field = (Field) xmlChoiceMapping.getClassToFieldMappings().get(entry.getKey()); if(field != null && field.equals(this.xmlField)) { Mapping mappingForClass = entry.getValue(); if(mappingForClass != xmlMapping) { if(this.choiceElementNodeValues == null) { choiceElementNodeValues = new HashMap(); } choiceElementNodeValues.put(entry.getKey(), getNodeValueForMapping(mappingForClass)); } } } } private NodeValue getNodeValueForMapping(Mapping xmlMapping) { if(xmlMapping instanceof BinaryDataMapping){ return new XMLBinaryDataMappingNodeValue((BinaryDataMapping)xmlMapping); } else if(xmlMapping instanceof DirectMapping) { return new XMLDirectMappingNodeValue((DirectMapping)xmlMapping); } else if(xmlMapping instanceof ObjectReferenceMapping) { return new XMLObjectReferenceMappingNodeValue((ObjectReferenceMapping)xmlMapping, xmlField); } else { return new XMLCompositeObjectMappingNodeValue((CompositeObjectMapping)xmlMapping); } } public void setNullCapableNodeValue(XMLChoiceObjectMappingNodeValue nodeValue) { this.nullCapableNodeValue = nodeValue; } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver) { return this.marshal(xPathFragment, marshalRecord, object, session, namespaceResolver, ObjectMarshalContext.getInstance()); } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { if(xmlChoiceMapping.isReadOnly()) { return false; } Object value = xmlChoiceMapping.getFieldValue(object, session, marshalRecord); return this.marshalSingleValue(xPathFragment, marshalRecord, object, value, session, namespaceResolver, marshalContext); } public boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object value, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { Class valueClass = null; if (value instanceof Root) { Root root = (Root)value; for(CoreField next: (List) this.xmlChoiceMapping.getFields()) { XPathFragment fragment = ((Field)next).getXPathFragment(); while(fragment != null && !fragment.nameIsText) { if(fragment.getNextFragment() == null || fragment.getHasText()) { if(fragment.getLocalName().equals(root.getLocalName())) { String fragUri = fragment.getNamespaceURI(); String namespaceUri = root.getNamespaceURI(); if((namespaceUri == null && fragUri == null) || (namespaceUri != null && fragUri != null && namespaceUri.equals(fragUri))) { if(next == this.xmlField) { return this.choiceElementNodeValue.marshalSingleValue(xPathFragment, marshalRecord, object, value, session, namespaceResolver, marshalContext); } else { //If this root is associated with another field, then return and let that NodeValue handle it return false; } } } } fragment = fragment.getNextFragment(); } } valueClass = root.getObject().getClass(); } if (value != null) { if(valueClass == null) { valueClass = value.getClass(); } Field fieldForClass = null; Class theClass = valueClass; while(theClass != null) { fieldForClass = (Field) xmlChoiceMapping.getClassToFieldMappings().get(valueClass); if(fieldForClass != null) { break; } theClass = theClass.getSuperclass(); } if (fieldForClass != null && fieldForClass.equals(this.xmlField)) { if(this.choiceElementNodeValues != null) { NodeValue nodeValue = this.choiceElementNodeValues.get(theClass); if(nodeValue != null) { return nodeValue.marshalSingleValue(xPathFragment, marshalRecord, object, value, session, namespaceResolver, marshalContext); } } return this.choiceElementNodeValue.marshalSingleValue(xPathFragment, marshalRecord, object, value, session, namespaceResolver, marshalContext); } List sourceFields = null; theClass = valueClass; while(theClass != null) { sourceFields = (List) xmlChoiceMapping.getClassToSourceFieldsMappings().get(theClass); if(sourceFields != null) { break; } theClass = theClass.getSuperclass(); } if (sourceFields != null && sourceFields.contains(this.xmlField)) { return this.choiceElementNodeValue.marshalSingleValue(xPathFragment, marshalRecord, object, value, session, namespaceResolver, marshalContext); } } return false; } public void endElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord) { if(null != xmlChoiceMapping.getConverter()) { UnmarshalContext unmarshalContext = unmarshalRecord.getUnmarshalContext(); unmarshalRecord.setUnmarshalContext(new ChoiceUnmarshalContext(unmarshalContext, xmlChoiceMapping)); this.choiceElementNodeValue.endElement(xPathFragment, unmarshalRecord); unmarshalRecord.setUnmarshalContext(unmarshalContext); } else { this.choiceElementNodeValue.endElement(xPathFragment, unmarshalRecord); } } public boolean startElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord, Attributes atts) { return this.choiceElementNodeValue.startElement(xPathFragment, unmarshalRecord, atts); } public void setXPathNode(XPathNode xPathNode) { super.setXPathNode(xPathNode); this.choiceElementNodeValue.setXPathNode(xPathNode); if(this.choiceElementNodeValues != null) { for(NodeValue next:choiceElementNodeValues.values()) { next.setXPathNode(xPathNode); } } } /** * The underlying choice element node value will handle attributes. * */ public void attribute(UnmarshalRecord unmarshalRecord, String URI, String localName, String value) { this.choiceElementNodeValue.attribute(unmarshalRecord, URI, localName, value); } @Override public Mapping getMapping() { return this.xmlChoiceMapping; } }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XPathEngine.java0000664000000000000000000016715612216173126024064 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Vector; import javax.xml.namespace.QName; import org.eclipse.persistence.exceptions.ConversionException; import org.eclipse.persistence.exceptions.XMLMarshalException; import org.eclipse.persistence.internal.core.helper.CoreClassConstants; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.documentpreservation.NoDocumentPreservationPolicy; import org.eclipse.persistence.internal.oxm.documentpreservation.XMLBinderPolicy; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.mappings.UnionField; import org.eclipse.persistence.internal.oxm.record.XMLRecord; import org.eclipse.persistence.oxm.NamespaceResolver; import org.eclipse.persistence.oxm.XMLField; import org.eclipse.persistence.oxm.documentpreservation.DocumentPreservationPolicy; import org.eclipse.persistence.oxm.record.XMLEntry; import org.eclipse.persistence.platform.xml.XMLNodeList; import org.w3c.dom.Attr; import org.w3c.dom.CDATASection; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.Text; /** * INTERNAL: *

Purpose: Utility class for creating and removing XML nodes using * XPath expressions.

* @author Rick Barkhouse - rick.barkhouse@oracle.com * @since OracleAS TopLink 10g (10.0.3), 03/11/2003 10:21:42 */ public class XPathEngine < XML_FIELD extends Field >{ private static XPathEngine instance = null; private UnmarshalXPathEngine unmarshalXPathEngine; private DocumentPreservationPolicy noDocPresPolicy = new NoDocumentPreservationPolicy();//handles xpath engine calls without a policy private DocumentPreservationPolicy xmlBinderPolicy = new XMLBinderPolicy();//used for adding new elements to a collection. /** * Return the XPathEngine singleton. */ public static XPathEngine getInstance() { if (instance == null) { instance = new XPathEngine(); } return instance; } private XPathEngine() { super(); unmarshalXPathEngine = new UnmarshalXPathEngine(); } /** * Create the node path specified by xpathString under element. * This method also supports creating attributes and indexed elements using the appropriate * XPath syntax ('@' and '[ ]' respectively). * * @param xmlField XMLField containing xpath expression representing the node path to create * @param element Root element under which to create path * * @return The last XMLNode in the path * * @exception org.eclipse.persistence.oxm.exceptions.XMLMarshalException Thrown if passed an invalid XPath string */ public Node create(Field xmlField, Node element, CoreAbstractSession session) throws XMLMarshalException { return create(xmlField, element, this, session); } public Node create(Field xmlField, Node element, Object value, CoreAbstractSession session) { return create(xmlField, element, value, null, noDocPresPolicy, session); } /** * Create the node path specified by xpathString under element * and initialize the leaf node with value. * This method also supports creating attributes and integer-indexed elements using the * appropriate XPath syntax ('@' and '[ ]' respectively). * * @param xmlField XMLField containing xpath expression representing the node path to create * @param element Root element under which to create path * @param value Initial value for the leaf node (should not be a list) * * @return The last XMLNode in the path * * @exception org.eclipse.persistence.oxm.exceptions.XMLMarshalException Thrown if passed an invalid XPath string */ public Node create(Field xmlField, Node element, Object value, Field lastUpdated, DocumentPreservationPolicy docPresPolicy, CoreAbstractSession session) throws XMLMarshalException { if (null == value) { return null; } if (docPresPolicy == null) { //EIS case and others docPresPolicy = this.noDocPresPolicy; } XPathFragment fragment = xmlField.getXPathFragment(); if (fragment.getNextFragment() == null) { if (fragment.nameIsText()) { Object textValue = getValueToWrite(value, xmlField, session); if (textValue instanceof String) { if (xmlField.isTypedTextField()) { XMLNodeList createdElements = new XMLNodeList(); createdElements.add(element); addTypeAttributes(createdElements, xmlField, value, resolveNamespacePrefixForURI(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, getNamespaceResolverForField(xmlField))); } return addText(xmlField, element, (String)textValue); } return null; } } NodeList created = createCollection(xmlField, element, value, lastUpdated, docPresPolicy, session); if ((created == null) || (created.getLength() == 0)) { return null; } return created.item(0); } public void create(List xmlFields, Node contextNode, List values, Field lastUpdatedField, DocumentPreservationPolicy docPresPolicy, CoreAbstractSession session) { List itemsToWrite = new ArrayList(); for(int i = 0, size = values.size(); i < size; i++) { XMLEntry nextEntry = values.get(i); itemsToWrite.add(nextEntry.getValue()); if(i == (values.size() -1) || values.get(i+1).getXMLField() != nextEntry.getXMLField()) { create(nextEntry.getXMLField(), contextNode, itemsToWrite, lastUpdatedField, docPresPolicy, session); itemsToWrite = new ArrayList(); lastUpdatedField = nextEntry.getXMLField(); } } } /** * Create the node path specified by xpathString under element * and initialize the leaf node with value. * This method also supports creating attributes and integer-indexed elements using the * appropriate XPath syntax ('@' and '[ ]' respectively). * * @param xmlField XMLField containing xpath expression representing the node path to create * @param element Root element under which to create path * @param value Initial value for the leaf node (this can be a value or a collection of values) * * @return The last XMLNode in the path * * @exception org.eclipse.persistence.oxm.exceptions.XMLMarshalException Thrown if passed an invalid XPath string */ private NodeList createCollection(Field xmlField, Node element, Object value, Field lastUpdated, DocumentPreservationPolicy docPresPolicy, CoreAbstractSession session) throws XMLMarshalException { XMLNodeList createdElements = new XMLNodeList(); //CR:### If the value is null, then the node(s) must not be created. if ((value == null) || (value instanceof Collection && (((Collection)value).size() == 0))) { return createdElements; } Node nextElement = element; Element sibling = null; XPathFragment siblingFragment = null; if(lastUpdated != null) { siblingFragment = lastUpdated.getXPathFragment(); } if ((lastUpdated != null) && !siblingFragment.isAttribute() && !siblingFragment.nameIsText()) { //find the sibling element. NodeList nodes = unmarshalXPathEngine.selectElementNodes(element, siblingFragment, getNamespaceResolverForField(lastUpdated)); if (nodes.getLength() > 0) { sibling = (Element)nodes.item(nodes.getLength() - 1); } } NodeList elements; XPathFragment next = xmlField.getXPathFragment(); while (next != null) { if (next.isAttribute()) { addAttribute(next, xmlField, nextElement, value, session); } else if (next.containsIndex()) { // If we are creating multiple nodes from this XPath, assume the value is for the last node. boolean hasMore = !(next.getHasText() || (next.getNextFragment() == null)); if (hasMore) { nextElement = addIndexedElement(next, xmlField, nextElement, this, !hasMore, session); } else { Object valueToWrite = getValueToWrite(value, xmlField, session); nextElement = addIndexedElement(next, xmlField, nextElement, valueToWrite, !hasMore, session); createdElements.add(nextElement); } } else { boolean hasMore = !(next.getHasText() || (next.getNextFragment() == null)); if (hasMore) { elements = addElements(next, xmlField, nextElement, this, !hasMore, sibling, docPresPolicy, session); } else { XPathFragment nextFragment = next.getNextFragment(); if ((nextFragment != null) && nextFragment.isAttribute() && !(value instanceof List)) { elements = addElements(next, xmlField, nextElement, this, hasMore, sibling, docPresPolicy, session); } else { Object valueToWrite = getValueToWrite(value, xmlField, session); elements = addElements(next, xmlField, nextElement, valueToWrite, !hasMore, sibling, docPresPolicy, session); createdElements.addAll(elements); } } nextElement = elements.item(elements.getLength() - 1); } if(siblingFragment != null && sibling != null && siblingFragment.equals(next)) { //if the sibling shares a grouping element, update the sibling siblingFragment = siblingFragment.getNextFragment(); if ((siblingFragment != null) && !siblingFragment.isAttribute() && !siblingFragment.nameIsText()) { //find the sibling element. NodeList nodes = unmarshalXPathEngine.selectElementNodes(nextElement, siblingFragment, getNamespaceResolverForField(lastUpdated)); if (nodes.getLength() > 0) { sibling = (Element)nodes.item(nodes.getLength() - 1); } else { sibling = null; } } else { sibling = null; } } else { sibling = null; } next = next.getNextFragment(); if ((next != null) && next.nameIsText()) { next = null; } } if (xmlField.isTypedTextField()) { addTypeAttributes(createdElements, xmlField, value, resolveNamespacePrefixForURI(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, getNamespaceResolverForField(xmlField))); } return createdElements; } private Object getNonNodeValueToWrite(Object value, Field xmlField, CoreAbstractSession session) { if (this == value) { return this; } QName schemaType = null; if(xmlField.getLeafElementType() != null){ schemaType = xmlField.getLeafElementType(); }else if (xmlField.isUnionField()) { return getValueToWriteForUnion((UnionField)xmlField, value, session); }else if (xmlField.isTypedTextField()) { schemaType = xmlField.getXMLType(value.getClass()); }else if (xmlField.getSchemaType() != null) { schemaType = xmlField.getSchemaType(); } if (value instanceof List) { if (xmlField.usesSingleNode()) { StringBuilder returnStringBuilder = new StringBuilder(); for (int i = 0; i < ((List)value).size(); i++) { Object nextItem = ((List)value).get(i); String nextConvertedItem = null; if(schemaType != null && schemaType.equals(Constants.QNAME_QNAME)){ nextConvertedItem = getStringForQName((QName)nextItem, getNamespaceResolverForField(xmlField)); }else{ nextConvertedItem = (String) ((XMLConversionManager)session.getDatasourcePlatform().getConversionManager()).convertObject(nextItem, CoreClassConstants.STRING, schemaType); } returnStringBuilder.append(nextConvertedItem); if (i < (((List)value).size() - 1)) { returnStringBuilder.append(' '); } } return returnStringBuilder.toString(); } else { ArrayList items = new ArrayList(((List)value).size()); for (int index = 0; index < ((List)value).size(); index++) { Object nextItem = ((List)value).get(index); if (nextItem instanceof Node || nextItem == XMLRecord.NIL) { items.add(nextItem); } else { if(schemaType != null && schemaType.equals(Constants.QNAME_QNAME)){ String nextConvertedItem = getStringForQName((QName)nextItem, getNamespaceResolverForField(xmlField)); items.add(nextConvertedItem); }else{ String nextConvertedItem = (String) ((XMLConversionManager)session.getDatasourcePlatform().getConversionManager()).convertObject(nextItem, CoreClassConstants.STRING, schemaType); items.add(nextConvertedItem); } } } return items; } } else { if(schemaType != null && schemaType.equals(Constants.QNAME_QNAME)){ return getStringForQName((QName)value, getNamespaceResolverForField(xmlField)); } return ((XMLConversionManager)session.getDatasourcePlatform().getConversionManager()).convertObject(value, CoreClassConstants.STRING, schemaType); } } private Object getValueToWrite(Object value, Field xmlField, CoreAbstractSession session) { if (value instanceof Node || value == XMLRecord.NIL) { return value; } return getNonNodeValueToWrite(value, xmlField, session); } private String getSingleValueToWriteForUnion(UnionField xmlField, Object value, CoreAbstractSession session) { List schemaTypes = xmlField.getSchemaTypes(); QName schemaType = null; for (int i = 0; i < schemaTypes.size(); i++) { QName nextQName = (QName)(xmlField).getSchemaTypes().get(i); try { if (nextQName != null) { Class javaClass = xmlField.getJavaClass(nextQName); value = ((XMLConversionManager)session.getDatasourcePlatform().getConversionManager()).convertObject(value, javaClass, nextQName); schemaType = nextQName; break; } } catch (ConversionException ce) { if (i == (schemaTypes.size() - 1)) { schemaType = nextQName; } } } return (String) ((XMLConversionManager)session.getDatasourcePlatform().getConversionManager()).convertObject(value, CoreClassConstants.STRING, schemaType); } private Object getValueToWriteForUnion(UnionField xmlField, Object value, CoreAbstractSession session) { if (value instanceof List) { if (xmlField.usesSingleNode()) { StringBuilder returnStringBuilder = new StringBuilder(); Object next = null; for (int i = 0; i < ((List)value).size(); i++) { next = ((List)value).get(i); returnStringBuilder.append(getSingleValueToWriteForUnion(xmlField, next, session)); if (i < (((List)value).size() - 1)) { returnStringBuilder.append(' '); } } return returnStringBuilder.toString(); } else { ArrayList items = new ArrayList(((List)value).size()); Object next = null; for (int i = 0; i < ((List)value).size(); i++) { next = ((List)value).get(i); items.add(getSingleValueToWriteForUnion(xmlField, next, session)); } return items; } } else { return getSingleValueToWriteForUnion(xmlField, value, session); } } /** * Add a new indexed element to the parent element. * Will overwrite if an element already exists at that position. Currently only supports * integer indices. * * @param xpathString element and index to create (in the form 'element[index]') * @param namespaceResolover namespaceResolover of the element being created * @param parent Parent element * @param schemaType schemaType for the new node * @param value Value for the new node * @param forceCreate If true, create a new element even if one with the same name currently exists * * @return The XMLElement that was created/found * * @exception org.eclipse.persistence.oxm.exceptions.XMLMarshalException Thrown if passed an invalid XPath string */ private Node addIndexedElement(XPathFragment fragment, Field xmlField, Node parent, Object value, boolean forceCreate, CoreAbstractSession session) throws XMLMarshalException { String element = fragment.getShortName(); int index = fragment.getIndexValue(); if (index < 0) { throw XMLMarshalException.invalidXPathIndexString(fragment.getXPath()); } Node existingElement; NamespaceResolver namespaceResolver = getNamespaceResolverForField(xmlField); for (int i = 1; i < index; i++) { Field field = new XMLField(element + "[" + i + "]"); field.setNamespaceResolver(namespaceResolver); existingElement = (Node)unmarshalXPathEngine.selectSingleNode(parent, field, namespaceResolver); if (existingElement == null) { addElement(new XPathFragment(element), xmlField, parent, this, true, session); } } Field field = new XMLField(fragment.getXPath()); field.setNamespaceResolver(namespaceResolver); existingElement = (Node)unmarshalXPathEngine.selectSingleNode(parent, field, namespaceResolver); if (existingElement == null) { return addElement(new XPathFragment(element), field, parent, value, true, session); } if ((existingElement != null) && !forceCreate) { return existingElement; } String namespace = resolveNamespacePrefix(fragment, namespaceResolver); Element elementToReturn = parent.getOwnerDocument().createElementNS(namespace, element); if ((value != this) && (value != null)) { if (value instanceof String) { addText(xmlField, elementToReturn, (String)value); } } parent.replaceChild(elementToReturn, existingElement); return elementToReturn; } /** * Add a new element to the parent element. If an element with * this name already exists, return it (unless forceCreate is true). * * @param element Name of element to create * @param parent Parent element * @param value Value for the new node * @param forceCreate If true, create a new element even if one with the same name currently exists * * @return The XMLElement that was created/found */ private Node addElement(XPathFragment fragment, Field xmlField, Node parent, Object value, boolean forceCreate, CoreAbstractSession session) { return addElement(fragment, xmlField, parent, null, value, forceCreate, session); } private Node addElement(XPathFragment fragment, Field xmlField, Node parent, QName schemaType, Object value, boolean forceCreate, CoreAbstractSession session) { NodeList list = addElements(fragment, xmlField, parent, value, forceCreate, null, noDocPresPolicy, session); if (list.getLength() > 0) { return list.item(0); } return null; } /** * Add a new element to the parent element. If an element with * this name already exists, return it (unless forceCreate is true). * * @param fragment Name of element to create * @param namespace namespace of element to create * @param parent Parent element * @param schemaType schemaType of element to create * @param value Value for the new node * @param forceCreate If true, create a new element even if one with the same name currently exists * @return The NodeList that was created/found */ private NodeList addElements(XPathFragment fragment, Field xmlField, Node parent, Object value, boolean forceCreate, Element sibling, DocumentPreservationPolicy docPresPolicy, CoreAbstractSession session) { if (!forceCreate) { NodeList nodes = unmarshalXPathEngine.selectElementNodes(parent, fragment, getNamespaceResolverForField(xmlField)); if (nodes.getLength() > 0) { return nodes; } } XMLNodeList elementsToReturn = new XMLNodeList(); if (value == this) { String namespace = resolveNamespacePrefix(fragment, getNamespaceResolverForField(xmlField)); Element newElement = parent.getOwnerDocument().createElementNS(namespace, fragment.getShortName()); XPathPredicate predicate = fragment.getPredicate(); if(predicate != null) { XPathFragment predicateFragment = predicate.getXPathFragment(); if(predicateFragment.isAttribute()) { if(predicateFragment.getNamespaceURI() == null || predicateFragment.getNamespaceURI().length() == 0) { newElement.setAttribute(predicateFragment.getLocalName(), fragment.getPredicate().getValue()); } else { String name = predicateFragment.getLocalName(); if(predicateFragment.getPrefix() != null && predicateFragment.getPrefix().length() != 0) { name = predicateFragment.getPrefix() + Constants.COLON + name; } newElement.setAttributeNS(predicateFragment.getNamespaceURI(), name, fragment.getPredicate().getValue()); } } } elementsToReturn.add(newElement); docPresPolicy.getNodeOrderingPolicy().appendNode(parent, newElement, sibling); } else if (value == null) { elementsToReturn.add(parent); } else { // Value may be a direct value, node, or list of values. if (value instanceof List) { List values = (List)value; for (int index = 0; index < values.size(); index++) { Element newElement = null; if (values.get(index) != XMLRecord.NIL) { newElement = (Element) createElement(parent, fragment, xmlField, values.get(index), session); } else { newElement = (Element) createElement(parent, fragment, xmlField, Constants.EMPTY_STRING, session); addXsiNilToElement(newElement, xmlField); } XPathPredicate predicate = fragment.getPredicate(); if(predicate != null) { XPathFragment predicateFragment = predicate.getXPathFragment(); if(predicateFragment.isAttribute()) { if(predicateFragment.getNamespaceURI() == null || predicateFragment.getNamespaceURI().length() == 0) { newElement.setAttribute(predicateFragment.getLocalName(), fragment.getPredicate().getValue()); } else { String name = predicateFragment.getLocalName(); if(predicateFragment.getPrefix() != null && predicateFragment.getPrefix().length() != 0) { name = predicateFragment.getPrefix() + Constants.COLON + name; } newElement.setAttributeNS(predicateFragment.getNamespaceURI(), name, fragment.getPredicate().getValue()); } } } docPresPolicy.getNodeOrderingPolicy().appendNode(parent, newElement, sibling); elementsToReturn.add(newElement); sibling = newElement; } } else { Element newElement = null; if (value != XMLRecord.NIL) { newElement = (Element)createElement(parent, fragment, xmlField, value, session); } else { newElement = (Element) createElement(parent, fragment, xmlField, Constants.EMPTY_STRING, session); addXsiNilToElement(newElement, xmlField); } XPathPredicate predicate = fragment.getPredicate(); if(predicate != null) { XPathFragment predicateFragment = predicate.getXPathFragment(); if(predicateFragment.isAttribute()) { if(predicateFragment.getNamespaceURI() == null || predicateFragment.getNamespaceURI().length() == 0) { newElement.setAttribute(predicateFragment.getLocalName(), fragment.getPredicate().getValue()); } else { String name = predicateFragment.getLocalName(); if(predicateFragment.getPrefix() != null && predicateFragment.getPrefix().length() != 0) { name = predicateFragment.getPrefix() + Constants.COLON + name; } newElement.setAttributeNS(predicateFragment.getNamespaceURI(), name, fragment.getPredicate().getValue()); } } } docPresPolicy.getNodeOrderingPolicy().appendNode(parent, newElement, sibling); elementsToReturn.add(newElement); } } return elementsToReturn; } /** * Creates a new Element and appends a value to an element. * * @param parent Element which will own the newly created element * @param elementName tag name for the new element * @param value Object to add */ private Node createElement(Node parent, XPathFragment fragment, Field xmlField, Object value, CoreAbstractSession session) { if (value == null) { return parent; } if (value instanceof Node) { return createElement(parent, fragment, getNamespaceResolverForField(xmlField), (Node)value); } Element element = null; if (parent.getOwnerDocument() == null) { element = ((Document)parent).getDocumentElement(); } else { String namespace = resolveNamespacePrefix(fragment, getNamespaceResolverForField(xmlField)); NamespaceResolver domResolver = new NamespaceResolver(); domResolver.setDOM(parent); String existingPrefix = domResolver.resolveNamespaceURI(namespace); String elementName = fragment.getShortName(); if(existingPrefix != null) { if(existingPrefix.length() > 0) { elementName = existingPrefix + Constants.COLON + fragment.getLocalName(); } else { elementName = fragment.getLocalName(); } } element = parent.getOwnerDocument().createElementNS(namespace, elementName); if (fragment.isGeneratedPrefix() && existingPrefix == null) { element.setAttributeNS(javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI, javax.xml.XMLConstants.XMLNS_ATTRIBUTE + Constants.COLON + fragment.getPrefix(), fragment.getNamespaceURI()); } XPathPredicate predicate = fragment.getPredicate(); if(predicate != null) { XPathFragment predicateFragment = predicate.getXPathFragment(); if(predicateFragment.isAttribute()) { element.setAttributeNS(predicateFragment.getNamespaceURI(), predicateFragment.getLocalName(), fragment.getPredicate().getValue()); } } } XPathFragment nextFragment = fragment.getNextFragment(); if ((nextFragment != null) && nextFragment.isAttribute()) { addAttribute(nextFragment, xmlField, element, value, session); } else if (value instanceof String && ((String)value).length() > 0) { addText(xmlField, element, (String)value); } else if (value == XMLRecord.NIL) { addXsiNilToElement(element, xmlField); } return element; } public Element createUnownedElement(Node parent, Field xmlField) { XPathFragment lastFragment = xmlField.getXPathFragment(); while (lastFragment.getNextFragment() != null) { lastFragment = lastFragment.getNextFragment(); } String nodeName = lastFragment.getShortName(); String namespace = resolveNamespacePrefix(lastFragment, getNamespaceResolverForField(xmlField)); NamespaceResolver domResolver = new NamespaceResolver(); domResolver.setDOM(parent); String existingPrefix = domResolver.resolveNamespaceURI(namespace); String elementName = lastFragment.getShortName(); if(existingPrefix != null) { if(existingPrefix.length() > 0) { elementName = existingPrefix + Constants.COLON + lastFragment.getLocalName(); } else { elementName = lastFragment.getLocalName(); } } Element elem = parent.getOwnerDocument().createElementNS(namespace, elementName); if (lastFragment.isGeneratedPrefix() && existingPrefix == null) { elem.setAttributeNS(javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI, javax.xml.XMLConstants.XMLNS_ATTRIBUTE + Constants.COLON + lastFragment.getPrefix(), lastFragment.getNamespaceURI()); } return elem; } /** * Adds a type attribute on an element, the value of the attribute is determined * by performing a lookup in the SimpleTypeTranslator to find the Schema type * for the value. * * @param elements NodeList which will have a type attribute added to them * @param simpleTypeTranslator SimpleTypeTranslator to perform lookup in * @param value Object to base the lookup on * @param schemaInstancePrefix the prefix representing the schema instance namespace */ private void addTypeAttributes(NodeList elements, Field field, Object value, String schemaInstancePrefix) { NamespaceResolver namespaceResolver = getNamespaceResolverForField(field); if (!field.isTypedTextField()) { return; } List values; if (value instanceof List) { values = (List)value; } else { values = new ArrayList(); values.add(value); } int size = elements.getLength(); int valuesSize = values.size(); if (size != valuesSize) { return; } Node next = null; for (int i = 0; i < size; i++) { next = elements.item(i); if (next.getNodeType() == Node.ELEMENT_NODE) { Class valueClass = values.get(i).getClass(); if(valueClass != CoreClassConstants.STRING){ QName qname = field.getXMLType(valueClass); if (qname != null) { if (null == schemaInstancePrefix) { schemaInstancePrefix = namespaceResolver.generatePrefix(Constants.SCHEMA_INSTANCE_PREFIX); ((Element)next).setAttributeNS(javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI, javax.xml.XMLConstants.XMLNS_ATTRIBUTE + Constants.COLON + schemaInstancePrefix, javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI); } String type; String prefix = this.resolveNamespacePrefixForURI(qname.getNamespaceURI(), namespaceResolver); if (prefix == null || prefix.length() == 0) { if(qname.getNamespaceURI().equals(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI)){ prefix = namespaceResolver.generatePrefix(Constants.SCHEMA_PREFIX); }else{ prefix = namespaceResolver.generatePrefix(); } ((Element)next).setAttributeNS(javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI, javax.xml.XMLConstants.XMLNS_ATTRIBUTE + Constants.COLON + prefix, qname.getNamespaceURI()); } type = prefix + Constants.COLON + qname.getLocalPart(); ((Element)next).setAttributeNS(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, schemaInstancePrefix + Constants.COLON + Constants.SCHEMA_TYPE_ATTRIBUTE, type); } } } } } /** * Creates a new Element and appends a value to an element. * * @param parent Element which will own the newly created element * @param elementName tag name for the new element * @param value Node to add * */ private Node createElement(Node parent, XPathFragment fragment, NamespaceResolver namespaceResolver, Node value) { String elementName = fragment.getXPath(); // The case of the parent being the document (element is the root) needs to be handled. // Document have no owner document, but are the document. Document document = parent.getOwnerDocument(); if ((document == null) && (parent.getNodeType() == Node.DOCUMENT_NODE)) { document = (Document)parent; } String nodeUri = value.getNamespaceURI(); String nodeName = value.getLocalName(); //String fragUri = resolveNamespacePrefix(fragment, namespaceResolver); String fragUri = fragment.getNamespaceURI(); String fragName = fragment.getLocalName(); if ((nodeName != null) && nodeName.equals(fragName) && (((nodeUri != null) && nodeUri.equals(fragUri)) || ((nodeUri == null) && (fragUri == null)))) { if (document != value.getOwnerDocument()) { return document.importNode(value, true); } return value; } else { // Need to reset the node name. String namespace = resolveNamespacePrefix(fragment, namespaceResolver); Element clone = document.createElementNS(namespace, fragName); NamedNodeMap attributes = value.getAttributes(); int attributesLength = attributes.getLength(); for (int index = 0; index < attributesLength; index++) { Node attribute = document.importNode(attributes.item(index), true); clone.setAttributeNode((Attr)attribute); } NodeList elements = value.getChildNodes(); int elementsLength = elements.getLength(); for (int index = 0; index < elementsLength; index++) { Node attribute = document.importNode(elements.item(index), true); clone.appendChild(attribute); } return clone; } } /** * Add a new attribute to an element. If the attribute already exists, return the element. * * @param attributeName Name of the attribute to add * @param parent Element to create the attribute on * @param value Value for the new attribute * * @return The XMLElement that the attribute was added to (same as the parent parameter). */ private Node addAttribute(XPathFragment attributeFragment, Field xmlField, Node parent, Object value, CoreAbstractSession session) { Object valueToWrite = null; if (!(parent instanceof Element)) { return parent; } Element parentElement = (Element)parent; if (value instanceof Node) { if (((Node)value).getNodeType() == Node.ATTRIBUTE_NODE) { Attr attr = (Attr)value; if (parent.getAttributes().getNamedItemNS(attr.getNamespaceURI(), attr.getLocalName()) == null) { String pfx = null; if (xmlField.getNamespaceResolver() != null) { pfx = getNamespaceResolverForField(xmlField).resolveNamespaceURI(attr.getNamespaceURI()); } if (pfx != null) { // If the namespace resolver has a prefix for the node's URI, use it parentElement.setAttributeNS(attr.getNamespaceURI(), pfx + Constants.COLON + attr.getLocalName(), attr.getNodeValue()); } else { // No entry for the node's URI in the resolver, so use the node's // prefix/uri pair and define the URI locally parentElement.setAttributeNS(attr.getNamespaceURI(), attr.getName(), attr.getNodeValue()); } } return parent; } valueToWrite = value; } else { valueToWrite = getNonNodeValueToWrite(value, xmlField, session); } String attributeName = attributeFragment.getLocalName(); String attributeNamespace = resolveNamespacePrefix(attributeFragment, getNamespaceResolverForField(xmlField)); if ((valueToWrite != null) && (parent.getAttributes().getNamedItemNS(attributeNamespace, attributeName) == null)) { if (valueToWrite == this) { parentElement.setAttributeNS(attributeNamespace, attributeFragment.getShortName(), Constants.EMPTY_STRING); } else if (valueToWrite instanceof String) { parentElement.setAttributeNS(attributeNamespace, attributeFragment.getShortName(), (String)valueToWrite); } if (attributeFragment.isGeneratedPrefix()) { parentElement.setAttributeNS(javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI, javax.xml.XMLConstants.XMLNS_ATTRIBUTE + Constants.COLON + attributeFragment.getPrefix(), attributeFragment.getNamespaceURI()); } } return parent; } // ========================================================================================== /** * Remove a node. If xpathString points to an indexed element, the element will not be removed, * but will instead be reinitialzed (to maintain the index of the collection). * * @param xmlField Field containing XPath query string * @param element Root element at which to begin search * * @return NodeList containing the nodes that were removed. * * @exception org.eclipse.persistence.oxm.exceptions.XMLMarshalException Thrown if passed an invalid XPath string */ public NodeList remove(Field xmlField, Node element) throws XMLMarshalException { return remove(xmlField, element, false); } /** * Remove a node. * * @param xmlField Field containing XPath query string * @param element Root element at which to begin search * @param forceRemove If true, then indexed elements will be truly deleted, otherwise they will be reinitialized * * @return NodeList containing the nodes that were removed. * * @exception org.eclipse.persistence.oxm.exceptions.XMLMarshalException Thrown if passed an invalid XPath string */ public NodeList remove(Field xmlField, Node element, boolean forceRemove) throws XMLMarshalException { String xpathString = xmlField.getXPath(); NodeList nodes = unmarshalXPathEngine.selectNodes(element, xmlField, getNamespaceResolverForField(xmlField)); int numberOfNodes = nodes.getLength(); boolean shouldNullOutNode = containsIndex(xpathString) && !forceRemove; // Remove the element or attribute, for positional element null-out instead of remove. for (int i = 0; i < numberOfNodes; i++) { Node node = nodes.item(i); if (node.getNodeType() == Node.ATTRIBUTE_NODE) { ((Attr)node).getOwnerElement().removeAttribute(node.getNodeName()); } else { if (shouldNullOutNode) { Node blankNode = node.getParentNode().getOwnerDocument().createElementNS(node.getNamespaceURI(), node.getNodeName()); node.getParentNode().replaceChild(blankNode, node); } else { node.getParentNode().removeChild(node); } } } return nodes; } // ========================================================================================== /** * Replace the value of the nodes matching xpathString with value. * This method handles elements, indexed elements, and attributes. * * @param xmlField Field containing XPath query string * @param parent Parent element * @param value New value for the node * * @return NodeList containing the nodes that were replaced. */ public NodeList replaceValue(Field xmlField, Node parent, Object value, CoreAbstractSession session) throws XMLMarshalException { NodeList nodes = unmarshalXPathEngine.selectNodes(parent, xmlField, getNamespaceResolverForField(xmlField), null, false, false); int numberOfNodes = nodes.getLength(); if(numberOfNodes == 0 && xmlField.getLastXPathFragment().nameIsText()) { nodes = unmarshalXPathEngine.selectNodes(parent, xmlField, getNamespaceResolverForField(xmlField), null, true); XMLNodeList textNodes = new XMLNodeList(); for(int i = 0; i < nodes.getLength(); i++) { Element nextNode = (Element)nodes.item(i); Text text = nextNode.getOwnerDocument().createTextNode(""); nextNode.appendChild(text); textNodes.add(text); } numberOfNodes = textNodes.getLength(); nodes = textNodes; } XMLNodeList createdElements = new XMLNodeList(); for (int i = 0; i < numberOfNodes; i++) { Node node = nodes.item(i); // Handle Attributes and Text if (node.getNodeType() != Node.ELEMENT_NODE) { if (((node.getNodeType() == Node.TEXT_NODE) || (node.getNodeType() == Node.CDATA_SECTION_NODE)) && (value == null)) { //if parent has only text children, remove parent. If parent has non-text children, //remove all text children. Node parentNode = node.getParentNode(); if(parentNode != null) { Node grandParentNode = parentNode.getParentNode(); NodeList childNodes = parentNode.getChildNodes(); if(childNodes.getLength() == numberOfNodes) { grandParentNode.removeChild(parentNode); } else { for(int x = 0; x < childNodes.getLength(); x++) { Node next = childNodes.item(x); if(next.getNodeType() == Node.TEXT_NODE || next.getNodeType() == Node.CDATA_SECTION_NODE) { parentNode.removeChild(next); } } } } } else { if(value == null) { ((Attr)node).getOwnerElement().removeAttributeNode((Attr)node); } else { if(value == XMLRecord.NIL && ((node.getNodeType() == Node.TEXT_NODE) || (node.getNodeType() == Node.CDATA_SECTION_NODE))) { Element parentElement = (Element)node.getParentNode(); addXsiNilToElement(parentElement, xmlField); parentElement.removeChild(node); } else { String stringValue = (String)((XMLConversionManager)session.getDatasourcePlatform().getConversionManager()).convertObject(value, CoreClassConstants.STRING); Element parentElement = (Element)node.getParentNode(); if(parentElement == null && parent.getNodeType() == Node.ELEMENT_NODE) { parentElement = (Element)parent; } if(stringValue.length() == 0 && ((node.getNodeType() == Node.TEXT_NODE) || (node.getNodeType() == Node.CDATA_SECTION_NODE)) && parentElement != null) { parentElement.removeChild(node); } else { node.setNodeValue(stringValue); if(((node.getNodeType() == Node.TEXT_NODE) || (node.getNodeType() == Node.CDATA_SECTION_NODE)) && parentElement != null) { Attr nil = parentElement.getAttributeNodeNS(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, Constants.SCHEMA_NIL_ATTRIBUTE); if(nil != null) { parentElement.removeAttributeNode(nil); } } } } } } } else { Element element = (Element)node; Node parentNode = element.getParentNode(); if (value == null) { parentNode.removeChild(element); } else { String elementName = element.getTagName(); Element newElement = null; Object valueToWrite = getValueToWrite(value, xmlField, session); XPathFragment childFrag = new XPathFragment(elementName); childFrag.setNamespaceURI(element.getNamespaceURI()); newElement = (Element)createElement(parentNode, childFrag, xmlField, valueToWrite, session); createdElements.add(newElement); if (newElement != element) { parentNode.replaceChild(newElement, element); } } } } if (xmlField.isTypedTextField()) { addTypeAttributes(createdElements, xmlField, value, resolveNamespacePrefixForURI(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, getNamespaceResolverForField(xmlField))); } return nodes; } public List replaceCollection(List xmlFields, List values, Node contextNode, DocumentPreservationPolicy docPresPolicy, Field lastUpdatedField, CoreAbstractSession session) { List oldNodes = unmarshalXPathEngine.selectNodes(contextNode, xmlFields, getNamespaceResolverForField(xmlFields.get(0))); if(oldNodes == null || oldNodes.size() == 0) { return oldNodes; } Iterator oldValues = oldNodes.iterator(); //Remove all the old values, and then call create to add them back in. while(oldValues.hasNext()) { XMLEntry entry = oldValues.next(); Node nextNode = (Node)entry.getValue(); Node parent = nextNode.getParentNode(); parent.removeChild(nextNode); while(parent != contextNode) { if(parent.getChildNodes().getLength() == 0) { nextNode = parent; parent = nextNode.getParentNode(); parent.removeChild(nextNode); } else { break; } } } create(xmlFields, contextNode, values, lastUpdatedField, xmlBinderPolicy, session); return oldNodes; } public NodeList replaceCollection(Field xmlField, Node parent, Collection values, CoreAbstractSession session) throws XMLMarshalException { NodeList nodes = null; if (xmlField != null) { nodes = unmarshalXPathEngine.selectNodes(parent, xmlField, getNamespaceResolverForField(xmlField)); } else { nodes = parent.getChildNodes(); } if (nodes.getLength() == 0) { return nodes; } Iterator collectionValues = values.iterator(); int i = 0; int nodesLength = nodes.getLength(); Vector newNodes = new Vector(); // Iterate over both collections until one runs out, creating a collection of correct nodes // while removing the old ones. boolean performedReplace = true; Object value = null; while ((i < nodesLength) && collectionValues.hasNext()) { //Keep track of which nodes have been replaced Node oldChild = nodes.item(i); Element newChild = null; if (performedReplace) { value = collectionValues.next(); } Node parentNode = oldChild.getParentNode(); // Handle Attributes and Text if (oldChild.getNodeType() != Node.ELEMENT_NODE) { if (((oldChild.getNodeType() == Node.TEXT_NODE) || (oldChild.getNodeType() == Node.CDATA_SECTION_NODE)) && (value == null)) { Node grandParentNode = parentNode.getParentNode(); grandParentNode.removeChild(parentNode); } else { oldChild.setNodeValue((String) ((XMLConversionManager)session.getDatasourcePlatform().getConversionManager()).convertObject(value, CoreClassConstants.STRING)); } } else { Element element = (Element)oldChild; String elementName = element.getTagName(); Object valueToWrite = getValueToWrite(value, xmlField, session); XPathFragment childFragment = new XPathFragment(elementName); childFragment.setNamespaceURI(element.getNamespaceURI()); newChild = (Element)createElement(parentNode, childFragment, xmlField, valueToWrite, session); if (!newNodes.contains(oldChild)) { if (newChild != oldChild) { parentNode.replaceChild(newChild, oldChild); } newNodes.addElement(newChild); performedReplace = true; } else { performedReplace = false; } } i++; } // This means collection was ran out first. Remove left-overs. while (i < nodesLength) { Node toRemove = nodes.item(i); Node removedParent = toRemove.getParentNode(); if ((removedParent != null) && !newNodes.contains(toRemove)) { //only remove it, if it's not already been added back in removedParent.removeChild(toRemove); } i++; } //Now add the nodes back in, in the correct order //for (Iterator newNodesIter = newNodes.iterator(); newNodesIter.hasNext();) { // Element newNode = (Element)newNodesIter.next(); //this.create(xmlField, parent, newNode); //} if ((value != null) && !performedReplace) { //If we didn't add in the last element we tried then add it now if ((xmlField.getXPathFragment().getNextFragment() == null) || xmlField.getXPathFragment().getHasText()) { //if there's no grouping element, ensure that new collection elements //are added inline with the others create(xmlField, parent, value, xmlField, xmlBinderPolicy, session); } else { create(xmlField, parent, value, session); } } // Now add in any others that are left in the iterator while (collectionValues.hasNext()) { value = collectionValues.next(); //If there's a grouping element, then just do the normal create if ((xmlField.getXPathFragment().getNextFragment() == null) || xmlField.getXPathFragment().getHasText()) { //if there's no grouping element, ensure that new collection elements //are added inline with the others create(xmlField, parent, value, xmlField, xmlBinderPolicy, session); } else { create(xmlField, parent, value, session); } } return nodes; } // ========================================================================================== /** * Determine if xpathString contains an index (e.g. 'element[index]'). * * @param xpathString XPath expression to test * * @return true if xpathString contains an index, otherwise false. */ private boolean containsIndex(String xpathString) { return (xpathString.lastIndexOf('[') != -1) && (xpathString.lastIndexOf(']') != -1); } private String resolveNamespacePrefix(XPathFragment fragment, NamespaceResolver namespaceResolver) { try { if (fragment.getNamespaceURI() != null) { return fragment.getNamespaceURI(); } if(fragment.getPrefix() == null && fragment.isAttribute()) { return null; } return namespaceResolver.resolveNamespacePrefix(fragment.getPrefix()); } catch (Exception e) { return null; } } private String resolveNamespacePrefixForURI(String namespaceURI, NamespaceResolver namespaceResolver) { if (null == namespaceResolver) { return null; } return namespaceResolver.resolveNamespaceURI(namespaceURI); } private Node addText(Field xmlField, Node element, String textValue) { if (xmlField.isCDATA()) { CDATASection cdata = element.getOwnerDocument().createCDATASection(textValue); element.appendChild(cdata); return cdata; } else { Text text = element.getOwnerDocument().createTextNode(textValue); element.appendChild(text); return text; } } private String getStringForQName(QName qName, NamespaceResolver namespaceResolver){ if(null == qName) { return null; } if(null == qName.getNamespaceURI()) { return qName.getLocalPart(); } else { String namespaceURI = qName.getNamespaceURI(); if(namespaceResolver == null){ throw XMLMarshalException.namespaceResolverNotSpecified(namespaceURI); } String prefix = namespaceResolver.resolveNamespaceURI(namespaceURI); if(null == prefix) { return qName.getLocalPart(); } else { return prefix + Constants.COLON + qName.getLocalPart(); } } } private NamespaceResolver getNamespaceResolverForField(Field field){ NamespaceResolver nr = (org.eclipse.persistence.oxm.NamespaceResolver) field.getNamespaceResolver(); if(nr == null){ field.setNamespaceResolver(new NamespaceResolver()); } return (org.eclipse.persistence.oxm.NamespaceResolver) field.getNamespaceResolver(); } private void addXsiNilToElement(Element element, Field xmlField) { NamespaceResolver nsr = new NamespaceResolver(); nsr.setDOM(element); String schemaInstancePrefix = resolveNamespacePrefixForURI(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, nsr); Node parentNode = element.getParentNode(); while(schemaInstancePrefix == null && parentNode != null && parentNode.getNodeType() == Node.ELEMENT_NODE){ nsr.setDOM(element); schemaInstancePrefix = resolveNamespacePrefixForURI(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, nsr); parentNode = parentNode.getParentNode(); } if(schemaInstancePrefix == null && element.getOwnerDocument() != null){ nsr.setDOM(element.getOwnerDocument().getDocumentElement()); schemaInstancePrefix = resolveNamespacePrefixForURI(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, nsr); } if(schemaInstancePrefix == null) { //Not decalred in the doc nsr = getNamespaceResolverForField(xmlField); schemaInstancePrefix = nsr.resolveNamespaceURI(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI); if(schemaInstancePrefix == null) { schemaInstancePrefix = nsr.generatePrefix(Constants.SCHEMA_INSTANCE_PREFIX); } element.setAttributeNS(javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI, javax.xml.XMLConstants.XMLNS_ATTRIBUTE + Constants.COLON + schemaInstancePrefix, javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI); } element.setAttributeNS(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, Constants.SCHEMA_INSTANCE_PREFIX + Constants.COLON + Constants.SCHEMA_NIL_ATTRIBUTE, Constants.BOOLEAN_STRING_TRUE); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/NodeValue.java0000664000000000000000000002111012216173126023547 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.xml.sax.Attributes; /** * INTERNAL: *

Purpose: A NodeValue is responsible for performing the unmarshal * and marshal operation at a mapping or policy level. The operations are based * on a SAX ContextHandler.

*

Responsibilities:

    *
  • Maintain a reference to the owning XPathNode.
  • *
  • Given a XPathFragment recognize the node to which the mapping should be * applied.
  • *
  • Perform the unmarshal and marshal operation for the given mapping or * policy.
  • *
*/ public abstract class NodeValue { protected XPathNode xPathNode; public XPathNode getXPathNode() { return xPathNode; } public void setXPathNode(XPathNode xPathNode) { this.xPathNode = xPathNode; } /** * INTERNAL: * Return whether we ignore this node value when marshalling its parent * @return */ public boolean isMarshalOnlyNodeValue() { return false; } /** * INTERNAL: * @param xPathFragment * @return */ public boolean isOwningNode(XPathFragment xPathFragment) { return null == xPathFragment.getNextFragment(); } /** * INTERNAL: * @param xPathFragment * @param marshalRecord * @param object * @param session * @param namespaceResolver * @return */ public abstract boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver); /** * INTERNAL: * This method is no longer required as now MarshalRecord maintains a * reference to the XMLMarshaller. * @param xPathFragment * @param marshalRecord * @param object * @param session * @param namespaceResolver * @param marshaller * @return */ public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver, Marshaller marshaller) { marshalRecord.setMarshaller(marshaller); return this.marshal(xPathFragment, marshalRecord, object, session, namespaceResolver); } /** * INTERNAL: * @param xPathFragment * @param marshalRecord * @param object * @param session * @param namespaceResolver * @return */ public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext, XPathFragment rootFragment) { return this.marshal(xPathFragment, marshalRecord, object, session, namespaceResolver, marshalContext); } /** * INTERNAL: * This method provides an optimization in cases where the value has already * been calculated. * @param xPathFragment * @param marshalRecord * @param object * @param objectValue * @param session * @param namespaceResolver * @param marshalContext * @return */ public abstract boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object objectValue, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext); /** * INTERNAL: * This method provides an optimization in cases where the value has already * been calculated. * @param xPathFragment * @param marshalRecord * @param object * @param objectValue * @param session * @param namespaceResolver * @param marshalContext * @return */ public boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object objectValue, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext, XPathFragment rootFragment) { return this.marshalSingleValue(xPathFragment, marshalRecord, object, objectValue, session, namespaceResolver, marshalContext); } /** * INTERNAL: * Override this method if the NodeValue is applicable to sequenced objects. * @param xPathFragment * @param marshalRecord * @param object * @param session * @param namespaceResolver * @param xPathNodeWalker * @return */ public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { return this.marshal(xPathFragment, marshalRecord, object, session, namespaceResolver); } /** * INTERNAL: * @param xPathFragment * @param unmarshalRecord * @param atts * @return */ public boolean startElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord, Attributes atts) { return true; } /** * INTERNAL: * @param unmarshalRecord * @param URI * @param localName * @param value */ public void attribute(UnmarshalRecord unmarshalRecord, String URI, String localName, String value) { // No operation for parent } /** * INTERNAL: * @param xPathFragment * @param unmarshalRecord */ public void endElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord) { } /** * INTERNAL: * @param unmarshalRecord * @param atts * @return */ public UnmarshalRecord buildSelfRecord(UnmarshalRecord unmarshalRecord, Attributes atts) { return null; } public void endSelfNodeValue(UnmarshalRecord unmarshalRecord, UnmarshalRecord selfRecord, Attributes atts) { } /** * INTERNAL: * @return Returns true if the NodeValue implements ContainerValue. * @see org.eclipse.persistence.internal.oxm.ContainerValue */ public boolean isContainerValue() { return false; } /** * INTERNAL: * @return Returns true if the NodeValue implements NullCapableValue. * @see org.eclipse.persistence.internal.oxm.NullCapableValue */ public boolean isNullCapableValue() { return false; } public void endElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord, Object collection) { } public boolean isUnmarshalNodeValue() { return true; } public boolean isMarshalNodeValue() { return true; } /** * INTERNAL: * Marshal any 'self' mapped attributes. * * @param xPathFragment * @param marshalRecord * @param object * @param session * @param namespaceResolver * @param marshaller * @return */ public boolean marshalSelfAttributes(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver, Marshaller marshaller) { return false; } public boolean isMappingNodeValue() { return false; } public boolean isWhitespaceAware() { return false; } public boolean isAnyMappingNodeValue() { return false; } /** * INTERNAL: * Return true if this is the node value representing mixed content. * @return */ public boolean isMixedContentNodeValue() { return false; } public boolean isWrapperNodeValue() { return false; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLMarshaller.java0000664000000000000000000015643412216173126024362 0ustar /******************************************************************************* * Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.lang.reflect.Array; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URL; import java.util.Collection; import javax.xml.namespace.QName; import javax.xml.transform.Result; import javax.xml.transform.dom.DOMResult; import javax.xml.transform.sax.SAXResult; import javax.xml.transform.stream.StreamResult; import javax.xml.validation.Schema; import org.eclipse.persistence.core.queries.CoreAttributeGroup; import org.eclipse.persistence.exceptions.EclipseLinkException; import org.eclipse.persistence.exceptions.XMLMarshalException; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.helper.ClassConstants; import org.eclipse.persistence.internal.oxm.mappings.Descriptor; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.record.AbstractMarshalRecord; import org.eclipse.persistence.internal.oxm.record.namespaces.PrefixMapperNamespaceResolver; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.oxm.JSONWithPadding; import org.eclipse.persistence.oxm.attachment.XMLAttachmentMarshaller; import org.eclipse.persistence.oxm.record.ContentHandlerRecord; import org.eclipse.persistence.oxm.record.FormattedOutputStreamRecord; import org.eclipse.persistence.oxm.record.FormattedWriterRecord; import org.eclipse.persistence.oxm.record.JSONFormattedWriterRecord; import org.eclipse.persistence.oxm.record.JSONWriterRecord; import org.eclipse.persistence.oxm.record.MarshalRecord; import org.eclipse.persistence.oxm.record.NodeRecord; import org.eclipse.persistence.oxm.record.OutputStreamRecord; import org.eclipse.persistence.oxm.record.ValidatingMarshalRecord; import org.eclipse.persistence.oxm.record.WriterRecord; import org.eclipse.persistence.oxm.schema.XMLSchemaReference; import org.eclipse.persistence.platform.xml.XMLPlatform; import org.eclipse.persistence.platform.xml.XMLPlatformFactory; import org.eclipse.persistence.platform.xml.XMLTransformer; import org.w3c.dom.Attr; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.xml.sax.ContentHandler; import org.xml.sax.ext.LexicalHandler; public abstract class XMLMarshaller< ABSTRACT_SESSION extends CoreAbstractSession, CONTEXT extends Context, DESCRIPTOR extends Descriptor, MEDIA_TYPE extends MediaType, NAMESPACE_PREFIX_MAPPER extends NamespacePrefixMapper, OBJECT_BUILDER extends ObjectBuilder> extends Marshaller { protected final static String DEFAULT_XML_VERSION = "1.0"; private static final String STAX_RESULT_CLASS_NAME = "javax.xml.transform.stax.StAXResult"; private static final String GET_XML_STREAM_WRITER_METHOD_NAME = "getXMLStreamWriter"; private static final String GET_XML_EVENT_WRITER_METHOD_NAME = "getXMLEventWriter"; private static final String XML_STREAM_WRITER_RECORD_CLASS_NAME = "org.eclipse.persistence.oxm.record.XMLStreamWriterRecord"; private static final String XML_EVENT_WRITER_RECORD_CLASS_NAME = "org.eclipse.persistence.oxm.record.XMLEventWriterRecord"; private static final String XML_STREAM_WRITER_CLASS_NAME = "javax.xml.stream.XMLStreamWriter"; private static final String XML_EVENT_WRITER_CLASS_NAME = "javax.xml.stream.XMLEventWriter"; private static final String DOM_TO_STREAM_WRITER_CLASS_NAME = "org.eclipse.persistence.internal.oxm.record.DomToXMLStreamWriter"; private static final String DOM_TO_EVENT_WRITER_CLASS_NAME = "org.eclipse.persistence.internal.oxm.record.DomToXMLEventWriter"; private static final String WRITE_TO_STREAM_METHOD_NAME = "writeToStream"; private static final String WRITE_TO_EVENT_WRITER_METHOD_NAME = "writeToEventWriter"; protected static Class staxResultClass; protected static Method staxResultGetStreamWriterMethod; protected static Method staxResultGetEventWriterMethod; private static Constructor xmlStreamWriterRecordConstructor; private static Constructor xmlEventWriterRecordConstructor; protected static Method writeToStreamMethod; protected static Method writeToEventWriterMethod; protected static Class domToStreamWriterClass; protected static Class domToEventWriterClass; static { try { staxResultClass = PrivilegedAccessHelper.getClassForName(STAX_RESULT_CLASS_NAME); if(staxResultClass != null) { staxResultGetStreamWriterMethod = PrivilegedAccessHelper.getDeclaredMethod(staxResultClass, GET_XML_STREAM_WRITER_METHOD_NAME, new Class[]{}); staxResultGetEventWriterMethod = PrivilegedAccessHelper.getDeclaredMethod(staxResultClass, GET_XML_EVENT_WRITER_METHOD_NAME, new Class[]{}); } Class streamWriterRecordClass = PrivilegedAccessHelper.getClassForName(XML_STREAM_WRITER_RECORD_CLASS_NAME); Class streamWriterClass = PrivilegedAccessHelper.getClassForName(XML_STREAM_WRITER_CLASS_NAME); xmlStreamWriterRecordConstructor = PrivilegedAccessHelper.getConstructorFor(streamWriterRecordClass, new Class[]{streamWriterClass}, true); Class eventWriterRecordClass = PrivilegedAccessHelper.getClassForName(XML_EVENT_WRITER_RECORD_CLASS_NAME); Class eventWriterClass = PrivilegedAccessHelper.getClassForName(XML_EVENT_WRITER_CLASS_NAME); xmlEventWriterRecordConstructor = PrivilegedAccessHelper.getConstructorFor(eventWriterRecordClass, new Class[]{eventWriterClass}, true); domToStreamWriterClass = PrivilegedAccessHelper.getClassForName(DOM_TO_STREAM_WRITER_CLASS_NAME); writeToStreamMethod = PrivilegedAccessHelper.getMethod(domToStreamWriterClass, WRITE_TO_STREAM_METHOD_NAME, new Class[] {ClassConstants.NODE, ClassConstants.STRING, ClassConstants.STRING, streamWriterClass}, true); domToEventWriterClass = PrivilegedAccessHelper.getClassForName(DOM_TO_EVENT_WRITER_CLASS_NAME); writeToEventWriterMethod = PrivilegedAccessHelper.getMethod(domToEventWriterClass, WRITE_TO_EVENT_WRITER_METHOD_NAME, new Class[] {ClassConstants.NODE, ClassConstants.STRING, ClassConstants.STRING, eventWriterClass}, true); } catch (Exception ex) { // Do nothing } } protected XMLAttachmentMarshaller attachmentMarshaller; private String attributePrefix; private boolean fragment; private boolean includeRoot = true; private boolean marshalEmptyCollections = true; protected MEDIA_TYPE mediaType; private char namespaceSeparator; private String noNamespaceSchemaLocation; private boolean reduceAnyArrays; private Schema schema; private String schemaLocation; protected XMLTransformer transformer; private String valueWrapper; private boolean wrapperAsCollectionName = false; private String xmlHeader; private Object marshalAttributeGroup; public XMLMarshaller(CONTEXT context) { super(context); this.includeRoot = true; this.marshalEmptyCollections = true; this.namespaceSeparator = Constants.DOT; this.reduceAnyArrays = false; this.valueWrapper = Constants.VALUE_WRAPPER; } /** * Copy constructor */ protected XMLMarshaller(XMLMarshaller xmlMarshaller) { super(xmlMarshaller); attachmentMarshaller = xmlMarshaller.getAttachmentMarshaller(); attributePrefix = xmlMarshaller.getAttributePrefix(); fragment = xmlMarshaller.isFragment(); includeRoot = xmlMarshaller.isIncludeRoot(); marshalEmptyCollections = xmlMarshaller.isMarshalEmptyCollections(); mediaType = (MEDIA_TYPE) xmlMarshaller.getMediaType(); namespaceSeparator = xmlMarshaller.getNamespaceSeparator(); noNamespaceSchemaLocation = xmlMarshaller.getNoNamespaceSchemaLocation(); reduceAnyArrays = xmlMarshaller.isReduceAnyArrays(); if(null != xmlMarshaller.getSchema()) { setSchema(xmlMarshaller.getSchema()); } schemaLocation = xmlMarshaller.getSchemaLocation(); valueWrapper = xmlMarshaller.getValueWrapper(); wrapperAsCollectionName = xmlMarshaller.isWrapperAsCollectionName(); xmlHeader = xmlMarshaller.getXmlHeader(); } protected void addDescriptorNamespacesToXMLRecord(DESCRIPTOR xmlDescriptor, AbstractMarshalRecord record) { if (null == xmlDescriptor) { return; } copyNamespaces(xmlDescriptor.getNamespaceResolver(), record.getNamespaceResolver()); } private XPathFragment buildRootFragment(Object object, DESCRIPTOR descriptor, boolean isXMLRoot, MarshalRecord marshalRecord) { XPathFragment rootFragment = null; if (isXMLRoot) { String xmlRootUri = ((Root) object).getNamespaceURI(); String xmlRootLocalName = ((Root) object).getLocalName(); rootFragment = new XPathFragment(); rootFragment.setLocalName(xmlRootLocalName); rootFragment.setNamespaceURI(xmlRootUri); rootFragment.setNamespaceAware(marshalRecord.isNamespaceAware()); rootFragment.setNamespaceSeparator(marshalRecord.getNamespaceSeparator()); if (xmlRootUri != null) { if (descriptor != null) { String xmlRootPrefix = marshalRecord.getNamespaceResolver().resolveNamespaceURI(xmlRootUri); if (xmlRootPrefix == null && !(xmlRootUri.equals(marshalRecord.getNamespaceResolver().getDefaultNamespaceURI()))) { xmlRootPrefix = marshalRecord.getNamespaceResolver().generatePrefix(); marshalRecord.getNamespaceResolver().put(xmlRootPrefix, xmlRootUri); } if(xmlRootPrefix == null) { rootFragment.setXPath(xmlRootLocalName); } else { rootFragment.setPrefix(xmlRootPrefix); } } else { if(marshalRecord.isNamespaceAware()){ String xmlRootPrefix = "ns0"; marshalRecord.getNamespaceResolver().put(xmlRootPrefix, xmlRootUri); rootFragment.setXPath(xmlRootPrefix + marshalRecord.getNamespaceSeparator() + xmlRootLocalName); }else{ rootFragment.setXPath(xmlRootLocalName); } } } } else { Field defaultRootField = (Field) descriptor.getDefaultRootElementField(); if(defaultRootField != null){ rootFragment = defaultRootField.getXPathFragment(); } } return rootFragment; } protected void copyNamespaces(NamespaceResolver source, NamespaceResolver target) { if (null != source && null != target) { if(source.hasPrefixesToNamespaces()) { target.getPrefixesToNamespaces().putAll(source.getPrefixesToNamespaces()); } target.setDefaultNamespaceURI(source.getDefaultNamespaceURI()); } } @Override public XMLAttachmentMarshaller getAttachmentMarshaller() { return this.attachmentMarshaller; } /** * Value that will be used to prefix attributes. * Ignored marshalling XML. * @since 2.4 */ public String getAttributePrefix() { return attributePrefix; } /** * INTERNAL: * Return the descriptor for the root object. */ protected DESCRIPTOR getDescriptor(Class clazz, ABSTRACT_SESSION session) throws XMLMarshalException { DESCRIPTOR descriptor = (DESCRIPTOR) session.getDescriptor(clazz); if (descriptor == null) { throw XMLMarshalException.descriptorNotFoundInProject(clazz.getName()); } return descriptor; } /** * INTERNAL: * Return the descriptor for the root object. */ public DESCRIPTOR getDescriptor(Object object) throws XMLMarshalException { DESCRIPTOR descriptor = (DESCRIPTOR) context.getSession(object).getDescriptor(object); if (descriptor == null) { throw XMLMarshalException.descriptorNotFoundInProject(object.getClass().getName()); } return descriptor; } /** * INTERNAL: * Return the descriptor for the root object. */ protected DESCRIPTOR getDescriptor(Object object, ABSTRACT_SESSION session) throws XMLMarshalException { DESCRIPTOR descriptor = (DESCRIPTOR) session.getDescriptor(object); if (descriptor == null) { throw XMLMarshalException.descriptorNotFoundInProject(object.getClass().getName()); } return descriptor; } protected DESCRIPTOR getDescriptor(Object object, boolean isXMLRoot) { if (isXMLRoot) { return getDescriptor((Root) object); } else { return getDescriptor(object); } } protected DESCRIPTOR getDescriptor(Root object) throws XMLMarshalException { DESCRIPTOR descriptor = null; try { ABSTRACT_SESSION session = context.getSession(object.getObject()); if(null == session) { return null; } descriptor = (DESCRIPTOR) session.getDescriptor(object.getObject()); } catch (XMLMarshalException marshalException) { if ((descriptor == null) && isSimpleXMLRoot(object)) { return null; } throw marshalException; } if (descriptor == null) { throw XMLMarshalException.descriptorNotFoundInProject(object.getClass().getName()); } return descriptor; } protected DESCRIPTOR getDescriptor(Root object, ABSTRACT_SESSION session) throws XMLMarshalException { DESCRIPTOR descriptor = null; try { if(null == session) { return null; } descriptor = (DESCRIPTOR) session.getDescriptor(object.getObject()); } catch (XMLMarshalException marshalException) { if ((descriptor == null) && isSimpleXMLRoot(object)) { return null; } throw marshalException; } if (descriptor == null) { throw XMLMarshalException.descriptorNotFoundInProject(object.getClass().getName()); } return descriptor; } /** * Get the MediaType for this xmlMarshaller. * See org.eclipse.persistence.oxm.MediaType for the media types supported by EclipseLink MOXy * If not set the default is MediaType.APPLICATION_XML * @return MediaType */ @Override public MEDIA_TYPE getMediaType(){ return mediaType; } protected Node getNode(Object object, Node parentNode, ABSTRACT_SESSION session, DESCRIPTOR descriptor, boolean isRoot) { if(isRoot) { object = ((Root) object).getObject(); if(object instanceof Node) { return (Node) object; } } return null; } /** * Get the no namespace schema location set on this XMLMarshaller * @return the no namespace schema location specified on this XMLMarshaller */ public String getNoNamespaceSchemaLocation() { return noNamespaceSchemaLocation; } public Schema getSchema() { return schema; } /** * INTERNAL * @return the transformer instance for this marshaller */ @Override public XMLTransformer getTransformer() { if(null == transformer) { XMLPlatform xmlPlatform = XMLPlatformFactory.getInstance().getXMLPlatform(); transformer = xmlPlatform.newXMLTransformer(); transformer.setEncoding(getEncoding()); transformer.setFormattedOutput(isFormattedOutput()); transformer.setFragment(fragment); } return transformer; } /** * Name of the property to marshal/unmarshal as a wrapper on the text() mappings * Ignored marshalling XML. * @since 2.4 */ public String getValueWrapper() { return valueWrapper; } /** * Get this Marshaller's XML Header. * @since 2.4 */ public String getXmlHeader() { return xmlHeader; } /** * Get the schema location set on this XMLMarshaller * @return the schema location specified on this XMLMarshaller */ public String getSchemaLocation() { return schemaLocation; } /** * PUBLIC: * Returns if this should marshal to a fragment. If true an XML header string is not written out. * @return if this should marshal to a fragment or not */ public boolean isFragment() { return mediaType.isApplicationXML() && fragment; } /** * Determine if the @XMLRootElement should be marshalled when present. * Ignored marshalling XML. * @return * @since 2.4 */ @Override public boolean isIncludeRoot() { if(mediaType.isApplicationJSON()){ return includeRoot; } return true; } /** * Property to determine if size 1 any collections should be treated as collections * Ignored marshalling XML. */ @Override public boolean isReduceAnyArrays() { return reduceAnyArrays; } /** * Get the namespace separator used during marshal operations. * If mediaType is application/json '.' is the default * Ignored marshalling XML. * @since 2.4 */ public char getNamespaceSeparator() { return namespaceSeparator; } /** * Name of the property to determine if empty collections should be marshalled as [] * Ignored marshalling XML. * @since 2.4 */ public boolean isMarshalEmptyCollections() { return marshalEmptyCollections; } public boolean isWrapperAsCollectionName() { return wrapperAsCollectionName; } protected boolean isSimpleXMLRoot(Root xmlRoot) { Class xmlRootObjectClass = xmlRoot.getObject().getClass(); if (XMLConversionManager.getDefaultJavaTypes().get(xmlRootObjectClass) != null || ClassConstants.List_Class.isAssignableFrom(xmlRootObjectClass) || ClassConstants.XML_GREGORIAN_CALENDAR.isAssignableFrom(xmlRootObjectClass) || ClassConstants.DURATION.isAssignableFrom(xmlRootObjectClass)) { return true; } else if(xmlRoot.getObject() instanceof org.w3c.dom.Node) { return true; } return false; } /** * PUBLIC: * Convert the given object to XML and update the given contentHandler with that XML Document * @param object the object to marshal * @param contentHandler the contentHandler which the specified object should be marshalled to * @throws XMLMarshalException if an error occurred during marshalling */ public void marshal(Object object, ContentHandler contentHandler) throws XMLMarshalException { marshal(object, contentHandler, null); } /** * PUBLIC: * Convert the given object to XML and update the given contentHandler with that XML Document * @param object the object to marshal * @param contentHandler the contentHandler which the specified object should be marshalled to * @throws XMLMarshalException if an error occurred during marshalling */ public void marshal(Object object, ContentHandler contentHandler, LexicalHandler lexicalHandler) throws XMLMarshalException { if(object instanceof JSONWithPadding && !mediaType.isApplicationJSON()){ object = ((JSONWithPadding)object).getObject(); } if ((object == null) || (contentHandler == null)) { throw XMLMarshalException.nullArgumentException(); } ABSTRACT_SESSION session = null; DESCRIPTOR xmlDescriptor = null; boolean isXMLRoot = (object instanceof Root); if(isXMLRoot){ try{ session = context.getSession(((Root)object).getObject()); if(session != null){ xmlDescriptor = getDescriptor(((Root)object).getObject(), session); } }catch (XMLMarshalException marshalException) { if (!isSimpleXMLRoot((Root) object)) { throw marshalException; } } }else{ Class objectClass = object.getClass(); session = context.getSession(objectClass); xmlDescriptor = getDescriptor(objectClass, session); } ContentHandlerRecord contentHandlerRecord = new ContentHandlerRecord(); contentHandlerRecord.setMarshaller(this); contentHandlerRecord.setContentHandler(contentHandler); contentHandlerRecord.setLexicalHandler(lexicalHandler); marshal(object, contentHandlerRecord, session, xmlDescriptor,isXMLRoot); } /** * Convert the given object to XML and update the given marshal record with * that XML Document. * @param object the object to marshal * @param marshalRecord the marshalRecord to marshal the object to */ public void marshal(Object object, MarshalRecord marshalRecord) { if(object instanceof JSONWithPadding && !mediaType.isApplicationJSON()){ object = ((JSONWithPadding)object).getObject(); } if ((object == null) || (marshalRecord == null)) { throw XMLMarshalException.nullArgumentException(); } boolean isXMLRoot = (object instanceof Root); ABSTRACT_SESSION session = null; DESCRIPTOR xmlDescriptor = null; if(isXMLRoot){ try{ session = context.getSession(((Root)object).getObject()); if(session != null){ xmlDescriptor = getDescriptor(((Root)object).getObject(), session); } }catch (XMLMarshalException marshalException) { if (!isSimpleXMLRoot((Root) object)) { throw marshalException; } } }else{ Class objectClass = object.getClass(); session = context.getSession(objectClass); xmlDescriptor = getDescriptor(objectClass, session); } marshal(object, marshalRecord, session, xmlDescriptor, isXMLRoot); } /** * Convert the given object to XML and update the given marshal record with * that XML Document. * @param object the object to marshal * @param marshalRecord the marshalRecord to marshal the object to * @param descriptor the XMLDescriptor for the object being marshalled */ protected void marshal(Object object, MarshalRecord marshalRecord, ABSTRACT_SESSION session, DESCRIPTOR descriptor, boolean isXMLRoot) { if(null != schema) { marshalRecord = new ValidatingMarshalRecord(marshalRecord, this); } if (this.attachmentMarshaller != null) { marshalRecord.setXOPPackage(this.attachmentMarshaller.isXOPPackage()); } marshalRecord.setMarshaller(this); Root root = null; if(isXMLRoot) { root = (Root) object; } Node node = getNode(object, marshalRecord.getDOM(), session, descriptor, isXMLRoot); if(this.mapper == null) { if(null == node) { addDescriptorNamespacesToXMLRecord(descriptor, marshalRecord); } } else { if(descriptor == null || null != node){ marshalRecord.setNamespaceResolver(new PrefixMapperNamespaceResolver(mapper, null)); }else{ marshalRecord.setNamespaceResolver(new PrefixMapperNamespaceResolver(mapper, descriptor.getNamespaceResolver())); } marshalRecord.setCustomNamespaceMapper(true); } if(this.getMarshalAttributeGroup() != null) { if(marshalAttributeGroup.getClass() == ClassConstants.STRING) { CoreAttributeGroup group = descriptor.getAttributeGroup((String)marshalAttributeGroup); if(group != null) { marshalRecord.pushAttributeGroup(group); } else { throw XMLMarshalException.invalidAttributeGroupName((String)marshalAttributeGroup, descriptor.getJavaClassName()); } } else if(marshalAttributeGroup instanceof CoreAttributeGroup) { marshalRecord.pushAttributeGroup((CoreAttributeGroup)marshalAttributeGroup); } else { //Error case } } NamespaceResolver nr = marshalRecord.getNamespaceResolver(); if(node != null) { if(isXMLRoot) { if (isFragment()) { marshalRecord.node(node, null, root.getNamespaceURI(), root.getLocalName()); } else { String encoding = root.getEncoding(); if(null == encoding) { encoding = Constants.DEFAULT_XML_ENCODING; } String version = root.getXMLVersion(); if(null == version) { version = DEFAULT_XML_VERSION; } marshalRecord.startDocument(encoding, version); marshalRecord.node(node, marshalRecord.getNamespaceResolver(), root.getNamespaceURI(), root.getLocalName()); marshalRecord.endDocument(); } } else { marshalRecord.node(node, nr); } marshalRecord.flush(); return; } if(isXMLRoot){ if(descriptor != null){ marshalRecord.beforeContainmentMarshal(root.getObject()); } }else{ marshalRecord.beforeContainmentMarshal(object); } if (!isFragment()) { String encoding = getEncoding(); String version = DEFAULT_XML_VERSION; if (!isXMLRoot && descriptor!= null) { marshalRecord.setLeafElementType(descriptor.getDefaultRootElementType()); } else { if (root.getEncoding() != null) { encoding = root.getEncoding(); } if (root.getXMLVersion() != null) { version = root.getXMLVersion(); } } marshalRecord.startDocument(encoding, version); } if (getXmlHeader() != null) { marshalRecord.writeHeader(); } if(isXMLRoot) { if(root.getObject() instanceof Node) { marshalRecord.node((Node)root.getObject(), new NamespaceResolver(), root.getNamespaceURI(), root.getLocalName()); marshalRecord.endDocument(); return; } } XPathFragment rootFragment = buildRootFragment(object, descriptor, isXMLRoot, marshalRecord); String schemaLocation = getSchemaLocation(); String noNsSchemaLocation = getNoNamespaceSchemaLocation(); boolean isNil = false; if (isXMLRoot) { object = root.getObject(); if (root.getSchemaLocation() != null) { schemaLocation = root.getSchemaLocation(); } if (root.getNoNamespaceSchemaLocation() != null) { noNsSchemaLocation = root.getNoNamespaceSchemaLocation(); } marshalRecord.setLeafElementType(root.getSchemaType()); isNil = root.isNil(); } String xsiPrefix = null; if ((null != getSchemaLocation()) || (null != getNoNamespaceSchemaLocation()) || (isNil)) { xsiPrefix = nr.resolveNamespaceURI(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI); if (null == xsiPrefix) { xsiPrefix = Constants.SCHEMA_INSTANCE_PREFIX; nr.put(Constants.SCHEMA_INSTANCE_PREFIX, javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI); } } OBJECT_BUILDER treeObjectBuilder = null; if (descriptor != null) { treeObjectBuilder = (OBJECT_BUILDER) descriptor.getObjectBuilder(); } if(session == null){ session = (ABSTRACT_SESSION) context.getSession(); } marshalRecord.setSession(session); if (null != rootFragment && !(rootFragment.getLocalName().equals(Constants.EMPTY_STRING))) { marshalRecord.startPrefixMappings(nr); if (!isXMLRoot && descriptor != null && descriptor.getNamespaceResolver() == null && rootFragment.hasNamespace()) { // throw an exception if the name has a : in it but the namespaceresolver is null throw XMLMarshalException.namespaceResolverNotSpecified(rootFragment.getShortName()); } if(isIncludeRoot()){ marshalRecord.openStartElement(rootFragment, nr); } if (null != schemaLocation) { marshalRecord.attributeWithoutQName(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, Constants.SCHEMA_LOCATION, xsiPrefix, schemaLocation); } if (null != noNsSchemaLocation) { marshalRecord.attributeWithoutQName(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, Constants.NO_NS_SCHEMA_LOCATION, xsiPrefix, noNsSchemaLocation); } if (isNil) { marshalRecord.nilSimple(nr); } marshalRecord.namespaceDeclarations(nr); if (descriptor != null && !isNil) { marshalRecord.addXsiTypeAndClassIndicatorIfRequired(descriptor, null, descriptor.getDefaultRootElementField(), root, object, isXMLRoot, true); treeObjectBuilder.marshalAttributes(marshalRecord, object, session); } if(isIncludeRoot()) { marshalRecord.closeStartElement(); } }else{ //no rootfragment marshalRecord.marshalWithoutRootElement(treeObjectBuilder,object, descriptor, root, isXMLRoot); } if (treeObjectBuilder != null && !isNil) { treeObjectBuilder.buildRow(marshalRecord, object, session, this, rootFragment); } else if (isXMLRoot) { if(object != null && !isNil) { if(root.getDeclaredType() != null && root.getObject() != null && root.getDeclaredType() != root.getObject().getClass()) { QName type = (QName)XMLConversionManager.getDefaultJavaTypes().get(object.getClass()); if(type != null) { xsiPrefix = nr.resolveNamespaceURI(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI); if (null == xsiPrefix) { xsiPrefix = Constants.SCHEMA_INSTANCE_PREFIX; marshalRecord.namespaceDeclaration(xsiPrefix, javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI); } marshalRecord.namespaceDeclaration(Constants.SCHEMA_PREFIX, javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI); String typeValue = type.getLocalPart(); if(marshalRecord.isNamespaceAware()){ typeValue = Constants.SCHEMA_PREFIX + marshalRecord.getNamespaceSeparator() + typeValue; } marshalRecord.attribute(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, Constants.SCHEMA_TYPE_ATTRIBUTE, xsiPrefix + Constants.COLON + Constants.SCHEMA_TYPE_ATTRIBUTE, typeValue); } } marshalRecord.characters(root.getSchemaType(), object, null, false); } } if (null != rootFragment && !(rootFragment.getLocalName().equals(Constants.EMPTY_STRING)) && isIncludeRoot()) { marshalRecord.endElement(rootFragment, nr); marshalRecord.endPrefixMappings(nr); } if (!isFragment() ) { marshalRecord.endDocument(); } if(isXMLRoot){ if(descriptor != null){ marshalRecord.afterContainmentMarshal(null, root.getObject()); } }else{ marshalRecord.afterContainmentMarshal(null, object); } } /** * PUBLIC: * @param object the object to marshal * @param node the node which the specified object should be marshalled to * @throws XMLMarshalException if an error occurred during marshalling */ public void marshal(Object object, Node node) throws XMLMarshalException { if(object instanceof JSONWithPadding && !mediaType.isApplicationJSON()){ object = ((JSONWithPadding)object).getObject(); } if ((object == null) || (node == null)) { throw XMLMarshalException.nullArgumentException(); } ABSTRACT_SESSION session = null; DESCRIPTOR xmlDescriptor = null; boolean isXMLRoot = (object instanceof Root); if(isXMLRoot){ try{ session = context.getSession(((Root)object).getObject()); if(session != null){ xmlDescriptor = getDescriptor(((Root)object).getObject(), session); } }catch (XMLMarshalException marshalException) { if (!isSimpleXMLRoot((Root) object)) { throw marshalException; } } }else{ Class objectClass = object.getClass(); session = context.getSession(objectClass); xmlDescriptor = getDescriptor(objectClass, session); } NodeRecord contentHandlerRecord = new NodeRecord(node); contentHandlerRecord.setMarshaller(this); if (!isXMLRoot) { if ((null == xmlDescriptor.getDefaultRootElement()) && (node.getNodeType() == Node.ELEMENT_NODE) && (xmlDescriptor.getSchemaReference() != null) && (xmlDescriptor.getSchemaReference().getType() == XMLSchemaReference.COMPLEX_TYPE)) { Attr typeAttr = ((Element) node).getAttributeNodeNS(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, Constants.SCHEMA_TYPE_ATTRIBUTE); if (typeAttr == null) { NamespaceResolver namespaceResolver = xmlDescriptor.getNonNullNamespaceResolver(); String xsiPrefix = namespaceResolver.resolveNamespaceURI(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI); if (null == xsiPrefix) { xsiPrefix = namespaceResolver.generatePrefix(Constants.SCHEMA_INSTANCE_PREFIX); } String value = xmlDescriptor.getSchemaReference().getSchemaContext(); ((Element) node).setAttributeNS(javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI, javax.xml.XMLConstants.XMLNS_ATTRIBUTE + Constants.COLON + xsiPrefix, javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI); ((Element) node).setAttributeNS(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, xsiPrefix + Constants.COLON + Constants.SCHEMA_TYPE_ATTRIBUTE, value); } else { String value = xmlDescriptor.getSchemaReference().getSchemaContext(); typeAttr.setValue(value); } } } marshal(object, contentHandlerRecord, session, xmlDescriptor,isXMLRoot); } /** * PUBLIC: * Convert the given object to XML and update the given outputStream with that XML Document * @param object the object to marshal * @param outputStream the outputStream to marshal the object to * @throws XMLMarshalException if an error occurred during marshalling */ public void marshal(Object object, OutputStream outputStream) throws XMLMarshalException { marshal (object, outputStream, null, null); } private void marshal(Object object, OutputStream outputStream, ABSTRACT_SESSION session, DESCRIPTOR xmlDescriptor) throws XMLMarshalException { if ((object == null) || (outputStream == null)) { throw XMLMarshalException.nullArgumentException(); } boolean isXMLRoot = false; String version = DEFAULT_XML_VERSION; String encoding = getEncoding(); String callbackName = null; if(object instanceof JSONWithPadding){ callbackName = ((JSONWithPadding)object).getCallbackName(); object = ((JSONWithPadding)object).getObject(); if(object == null){ throw XMLMarshalException.nullArgumentException(); } } if (object instanceof Root) { isXMLRoot = true; Root xroot = (Root) object; version = xroot.getXMLVersion() != null ? xroot.getXMLVersion() : version; encoding = xroot.getEncoding() != null ? xroot.getEncoding() : encoding; } if(!encoding.equals(Constants.DEFAULT_XML_ENCODING)) { try { OutputStreamWriter writer = new OutputStreamWriter(outputStream, encoding); marshal(object, writer, session, xmlDescriptor); writer.flush(); } catch(EclipseLinkException e) { throw e; } catch(Exception e) { throw XMLMarshalException.marshalException(e); } return; } MarshalRecord marshalRecord; if (isFormattedOutput()) { if(mediaType.isApplicationJSON()) { marshalRecord = new JSONFormattedWriterRecord(outputStream, callbackName); } else { marshalRecord = new FormattedOutputStreamRecord(); ((FormattedOutputStreamRecord)marshalRecord).setOutputStream(outputStream); } } else { if(mediaType.isApplicationJSON()) { marshalRecord = new JSONWriterRecord(outputStream, callbackName); } else { marshalRecord = new OutputStreamRecord(); ((OutputStreamRecord)marshalRecord).setOutputStream(outputStream); } } marshalStreamOrWriter(object, marshalRecord, session, xmlDescriptor, isXMLRoot); } /** * PUBLIC: * Convert the given object to XML and update the given result with that XML Document * @param object the object to marshal * @param result the result to marshal the object to * @throws XMLMarshalException if an error occurred during marshalling */ public void marshal(Object object, Result result) throws XMLMarshalException { if ((object == null) || (result == null)) { throw XMLMarshalException.nullArgumentException(); } DESCRIPTOR xmlDescriptor = null; ABSTRACT_SESSION session = null; boolean isXMLRoot = (object instanceof Root); if(isXMLRoot){ try{ session = context.getSession(((Root)object).getObject()); if(session != null){ xmlDescriptor = getDescriptor(((Root)object).getObject(), session); } }catch (XMLMarshalException marshalException) { if (!isSimpleXMLRoot((Root) object)) { throw marshalException; } } }else{ Class objectClass = object.getClass(); session = context.getSession(objectClass); xmlDescriptor = getDescriptor(objectClass, session); } //if this is a simple xml root, the session and descriptor will be null if (result instanceof StreamResult) { StreamResult streamResult = (StreamResult) result; Writer writer = streamResult.getWriter(); if (writer != null) { marshal(object, writer, session, xmlDescriptor); } else if (streamResult.getOutputStream() != null) { marshal(object, streamResult.getOutputStream(), session, xmlDescriptor); } else { try { File f; try { f = new File(new URL(streamResult.getSystemId()).toURI()); } catch(MalformedURLException malformedURLException) { try { f = new File(streamResult.getSystemId()); } catch(Exception e) { throw malformedURLException; } } writer = new FileWriter(f); try { marshal(object, writer, session, xmlDescriptor); } finally { writer.close(); } } catch (Exception e) { throw XMLMarshalException.marshalException(e); } } }else if (result instanceof DOMResult) { DOMResult domResult = (DOMResult) result; // handle case where the node is null if (domResult.getNode() == null) { domResult.setNode(this.objectToXML(object)); } else { marshal(object, domResult.getNode()); } } else if (result instanceof SAXResult) { SAXResult saxResult = (SAXResult) result; marshal(object, saxResult.getHandler()); } else { if (result.getClass().equals(staxResultClass)) { try { Object xmlStreamWriter = PrivilegedAccessHelper.invokeMethod(staxResultGetStreamWriterMethod, result); if (xmlStreamWriter != null) { MarshalRecord record = (MarshalRecord)PrivilegedAccessHelper.invokeConstructor(xmlStreamWriterRecordConstructor, new Object[]{xmlStreamWriter}); record.setMarshaller(this); marshal(object, record, session, xmlDescriptor, isXMLRoot); return; } else { Object xmlEventWriter = PrivilegedAccessHelper.invokeMethod(staxResultGetEventWriterMethod, result); if(xmlEventWriter != null) { MarshalRecord record = (MarshalRecord)PrivilegedAccessHelper.invokeConstructor(xmlEventWriterRecordConstructor, new Object[]{xmlEventWriter}); record.setMarshaller(this); marshal(object, record, session, xmlDescriptor, isXMLRoot); return; } } } catch (Exception e) { throw XMLMarshalException.marshalException(e); } } java.io.StringWriter writer = new java.io.StringWriter(); marshal(object, writer); javax.xml.transform.stream.StreamSource source = new javax.xml.transform.stream.StreamSource(new java.io.StringReader(writer.toString())); getTransformer().transform(source, result); } return; } /** * PUBLIC: * Convert the given object to XML and update the given writer with that XML Document * @param object the object to marshal * @param writer the writer to marshal the object to * @throws XMLMarshalException if an error occurred during marshalling */ public void marshal(Object object, Writer writer) throws XMLMarshalException { marshal(object, writer, null, null); } private void marshal(Object object, Writer writer, ABSTRACT_SESSION session, DESCRIPTOR xmlDescriptor) throws XMLMarshalException { if ((object == null) || (writer == null)) { throw XMLMarshalException.nullArgumentException(); } boolean isXMLRoot = false; String version = DEFAULT_XML_VERSION; String encoding = getEncoding(); String callbackName = null; if(object instanceof JSONWithPadding){ callbackName = ((JSONWithPadding)object).getCallbackName(); object = ((JSONWithPadding)object).getObject(); if(object == null){ throw XMLMarshalException.nullArgumentException(); } } if (object instanceof Root) { isXMLRoot = true; Root xroot = (Root) object; version = xroot.getXMLVersion() != null ? xroot.getXMLVersion() : version; encoding = xroot.getEncoding() != null ? xroot.getEncoding() : encoding; } MarshalRecord marshalRecord; writer = wrapWriter(writer); if (isFormattedOutput()) { if(mediaType.isApplicationJSON()) { marshalRecord = new JSONFormattedWriterRecord(writer, callbackName); } else { marshalRecord = new FormattedWriterRecord(); ((FormattedWriterRecord) marshalRecord).setWriter(writer); } } else { if(mediaType.isApplicationJSON()) { marshalRecord = new JSONWriterRecord(writer, callbackName); } else { marshalRecord = new WriterRecord(); ((WriterRecord) marshalRecord).setWriter(writer); } } marshalStreamOrWriter(object, marshalRecord, session, xmlDescriptor, isXMLRoot); } private void marshalStreamOrWriter(Object object, MarshalRecord marshalRecord, ABSTRACT_SESSION session, DESCRIPTOR descriptor, boolean isXMLRoot) { marshalRecord.setMarshaller(this); String rootName = null; String rootNamespace = null; if(isXMLRoot){ rootName = ((Root)object).getLocalName(); rootNamespace = ((Root)object).getNamespaceURI(); if(session == null || descriptor == null){ try{ session = context.getSession(((Root)object).getObject()); if(session != null){ descriptor = getDescriptor(((Root)object).getObject(), session); } }catch (XMLMarshalException marshalException) { if (!isSimpleXMLRoot((Root) object)) { throw marshalException; } } } }else{ Class objectClass = object.getClass(); if(object instanceof Collection) { marshalRecord.startCollection(); for(Object o : (Collection) object) { marshal(o, marshalRecord); } marshalRecord.endCollection(); marshalRecord.flush(); return; } else if(objectClass.isArray()) { marshalRecord.startCollection(); int arrayLength = Array.getLength(object); for(int x=0; x * Set this Marshaller's XML Header. This header string will appear after * the XML processing instruction (<?xml ...>), but before the start * of the document's data. *

* *

* This feature is only supported when marshalling to Stream, Writer, * or StreamResult. *

* @since 2.4 */ public void setXmlHeader(String xmlHeader) { this.xmlHeader = xmlHeader; } public void setMarshalAttributeGroup(Object group) { this.marshalAttributeGroup = group; } public Object getMarshalAttributeGroup() { return this.marshalAttributeGroup; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLSequencedDescriptor.java0000664000000000000000000000562212216173126026233 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.lang.reflect.Method; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.internal.security.PrivilegedAccessHelper; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.oxm.XMLDescriptor; /** *

Purpose:An extnesion of XMLDescriptor that's used for sequened * objects. *

Responsibilities: */ public class XMLSequencedDescriptor extends XMLDescriptor { private String getSettingsMethodName; private Method getSettingsMethod; public void initialize(AbstractSession session) throws DescriptorException { super.initialize(session); if(shouldPreserveDocument()) { this.objectBuilder = new XMLSequencedObjectBuilder(this); } if(getGetSettingsMethodName() != null) { try { this.getSettingsMethod = PrivilegedAccessHelper.getDeclaredMethod(this.getJavaClass(), this.getGetSettingsMethodName(), new Class[0]); } catch(Exception ex) { } } } /** * INTERNAL: * Get the method that will be used to obtain an ordered list of TopLinkSetting objects * at runtime. Only used with Sequenced objects * @return The name of the method to be invoked. */ public String getGetSettingsMethodName() { return this.getSettingsMethodName; } /** * INTERNAL: * Set the name of the method to be invoked to obtain an ordered list of TopLinkSetting * objects at runtime. Only used with Sequenced objects. * @param methodName: The name of the method. */ public void setGetSettingsMethodName(String methodName) { this.getSettingsMethodName = methodName; } /** * INTERNAL: * Return the actual method to be invoked to obtain an ordered list of TopLinkSetting objects * Only used with Sequenced Objects. Is set during initialize. * @return The method to be invoked. */ public Method getGetSettingsMethod() { return this.getSettingsMethod; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XPathPredicate.java0000664000000000000000000000332612216173126024543 0ustar /******************************************************************************* * Copyright (c) 2011, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.3 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm; public class XPathPredicate { private XPathFragment xPathFragment; private String value; public XPathPredicate(XPathFragment xPathFragment, String value) { this.xPathFragment = xPathFragment; this.value = value; } public XPathFragment getXPathFragment() { return xPathFragment; } public void setXPathFragment(XPathFragment xmlFragment) { this.xPathFragment = xmlFragment; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } @Override public boolean equals(Object obj) { if(null == obj || obj.getClass() != XPathPredicate.class) { return false; } XPathPredicate test = (XPathPredicate) obj; if(!xPathFragment.equals(test.getXPathFragment())) { return false; } return value.equals(test.getValue()); } }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/ChoiceUnmarshalContext.java0000664000000000000000000000612312216173126026306 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * bdoughan - August 7/2009 - 2.0 - Initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import org.eclipse.persistence.internal.oxm.mappings.Mapping; import org.eclipse.persistence.internal.oxm.mappings.XMLConverterMapping; import org.eclipse.persistence.internal.oxm.record.UnmarshalContext; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; /** * Allow the unmarshal context to be wrapped. This is necessary so that choice * mappings with a converter can convert the result of the nested mapping. */ public class ChoiceUnmarshalContext implements UnmarshalContext { private UnmarshalContext unmarshalContext; private XMLConverterMapping converter; public ChoiceUnmarshalContext(UnmarshalContext unmarshalContext, XMLConverterMapping converter) { this.unmarshalContext = unmarshalContext; this.converter = converter; } public void addAttributeValue(UnmarshalRecord unmarshalRecord, ContainerValue containerValue, Object value) { this.unmarshalContext.addAttributeValue(unmarshalRecord, containerValue, getValue(value, unmarshalRecord)); } public void addAttributeValue(UnmarshalRecord unmarshalRecord, ContainerValue containerValue, Object value, Object collection) { this.unmarshalContext.addAttributeValue(unmarshalRecord, containerValue, getValue(value, unmarshalRecord), collection); } public void characters(UnmarshalRecord unmarshalRecord) { unmarshalContext.characters(unmarshalRecord); } public void endElement(UnmarshalRecord unmarshalRecord) { unmarshalContext.endElement(unmarshalRecord); } public void reference(Reference reference) { unmarshalContext.reference(reference); } public void setAttributeValue(UnmarshalRecord unmarshalRecord, Object value, Mapping mapping) { unmarshalContext.setAttributeValue(unmarshalRecord, getValue(value, unmarshalRecord), mapping); } public void startElement(UnmarshalRecord unmarshalRecord) { unmarshalContext.startElement(unmarshalRecord); } public void unmappedContent(UnmarshalRecord unmarshalRecord) { unmarshalContext.unmappedContent(unmarshalRecord); } private Object getValue(Object value, UnmarshalRecord unmarshalRecord) { return converter.convertDataValueToObjectValue(value, unmarshalRecord.getSession(), unmarshalRecord.getUnmarshaller()); } }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/StrBuffer.java0000664000000000000000000000475012216173126023602 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; /** * INTERNAL: *

Purpose: This is a non-synchronized, reusable implementation of * StringBuffer. * @author mmacivor */ public class StrBuffer implements CharSequence { private int numChar; //The number of characters currently in this buffer private char[] myBuf; public StrBuffer() { this(80); } public StrBuffer(int length) { myBuf = new char[length]; numChar = 0; } /* * Clears the StringBuffer to be reused. */ public void reset() { numChar = 0; } private void increaseCapacity(int minStorage) { int newStorage = (myBuf.length * 2) + 5; if(newStorage < minStorage) { newStorage = minStorage; } char[] newBuf = new char[newStorage]; System.arraycopy(this.myBuf, 0, newBuf, 0, this.numChar); this.myBuf = newBuf; } public StrBuffer append(String str) { int strlen = str.length(); int newLength = this.numChar + strlen; if(newLength > this.myBuf.length) { increaseCapacity(newLength); } str.getChars(0, strlen, this.myBuf, this.numChar); this.numChar = newLength; return this; } public StrBuffer append(char[] chars, int start, int length) { int newLength = this.numChar + length; if(newLength > this.myBuf.length) { increaseCapacity(newLength); } System.arraycopy(chars, start, this.myBuf, numChar, length); this.numChar = newLength; return this; } public int length() { return numChar; } public String toString() { return new String(this.myBuf, 0, this.numChar); } public char charAt(int index) { return myBuf[index]; } public CharSequence subSequence(int start, int end) { return new String(this.myBuf, start, end); } }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLCompositeObjectMappingNodeValue.java0000664000000000000000000010243112216173126030464 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.lang.reflect.Modifier; import java.util.List; import javax.xml.namespace.QName; import org.eclipse.persistence.core.queries.CoreAttributeGroup; import org.eclipse.persistence.core.queries.CoreAttributeItem; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.exceptions.DescriptorException; import org.eclipse.persistence.exceptions.XMLMarshalException; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.mappings.CompositeObjectMapping; import org.eclipse.persistence.internal.oxm.mappings.Descriptor; import org.eclipse.persistence.internal.oxm.mappings.DirectMapping; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.mappings.InverseReferenceMapping; import org.eclipse.persistence.internal.oxm.mappings.Mapping; import org.eclipse.persistence.internal.oxm.mappings.UnmarshalKeepAsElementPolicy; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.ObjectMarshalContext; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.eclipse.persistence.internal.oxm.record.XMLReader; import org.eclipse.persistence.internal.oxm.record.XMLRecord; import org.eclipse.persistence.internal.oxm.record.deferred.CompositeObjectMappingContentHandler; import org.eclipse.persistence.oxm.mappings.nullpolicy.AbstractNullPolicy; import org.eclipse.persistence.platform.xml.XMLPlatformFactory; import org.w3c.dom.Attr; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.Text; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; /** * INTERNAL: *

Purpose: This is how the XML Composite Object Mapping is handled * when used with the TreeObjectBuilder.

*/ public class XMLCompositeObjectMappingNodeValue extends XMLRelationshipMappingNodeValue implements NullCapableValue { private CompositeObjectMapping xmlCompositeObjectMapping; private boolean isInverseReference; public XMLCompositeObjectMappingNodeValue(CompositeObjectMapping xmlCompositeObjectMapping) { this.xmlCompositeObjectMapping = xmlCompositeObjectMapping; } public XMLCompositeObjectMappingNodeValue(CompositeObjectMapping xmlCompositeObjectMapping, boolean isInverse) { this(xmlCompositeObjectMapping); isInverseReference = isInverse; } @Override public void attribute(UnmarshalRecord unmarshalRecord, String namespaceURI, String localName, String value) { unmarshalRecord.removeNullCapableValue(this); Descriptor referenceDescriptor = (Descriptor) getMapping().getReferenceDescriptor(); ObjectBuilder treeObjectBuilder = (ObjectBuilder) referenceDescriptor.getObjectBuilder(); MappingNodeValue textMappingNodeValue = (MappingNodeValue) treeObjectBuilder.getRootXPathNode().getTextNode().getNodeValue(); Mapping textMapping = textMappingNodeValue.getMapping(); Object childObject = referenceDescriptor.getInstantiationPolicy().buildNewInstance(); if(textMapping.isAbstractDirectMapping()) { DirectMapping xmlDirectMapping = (DirectMapping) textMappingNodeValue.getMapping(); Field xmlField = (Field) xmlDirectMapping.getField(); Object realValue = unmarshalRecord.getXMLReader().convertValueBasedOnSchemaType(xmlField, value, (XMLConversionManager) unmarshalRecord.getSession().getDatasourcePlatform().getConversionManager(), unmarshalRecord); Object convertedValue = xmlDirectMapping.getAttributeValue(realValue, unmarshalRecord.getSession(), unmarshalRecord); xmlDirectMapping.setAttributeValueInObject(childObject, convertedValue); } else { Object oldChildObject = unmarshalRecord.getCurrentObject(); CompositeObjectMapping nestedXMLCompositeObjectMapping = (CompositeObjectMapping) textMappingNodeValue.getMapping(); unmarshalRecord.setCurrentObject(childObject); textMappingNodeValue.attribute(unmarshalRecord, namespaceURI, localName, value); unmarshalRecord.setCurrentObject(oldChildObject); } setAttributeValue(childObject, unmarshalRecord); } /** * Marshal any 'self' mapped attributes. * * @param xPathFragment * @param marshalRecord * @param object * @param session * @param namespaceResolver * @param marshaller * @return */ @Override public boolean marshalSelfAttributes(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver, Marshaller marshaller) { Object objectValue = xmlCompositeObjectMapping.getAttributeValueFromObject(object); objectValue = xmlCompositeObjectMapping.convertObjectValueToDataValue(objectValue, session, marshaller); Descriptor descriptor = (Descriptor)session.getDescriptor(objectValue); if(descriptor != null){ ObjectBuilder objectBuilder = (ObjectBuilder)descriptor.getObjectBuilder(); return objectBuilder.marshalAttributes(marshalRecord, objectValue, session); } else { UnmarshalKeepAsElementPolicy keepAsElementPolicy = getMapping().getKeepAsElementPolicy(); if(null != keepAsElementPolicy && (keepAsElementPolicy.isKeepAllAsElement() || keepAsElementPolicy.isKeepUnknownAsElement())) { if(objectValue instanceof Node) { Node rootNode = (Node)objectValue; NamedNodeMap attributes = rootNode.getAttributes(); for(int i = 0; i < attributes.getLength(); i++) { Attr next = (Attr)attributes.item(i); if(!(javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI.equals(next.getNamespaceURI()))) { marshalRecord.node(next, namespaceResolver); } } } } } return false; } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver) { return marshal(xPathFragment, marshalRecord, object, session, namespaceResolver, ObjectMarshalContext.getInstance()); } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { if (xmlCompositeObjectMapping.isReadOnly()) { return false; } int size =marshalRecord.getCycleDetectionStack().size(); Object objectValue = marshalContext.getAttributeValue(object, xmlCompositeObjectMapping); if((isInverseReference || xmlCompositeObjectMapping.getInverseReferenceMapping() !=null)&& objectValue !=null && size >= 2){ Object owner = marshalRecord.getCycleDetectionStack().get(size - 2); if(owner.equals(objectValue)){ return false; } } return this.marshalSingleValue(xPathFragment, marshalRecord, object, objectValue, session, namespaceResolver, marshalContext); } public boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object objectValue, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { objectValue = xmlCompositeObjectMapping.convertObjectValueToDataValue(objectValue, session, marshalRecord.getMarshaller()); if (null == objectValue) { return xmlCompositeObjectMapping.getNullPolicy().compositeObjectMarshal(xPathFragment, marshalRecord, object, session, namespaceResolver); } XPathFragment groupingFragment = marshalRecord.openStartGroupingElements(namespaceResolver); if(xPathFragment.hasAttribute) { ObjectBuilder tob = (ObjectBuilder) xmlCompositeObjectMapping.getReferenceDescriptor().getObjectBuilder(); MappingNodeValue textMappingNodeValue = (MappingNodeValue) tob.getRootXPathNode().getTextNode().getMarshalNodeValue(); Mapping textMapping = textMappingNodeValue.getMapping(); if(textMapping.isAbstractDirectMapping()) { DirectMapping xmlDirectMapping = (DirectMapping) textMapping; Object fieldValue = xmlDirectMapping.getFieldValue(xmlDirectMapping.valueFromObject(objectValue, xmlDirectMapping.getField(), session), session, marshalRecord); QName schemaType = ((Field) xmlDirectMapping.getField()).getSchemaTypeForValue(fieldValue, session); if(fieldValue != null) { marshalRecord.attribute(xPathFragment, namespaceResolver, fieldValue, schemaType); } else { XMLMarshalException ex = XMLMarshalException.nullValueNotAllowed(this.xmlCompositeObjectMapping.getAttributeName(), this.xmlCompositeObjectMapping.getDescriptor().getJavaClass().getName()); try { marshalRecord.getMarshaller().getErrorHandler().warning(new SAXParseException(null, null, ex)); } catch(Exception saxException) { throw ex; } } marshalRecord.closeStartGroupingElements(groupingFragment); return true; } else { return textMappingNodeValue.marshalSingleValue(xPathFragment, marshalRecord, objectValue, textMapping.getAttributeValueFromObject(objectValue), session, namespaceResolver, marshalContext); } } boolean isSelfFragment = xPathFragment.isSelfFragment; marshalRecord.closeStartGroupingElements(groupingFragment); UnmarshalKeepAsElementPolicy keepAsElementPolicy = xmlCompositeObjectMapping.getKeepAsElementPolicy(); if (null != keepAsElementPolicy && (keepAsElementPolicy.isKeepUnknownAsElement() || keepAsElementPolicy.isKeepAllAsElement()) && objectValue instanceof Node) { if (isSelfFragment) { NodeList children = ((org.w3c.dom.Element) objectValue).getChildNodes(); for (int i = 0, childrenLength = children.getLength(); i < childrenLength ; i++) { Node next = children.item(i); short nodeType = next.getNodeType(); if (nodeType == Node.ELEMENT_NODE) { marshalRecord.node(next, marshalRecord.getNamespaceResolver()); return true; } else if (nodeType == Node.TEXT_NODE) { marshalRecord.characters(((Text) next).getNodeValue()); return true; } } return false; } else { marshalRecord.node((Node) objectValue, marshalRecord.getNamespaceResolver()); return true; } } Descriptor descriptor = (Descriptor)xmlCompositeObjectMapping.getReferenceDescriptor(); if(descriptor == null){ descriptor = (Descriptor) session.getDescriptor(objectValue.getClass()); }else if(descriptor.hasInheritance()){ Class objectValueClass = objectValue.getClass(); if(!(objectValueClass == descriptor.getJavaClass())){ descriptor = (Descriptor) session.getDescriptor(objectValueClass); } } if(descriptor != null){ marshalRecord.beforeContainmentMarshal(objectValue); ObjectBuilder objectBuilder = (ObjectBuilder)descriptor.getObjectBuilder(); CoreAttributeGroup group = marshalRecord.getCurrentAttributeGroup(); CoreAttributeItem item = group.getItem(getMapping().getAttributeName()); CoreAttributeGroup nestedGroup = XMLRecord.DEFAULT_ATTRIBUTE_GROUP; if(item != null) { if(item.getGroups() != null) { nestedGroup = item.getGroup(descriptor.getJavaClass()); } if(nestedGroup == null) { nestedGroup = item.getGroup() == null?XMLRecord.DEFAULT_ATTRIBUTE_GROUP:item.getGroup(); } } marshalRecord.pushAttributeGroup(nestedGroup); if (!(isSelfFragment || xPathFragment.nameIsText)) { xPathNode.startElement(marshalRecord, xPathFragment, object, session, namespaceResolver, objectBuilder, objectValue); } List extraNamespaces = null; if (!marshalRecord.hasEqualNamespaceResolvers()) { extraNamespaces = objectBuilder.addExtraNamespacesToNamespaceResolver(descriptor, marshalRecord, session, true, false); writeExtraNamespaces(extraNamespaces, marshalRecord, session); } if(!isSelfFragment) { marshalRecord.addXsiTypeAndClassIndicatorIfRequired(descriptor, (Descriptor) xmlCompositeObjectMapping.getReferenceDescriptor(), (Field)xmlCompositeObjectMapping.getField(), false); } objectBuilder.buildRow(marshalRecord, objectValue, session, marshalRecord.getMarshaller(), xPathFragment); marshalRecord.afterContainmentMarshal(object, objectValue); marshalRecord.popAttributeGroup(); if (!(isSelfFragment || xPathFragment.nameIsText())) { marshalRecord.endElement(xPathFragment, namespaceResolver); } marshalRecord.removeExtraNamespacesFromNamespaceResolver(extraNamespaces, session); } else { if(Constants.UNKNOWN_OR_TRANSIENT_CLASS.equals(xmlCompositeObjectMapping.getReferenceClassName())){ throw XMLMarshalException.descriptorNotFoundInProject(objectValue.getClass().getName()); } if (!(isSelfFragment || xPathFragment.nameIsText())) { xPathNode.startElement(marshalRecord, xPathFragment, object, session, namespaceResolver, null, objectValue); } QName schemaType = ((Field) xmlCompositeObjectMapping.getField()).getSchemaTypeForValue(objectValue,session); updateNamespaces(schemaType, marshalRecord,((Field)xmlCompositeObjectMapping.getField())); marshalRecord.characters(schemaType, objectValue, null, false); if (!(isSelfFragment || xPathFragment.nameIsText())) { marshalRecord.endElement(xPathFragment, namespaceResolver); } } return true; } public boolean startElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord, Attributes atts) { try { unmarshalRecord.removeNullCapableValue(this); Descriptor xmlDescriptor = (Descriptor)xmlCompositeObjectMapping.getReferenceDescriptor(); if (null == xmlDescriptor) { xmlDescriptor = findReferenceDescriptor(xPathFragment, unmarshalRecord, atts, xmlCompositeObjectMapping,xmlCompositeObjectMapping.getKeepAsElementPolicy()); if(xmlDescriptor == null){ if(xmlCompositeObjectMapping.getField() != null){ //try leaf element type QName leafType = ((Field)xmlCompositeObjectMapping.getField()).getLastXPathFragment().getLeafElementType(); if (leafType != null) { XPathFragment frag = new XPathFragment(); frag.setNamespaceAware(unmarshalRecord.isNamespaceAware()); String xpath = leafType.getLocalPart(); String uri = leafType.getNamespaceURI(); if (uri != null && uri.length() > 0) { frag.setNamespaceURI(uri); String prefix = ((Descriptor)xmlCompositeObjectMapping.getDescriptor()).getNonNullNamespaceResolver().resolveNamespaceURI(uri); if (prefix != null && prefix.length() > 0) { xpath = prefix + Constants.COLON + xpath; } } frag.setXPath(xpath); Context xmlContext = unmarshalRecord.getUnmarshaller().getContext(); xmlDescriptor = xmlContext.getDescriptorByGlobalType(frag); } } } UnmarshalKeepAsElementPolicy policy = xmlCompositeObjectMapping.getKeepAsElementPolicy(); if (null != policy && ((xmlDescriptor == null && policy.isKeepUnknownAsElement()) || policy.isKeepAllAsElement())) { QName schemaType = unmarshalRecord.getTypeQName(); if(schemaType == null){ schemaType = ((Field)xmlCompositeObjectMapping.getField()).getSchemaType(); unmarshalRecord.setTypeQName(schemaType); } if(schemaType != null){ Class theClass = (Class)((XMLConversionManager) unmarshalRecord.getSession().getDatasourcePlatform().getConversionManager()).getDefaultXMLTypes().get(schemaType); if(theClass == null){ setupHandlerForKeepAsElementPolicy(unmarshalRecord, xPathFragment, atts); return true; } }else{ setupHandlerForKeepAsElementPolicy(unmarshalRecord, xPathFragment, atts); return true; } } } // // Null Composite Objects are marshalled in 2 ways when the input XML node is empty. // (1) as null // - isNullRepresentedByEmptyNode = true // (2) as empty object // - isNullRepresentedByEmptyNode = false // A deferred contentHandler is used to queue events until we are able to determine // whether we are in one of empty/simple/complex state. // Control is returned to the UnmarshalHandler after creation of (1) or (2) above is started. // Object creation was deferred to the DeferredContentHandler // // Check if we need to create the DeferredContentHandler based on policy state AbstractNullPolicy nullPolicy = xmlCompositeObjectMapping.getNullPolicy(); if(nullPolicy.isNullRepresentedByEmptyNode()) { String qnameString = xPathFragment.getLocalName(); if(xPathFragment.getPrefix() != null) { qnameString = xPathFragment.getPrefix() + Constants.COLON + qnameString; } if(null != xmlDescriptor) { // Process null capable value CompositeObjectMappingContentHandler aHandler = new CompositeObjectMappingContentHandler(// unmarshalRecord, this, xmlCompositeObjectMapping, atts, xPathFragment, xmlDescriptor); // Send control to the handler aHandler.startElement(xPathFragment.getNamespaceURI(), xPathFragment.getLocalName(), qnameString, atts); XMLReader xmlReader = unmarshalRecord.getXMLReader(); xmlReader.setContentHandler(aHandler); xmlReader.setLexicalHandler(aHandler); } } else { if(unmarshalRecord.getXMLReader().isNullRepresentedByXsiNil(nullPolicy) && unmarshalRecord.isNil()){ xmlCompositeObjectMapping.setAttributeValueInObject(unmarshalRecord.getCurrentObject(), null); } else { Field xmlFld = (Field)this.xmlCompositeObjectMapping.getField(); if (xmlFld.hasLastXPathFragment()) { unmarshalRecord.setLeafElementType(xmlFld.getLastXPathFragment().getLeafElementType()); } processChild(xPathFragment, unmarshalRecord, atts, xmlDescriptor, xmlCompositeObjectMapping); } } } catch (SAXException e) { throw XMLMarshalException.unmarshalException(e); } return true; } public void endElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord) { if(unmarshalRecord.isNil() && xmlCompositeObjectMapping.getNullPolicy().isNullRepresentedByXsiNil()){ unmarshalRecord.resetStringBuffer(); return; } if (null == unmarshalRecord.getChildRecord()) { SAXFragmentBuilder builder = unmarshalRecord.getFragmentBuilder(); UnmarshalKeepAsElementPolicy keepAsElementPolicy = xmlCompositeObjectMapping.getKeepAsElementPolicy(); if (null != keepAsElementPolicy && (keepAsElementPolicy.isKeepUnknownAsElement() || keepAsElementPolicy.isKeepAllAsElement()) && builder.getNodes().size() != 0) { if(unmarshalRecord.getTypeQName() != null){ Class theClass = (Class)((XMLConversionManager) unmarshalRecord.getSession().getDatasourcePlatform().getConversionManager()).getDefaultXMLTypes().get(unmarshalRecord.getTypeQName()); if(theClass != null){ //handle simple text endElementProcessText(unmarshalRecord, xmlCompositeObjectMapping, xPathFragment, null); return; } } if (builder.getDocument() != null) { setOrAddAttributeValueForKeepAsElement(builder, xmlCompositeObjectMapping, xmlCompositeObjectMapping, unmarshalRecord, false, null); return; } }else{ //handle simple text endElementProcessText(unmarshalRecord, xmlCompositeObjectMapping, xPathFragment, null); return; } } else { Object object = unmarshalRecord.getChildRecord().getCurrentObject(); setAttributeValue(object, unmarshalRecord); unmarshalRecord.setChildRecord(null); } } private void setAttributeValue(Object object, UnmarshalRecord unmarshalRecord) { InverseReferenceMapping inverseReferenceMapping = xmlCompositeObjectMapping.getInverseReferenceMapping(); //If isInverseReference then this mapping is an inlineMapping of an InverseReference if(null != inverseReferenceMapping){ if(inverseReferenceMapping.getContainerPolicy() == null) { Object currentValue = inverseReferenceMapping.getAttributeAccessor().getAttributeValueFromObject(object); if( !isInverseReference || (currentValue == null && isInverseReference)) { inverseReferenceMapping.getAttributeAccessor().setAttributeValueInObject(object, unmarshalRecord.getCurrentObject()); } } else { Object backpointerContainer = inverseReferenceMapping.getAttributeAccessor().getAttributeValueFromObject(object); if(backpointerContainer == null) { backpointerContainer = inverseReferenceMapping.getContainerPolicy().containerInstance(); inverseReferenceMapping.getAttributeAccessor().setAttributeValueInObject(object, backpointerContainer); } inverseReferenceMapping.getContainerPolicy().addInto(unmarshalRecord.getCurrentObject(), backpointerContainer, unmarshalRecord.getSession()); } } object = xmlCompositeObjectMapping.convertDataValueToObjectValue(object, unmarshalRecord.getSession(), unmarshalRecord.getUnmarshaller()); // Set the child object on the parent unmarshalRecord.setAttributeValue(object, xmlCompositeObjectMapping); } public void endSelfNodeValue(UnmarshalRecord unmarshalRecord, UnmarshalRecord selfRecord, Attributes attributes) { if(xmlCompositeObjectMapping.getNullPolicy().valueIsNull(attributes)){ xmlCompositeObjectMapping.setAttributeValueInObject(unmarshalRecord.getCurrentObject(), null); return; } unmarshalRecord.removeNullCapableValue(this); if (unmarshalRecord.getFragmentBuilder().getDocument() != null) { UnmarshalKeepAsElementPolicy keepAsElementPolicy = xmlCompositeObjectMapping.getKeepAsElementPolicy(); SAXFragmentBuilder builder = unmarshalRecord.getFragmentBuilder(); if ((((keepAsElementPolicy.isKeepUnknownAsElement()) || (keepAsElementPolicy.isKeepAllAsElement())))&& (builder.getNodes().size() != 0) ) { if(unmarshalRecord.getTypeQName() != null){ Class theClass = (Class)((XMLConversionManager) unmarshalRecord.getSession().getDatasourcePlatform().getConversionManager()).getDefaultXMLTypes().get(unmarshalRecord.getTypeQName()); if(theClass != null){ //handle simple text endElementProcessText(unmarshalRecord, xmlCompositeObjectMapping, null, null); return; } } Element element = (Element) builder.getNodes().remove(builder.getNodes().size() -1); String xsiType = null; if(null != element) { if(unmarshalRecord.isNamespaceAware()){ xsiType = element.getAttributeNS(javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, Constants.SCHEMA_TYPE_ATTRIBUTE); }else{ xsiType = element.getAttribute(Constants.SCHEMA_TYPE_ATTRIBUTE); } } if(null != xsiType) { xsiType = xsiType.trim(); Object value = element; String namespace = null; int colonIndex = xsiType.indexOf(unmarshalRecord.getNamespaceSeparator()); if (colonIndex > -1) { String prefix = xsiType.substring(0, colonIndex); namespace = unmarshalRecord.resolveNamespacePrefix(prefix); if(null == namespace) { namespace = XMLPlatformFactory.getInstance().getXMLPlatform().resolveNamespacePrefix(element, prefix); } String name = xsiType.substring(colonIndex + 1); QName qName = new QName(namespace, xsiType.substring(colonIndex + 1)); Class theClass = (Class) XMLConversionManager.getDefaultXMLTypes().get(qName); if (theClass != null) { value = ((XMLConversionManager) unmarshalRecord.getSession().getDatasourcePlatform().getConversionManager()).convertObject(element.getTextContent(), theClass, qName); } }else{ if(!unmarshalRecord.isNamespaceAware()){ QName qName = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, xsiType); Class theClass = (Class) XMLConversionManager.getDefaultXMLTypes().get(qName); if (theClass != null) { value = ((XMLConversionManager) unmarshalRecord.getSession().getDatasourcePlatform().getConversionManager()).convertObject(element.getTextContent(), theClass, qName); } } } xmlCompositeObjectMapping.setAttributeValueInObject(unmarshalRecord.getCurrentObject(), value); } else { xmlCompositeObjectMapping.setAttributeValueInObject(unmarshalRecord.getCurrentObject(), element); } } } else { Object valueToSet = selfRecord.getCurrentObject(); valueToSet = xmlCompositeObjectMapping.convertDataValueToObjectValue(valueToSet, unmarshalRecord.getSession(), unmarshalRecord.getUnmarshaller()); xmlCompositeObjectMapping.setAttributeValueInObject(unmarshalRecord.getCurrentObject(), valueToSet); InverseReferenceMapping inverseReferenceMapping = xmlCompositeObjectMapping.getInverseReferenceMapping(); if (null != inverseReferenceMapping) { inverseReferenceMapping.getAttributeAccessor().setAttributeValueInObject(valueToSet, unmarshalRecord.getCurrentObject()); } } } public UnmarshalRecord buildSelfRecord(UnmarshalRecord unmarshalRecord, Attributes atts) { try { Descriptor xmlDescriptor = (Descriptor)xmlCompositeObjectMapping.getReferenceDescriptor(); if (null == xmlDescriptor) { xmlDescriptor = findReferenceDescriptor(null, unmarshalRecord, atts, xmlCompositeObjectMapping,xmlCompositeObjectMapping.getKeepAsElementPolicy()); } if(xmlDescriptor != null){ if (xmlDescriptor.hasInheritance()) { unmarshalRecord.setAttributes(atts); Class clazz = ((ObjectBuilder)xmlDescriptor.getObjectBuilder()).classFromRow(unmarshalRecord, unmarshalRecord.getSession()); if (clazz == null) { // no xsi:type attribute - look for type indicator on the default root element XPathQName leafElementType = unmarshalRecord.getLeafElementType(); // if we have a user-set type, try to get the class from the inheritance policy if (leafElementType != null) { Object indicator = xmlDescriptor.getInheritancePolicy().getClassIndicatorMapping().get(leafElementType); if(indicator != null) { clazz = (Class)indicator; } } } if (clazz != null) { xmlDescriptor = (Descriptor)unmarshalRecord.getSession().getDescriptor(clazz); } else { // since there is no xsi:type attribute, use the reference descriptor set // on the mapping - make sure it is non-abstract if (Modifier.isAbstract(xmlDescriptor.getJavaClass().getModifiers())) { // need to throw an exception here throw DescriptorException.missingClassIndicatorField(unmarshalRecord, (org.eclipse.persistence.oxm.XMLDescriptor)xmlDescriptor.getInheritancePolicy().getDescriptor()); } } } ObjectBuilder stob2 = (ObjectBuilder)xmlDescriptor.getObjectBuilder(); UnmarshalRecord childRecord = unmarshalRecord.getChildUnmarshalRecord(stob2); childRecord.setSelfRecord(true); unmarshalRecord.setChildRecord(childRecord); childRecord.startDocument(); childRecord.initializeRecord(this.xmlCompositeObjectMapping); return childRecord; } else{ return null; } } catch (SAXException e) { throw XMLMarshalException.unmarshalException(e); } } public void setNullValue(Object object, CoreSession session) { xmlCompositeObjectMapping.setAttributeValueInObject(object, null); } public boolean isNullCapableValue() { if(xmlCompositeObjectMapping.getAttributeAccessor().isInstanceVariableAttributeAccessor() && !xmlCompositeObjectMapping.hasConverter()) { return false; } Field xmlField = (Field)xmlCompositeObjectMapping.getField(); if (xmlField.getLastXPathFragment().isSelfFragment) { return false; } return xmlCompositeObjectMapping.getNullPolicy().getIsSetPerformedForAbsentNode(); } public CompositeObjectMapping getMapping() { return xmlCompositeObjectMapping; } protected void setOrAddAttributeValue(UnmarshalRecord unmarshalRecord, Object value, XPathFragment xPathFragment, Object collection){ unmarshalRecord.setAttributeValue(value, xmlCompositeObjectMapping); } }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/TreeObjectBuilder.java0000664000000000000000000002333212216173126025232 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.eclipse.persistence.core.descriptors.CoreInheritancePolicy; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.helper.DatabaseField; import org.eclipse.persistence.internal.oxm.mappings.Descriptor; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.record.AbstractMarshalRecord; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.XMLRecord; import org.eclipse.persistence.internal.sessions.AbstractRecord; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.mappings.DatabaseMapping.WriteType; import org.eclipse.persistence.oxm.XMLField; import org.eclipse.persistence.oxm.record.NodeRecord; import org.eclipse.persistence.oxm.record.UnmarshalRecord; import org.w3c.dom.Document; import org.w3c.dom.Node; /** * INTERNAL: *

Purpose: Perform the unmarshal and marshal operations based on the * object-to-XML mapping metadata.

*

Responsibilities:

    *
  • Convert mapping metadata to a tree of XPathNodes. This tree is then * traversed during unmarshal and marshal operations.
  • *
  • Create records appropriate to this implementation of ObjectBuilder.
  • *
*/ public class TreeObjectBuilder extends XMLObjectBuilder implements ObjectBuilder { private XPathObjectBuilder xPathObjectBuilder; public TreeObjectBuilder(ClassDescriptor descriptor) { super(descriptor); xPathObjectBuilder = new XPathObjectBuilder(descriptor); } @Override protected void initialize(ClassDescriptor descriptor) { int descriptorMappingsSize = descriptor.getMappings().size(); this.mappingsByField = new HashMap(descriptorMappingsSize); this.fieldsMap = new HashMap(descriptorMappingsSize); this.cloningMappings = new ArrayList(descriptorMappingsSize); } public XPathNode getRootXPathNode() { return xPathObjectBuilder.getRootXPathNode(); } @Override public List getPrimaryKeyMappings() { if(null == primaryKeyMappings) { primaryKeyMappings = new ArrayList(1); } return primaryKeyMappings; } public List getTransformationMappings() { return xPathObjectBuilder.getTransformationMappings(); } public List getContainerValues() { return xPathObjectBuilder.getContainerValues(); } public List getNullCapableValues() { return xPathObjectBuilder.getNullCapableValues(); } public List getDefaultEmptyContainerValues() { return xPathObjectBuilder.getDefaultEmptyContainerValues(); } public void initialize(org.eclipse.persistence.internal.sessions.AbstractSession session) { super.initialize(session); Descriptor xmlDescriptor = (Descriptor)getDescriptor(); // INHERITANCE if (xmlDescriptor.hasInheritance()) { CoreInheritancePolicy inheritancePolicy = xmlDescriptor.getInheritancePolicy(); if (!inheritancePolicy.hasClassExtractor()) { Field classIndicatorField = new XMLField(inheritancePolicy.getClassIndicatorFieldName()); classIndicatorField.setNamespaceResolver(xmlDescriptor.getNamespaceResolver()); } } if(!xmlDescriptor.isLazilyInitialized()) { xPathObjectBuilder.lazyInitialize(); } } @Override public AbstractRecord buildRow(AbstractRecord record, Object object, org.eclipse.persistence.internal.sessions.AbstractSession session, WriteType writeType) { return (AbstractRecord) buildRow((XMLRecord) record, object, session, null, null); } public org.eclipse.persistence.internal.oxm.record.XMLRecord buildRow(org.eclipse.persistence.internal.oxm.record.XMLRecord record, Object object, CoreAbstractSession session, XMLMarshaller marshaller, XPathFragment rootFragment) { return xPathObjectBuilder.buildRow(record, object, session, marshaller, rootFragment); } public boolean marshalAttributes(MarshalRecord marshalRecord, Object object, CoreAbstractSession session) { return xPathObjectBuilder.marshalAttributes(marshalRecord, object, session); } /** * Create a new row/record for the object builder. * This allows subclasses to define different record types. */ public AbstractRecord createRecord(AbstractSession session) { xPathObjectBuilder.lazyInitialize(); org.eclipse.persistence.internal.oxm.record.UnmarshalRecordImpl uRec = new org.eclipse.persistence.internal.oxm.record.UnmarshalRecordImpl(this); uRec.setSession(session); return new UnmarshalRecord(uRec); } /** * Create a new row/record for the object builder with the given name. * This allows subclasses to define different record types. */ public AbstractMarshalRecord createRecord(String rootName, AbstractSession session) { NodeRecord nRec = new NodeRecord(rootName, getNamespaceResolver()); nRec.setSession(session); return nRec; } /** * Create a new row/record for the object builder with the given name. * This allows subclasses to define different record types. */ public AbstractMarshalRecord createRecord(String rootName, Node parent, AbstractSession session) { NodeRecord nRec = new NodeRecord(rootName, getNamespaceResolver(), parent); nRec.setSession(session); return nRec; } /** * Create a new row/record for the object builder. * This allows subclasses to define different record types. */ public AbstractRecord createRecord(int size, AbstractSession session) { return createRecord(session); } @Override public List addExtraNamespacesToNamespaceResolver(Descriptor desc, AbstractMarshalRecord marshalRecord, CoreAbstractSession session, boolean allowOverride, boolean ignoreEqualResolvers) { return xPathObjectBuilder.addExtraNamespacesToNamespaceResolver(desc, marshalRecord, session, allowOverride, ignoreEqualResolvers); } @Override public boolean addClassIndicatorFieldToRow(AbstractMarshalRecord abstractMarshalRecord) { if (descriptor.hasInheritance() && !xPathObjectBuilder.isXsiTypeIndicatorField()) { InheritanceRecord inheritanceRecord = new InheritanceRecord(abstractMarshalRecord); descriptor.getInheritancePolicy().addClassIndicatorFieldToRow(inheritanceRecord); return true; } return false; } @Override public Class classFromRow(org.eclipse.persistence.internal.oxm.record.UnmarshalRecord unmarshalRecord, AbstractSession session) { UnmarshalRecord inheritanceRecord = new UnmarshalRecord(unmarshalRecord); return descriptor.getInheritancePolicy().classFromRow(inheritanceRecord, session); } private static class InheritanceRecord extends org.eclipse.persistence.oxm.record.XMLRecord { private AbstractMarshalRecord abstractMarshalRecord; public InheritanceRecord(AbstractMarshalRecord abstractMarshalRecord) { this.abstractMarshalRecord = abstractMarshalRecord; } @Override public String getLocalName() { throw new UnsupportedOperationException(); } @Override public String getNamespaceURI() { throw new UnsupportedOperationException(); } @Override public void clear() { throw new UnsupportedOperationException(); } @Override public Document getDocument() { throw new UnsupportedOperationException(); } @Override public Node getDOM() { throw new UnsupportedOperationException(); } @Override public String transformToXML() { throw new UnsupportedOperationException(); } @Override public boolean isNamespaceAware() { return abstractMarshalRecord.isNamespaceAware(); } @Override public boolean hasCustomNamespaceMapper() { return abstractMarshalRecord.hasCustomNamespaceMapper(); } @Override public char getNamespaceSeparator() { return abstractMarshalRecord.getNamespaceSeparator(); } @Override public Object put(DatabaseField key, Object value) { return abstractMarshalRecord.put(key, value); } } }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/accessor/0000775000000000000000000000000012216174372022635 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/accessor/OrmAttributeAccessor.java0000664000000000000000000001125112216173126027600 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * mmacivor - Feb 06/2009 - Initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.accessor; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.descriptors.changetracking.ChangeTracker; import org.eclipse.persistence.indirection.ValueHolder; import org.eclipse.persistence.indirection.ValueHolderInterface; import org.eclipse.persistence.indirection.WeavedAttributeValueHolderInterface; import org.eclipse.persistence.mappings.AttributeAccessor; /** * INTERNAL: * A custom AttriuteAccessor to be used when the same object is mapped in both * OXM and ORM. This will bridge the gap between the two for attributes that use * ValueHolders. Specifically for JPA weaving. * @author matt.macivor * */ public class OrmAttributeAccessor extends AttributeAccessor { private AttributeAccessor ormAccessor; private CoreAttributeAccessor oxmAccessor; private boolean isValueHolderProperty; private boolean isChangeTracking; public OrmAttributeAccessor(AttributeAccessor ormAccessor, CoreAttributeAccessor oxmAccessor) { this.ormAccessor = ormAccessor; this.oxmAccessor = oxmAccessor; } public void setValueHolderProperty(boolean isValueHolder) { isValueHolderProperty = isValueHolder; } public void setChangeTracking(boolean changeTracking) { this.isChangeTracking = changeTracking; } public boolean isValueHolderProperty() { return this.isValueHolderProperty; } public boolean isChangeTracking() { return this.isChangeTracking; } public Object getAttributeValueFromObject(Object object) { if(isValueHolderProperty) { ValueHolderInterface vh = (ValueHolderInterface)ormAccessor.getAttributeValueFromObject(object); if(vh != null && !vh.isInstantiated()) { Object value = vh.getValue(); oxmAccessor.setAttributeValueInObject(object, value); if(vh instanceof WeavedAttributeValueHolderInterface) { ((WeavedAttributeValueHolderInterface)vh).setIsCoordinatedWithProperty(true); } } } return oxmAccessor.getAttributeValueFromObject(object); } public void setAttributeValueInObject(Object object, Object value) { if(isChangeTracking) { Object oldValue = getAttributeValueFromObject(object); PropertyChangeListener listener = ((ChangeTracker)object)._persistence_getPropertyChangeListener(); if(listener != null) { listener.propertyChange(new PropertyChangeEvent(object, oxmAccessor.getAttributeName(), value, oldValue)); } } if(isValueHolderProperty) { ValueHolderInterface vh = (ValueHolderInterface)ormAccessor.getAttributeValueFromObject(object); if(vh == null) { vh = new ValueHolder(); ((ValueHolder)vh).setIsNewlyWeavedValueHolder(true); } vh.setValue(value); ormAccessor.setAttributeValueInObject(object, vh); } oxmAccessor.setAttributeValueInObject(object, value); } public AttributeAccessor getOrmAccessor() { return this.ormAccessor; } public CoreAttributeAccessor getOxmAccessor() { return this.oxmAccessor; } public void setOrmAccessor(AttributeAccessor accessor) { this.ormAccessor = accessor; } public void setOxmAccessor(AttributeAccessor accessor) { this.oxmAccessor = accessor; } public Class getAttributeClass() { return oxmAccessor.getAttributeClass(); } public boolean isMethodAttributeAccessor() { return oxmAccessor.isMethodAttributeAccessor(); } public String getAttributeName() { return oxmAccessor.getAttributeName(); } } ././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLCompositeCollectionMappingNodeValue.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLCompositeCollectionMappingNodeValue.j0000664000000000000000000005147012216173126030667 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.util.List; import javax.xml.namespace.QName; import org.eclipse.persistence.core.queries.CoreAttributeGroup; import org.eclipse.persistence.core.queries.CoreAttributeItem; import org.eclipse.persistence.exceptions.XMLMarshalException; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.mappings.CompositeCollectionMapping; import org.eclipse.persistence.internal.oxm.mappings.Descriptor; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.mappings.InverseReferenceMapping; import org.eclipse.persistence.internal.oxm.mappings.UnmarshalKeepAsElementPolicy; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.ObjectMarshalContext; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.eclipse.persistence.internal.oxm.record.XMLReader; import org.eclipse.persistence.internal.oxm.record.XMLRecord; import org.eclipse.persistence.internal.oxm.record.deferred.CompositeCollectionMappingContentHandler; import org.eclipse.persistence.oxm.mappings.nullpolicy.AbstractNullPolicy; import org.eclipse.persistence.oxm.mappings.nullpolicy.XMLNullRepresentationType; import org.xml.sax.Attributes; import org.xml.sax.SAXException; /** * INTERNAL: *

Purpose: This is how the XML Composite Collection Mapping is * handled when used with the TreeObjectBuilder.

*/ /** * INTERNAL: *

Purpose: This is how the XML Composite Collection Mapping is * handled when used with the TreeObjectBuilder.

*/ public class XMLCompositeCollectionMappingNodeValue extends XMLRelationshipMappingNodeValue implements ContainerValue { private CompositeCollectionMapping xmlCompositeCollectionMapping; private int index = -1; private boolean isInverseReference; public XMLCompositeCollectionMappingNodeValue(CompositeCollectionMapping xmlCompositeCollectionMapping) { super(); this.xmlCompositeCollectionMapping = xmlCompositeCollectionMapping; } public XMLCompositeCollectionMappingNodeValue(CompositeCollectionMapping xmlCompositeCollectionMapping, boolean isInverse) { this(xmlCompositeCollectionMapping); this.isInverseReference = isInverse; } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver) { if (xmlCompositeCollectionMapping.isReadOnly()) { return false; } Object collection = xmlCompositeCollectionMapping.getAttributeAccessor().getAttributeValueFromObject(object); if (null == collection) { AbstractNullPolicy wrapperNP = xmlCompositeCollectionMapping.getWrapperNullPolicy(); if (wrapperNP != null && wrapperNP.getMarshalNullRepresentation() == XMLNullRepresentationType.XSI_NIL) { marshalRecord.nilSimple(namespaceResolver); return true; } else { return false; } } CoreContainerPolicy cp = getContainerPolicy(); Object iterator = cp.iteratorFor(collection); if (null != iterator && cp.hasNext(iterator)) { XPathFragment groupingFragment = marshalRecord.openStartGroupingElements(namespaceResolver); marshalRecord.closeStartGroupingElements(groupingFragment); } else { return marshalRecord.emptyCollection(xPathFragment, namespaceResolver, xmlCompositeCollectionMapping.getWrapperNullPolicy() != null); } marshalRecord.startCollection(); int size =marshalRecord.getCycleDetectionStack().size(); //when writing the collection need to see if any of the objects we are writing are in the parent collection inverse ref if((isInverseReference || xmlCompositeCollectionMapping.getInverseReferenceMapping() !=null)&& size >= 2){ Object owner = marshalRecord.getCycleDetectionStack().get(size - 2); try { if(cp.contains(owner, collection, session)){ return false; } } catch(ClassCastException e) { // For Bug #416875 } } iterator = cp.iteratorFor(collection); while (cp.hasNext(iterator)) { Object objectValue = cp.next(iterator, session); marshalSingleValue(xPathFragment, marshalRecord, object, objectValue, session, namespaceResolver, ObjectMarshalContext.getInstance()); } marshalRecord.endCollection(); return true; } public boolean startElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord, Attributes atts) { try { Descriptor xmlDescriptor = (Descriptor)xmlCompositeCollectionMapping.getReferenceDescriptor(); if (xmlDescriptor == null) { xmlDescriptor = findReferenceDescriptor(xPathFragment,unmarshalRecord, atts, xmlCompositeCollectionMapping, xmlCompositeCollectionMapping.getKeepAsElementPolicy()); if(xmlDescriptor == null){ if (unmarshalRecord.getXMLReader().isNullRepresentedByXsiNil(xmlCompositeCollectionMapping.getNullPolicy())){ if(unmarshalRecord.isNil()){ return true; } } else if(xmlCompositeCollectionMapping.getNullPolicy().valueIsNull(atts)){ getContainerPolicy().addInto(null, unmarshalRecord.getContainerInstance(this), unmarshalRecord.getSession()); return true; } if(xmlCompositeCollectionMapping.getField() != null){ //try leaf element type QName leafType = ((Field)xmlCompositeCollectionMapping.getField()).getLastXPathFragment().getLeafElementType(); if (leafType != null) { XPathFragment frag = new XPathFragment(); frag.setNamespaceAware(unmarshalRecord.isNamespaceAware()); String xpath = leafType.getLocalPart(); String uri = leafType.getNamespaceURI(); if (uri != null && uri.length() > 0) { frag.setNamespaceURI(uri); String prefix = ((Descriptor)xmlCompositeCollectionMapping.getDescriptor()).getNonNullNamespaceResolver().resolveNamespaceURI(uri); if (prefix != null && prefix.length() > 0) { xpath = prefix + Constants.COLON + xpath; } } frag.setXPath(xpath); Context xmlContext = unmarshalRecord.getUnmarshaller().getContext(); xmlDescriptor = xmlContext.getDescriptorByGlobalType(frag); } } } UnmarshalKeepAsElementPolicy policy = xmlCompositeCollectionMapping.getKeepAsElementPolicy(); if (policy != null && ((xmlDescriptor == null && policy.isKeepUnknownAsElement()) || policy.isKeepAllAsElement())) { if(unmarshalRecord.getTypeQName() != null){ Class theClass = (Class)((XMLConversionManager) unmarshalRecord.getSession().getDatasourcePlatform().getConversionManager()).getDefaultXMLTypes().get(unmarshalRecord.getTypeQName()); if(theClass == null){ setupHandlerForKeepAsElementPolicy(unmarshalRecord, xPathFragment, atts); return true; } }else{ setupHandlerForKeepAsElementPolicy(unmarshalRecord, xPathFragment, atts); return true; } } } AbstractNullPolicy nullPolicy = xmlCompositeCollectionMapping.getNullPolicy(); if(nullPolicy.isNullRepresentedByEmptyNode()) { String qnameString = xPathFragment.getLocalName(); if(xPathFragment.getPrefix() != null) { qnameString = xPathFragment.getPrefix() + Constants.COLON + qnameString; } if(null != xmlDescriptor) { // Process null capable value CompositeCollectionMappingContentHandler aHandler = new CompositeCollectionMappingContentHandler(// unmarshalRecord, this, xmlCompositeCollectionMapping, atts, xPathFragment, xmlDescriptor); // Send control to the handler aHandler.startElement(xPathFragment.getNamespaceURI(), xPathFragment.getLocalName(), qnameString, atts); XMLReader xmlReader = unmarshalRecord.getXMLReader(); xmlReader.setContentHandler(aHandler); xmlReader.setLexicalHandler(aHandler); } } else if (!(unmarshalRecord.getXMLReader().isNullRepresentedByXsiNil(nullPolicy) && unmarshalRecord.isNil())) { Field xmlFld = (Field) this.xmlCompositeCollectionMapping.getField(); if (xmlFld.hasLastXPathFragment()) { unmarshalRecord.setLeafElementType(xmlFld.getLastXPathFragment().getLeafElementType()); } processChild(xPathFragment, unmarshalRecord, atts, xmlDescriptor, xmlCompositeCollectionMapping); } } catch (SAXException e) { throw XMLMarshalException.unmarshalException(e); } return true; } public void endElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord) { Object collection = unmarshalRecord.getContainerInstance(this); endElement(xPathFragment, unmarshalRecord, collection); } public void endElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord, Object collection) { if(unmarshalRecord.isNil() && unmarshalRecord.getXMLReader().isNullRepresentedByXsiNil(xmlCompositeCollectionMapping.getNullPolicy())){ if(unmarshalRecord.getXMLReader().isInCollection()){ unmarshalRecord.addAttributeValue(this, null); }else{ unmarshalRecord.setAttributeValueNull(this); } unmarshalRecord.resetStringBuffer(); return; } if (null == unmarshalRecord.getChildRecord()) { SAXFragmentBuilder builder = unmarshalRecord.getFragmentBuilder(); UnmarshalKeepAsElementPolicy keepAsElementPolicy = xmlCompositeCollectionMapping.getKeepAsElementPolicy(); if (null != keepAsElementPolicy && (keepAsElementPolicy.isKeepUnknownAsElement() || keepAsElementPolicy.isKeepAllAsElement()) && builder.getNodes().size() > 1) { if(unmarshalRecord.getTypeQName() != null){ Class theClass = (Class)((XMLConversionManager) unmarshalRecord.getSession().getDatasourcePlatform().getConversionManager()).getDefaultXMLTypes().get(unmarshalRecord.getTypeQName()); if(theClass != null){ //handle simple text endElementProcessText(unmarshalRecord, xmlCompositeCollectionMapping, xPathFragment, collection); return; } } if(builder.getNodes().size() > 1) { setOrAddAttributeValueForKeepAsElement(builder, xmlCompositeCollectionMapping, xmlCompositeCollectionMapping, unmarshalRecord, true, collection); return; } }else{ //handle simple text endElementProcessText(unmarshalRecord, xmlCompositeCollectionMapping, xPathFragment, collection); return; } return; } Object objectValue = unmarshalRecord.getChildRecord().getCurrentObject(); InverseReferenceMapping inverseReferenceMapping = xmlCompositeCollectionMapping.getInverseReferenceMapping(); if(null != inverseReferenceMapping) { if(inverseReferenceMapping.getContainerPolicy() == null) { Object currentValue = inverseReferenceMapping.getAttributeAccessor().getAttributeValueFromObject(objectValue); if( !isInverseReference || (currentValue == null && isInverseReference)) { inverseReferenceMapping.getAttributeAccessor().setAttributeValueInObject(objectValue, unmarshalRecord.getCurrentObject()); } } else { Object backpointerContainer = inverseReferenceMapping.getAttributeAccessor().getAttributeValueFromObject(objectValue); if(backpointerContainer == null) { backpointerContainer = inverseReferenceMapping.getContainerPolicy().containerInstance(); inverseReferenceMapping.getAttributeAccessor().setAttributeValueInObject(objectValue, backpointerContainer); } inverseReferenceMapping.getContainerPolicy().addInto(unmarshalRecord.getCurrentObject(), backpointerContainer, unmarshalRecord.getSession()); } } // convert the value - if necessary objectValue = xmlCompositeCollectionMapping.convertDataValueToObjectValue(objectValue, unmarshalRecord.getSession(), unmarshalRecord.getUnmarshaller()); unmarshalRecord.addAttributeValue(this, objectValue, collection); unmarshalRecord.setChildRecord(null); } public Object getContainerInstance() { return getContainerPolicy().containerInstance(); } public void setContainerInstance(Object object, Object containerInstance) { xmlCompositeCollectionMapping.setAttributeValueInObject(object, containerInstance); } public CoreContainerPolicy getContainerPolicy() { return xmlCompositeCollectionMapping.getContainerPolicy(); } public boolean isContainerValue() { return true; } public boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object value, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { Marshaller marshaller = marshalRecord.getMarshaller(); // convert the value - if necessary value = xmlCompositeCollectionMapping.convertObjectValueToDataValue(value, session, marshaller); if (null == value) { return xmlCompositeCollectionMapping.getNullPolicy().compositeObjectMarshal(xPathFragment, marshalRecord, object, session, namespaceResolver); } Descriptor descriptor = (Descriptor)xmlCompositeCollectionMapping.getReferenceDescriptor(); if(descriptor == null){ descriptor = (Descriptor) session.getDescriptor(value.getClass()); }else if(descriptor.hasInheritance()){ Class objectValueClass = value.getClass(); if(!(objectValueClass == descriptor.getJavaClass())){ descriptor = (Descriptor) session.getDescriptor(objectValueClass); } } UnmarshalKeepAsElementPolicy keepAsElementPolicy = xmlCompositeCollectionMapping.getKeepAsElementPolicy(); if (null != keepAsElementPolicy && (keepAsElementPolicy.isKeepUnknownAsElement() || keepAsElementPolicy.isKeepAllAsElement()) && value instanceof org.w3c.dom.Node) { marshalRecord.node((org.w3c.dom.Node) value, marshalRecord.getNamespaceResolver()); return true; } if(descriptor != null){ marshalRecord.beforeContainmentMarshal(value); ObjectBuilder objectBuilder = (ObjectBuilder)descriptor.getObjectBuilder(); CoreAttributeGroup group = marshalRecord.getCurrentAttributeGroup(); CoreAttributeGroup nestedGroup = XMLRecord.DEFAULT_ATTRIBUTE_GROUP; CoreAttributeItem item = group.getItem(getMapping().getAttributeName()); if(item != null) { if(item.getGroups() != null) { nestedGroup = item.getGroup(descriptor.getJavaClass()); } if(nestedGroup == null) { nestedGroup = item.getGroup() == null?XMLRecord.DEFAULT_ATTRIBUTE_GROUP:item.getGroup(); } } marshalRecord.pushAttributeGroup(nestedGroup); xPathNode.startElement(marshalRecord, xPathFragment, object, session, namespaceResolver, objectBuilder, value); List extraNamespaces = objectBuilder.addExtraNamespacesToNamespaceResolver(descriptor, marshalRecord, session,true, false); writeExtraNamespaces(extraNamespaces, marshalRecord, session); marshalRecord.addXsiTypeAndClassIndicatorIfRequired(descriptor, (Descriptor) xmlCompositeCollectionMapping.getReferenceDescriptor(), (Field)xmlCompositeCollectionMapping.getField(), false); objectBuilder.buildRow(marshalRecord, value, session, marshaller, xPathFragment); marshalRecord.afterContainmentMarshal(object, value); marshalRecord.popAttributeGroup(); marshalRecord.endElement(xPathFragment, namespaceResolver); marshalRecord.removeExtraNamespacesFromNamespaceResolver(extraNamespaces, session); } else { if(Constants.UNKNOWN_OR_TRANSIENT_CLASS.equals(xmlCompositeCollectionMapping.getReferenceClassName())){ throw XMLMarshalException.descriptorNotFoundInProject(value.getClass().getName()); } xPathNode.startElement(marshalRecord, xPathFragment, object, session, namespaceResolver, null, value); QName schemaType = ((Field) xmlCompositeCollectionMapping.getField()).getSchemaTypeForValue(value, session); updateNamespaces(schemaType, marshalRecord,((Field)xmlCompositeCollectionMapping.getField())); marshalRecord.characters(schemaType, value, null, false); marshalRecord.endElement(xPathFragment, namespaceResolver); } return true; } public CompositeCollectionMapping getMapping() { return xmlCompositeCollectionMapping; } protected void setOrAddAttributeValue(UnmarshalRecord unmarshalRecord, Object value, XPathFragment xPathFragment, Object collection){ unmarshalRecord.addAttributeValue(this, value, collection); } public boolean getReuseContainer() { return xmlCompositeCollectionMapping.getReuseContainer(); } /** * INTERNAL: * Used to track the index of the corresponding containerInstance in the containerInstances Object[] on UnmarshalRecord */ public void setIndex(int index){ this.index = index; } /** * INTERNAL: * Set to track the index of the corresponding containerInstance in the containerInstances Object[] on UnmarshalRecord * Set during TreeObjectBuilder initialization */ public int getIndex(){ return index; } /** * INTERNAL * Return true if an empty container should be set on the object if there * is no presence of the collection in the XML document. * @since EclipseLink 2.3.3 */ public boolean isDefaultEmptyContainer() { return xmlCompositeCollectionMapping.isDefaultEmptyContainer(); } @Override public boolean isWrapperAllowedAsCollectionName() { return true; } }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/CycleRecoverableContextProxy.java0000664000000000000000000000323612216173126027516 0ustar /******************************************************************************* * Copyright (c) 2011, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * - rbarkhouse - 19 April 2012 - 2.4 - Initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; /** * Allows for the reflective creation of an implementation of Sun's * com.sun.xml.bind.CycleRecoverable$Context interface. */ public class CycleRecoverableContextProxy implements InvocationHandler { private Object marshaller; private CycleRecoverableContextProxy(Object m) { this.marshaller = m; } public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { return this.marshaller; } public static A getProxy(Class cycleRecoverableContextClass, Object marshaller) { return (A) Proxy.newProxyInstance(cycleRecoverableContextClass.getClassLoader(), new Class[] { cycleRecoverableContextClass }, new CycleRecoverableContextProxy(marshaller)); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLBinaryDataMappingNodeValue.java0000664000000000000000000004201412216173126027411 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import javax.activation.DataHandler; import javax.xml.namespace.QName; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.eclipse.persistence.core.sessions.CoreSession; import org.eclipse.persistence.exceptions.XMLMarshalException; import org.eclipse.persistence.internal.core.helper.CoreClassConstants; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.XMLBinaryDataHelper; import org.eclipse.persistence.internal.oxm.mappings.BinaryDataMapping; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.record.BinaryDataUnmarshalRecord; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.ObjectMarshalContext; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.eclipse.persistence.internal.oxm.record.XMLReader; import org.eclipse.persistence.internal.oxm.record.deferred.BinaryMappingContentHandler; /** * INTERNAL: *

Purpose: This is how the XML Binary Data Mapping is handled when * used with the TreeObjectBuilder.

* @author mmacivor */ public class XMLBinaryDataMappingNodeValue extends NodeValue implements NullCapableValue { private BinaryDataMapping xmlBinaryDataMapping; protected String getValueToWrite(QName schemaType, Object value, CoreAbstractSession session) { return (String) ((XMLConversionManager) session.getDatasourcePlatform().getConversionManager()).convertObject(value, CoreClassConstants.STRING, schemaType); } public boolean isOwningNode(XPathFragment xPathFragment) { return (xPathFragment.getNextFragment() == null) || xPathFragment.isAttribute(); } public XMLBinaryDataMappingNodeValue(BinaryDataMapping mapping) { this.xmlBinaryDataMapping = mapping; } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver) { return marshal(xPathFragment, marshalRecord, object, session, namespaceResolver, ObjectMarshalContext.getInstance(), null); } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext, XPathFragment rootFragment) { if (xmlBinaryDataMapping.isReadOnly()) { return false; } Object objectValue = marshalContext.getAttributeValue(object, xmlBinaryDataMapping); return this.marshalSingleValue(xPathFragment, marshalRecord, object, objectValue, session, namespaceResolver, marshalContext, rootFragment); } public boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object objectValue, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { return marshalSingleValue(xPathFragment, marshalRecord, object, objectValue, session, namespaceResolver, marshalContext, null); } public boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object objectValue,CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext, XPathFragment rootFragment) { XPathFragment xmlRootFrag = null; if (objectValue instanceof Root) { Root xmlRoot = (Root) objectValue; xmlRootFrag = new XPathFragment(); if (xmlRoot.getNamespaceURI() != null && !xmlRoot.getNamespaceURI().equals(namespaceResolver.getDefaultNamespaceURI())) { String prefix = namespaceResolver.resolveNamespaceURI(xmlRoot.getNamespaceURI()); xmlRootFrag.setXPath(prefix + Constants.COLON + xmlRoot.getLocalName()); xmlRootFrag.setNamespaceURI(xmlRoot.getNamespaceURI()); }else{ xmlRootFrag.setXPath(xmlRoot.getLocalName()); if(xmlRoot.getNamespaceURI() != null && xmlRoot.getNamespaceURI().length() > 0) { xmlRootFrag.setNamespaceURI(xmlRoot.getNamespaceURI()); } } } Marshaller marshaller = marshalRecord.getMarshaller(); objectValue = xmlBinaryDataMapping.convertObjectValueToDataValue(objectValue, session, marshaller); XPathFragment groupingFragment = marshalRecord.openStartGroupingElements(namespaceResolver); if(xPathFragment.isAttribute()){ if (objectValue == null) { marshalRecord.closeStartGroupingElements(groupingFragment); return true; } }else { marshalRecord.closeStartGroupingElements(groupingFragment); XPathFragment elementFragment = xPathFragment; if(xmlRootFrag != null) { elementFragment = xmlRootFrag; } if (objectValue == null) { XPathNode holderXPathNode = new XPathNode(); holderXPathNode.setXPathFragment(elementFragment); marshalRecord.addGroupingElement(holderXPathNode); boolean returnVal = xmlBinaryDataMapping.getNullPolicy().directMarshal(xPathFragment, marshalRecord, object, session, namespaceResolver); if(returnVal){ marshalRecord.endElement(elementFragment, namespaceResolver); } marshalRecord.removeGroupingElement(holderXPathNode); return returnVal; }else if(!xPathFragment.isSelfFragment){ marshalRecord.openStartElement(elementFragment, namespaceResolver); marshalRecord.closeStartElement(); } } // figure out CID or bytes String c_id = null; byte[] bytes = null; String mimeType = this.xmlBinaryDataMapping.getMimeType(object); String attachmentType = mimeType; if(mimeType == null) { mimeType = ""; attachmentType = "application/octet-stream"; } if (xmlBinaryDataMapping.isSwaRef() && (marshaller.getAttachmentMarshaller() != null)) { //object value should be a DataHandler if (xmlBinaryDataMapping.getAttributeClassification() == XMLBinaryDataHelper.getXMLBinaryDataHelper().DATA_HANDLER) { c_id = marshaller.getAttachmentMarshaller().addSwaRefAttachment((DataHandler) objectValue); } else { XMLBinaryDataHelper.EncodedData data = XMLBinaryDataHelper.getXMLBinaryDataHelper().getBytesForBinaryValue(// objectValue, marshaller, xmlBinaryDataMapping.getMimeType(object)); bytes = data.getData(); c_id = marshaller.getAttachmentMarshaller().addSwaRefAttachment(bytes, 0, bytes.length); } } else if (marshalRecord.isXOPPackage() && !xmlBinaryDataMapping.shouldInlineBinaryData()) { XPathFragment lastFrag = ((Field) xmlBinaryDataMapping.getField()).getLastXPathFragment(); if(xmlRootFrag != null) { lastFrag = xmlRootFrag; } String localName = null; String namespaceUri = null; if(rootFragment != null) { localName = rootFragment.getLocalName(); namespaceUri = rootFragment.getNamespaceURI(); } if(!lastFrag.isSelfFragment) { localName = lastFrag.getLocalName(); namespaceUri = lastFrag.getNamespaceURI(); } if (objectValue.getClass() == CoreClassConstants.APBYTE) { bytes = (byte[]) objectValue; c_id = marshaller.getAttachmentMarshaller().addMtomAttachment(bytes, 0, bytes.length, attachmentType, localName, namespaceUri); } else if (xmlBinaryDataMapping.getAttributeClassification() == XMLBinaryDataHelper.getXMLBinaryDataHelper().DATA_HANDLER) { c_id = marshaller.getAttachmentMarshaller().addMtomAttachment((DataHandler) objectValue, localName, namespaceUri); } else { XMLBinaryDataHelper.EncodedData data = XMLBinaryDataHelper.getXMLBinaryDataHelper().getBytesForBinaryValue(// objectValue, marshaller, xmlBinaryDataMapping.getMimeType(object)); bytes = data.getData(); c_id = marshaller.getAttachmentMarshaller().addMtomAttachment(bytes, 0, bytes.length, data.getMimeType(), localName, namespaceUri); } } // handle attributes if (xPathFragment.isAttribute()) { // if the CID is null there's nothing to write out if (c_id != null) { marshalRecord.attribute(xPathFragment, namespaceResolver, c_id); }else { String value = getValueToWrite(((Field) xmlBinaryDataMapping.getField()).getSchemaType(), objectValue, session); marshalRecord.attribute(xPathFragment, namespaceResolver, value); } marshalRecord.closeStartGroupingElements(groupingFragment); return true; } if (xmlBinaryDataMapping.isSwaRef() && (marshaller.getAttachmentMarshaller() != null)) { if(c_id != null) { marshalRecord.characters(c_id); } else { marshalRecord.characters(((Field) xmlBinaryDataMapping.getField()).getSchemaType(), objectValue, mimeType, false); } } else { if (marshalRecord.isXOPPackage() && !xmlBinaryDataMapping.shouldInlineBinaryData()) { if(c_id == null) { marshalRecord.characters(((Field) xmlBinaryDataMapping.getField()).getSchemaType(), objectValue, mimeType, false); } else { String xopPrefix = null; // If the field's resolver is non-null and has an entry for XOP, // use it - otherwise, create a new resolver, set the XOP entry, // on it, and use it instead. // We do this to avoid setting the XOP namespace declaration on // a given field or descriptor's resolver, as it is only required // on the current element // 20061023: handle NPE on null NSR if (namespaceResolver != null) { xopPrefix = namespaceResolver.resolveNamespaceURI(Constants.XOP_URL); } boolean addDeclaration = false; if (xopPrefix == null || namespaceResolver == null) { addDeclaration = true; xopPrefix = Constants.XOP_PREFIX; namespaceResolver = new org.eclipse.persistence.oxm.NamespaceResolver(); namespaceResolver.put(xopPrefix, Constants.XOP_URL); } XPathFragment xopInclude = new XPathFragment(xopPrefix + ":Include"); xopInclude.setNamespaceURI(Constants.XOP_URL); marshalRecord.openStartElement(xopInclude, namespaceResolver); marshalRecord.attribute(Constants.EMPTY_STRING, "href", "href", c_id); if (addDeclaration) { marshalRecord.namespaceDeclaration(xopPrefix, Constants.XOP_URL); } marshalRecord.closeStartElement(); marshalRecord.endElement(xopInclude, namespaceResolver); //marshal as an attachment } } else { marshalRecord.characters(((Field)xmlBinaryDataMapping.getField()).getSchemaType(), objectValue, mimeType, false); } } if(!xPathFragment.isSelfFragment()){ marshalRecord.endElement(xPathFragment, namespaceResolver); } return true; } public boolean startElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord, Attributes atts) { try { unmarshalRecord.removeNullCapableValue(this); Field xmlField = (Field) xmlBinaryDataMapping.getField(); XPathFragment lastFragment = xmlField.getLastXPathFragment(); BinaryMappingContentHandler handler = new BinaryMappingContentHandler(unmarshalRecord, this, this.xmlBinaryDataMapping); String qnameString = xPathFragment.getLocalName(); if (xPathFragment.getPrefix() != null) { qnameString = xPathFragment.getPrefix() + Constants.COLON + qnameString; } handler.startElement(xPathFragment.getNamespaceURI(), xPathFragment.getLocalName(), qnameString, atts); XMLReader xmlReader = unmarshalRecord.getXMLReader(); xmlReader.setContentHandler(handler); xmlReader.setLexicalHandler(handler); return true; } catch(SAXException ex) { throw XMLMarshalException.unmarshalException(ex); } } public void endElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord) { unmarshalRecord.resetStringBuffer(); } /** * Handle swaRef and inline attribute cases. */ public void attribute(UnmarshalRecord unmarshalRecord, String URI, String localName, String value) { unmarshalRecord.removeNullCapableValue(this); Field xmlField = (Field) xmlBinaryDataMapping.getField(); XPathFragment lastFragment = xmlField.getLastXPathFragment(); Object fieldValue = null; if (xmlBinaryDataMapping.isSwaRef()) { if (unmarshalRecord.getUnmarshaller().getAttachmentUnmarshaller() != null) { if (xmlBinaryDataMapping.getAttributeClassification() == XMLBinaryDataHelper.getXMLBinaryDataHelper().DATA_HANDLER) { fieldValue = unmarshalRecord.getUnmarshaller().getAttachmentUnmarshaller().getAttachmentAsDataHandler(value); } else { fieldValue = unmarshalRecord.getUnmarshaller().getAttachmentUnmarshaller().getAttachmentAsByteArray(value); } xmlBinaryDataMapping.setAttributeValueInObject(unmarshalRecord.getCurrentObject(), XMLBinaryDataHelper.getXMLBinaryDataHelper().convertObject(fieldValue, xmlBinaryDataMapping.getAttributeClassification(), unmarshalRecord.getSession(), null)); } } else { // value should be base64 binary string fieldValue = ((XMLConversionManager) unmarshalRecord.getSession().getDatasourcePlatform().getConversionManager()).convertSchemaBase64ToByteArray(value); xmlBinaryDataMapping.setAttributeValueInObject(unmarshalRecord.getCurrentObject(), XMLBinaryDataHelper.getXMLBinaryDataHelper().convertObject(fieldValue, xmlBinaryDataMapping.getAttributeClassification(), unmarshalRecord.getSession(), null)); } } public void setNullValue(Object object, CoreSession session) { Object value = xmlBinaryDataMapping.getObjectValue(null, session); xmlBinaryDataMapping.setAttributeValueInObject(object, value); } public boolean isNullCapableValue() { return xmlBinaryDataMapping.getNullPolicy().getIsSetPerformedForAbsentNode(); } public DataHandler getDataHandlerForObjectValue(Object obj, Class classification) { if (classification == DataHandler.class) { return (DataHandler) obj; } return null; } public BinaryDataMapping getMapping() { return this.xmlBinaryDataMapping; } public UnmarshalRecord buildSelfRecord(UnmarshalRecord unmarshalRecord, Attributes atts) { unmarshalRecord.removeNullCapableValue(this); BinaryDataUnmarshalRecord newRecord = new BinaryDataUnmarshalRecord(null, unmarshalRecord, this, xmlBinaryDataMapping); return newRecord; } public void endSelfNodeValue(UnmarshalRecord unmarshalRecord, UnmarshalRecord selfRecord, Attributes attributes) { unmarshalRecord.resetStringBuffer(); } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/Constants.java0000664000000000000000000002540712216173126023656 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Blaise Doughan - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.nio.charset.Charset; import javax.xml.namespace.QName; public class Constants { public static final String ANY_NAMESPACE_ANY = "##any"; public static final String ANY_NAMESPACE_OTHER = "##other"; public static final Character ATTRIBUTE = '@'; public static final String BOOLEAN_STRING_TRUE = "true"; public static final String CDATA = "CDATA"; public static final char COLON = ':'; public static final String DEFAULT_XML_ENCODING = "UTF-8"; public static final Charset DEFAULT_CHARSET = Charset.forName(DEFAULT_XML_ENCODING); public static final char DOT = '.'; public static final String EMPTY_STRING = ""; public static final String EXPECTED_CONTENT_TYPES = "expectedContentTypes"; public static final String JAXB_FRAGMENT = "jaxb.fragment"; public static final String JAXB_MARSHALLER = "jaxb.marshaller"; public static final String LEXICAL_HANDLER_PROPERTY = "http://xml.org/sax/properties/lexical-handler"; public static final Class LOCATOR_CLASS = org.xml.sax.Locator.class; public static final String LOCATOR_CLASS_NAME = "org.xml.sax.Locator"; public static final String NO_NS_SCHEMA_LOCATION = "noNamespaceSchemaLocation"; public static final Class QNAME_CLASS = QName.class; public static final String REF_PREFIX = "ref"; public static final String REF_URL = "http://ws-i.org/profiles/basic/1.1/xsd"; public static final String SCHEMA_LOCATION = "schemaLocation"; public static final String SCHEMA_PREFIX = "xsd"; public static final String SCHEMA_INSTANCE_PREFIX = "xsi"; public static final String SCHEMA_NIL_ATTRIBUTE = "nil"; public static final String SCHEMA_TYPE_ATTRIBUTE = "type"; public static final String SWAREF_XSD = "http://ws-i.org/profiles/basic/1.1/swaref.xsd"; public static final String TEXT = "text()"; public static final String UNKNOWN_OR_TRANSIENT_CLASS = "UNKNOWN_OR_TRANSIENT_CLASS"; public static final Class URI = java.net.URI.class; public static final Class UUID = java.util.UUID.class; public static final String VALUE_WRAPPER = "value"; public static final String XML_MIME_URL = "http://www.w3.org/2005/05/xmlmime"; public static final String XML_NAMESPACE_SCHEMA_LOCATION = "http://www.w3.org/XML/2001/xml.xsd"; public static final String XPATH_SEPARATOR = "/"; public static final String XPATH_INDEX_OPEN = "["; public static final String XPATH_INDEX_CLOSED = "]"; public static final String XOP_PREFIX = "xop"; public static final String XOP_URL = "http://www.w3.org/2004/08/xop/include"; // Schema Special values for Double and Float public static final String POSITIVE_INFINITY = "INF"; public static final String NEGATIVE_INFINITY = "-INF"; // Built-in Schema Types public static final String ANY = "any"; public static final String BASE_64_BINARY = "base64Binary"; public static final String BOOLEAN = "boolean"; public static final String BYTE = "byte"; public static final String DATE = "date"; public static final String DATE_TIME = "dateTime"; public static final String DECIMAL = "decimal"; public static final String DOUBLE = "double"; public static final String DURATION = "duration"; public static final String FLOAT = "float"; public static final String G_DAY = "gDay"; public static final String G_MONTH = "gMonth"; public static final String G_MONTH_DAY = "gMonthDay"; public static final String G_YEAR = "gYear"; public static final String G_YEAR_MONTH = "gYearMonth"; public static final String HEX_BINARY = "hexBinary"; public static final String INT = "int"; public static final String INTEGER = "integer"; public static final String LONG = "long"; public static final String NAME = "Name"; public static final String NCNAME = "NCName"; public static final String NEGATIVE_INTEGER = "negativeInteger"; public static final String NON_NEGATIVE_INTEGER = "nonNegativeInteger"; public static final String NON_POSITIVE_INTEGER = "nonPositiveInteger"; public static final String NOTATION = "NOTATION"; public static final String POSITIVE_INTEGER = "positiveInteger"; public static final String NORMALIZED_STRING = "normalizedString"; public static final String QNAME = "QName"; public static final String QUALIFIED = "qualified"; public static final String SHORT = "short"; public static final String STRING = "string"; public static final String TIME = "time"; public static final String UNQUALIFIED = "unqualified"; public static final String UNSIGNED_BYTE = "unsignedByte"; public static final String UNSIGNED_INT = "unsignedInt"; public static final String UNSIGNED_SHORT = "unsignedShort"; public static final String UNSIGNED_LONG = "unsignedLong"; public static final String ANY_SIMPLE_TYPE = "anySimpleType"; public static final String ANY_TYPE = "anyType"; public static final String ANY_URI = "anyURI"; public static final String SWA_REF = "swaRef"; // Schema Type QNames public static final QName ANY_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, ANY); public static final QName ANY_SIMPLE_TYPE_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, ANY_SIMPLE_TYPE); public static final QName ANY_TYPE_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, ANY_TYPE); public static final QName ANY_URI_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, ANY_URI); public static final QName BASE_64_BINARY_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, BASE_64_BINARY); public static final QName HEX_BINARY_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, HEX_BINARY); public static final QName DATE_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, DATE); public static final QName TIME_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, TIME); public static final QName DATE_TIME_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, DATE_TIME); public static final QName BOOLEAN_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, BOOLEAN); public static final QName BYTE_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, BYTE); public static final QName DECIMAL_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, DECIMAL); public static final QName DOUBLE_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, DOUBLE); public static final QName DURATION_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, DURATION); public static final QName FLOAT_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, FLOAT); public static final QName G_DAY_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, G_DAY); public static final QName G_MONTH_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, G_MONTH); public static final QName G_MONTH_DAY_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, G_MONTH_DAY); public static final QName G_YEAR_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, G_YEAR); public static final QName G_YEAR_MONTH_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, G_YEAR_MONTH); public static final QName INT_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, INT); public static final QName INTEGER_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, INTEGER); public static final QName LONG_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, LONG); public static final QName NAME_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, NAME); public static final QName NCNAME_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, NCNAME); public static final QName NEGATIVE_INTEGER_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, NEGATIVE_INTEGER); public static final QName NON_NEGATIVE_INTEGER_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, NON_NEGATIVE_INTEGER); public static final QName NON_POSITIVE_INTEGER_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, NON_POSITIVE_INTEGER); public static final QName NOTATION_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, NOTATION); public static final QName POSITIVE_INTEGER_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, POSITIVE_INTEGER); public static final QName NORMALIZEDSTRING_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, NORMALIZED_STRING); public static final QName QNAME_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, QNAME); public static final QName SHORT_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, SHORT); public static final QName STRING_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, STRING); public static final QName UNSIGNED_BYTE_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, UNSIGNED_BYTE); public static final QName UNSIGNED_INT_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, UNSIGNED_INT); public static final QName UNSIGNED_SHORT_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, UNSIGNED_SHORT); public static final QName UNSIGNED_LONG_QNAME = new QName(javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI, UNSIGNED_LONG); public static final QName SWA_REF_QNAME = new QName(REF_URL, SWA_REF); public static final QName EXPECTED_CONTENT_TYPES_QNAME = new QName(XML_MIME_URL, EXPECTED_CONTENT_TYPES); public static final MediaType APPLICATION_JSON = new MediaType() { @Override public boolean isApplicationJSON() { return true; } @Override public boolean isApplicationXML() { return false; } }; public static final MediaType APPLICATION_XML = new MediaType() { @Override public boolean isApplicationJSON() { return false; } @Override public boolean isApplicationXML() { return true; } }; }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/XMLChoiceFieldToClassAssociation.java0000664000000000000000000000366012216173126030104 0ustar package org.eclipse.persistence.internal.oxm; /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * mmacivor - June 19/2008 - 1.0 - Initial implementation ******************************************************************************/ import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.mappings.converters.Converter; /** INTERNAL: *

Purpose: This class holds onto a class name and an XMLField in order to read and write * choice mappings from deployment.xml * @author mmacivor */ public class XMLChoiceFieldToClassAssociation < XML_FIELD extends Field > { protected String className; protected XML_FIELD xmlField; protected Converter converter; public XMLChoiceFieldToClassAssociation() { } public XMLChoiceFieldToClassAssociation(XML_FIELD xmlField, String className) { this.xmlField = xmlField; this.className = className; } public String getClassName() { return className; } public void setClassName(String name) { this.className = name; } public XML_FIELD getXmlField() { return xmlField; } public void setXmlField(XML_FIELD field) { this.xmlField = field; } public Converter getConverter() { return this.converter; } public void setConverter(Converter valueConverter) { this.converter = valueConverter; } }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/Root.java0000664000000000000000000001547312216173126022627 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Denise Smith - 2.5 ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import javax.xml.namespace.QName; /** *

Root is used to hold an Object along with the corresponding QName and some other related information. * Typically this is used when the object is marshalled/unmarshalled to a QName other than * the defaultRootElement set on the Descriptor.

* *

Root objects can be returned from Unmarshaller unmarshal operations and * can be given to Marshaller.marshal operations. They may also be in values * returned by AnyCollectionMappings and AnyObjectMappings.

*/ public class Root { protected Object rootObject; protected String localName; protected String namespaceUri; protected String prefix; protected String encoding; protected String xmlVersion; protected String schemaLocation; protected String noNamespaceSchemaLocation; protected QName schemaType; protected Class declaredType; protected boolean nil; /** * Gets the object. This may be null. * * @return the object */ public Object getObject() { return rootObject; } /** * Gets the local name. This should not be null. * * @return the local name */ public String getLocalName() { return localName; } /** * Gets the namespace uri. This may be null. * * @return the namespace uri */ public String getNamespaceURI() { return namespaceUri; } /** * Sets the object associated with this XMLRoot. This may be null. * * @param rootObject The object to associate with this XMLRoot. */ public void setObject(Object rootObject) { this.rootObject = rootObject; } /** * Set the element name. This method will parse the qualified * name in an attempt to set the localName fields. ie: this could be * set to "someLocalName" or "somePrefix:someLocalName" * * @param name the new local name */ public void setLocalName(String name) { if(null == name) { this.localName = Constants.EMPTY_STRING; return; } int colonIdx = name.indexOf(Constants.COLON); if(colonIdx > -1){ this.localName = name.substring(colonIdx +1); }else{ this.localName = name; } } /** * Sets the namespace uri associated with the QName of this XMLRoot. * * @param rootElementUri the new namespace uri */ public void setNamespaceURI(String rootElementUri) { if(rootElementUri != null && rootElementUri.length() ==0){ this.namespaceUri = null; }else{ this.namespaceUri = rootElementUri; } } /** * Gets the encoding which will be set on the XMLRoot during unmarshal. * * @return the encoding */ public String getEncoding() { return encoding; } /** * Sets the encoding. * * @param encoding the new encoding */ public void setEncoding(String encoding) { this.encoding = encoding; } /** * Gets the XML version which will be set on the XMLRoot during unmarshal. * * @return the XML version */ public String getXMLVersion() { return xmlVersion; } /** * Sets the version. * * @param version the new version */ public void setVersion(String version) { this.xmlVersion = version; } /** * Gets the schema location which will be set on the XMLRoot during unmarshal. * * @return the schema location */ public String getSchemaLocation() { return schemaLocation; } /** * Sets the schema location. * * @param schemaLocation the new schema location */ public void setSchemaLocation(String schemaLocation) { this.schemaLocation = schemaLocation; } /** * Gets the no namespace schema location which will be set on the XMLRoot during unmarshal. * * @return the no namespace schema location */ public String getNoNamespaceSchemaLocation() { return noNamespaceSchemaLocation; } /** * Sets the no namespace schema location. * * @param noNamespaceSchemaLocation the new no namespace schema location */ public void setNoNamespaceSchemaLocation(String noNamespaceSchemaLocation) { this.noNamespaceSchemaLocation = noNamespaceSchemaLocation; } /** * Sets the schema type that should be associated with this XMLRoot object. * * @param schemaType the new schema type */ public void setSchemaType(QName schemaType) { this.schemaType = schemaType; } /** * Gets the schema type. This schema type will be considering when marshalling XMLRoot objects. * * @return the schema type associated with this XMLRoot object or null. */ public QName getSchemaType() { return schemaType; } /** * Sets the declared type. This may be different than the getObject().getClass(), for example, * in the case where inheritance is used the declared type may be the super class and the actual * object could be a subclass. * * @param type The declared type of this XMLRoot object. */ public void setDeclaredType(Class type) { this.declaredType = type; } /** * Gets the declared type. This may be different than the getObject().getClass(), for example, * in the case where inheritance is used the declared type may be the super class and the actual * object could be a subclass. * * @return the declared type */ public Class getDeclaredType() { return this.declaredType; } /** * Checks if is nil. Returns true if this XMLRoot has been explicitly set to nil * or if xmlRoot.getObject() is null. * * @return true, if is nil */ public boolean isNil() { return nil || rootObject == null; } /** * Sets that this XMLRoot object is nil. If setNil(true) is explicitly called then isNil() will * return true even if rootObject is not null. * * @param nil */ public void setNil(boolean nil) { this.nil = nil; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/CollectionGroupingElementNodeValue.java0000664000000000000000000000524612216173126030624 0ustar /******************************************************************************* * Copyright (c) 2011, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * - rbarkhouse - 27 January 2012 - 2.3.3 - Initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.oxm.record.MarshalContext; import org.eclipse.persistence.internal.oxm.record.MarshalRecord; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; /** * INTERNAL: *

* Purpose: Handles grouping elements for Collections when used with the TreeObjectBuilder. *

*/ public class CollectionGroupingElementNodeValue extends NodeValue { ContainerValue containerValue; public CollectionGroupingElementNodeValue(ContainerValue c) { this.containerValue = c; } public boolean isOwningNode(XPathFragment xPathFragment) { return true; } public boolean isMarshalNodeValue() { return false; } public boolean isWrapperNodeValue() { return true; } public void endElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord) { Object collection = unmarshalRecord.getContainerInstance(this.containerValue, false); endElement(xPathFragment, unmarshalRecord, collection); } public void endElement(XPathFragment xPathFragment, UnmarshalRecord unmarshalRecord, Object collection) { if (collection == null && !unmarshalRecord.isNil()) { unmarshalRecord.setContainerInstance(this.containerValue.getIndex(), this.containerValue.getContainerInstance()); } } public boolean marshal(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, CoreAbstractSession session, NamespaceResolver namespaceResolver) { return false; } public boolean marshalSingleValue(XPathFragment xPathFragment, MarshalRecord marshalRecord, Object object, Object objectValue, CoreAbstractSession session, NamespaceResolver namespaceResolver, MarshalContext marshalContext) { return false; } } eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/ReferenceResolver.java0000664000000000000000000005235312216173126025322 0ustar /******************************************************************************* * Copyright (c) 1998, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.internal.oxm; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Vector; import java.util.concurrent.Callable; import org.eclipse.persistence.core.descriptors.CoreDescriptor; import org.eclipse.persistence.core.descriptors.CoreInheritancePolicy; import org.eclipse.persistence.core.mappings.CoreAttributeAccessor; import org.eclipse.persistence.core.mappings.CoreMapping; import org.eclipse.persistence.exceptions.ConversionException; import org.eclipse.persistence.exceptions.XMLMarshalException; import org.eclipse.persistence.internal.core.helper.CoreClassConstants; import org.eclipse.persistence.internal.core.queries.CoreContainerPolicy; import org.eclipse.persistence.internal.core.sessions.CoreAbstractSession; import org.eclipse.persistence.internal.identitymaps.CacheId; import org.eclipse.persistence.internal.oxm.mappings.CollectionReferenceMapping; import org.eclipse.persistence.internal.oxm.mappings.Descriptor; import org.eclipse.persistence.internal.oxm.mappings.Field; import org.eclipse.persistence.internal.oxm.mappings.InverseReferenceMapping; import org.eclipse.persistence.internal.oxm.mappings.Mapping; import org.eclipse.persistence.internal.oxm.mappings.ObjectReferenceMapping; import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; /** * This class is leveraged by reference mappings. It plays 3 roles: *
    *
  • Stores objects with an ID
  • *
  • Stores key based relationships
  • *
  • Resolves key based relationships based on the objects stored by ID
  • *
*/ public class ReferenceResolver { private ArrayList references; private ReferenceKey lookupKey; private Map> cache; /** * The default constructor initializes the list of References. */ public ReferenceResolver() { references = new ArrayList(); lookupKey = new ReferenceKey(null, null); cache = new HashMap>(); } /** * Add a Reference object to the list - these References will * be resolved after unmarshalling is complete. * * @param ref */ public void addReference(Reference ref) { references.add(ref); } /** * INTERNAL: * Create primary key values to be used for cache lookup. The map * of primary keys on the reference is keyed on the reference descriptors primary * key field names. Each of these primary keys contains all of the values for a * particular key - in the order that they we read in from the document. For * example, if the key field names are A, B, and C, and there are three reference * object instances, then the hashmap would have the following: * (A=[1,2,3], B=[X,Y,Z], C=[Jim, Joe, Jane]). If the primary key field names on * the reference descriptor contained [B, C, A], then the result of this method call * would be reference.primaryKeys=([X, Jim, 1], [Y, Joe, 2], [Z, Jane, 3]). * * @param reference */ private void createPKVectorsFromMap(Reference reference, CollectionReferenceMapping mapping) { CoreDescriptor referenceDescriptor = mapping.getReferenceDescriptor(); Vector pks = new Vector(); if(null == referenceDescriptor) { CacheId pkVals = (CacheId) reference.getPrimaryKeyMap().get(null); if(null == pkVals) { return; } for(int x=0;x 1) { Map idWrapper = new HashMap(); for (int y = 0; y < primaryKey.getPrimaryKey().length; y++) { ObjectReferenceMapping refMapping = (ObjectReferenceMapping) reference.getMapping(); String idName = (String) refMapping.getReferenceDescriptor().getPrimaryKeyFieldNames().get(y); Object idValue = primaryKey.getPrimaryKey()[y]; idWrapper.put(idName, idValue); } c = userSpecifiedResolver.resolve(idWrapper, reference.getTargetClass()); } else { c = userSpecifiedResolver.resolve(primaryKey.getPrimaryKey()[0], reference.getTargetClass()); } if (c != null) { value = c.call(); } } catch (Exception e) { throw XMLMarshalException.unmarshalException(e); } } else { value = getValue(session, reference, primaryKey, handler); } if (value != null) { cPolicy.addInto(value, container, session); } } } // for each reference, get the source object and add it to the container policy // when finished, set the policy on the mapping mapping.setAttributeValueInObject(referenceSourceObject, container); InverseReferenceMapping inverseReferenceMapping = mapping.getInverseReferenceMapping(); if(inverseReferenceMapping != null && value != null) { CoreAttributeAccessor backpointerAccessor = inverseReferenceMapping.getAttributeAccessor(); CoreContainerPolicy backpointerContainerPolicy = inverseReferenceMapping.getContainerPolicy(); if(backpointerContainerPolicy == null) { backpointerAccessor.setAttributeValueInObject(value, referenceSourceObject); } else { Object backpointerContainer = backpointerAccessor.getAttributeValueFromObject(value); if(backpointerContainer == null) { backpointerContainer = backpointerContainerPolicy.containerInstance(); backpointerAccessor.setAttributeValueInObject(value, backpointerContainer); } backpointerContainerPolicy.addInto(referenceSourceObject, backpointerContainer, session); } } } else if (reference.getMapping() instanceof ObjectReferenceMapping) { CacheId primaryKey = (CacheId) reference.getPrimaryKey(); Object value = null; if (userSpecifiedResolver != null) { final Callable c; try { if (primaryKey.getPrimaryKey().length > 1) { Map idWrapper = new HashMap(); for (int y = 0; y < primaryKey.getPrimaryKey().length; y++) { ObjectReferenceMapping refMapping = (ObjectReferenceMapping) reference.getMapping(); String idName = (String) refMapping.getReferenceDescriptor().getPrimaryKeyFieldNames().get(y); Object idValue = primaryKey.getPrimaryKey()[y]; idWrapper.put(idName, idValue); } c = userSpecifiedResolver.resolve(idWrapper, reference.getTargetClass()); } else { c = userSpecifiedResolver.resolve(primaryKey.getPrimaryKey()[0], reference.getTargetClass()); } if (c != null) { value = c.call(); } } catch (Exception e) { throw XMLMarshalException.unmarshalException(e); } } else { value = getValue(session, reference, primaryKey, handler); } ObjectReferenceMapping mapping = (ObjectReferenceMapping)reference.getMapping(); if (value != null) { mapping.setAttributeValueInObject(reference.getSourceObject(), value); } if (null != reference.getSetting()) { reference.getSetting().setValue(value); } InverseReferenceMapping inverseReferenceMapping = mapping.getInverseReferenceMapping(); if(inverseReferenceMapping != null) { CoreAttributeAccessor backpointerAccessor = inverseReferenceMapping.getAttributeAccessor(); CoreContainerPolicy backpointerContainerPolicy = inverseReferenceMapping.getContainerPolicy(); if(backpointerContainerPolicy == null) { backpointerAccessor.setAttributeValueInObject(value, referenceSourceObject); } else { Object backpointerContainer = backpointerAccessor.getAttributeValueFromObject(value); if(backpointerContainer == null) { backpointerContainer = backpointerContainerPolicy.containerInstance(); backpointerAccessor.setAttributeValueInObject(value, backpointerContainer); } backpointerContainerPolicy.addInto(reference.getSourceObject(), backpointerContainer, session); } } } } // reset the references list references = new ArrayList(); cache.clear(); } private Object getValue(CoreAbstractSession session, Reference reference, CacheId primaryKey, ErrorHandler handler) { Class referenceTargetClass = reference.getTargetClass(); if(null == referenceTargetClass || referenceTargetClass == CoreClassConstants.OBJECT) { for(Object entry : session.getDescriptors().values()) { Object value = null; Descriptor targetDescriptor = (Descriptor) entry; List pkFields = targetDescriptor.getPrimaryKeyFields(); if(null != pkFields && 1 == pkFields.size()) { Field pkField = (Field) pkFields.get(0); pkField = (Field) targetDescriptor.getTypedField(pkField); Class targetType = pkField.getType(); if(targetType == CoreClassConstants.STRING || targetType == CoreClassConstants.OBJECT) { value = getValue(targetDescriptor.getJavaClass(), primaryKey); } else { try { Object[] pkValues = primaryKey.getPrimaryKey(); Object[] convertedPkValues = new Object[pkValues.length]; for(int x=0; x childDescriptors = inheritancePolicy.getAllChildDescriptors(); for(CoreDescriptor childDescriptor : childDescriptors) { value = getValue(childDescriptor.getJavaClass(), primaryKey); if(null != value) { return value; } } } } if(value == null && (primaryKey.getPrimaryKey()[0] != null) ){ XMLMarshalException e = XMLMarshalException.missingIDForIDRef(referenceTargetClass.getName(), primaryKey.getPrimaryKey()); if(handler != null){ SAXParseException saxParseException = new SAXParseException(e.getLocalizedMessage(), null, e); try{ handler.warning(saxParseException); }catch(SAXException saxException){ throw e; } } } return value; } } private Object getValue(Class clazz, CacheId primaryKey) { Map keyToObject = cache.get(clazz); if(null != keyToObject) { return keyToObject.get(primaryKey); } return null; } private class ReferenceKey { private Object sourceObject; private Mapping mapping; public ReferenceKey(Object sourceObject, Mapping mapping) { this.sourceObject = sourceObject; this.mapping = mapping; } public Object getSourceObject() { return sourceObject; } public Mapping getMapping() { return mapping; } public void setSourceObject(Object obj) { this.sourceObject = obj; } public void setMapping(Mapping mapping) { this.mapping = mapping; } @Override public int hashCode() { return this.mapping.hashCode() ^ this.sourceObject.hashCode(); } @Override public boolean equals(Object obj) { if(obj == null) { return false; } if(obj.getClass() != this.getClass()) { return false; } ReferenceKey key = (ReferenceKey)obj; return this.sourceObject == key.getSourceObject() && this.mapping == key.getMapping(); } } /** * Store an instance by key based on a mapped class. These values will be * used when it comes time to resolve the references. * @since EclipseLink 2.5.0 */ public void putValue(Class clazz, Object key, Object object) { Map keyToObject = cache.get(clazz); if(null == keyToObject) { keyToObject = new HashMap(); cache.put(clazz, keyToObject); } keyToObject.put(key, object); } }eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/unmapped/0000775000000000000000000000000012216174372022644 5ustar eclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/unmapped/UnmappedContentHandler.java0000664000000000000000000000255412216173126030113 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Denise Smith - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.unmapped; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.xml.sax.ContentHandler; /** *

Purpose:Provide an interface that can be implemented for handling * unmapped content during unmarshal operations with SAXPlatform. */ public interface UnmappedContentHandler extends ContentHandler { /** * Set the UnmarshalRecord which gives access to mechanisms used during the * unmarshal process such as an Unmarshaller and a Session. * @param unmarshalRecord */ void setUnmarshalRecord(UNMARSHAL_RECORD unmarshalRecord); } ././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrooteclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/unmapped/DefaultUnmappedContentHandler.javaeclipselink-2.5.1.orig/org/eclipse/persistence/internal/oxm/unmapped/DefaultUnmappedContentHandler.j0000664000000000000000000000443712216173126030732 0ustar /******************************************************************************* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Denise Smith - 2.5 - initial implementation ******************************************************************************/ package org.eclipse.persistence.internal.oxm.unmapped; import org.eclipse.persistence.internal.oxm.record.UnmarshalRecord; import org.xml.sax.Attributes; import org.xml.sax.Locator; import org.xml.sax.SAXException; /** *

Purpose:Provide a default implementation of the UnmappedContentHandler *

Responsibilities: