answer
stringlengths
17
10.2M
package com.facebook.litho.specmodels.processor; import static com.facebook.litho.specmodels.processor.ProcessorUtils.getPackageName; import static com.facebook.litho.specmodels.processor.ProcessorUtils.validate; import com.facebook.litho.specmodels.model.DependencyInjectionHelperFactory; import com.facebook.litho.specmodels.model.SpecModel; import com.squareup.javapoet.JavaFile; import java.io.IOException; import java.util.List; import java.util.Set; import javax.annotation.Nullable; import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.RoundEnvironment; import javax.annotation.processing.SupportedSourceVersion; import javax.lang.model.SourceVersion; import javax.lang.model.element.Element; import javax.lang.model.element.TypeElement; import javax.tools.Diagnostic; @SupportedSourceVersion(SourceVersion.RELEASE_7) public abstract class AbstractComponentsProcessor extends AbstractProcessor { @Nullable private final DependencyInjectionHelperFactory mDependencyInjectionHelperFactory; private final List<SpecModelFactory> mSpecModelFactories; private PropNameInterStageStore mPropNameInterStageStore; private final InterStageStore mInterStageStore = new InterStageStore() { @Override public PropNameInterStageStore getPropNameInterStageStore() { return mPropNameInterStageStore; } }; protected AbstractComponentsProcessor( List<SpecModelFactory> specModelFactories, DependencyInjectionHelperFactory dependencyInjectionHelperFactory) { mSpecModelFactories = specModelFactories; mDependencyInjectionHelperFactory = dependencyInjectionHelperFactory; } @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver()) { return false; } // processingEnv is not available at construction time. :( mPropNameInterStageStore = new PropNameInterStageStore(processingEnv.getFiler()); for (SpecModelFactory specModelFactory : mSpecModelFactories) { final Set<Element> elements = specModelFactory.extract(roundEnv); for (Element element : elements) { try { final SpecModel specModel = specModelFactory.create( processingEnv.getElementUtils(), (TypeElement) element, mDependencyInjectionHelperFactory == null ? null : mDependencyInjectionHelperFactory.create((TypeElement) element), mInterStageStore); validate(specModel); generate(specModel); afterGenerate(specModel); } catch (PrintableException e) { e.print(processingEnv.getMessager()); } catch (Exception e) { processingEnv .getMessager() .printMessage( Diagnostic.Kind.ERROR, String.format( "Unexpected error thrown when generating this component spec. " + "Please report stack trace to the components team.\n%s", e), element); e.printStackTrace(); } } } return false; } protected void generate(SpecModel specModel) throws IOException { final String packageName = getPackageName(specModel.getComponentTypeName()); JavaFile.builder(packageName, specModel.generate()) .skipJavaLangImports(true) .build() .writeTo(processingEnv.getFiler()); } private void afterGenerate(SpecModel specModel) throws IOException { mInterStageStore.getPropNameInterStageStore().saveNames(specModel); } }
package test; import java.beans.*; import java.io.*; import java.util.*; import org.ice4j.*; import org.ice4j.ice.*; import org.ice4j.ice.harvest.*; /** * Simple ice4j testing scenarios. * * @author Emil Ivov */ public class Ice { /** * Start time for debugging purposes. */ static long startTime; /** * Runs a test application that allocates streams, generates an SDP, dumps * it on stdout, waits for a remote peer SDP on stdin, then feeds that * to our local agent and starts ICE processing. * * @param args none currently handled * @throws Throwable every now and then. */ public static void main(String[] args) throws Throwable { startTime = System.currentTimeMillis(); Agent localAgent = createAgent(9090); localAgent.setNominationStrategy( NominationStrategy.NOMINATE_HIGHEST_PRIO); localAgent.addStateChangeListener(new IceProcessingListener()); //let them fight ... fights forge character. localAgent.setControlling(true); String localSDP = SdpUtils.createSDPDescription(localAgent); System.out.println(localSDP); String sdp = readSDP(); SdpUtils.parseSDP(localAgent, sdp); localAgent.startConnectivityEstablishment(); //Give processing enough time to finish. We'll System.exit() anyway //as soon as localAgent enters a final state. Thread.sleep(60000); } /** * Runs the test * @param args command line arguments * * @throws Throwable if bad stuff happens. */ public static void main2(String[] args) throws Throwable { startTime = System.currentTimeMillis(); Agent localAgent = createAgent(9090); localAgent.setNominationStrategy( NominationStrategy.NOMINATE_HIGHEST_PRIO); Agent remotePeer = createAgent(6060); localAgent.addStateChangeListener(new IceProcessingListener()); //let them fight ... fights forge character. localAgent.setControlling(true); remotePeer.setControlling(false); long endTime = System.currentTimeMillis(); transferRemoteCandidates(localAgent, remotePeer); localAgent.setRemoteUfrag(remotePeer.getLocalUfrag()); localAgent.setRemotePassword(remotePeer.getLocalPassword()); remotePeer.setRemoteUfrag(localAgent.getLocalUfrag()); remotePeer.setRemotePassword(localAgent.getLocalPassword()); System.out.println("Total candidate gathering time: " + (endTime - startTime) + "ms"); System.out.println("LocalAgent:\n" + localAgent); localAgent.startConnectivityEstablishment(); System.out.println("Local audio clist:\n" + localAgent.getStream("audio").getCheckList()); IceMediaStream videoStream = localAgent.getStream("video"); if(videoStream != null) System.out.println("Local video clist:\n" + videoStream.getCheckList()); //Give processing enough time to finish. We'll System.exit() anyway //as soon as localAgent enters a final state. Thread.sleep(60000); } /** * The listener that would end example execution once we enter the * completed state. */ public static final class IceProcessingListener implements PropertyChangeListener { /** * System.exit()s as soon as ICE processing enters a final state. * * @param evt the {@link PropertyChangeEvent} containing the old and new * states of ICE processing. */ public void propertyChange(PropertyChangeEvent evt) { long processingEndTime = System.currentTimeMillis(); System.out.println("Agent entered the " + evt.getNewValue() + " state."); if(evt.getNewValue() == IceProcessingState.COMPLETED || evt.getNewValue() == IceProcessingState.FAILED) { System.out.println("Total ICE processing time: " + (processingEndTime - startTime) + "ms"); Agent agent = (Agent)evt.getSource(); List<IceMediaStream> streams = agent.getStreams(); for(IceMediaStream stream : streams) { String streamName = stream.getName(); System.out.println("Pairs selected for stream: " + streamName); List<Component> components = stream.getComponents(); for(Component cmp : components) { String cmpName = cmp.getName(); System.out.println(cmpName + ": " + cmp.getSelectedPair()); } } System.out.println("Printing the completed check lists:"); for(IceMediaStream stream : streams) { String streamName = stream.getName(); System.out.println("Check list for stream: " + streamName); //uncomment for a more verbose output System.out.println(stream.getCheckList()); } } else if(evt.getNewValue() == IceProcessingState.TERMINATED) { System.exit(0); } } } /** * Installs remote candidates in <tt>localAgent</tt>.. * * @param localAgent a reference to the agent that we will pretend to be the * local * @param remotePeer a reference to what we'll pretend to be a remote agent. */ private static void transferRemoteCandidates(Agent localAgent, Agent remotePeer) { List<IceMediaStream> streams = localAgent.getStreams(); for(IceMediaStream localStream : streams) { String streamName = localStream.getName(); //get a reference to the local stream IceMediaStream remoteStream = remotePeer.getStream(streamName); if(remoteStream != null) transferRemoteCandidates(localStream, remoteStream); else { localAgent.removeStream(localStream); } } } /** * Installs remote candidates in <tt>localStream</tt>.. * * @param localStream the stream where we will be adding remote candidates * to. * @param remoteStream the stream that we should extract remote candidates * from. */ private static void transferRemoteCandidates(IceMediaStream localStream, IceMediaStream remoteStream) { List<Component> localComponents = localStream.getComponents(); for(Component localComponent : localComponents) { int id = localComponent.getComponentID(); Component remoteComponent = remoteStream.getComponent(id); if(remoteComponent != null) transferRemoteCandidates(localComponent, remoteComponent); else { localStream.removeComponent(localComponent); } } } /** * Adds to <tt>localComponent</tt> a list of remote candidates that are * actually the local candidates from <tt>remoteComponent</tt>. * * @param localComponent the <tt>Component</tt> where that we should be * adding <tt>remoteCandidate</tt>s to. * @param remoteComponent the source of remote candidates. */ private static void transferRemoteCandidates(Component localComponent, Component remoteComponent) { List<LocalCandidate> remoteCandidates = remoteComponent.getLocalCandidates(); localComponent.setDefaultRemoteCandidate( remoteComponent.getDefaultCandidate()); for(Candidate rCand : remoteCandidates) { localComponent.addRemoteCandidate(new RemoteCandidate( rCand.getTransportAddress(), localComponent, rCand.getType(), rCand.getFoundation(), rCand.getPriority())); } } /** * Creates an ICE <tt>Agent</tt> and adds to it an audio and a video stream * with RTP and RTCP components. * * @param rtpPort the port that we should try to bind the RTP component on * (the RTCP one would automatically go to rtpPort + 1) * @return an ICE <tt>Agent</tt> with an audio stream with RTP and RTCP * components. * * @throws Throwable if anything goes wrong. */ private static Agent createAgent(int rtpPort) throws Throwable { Agent agent = new Agent(); StunCandidateHarvester stunHarv = new StunCandidateHarvester( new TransportAddress("sip-communicator.net", 3478, Transport.UDP)); StunCandidateHarvester stun6Harv = new StunCandidateHarvester( new TransportAddress("ipv6.sip-communicator.net", 3478, Transport.UDP)); agent.addCandidateHarvester(stunHarv); agent.addCandidateHarvester(stun6Harv); createStream(rtpPort, "audio", agent); createStream(rtpPort + 2, "video", agent); return agent; } /** * Creates an <tt>IceMediaStrean</tt> and adds to it an RTP and and RTCP * component. * * @param rtpPort the port that we should try to bind the RTP component on * (the RTCP one would automatically go to rtpPort + 1) * @param streamName the name of the stream to create * @param agent the <tt>Agent</tt> that should create the stream. * *@return the newly created <tt>IceMediaStream</tt>. * @throws Throwable if anything goes wrong. */ private static IceMediaStream createStream(int rtpPort, String streamName, Agent agent) throws Throwable { IceMediaStream stream = agent.createMediaStream(streamName); long startTime = System.currentTimeMillis(); //TODO: component creation should probably be part of the library. it //should also be started after we've defined all components to be //created so that we could run the harvesting for everyone of them //simultaneously with the others. //rtp agent.createComponent( stream, Transport.UDP, rtpPort, rtpPort, rtpPort + 100); long endTime = System.currentTimeMillis(); System.out.println("RTP Component created in " + (endTime - startTime) +" ms"); startTime = endTime; //rtcpComp agent.createComponent( stream, Transport.UDP, rtpPort + 1, rtpPort + 1, rtpPort + 101); endTime = System.currentTimeMillis(); System.out.println("RTCP Component created in " + (endTime - startTime) +" ms"); return stream; } /** * Reads an SDP description from the standard input. We expect descriptions * provided to this method to be originating from instances of this * application running on remote computers. * * @return whatever we got on stdin (hopefully an SDP description. * * @throws Throwable if something goes wrong with console reading. */ private static String readSDP() throws Throwable { System.out.println("Paste SDP here. Enter an empty line to proceed:"); System.out.println("(we don't mind the [java] prefix in SDP intput)"); BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); StringBuffer buff = new StringBuffer(); String line = new String(); while ( (line = reader.readLine()) != null) { line = line.replace("[java]", ""); line = line.trim(); if(line.length() == 0) break; buff.append(line); buff.append("\r\n"); } return buff.toString(); } }
package peergos.server; import java.util.*; import java.util.logging.Logger; import peergos.server.storage.*; import peergos.server.messages.*; import peergos.server.storage.admin.*; import peergos.server.util.*; import java.util.logging.Level; import com.sun.net.httpserver.*; import peergos.shared.*; import peergos.shared.corenode.*; import peergos.shared.io.ipfs.multihash.*; import peergos.shared.mutable.*; import peergos.shared.social.*; import peergos.shared.storage.*; import peergos.server.net.*; import peergos.shared.storage.auth.*; import peergos.shared.storage.controller.*; import peergos.shared.user.*; import peergos.shared.util.*; import javax.net.ssl.*; import java.io.*; import java.net.*; import java.nio.file.*; import java.security.*; import java.security.cert.*; import java.util.concurrent.*; public class UserService { private static final Logger LOG = Logging.LOG(); public static final Version CURRENT_VERSION = Version.parse("0.8.0"); public static final String UI_URL = "/"; private static void initTLS() { // disable weak algorithms LOG.info("\nInitial security properties:"); printSecurityProperties(); // The ECDH and RSA key exchange algorithms are disabled because they don't provide forward secrecy Security.setProperty("jdk.tls.disabledAlgorithms", "SSLv3, TLSv1.3, RC4, MD2, MD4, MD5, SHA1, DES, DSA, MD5withRSA, DH, RSA keySize < 2048, EC keySize < 224, 3DES_EDE_CBC, " + "TLS_RSA_WITH_NULL_SHA256," + "TLS_RSA_WITH_AES_128_GCM_SHA256," + "TLS_RSA_WITH_AES_128_CBC_SHA256, " + "TLS_RSA_WITH_AES_256_GCM_SHA384, " + "TLS_RSA_WITH_AES_256_CBC_SHA256, " + "TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256, " + "TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256, " + "TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256, " + "TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384," + "TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384," + "TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256, " + "TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384," + "TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384," + "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256," + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384," + "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256," + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384," ); Security.setProperty("jdk.certpath.disabledAlgorithms", "RC4, MD2, MD4, MD5, SHA1, DSA, RSA keySize < 2048, EC keySize < 224"); Security.setProperty("jdk.tls.rejectClientInitializedRenegotiation", "true"); LOG.info("\nUpdated security properties:"); printSecurityProperties(); Security.setProperty("jdk.tls.ephemeralDHKeySize", "2048"); } static void printSecurityProperties() { LOG.info("jdk.tls.disabledAlgorithms: " + Security.getProperty("jdk.tls.disabledAlgorithms")); LOG.info("jdk.certpath.disabledAlgorithms: " + Security.getProperty("jdk.certpath.disabledAlgorithms")); LOG.info("jdk.tls.rejectClientInitializedRenegotiation: "+Security.getProperty("jdk.tls.rejectClientInitializedRenegotiation")); } public final ContentAddressedStorage storage; public final BatCave bats; public final Crypto crypto; public final CoreNode coreNode; public final Account account; public final SocialNetwork social; public final MutablePointers mutable; public final InstanceAdmin controller; public final SpaceUsage usage; public final ServerMessageStore serverMessages; public final GarbageCollector gc; // not exposed public UserService(ContentAddressedStorage storage, BatCave bats, Crypto crypto, CoreNode coreNode, Account account, SocialNetwork social, MutablePointers mutable, InstanceAdmin controller, SpaceUsage usage, ServerMessageStore serverMessages, GarbageCollector gc) { this.storage = storage; this.bats = bats; this.crypto = crypto; this.coreNode = coreNode; this.account = account; this.social = social; this.mutable = mutable; this.controller = controller; this.usage = usage; this.serverMessages = serverMessages; this.gc = gc; } public static class TlsProperties { public final String hostname, keyfilePassword; public TlsProperties(String hostname, String keyfilePassword) { this.hostname = hostname; this.keyfilePassword = keyfilePassword; } } public boolean initAndStart(InetSocketAddress local, Multihash nodeId, Optional<TlsProperties> tlsProps, Optional<String> publicHostname, List<String> blockstoreDomains, List<String> frameDomains, List<String> appSubdomains, boolean includeCsp, Optional<String> basicAuth, Optional<Path> webroot, boolean useWebCache, boolean isPublicServer, int connectionBacklog, int handlerPoolSize) throws IOException { InetAddress allInterfaces = InetAddress.getByName("::"); if (tlsProps.isPresent()) try { HttpServer httpServer = HttpServer.create(); httpServer.createContext("/", new RedirectHandler("https://" + tlsProps.get().hostname + ":443/")); httpServer.bind(new InetSocketAddress(allInterfaces, 80), connectionBacklog); httpServer.start(); initTLS(); } catch (Exception e) { LOG.log(Level.WARNING, e.getMessage(), e); LOG.info("Couldn't start http redirect to https for user server!"); } LOG.info("Starting local Peergos server at: localhost:"+local.getPort()); if (tlsProps.isPresent()) LOG.info("Starting Peergos TLS server on all interfaces."); HttpServer localhostServer = HttpServer.create(local, connectionBacklog); HttpsServer tlsServer = ! tlsProps.isPresent() ? null : HttpsServer.create(new InetSocketAddress(allInterfaces, 443), connectionBacklog); if (tlsProps.isPresent()) { try { SSLContext sslContext = SSLContext.getInstance("TLS"); char[] password = tlsProps.get().keyfilePassword.toCharArray(); KeyStore ks = getKeyStore("storage.p12", password); KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509"); kmf.init(ks, password); // TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509"); // tmf.init(SSL.getTrustedKeyStore()); // setup the HTTPS context and parameters sslContext.init(kmf.getKeyManagers(), null, null); sslContext.getSupportedSSLParameters().setUseCipherSuitesOrder(true); // set up perfect forward secrecy sslContext.getSupportedSSLParameters().setCipherSuites(new String[]{ "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256", "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384" }); SSLContext.setDefault(sslContext); tlsServer.setHttpsConfigurator(new HttpsConfigurator(sslContext) { public void configure(HttpsParameters params) { try { // initialise the SSL context SSLContext c = SSLContext.getDefault(); SSLEngine engine = c.createSSLEngine(); params.setNeedClientAuth(false); params.setCipherSuites(engine.getEnabledCipherSuites()); params.setProtocols(engine.getEnabledProtocols()); // get the default parameters SSLParameters defaultSSLParameters = c.getDefaultSSLParameters(); params.setSSLParameters(defaultSSLParameters); } catch (Exception ex) { LOG.severe("Failed to create HTTPS port"); ex.printStackTrace(System.err); } } }); } catch (NoSuchAlgorithmException | InvalidKeyException | KeyStoreException | CertificateException | NoSuchProviderException | SignatureException | UnrecoverableKeyException | KeyManagementException ex) { LOG.severe("Failed to load TLS settings"); throw new RuntimeException(ex); } } //define web-root static-handler if (webroot.isPresent()) LOG.info("Using webroot from local file system: " + webroot); else LOG.info("Using webroot from jar"); if (isPublicServer && publicHostname.isEmpty()) throw new IllegalStateException("Missing arg public-domain"); CspHost host = tlsProps.map(p -> new CspHost("https://", p.hostname)) .orElse(isPublicServer ? new CspHost("https://", publicHostname.get()) : new CspHost("http://", local.getHostName(), local.getPort())); StaticHandler handler = webroot.map(p -> (StaticHandler) new FileHandler(host, blockstoreDomains, frameDomains, appSubdomains, p, includeCsp, true)) .orElseGet(() -> new JarHandler(host, blockstoreDomains, frameDomains, appSubdomains, includeCsp, true, PathUtil.get("/webroot"))); if (useWebCache) { LOG.info("Caching web-resources"); handler = handler.withCache(); } addHandler(localhostServer, tlsServer, Constants.DHT_URL, new DHTHandler(storage, crypto.hasher, (h, i) -> true, isPublicServer), basicAuth, local, host, nodeId, false); addHandler(localhostServer, tlsServer, "/" + Constants.BATS_URL, new BatCaveHandler(this.bats, isPublicServer), basicAuth, local, host, nodeId, false); addHandler(localhostServer, tlsServer, "/" + Constants.CORE_URL, new CoreNodeHandler(this.coreNode, isPublicServer), basicAuth, local, host, nodeId, false); addHandler(localhostServer, tlsServer, "/" + Constants.SOCIAL_URL, new SocialHandler(this.social, isPublicServer), basicAuth, local, host, nodeId, false); addHandler(localhostServer, tlsServer, "/" + Constants.MUTABLE_POINTERS_URL, new MutationHandler(this.mutable, isPublicServer), basicAuth, local, host, nodeId, false); addHandler(localhostServer, tlsServer, "/" + Constants.LOGIN_URL, new AccountHandler(this.account, isPublicServer), basicAuth, local, host, nodeId, false); addHandler(localhostServer, tlsServer, "/" + Constants.ADMIN_URL, new AdminHandler(this.controller, isPublicServer), basicAuth, local, host, nodeId, false); addHandler(localhostServer, tlsServer, "/" + Constants.SPACE_USAGE_URL, new SpaceHandler(this.usage, isPublicServer), basicAuth, local, host, nodeId, false); addHandler(localhostServer, tlsServer, "/" + Constants.SERVER_MESSAGE_URL, new ServerMessageHandler(this.serverMessages, coreNode, storage, isPublicServer), basicAuth, local, host, nodeId, false); addHandler(localhostServer, tlsServer, "/" + Constants.PUBLIC_FILES_URL, new PublicFileHandler(crypto.hasher, coreNode, mutable, storage), basicAuth, local, host, nodeId, false); addHandler(localhostServer, tlsServer, UI_URL, handler, basicAuth, local, host, nodeId, true); localhostServer.setExecutor(Executors.newFixedThreadPool(handlerPoolSize)); localhostServer.start(); if (tlsServer != null) { tlsServer.setExecutor(Executors.newFixedThreadPool(handlerPoolSize)); tlsServer.start(); } return true; } private static void addHandler(HttpServer localhostServer, HttpsServer tlsServer, String path, HttpHandler handler, Optional<String> basicAuth, InetSocketAddress local, CspHost host, Multihash nodeId, boolean allowSubdomains) { HttpHandler withAuth = basicAuth .map(ba -> (HttpHandler) new BasicAuthHandler(ba, handler)) .orElse(handler); // Allow local requests, ones to the public host, and p2p reqs to our node List<String> allowedHosts = Arrays.asList("127.0.0.1:" + local.getPort(), host.host(), nodeId.toString()); SubdomainHandler subdomainHandler = new SubdomainHandler(allowedHosts, withAuth, allowSubdomains); localhostServer.createContext(path, subdomainHandler); if (tlsServer != null) { tlsServer.createContext(path, new HSTSHandler(subdomainHandler)); } } public static KeyStore getKeyStore(String filename, char[] password) throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException, InvalidKeyException, NoSuchProviderException, SignatureException { KeyStore ks = KeyStore.getInstance("PKCS12"); if (new File(filename).exists()) { ks.load(new FileInputStream(filename), password); return ks; } throw new IllegalStateException("SSL keystore file doesn't exist: "+filename); } }
package org.cytoscape.network.merge.internal.util; import org.cytoscape.model.CyColumn; import org.cytoscape.model.CyRow; import org.cytoscape.model.CyTable; import org.cytoscape.model.CyIdentifiable; import org.cytoscape.model.CyNetwork; import org.cytoscape.network.merge.internal.conflict.AttributeConflictCollector; import java.util.ArrayList; import java.util.List; import java.util.Map; public class DefaultAttributeMerger implements AttributeMerger { protected final AttributeConflictCollector conflictCollector; public DefaultAttributeMerger(final AttributeConflictCollector conflictCollector) { this.conflictCollector = conflictCollector; } /** * Merge one attribute into another * @param fromIDs * @param fromAttr * @param toID * @param toAttrName * @param attrs * @param conflictCollector */ //@Override public <T extends CyIdentifiable> void mergeAttribute(Map<T,CyColumn> mapGOAttr, T toGO, CyColumn toAttr, CyNetwork toNetwork) { if ((mapGOAttr == null) || (toGO == null) || (toAttr == null)) { throw new java.lang.IllegalArgumentException("Null argument."); } CyRow cyRow = toNetwork.getRow(toGO); ColumnType colType = ColumnType.getType(toAttr); for (Map.Entry<T,CyColumn> entryGOAttr : mapGOAttr.entrySet()) { T from = entryGOAttr.getKey(); CyColumn fromAttr = entryGOAttr.getValue(); CyTable fromTable1 = fromAttr.getTable(); CyRow fromCyRow = fromTable1.getRow(from.getSUID()); ColumnType fromColType = ColumnType.getType(fromAttr); if (colType == ColumnType.STRING) { // the case of inconvertable attributes and simple attributes to String Object o1 = fromCyRow.getRaw(fromAttr.getName()); //Correct?? String o2 = cyRow.get(toAttr.getName(), String.class); if (o2==null||o2.length()==0) { //null or empty attribute if (!toAttr.getVirtualColumnInfo().isVirtual()){ cyRow.set(toAttr.getName(), o1.toString()); } } else if (o1.equals(o2)) { //TODO: neccessary? // the same, do nothing } else { // attribute conflict // add to conflict collector conflictCollector.addConflict(from, fromAttr, toGO, toAttr); } } else if (!colType.isList()) { // simple type (Integer, Long, Double, Boolean) Object o1 = fromCyRow.get(fromAttr.getName(), fromColType.getType()); if (fromColType!=colType) { o1 = colType.castService(o1); } Object o2 = cyRow.get(toAttr.getName(), colType.getType()); if (o2==null) { cyRow.set(toAttr.getName(), o1); //continue; } else if (o1.equals(o2)) { //continue; // the same, do nothing } else { // attribute conflict // add to conflict collector conflictCollector.addConflict(from, fromAttr, toGO, toAttr); //continue; } } else { // toattr is list type //TODO: use a conflict handler to handle this part? ColumnType plainType = colType.toPlain(); List l2 = cyRow.getList(toAttr.getName(), plainType.getType()); if (l2 == null) { l2 = new ArrayList(); } if (!fromColType.isList()) { // from plain Object o1 = fromCyRow.get(fromAttr.getName(), fromColType.getType()); if (plainType!=fromColType) { o1 = plainType.castService(o1); } if (!l2.contains(o1)) { l2.add(o1); } cyRow.set(toAttr.getName(), l2); } else { // from list ColumnType fromPlain = fromColType.toPlain(); List l1 = fromCyRow.getList(fromAttr.getName(), fromPlain.getType()); int nl1 = l1.size(); for (int il1=0; il1<nl1; il1++) { Object o1 = l1.get(il1); if (plainType!=fromColType) { o1 = plainType.castService(o1); } if (!l2.contains(o1)) { l2.add(o1); } } } cyRow.set(toAttr.getName(), l2); } } } }
package io.druid.query; import com.google.common.base.Preconditions; import com.google.common.primitives.Ints; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFutureTask; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.metamx.common.concurrent.ExecutorServiceConfig; import com.metamx.common.lifecycle.Lifecycle; import javax.annotation.Nullable; import java.util.List; import java.util.concurrent.AbstractExecutorService; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.RunnableFuture; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; public class PrioritizedExecutorService extends AbstractExecutorService implements ListeningExecutorService { public static PrioritizedExecutorService create(Lifecycle lifecycle, ExecutorServiceConfig config) { final PrioritizedExecutorService service = new PrioritizedExecutorService( new ThreadPoolExecutor( config.getNumThreads(), config.getNumThreads(), 0L, TimeUnit.MILLISECONDS, new PriorityBlockingQueue<Runnable>(), new ThreadFactoryBuilder().setDaemon(true).setNameFormat(config.getFormatString()).build() ) ); lifecycle.addHandler( new Lifecycle.Handler() { @Override public void start() throws Exception { } @Override public void stop() { service.shutdownNow(); } } ); return service; } private final ListeningExecutorService delegate; private final BlockingQueue<Runnable> delegateQueue; private final boolean allowRegularTasks; private final int defaultPriority; public PrioritizedExecutorService( ThreadPoolExecutor threadPoolExecutor ) { this(threadPoolExecutor, false, 0); } public PrioritizedExecutorService( ThreadPoolExecutor threadPoolExecutor, boolean allowRegularTasks, int defaultPriority ) { this.delegate = MoreExecutors.listeningDecorator(Preconditions.checkNotNull(threadPoolExecutor)); this.delegateQueue = threadPoolExecutor.getQueue(); this.allowRegularTasks = allowRegularTasks; this.defaultPriority = defaultPriority; } @Override protected <T> PrioritizedListenableFutureTask<T> newTaskFor(Runnable runnable, T value) { Preconditions.checkArgument(allowRegularTasks || runnable instanceof PrioritizedRunnable, "task does not implement PrioritizedRunnable"); return PrioritizedListenableFutureTask.create(ListenableFutureTask.create(runnable, value), runnable instanceof PrioritizedRunnable ? ((PrioritizedRunnable) runnable).getPriority() : defaultPriority ); } @Override protected <T> PrioritizedListenableFutureTask<T> newTaskFor(Callable<T> callable) { Preconditions.checkArgument(allowRegularTasks || callable instanceof PrioritizedCallable, "task does not implement PrioritizedCallable"); return PrioritizedListenableFutureTask.create( ListenableFutureTask.create(callable), callable instanceof PrioritizedCallable ? ((PrioritizedCallable) callable).getPriority() : defaultPriority ); } @Override public ListenableFuture<?> submit(Runnable task) { return (ListenableFuture<?>) super.submit(task); } @Override public <T> ListenableFuture<T> submit(Runnable task, @Nullable T result) { return (ListenableFuture<T>) super.submit(task, result); } @Override public <T> ListenableFuture<T> submit(Callable<T> task) { return (ListenableFuture<T>) super.submit(task); } @Override public void shutdown() { delegate.shutdown(); } @Override public List<Runnable> shutdownNow() { return delegate.shutdownNow(); } @Override public boolean isShutdown() { return delegate.isShutdown(); } @Override public boolean isTerminated() { return delegate.isTerminated(); } @Override public boolean awaitTermination(long l, TimeUnit timeUnit) throws InterruptedException { return delegate.awaitTermination(l, timeUnit); } @Override public void execute(final Runnable runnable) { delegate.execute(runnable); } public int getQueueSize() { return delegateQueue.size(); } public static class PrioritizedListenableFutureTask<V> implements RunnableFuture<V>, ListenableFuture<V>, PrioritizedRunnable, Comparable<PrioritizedListenableFutureTask> { public static <V> PrioritizedListenableFutureTask<V> create(PrioritizedRunnable task, @Nullable V result) { return new PrioritizedListenableFutureTask<>(ListenableFutureTask.create(task, result), task.getPriority()); } public static <V> PrioritizedListenableFutureTask<?> create(PrioritizedCallable<V> callable) { return new PrioritizedListenableFutureTask<>(ListenableFutureTask.create(callable), callable.getPriority()); } public static <V> PrioritizedListenableFutureTask<V> create(ListenableFutureTask<V> task, int priority) { return new PrioritizedListenableFutureTask<>(task, priority); } private final ListenableFutureTask<V> delegate; private final int priority; PrioritizedListenableFutureTask(ListenableFutureTask<V> delegate, int priority) { this.delegate = delegate; this.priority = priority; } @Override public void run() { delegate.run(); } @Override public boolean cancel(boolean mayInterruptIfRunning) { return delegate.cancel(mayInterruptIfRunning); } @Override public boolean isCancelled() { return delegate.isCancelled(); } @Override public boolean isDone() { return delegate.isDone(); } @Override public V get() throws InterruptedException, ExecutionException { return delegate.get(); } @Override public V get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return delegate.get(timeout, unit); } @Override public void addListener(Runnable listener, Executor executor) { delegate.addListener(listener, executor); } @Override public int getPriority() { return priority; } @Override public int compareTo(PrioritizedListenableFutureTask otherTask) { return -Ints.compare(getPriority(), otherTask.getPriority()); } } }
package org.openregistry.aspect; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.openregistry.core.domain.sor.ReconciliationCriteria; import org.openregistry.core.domain.sor.SoRSpecification; import org.openregistry.core.domain.sor.SorPerson; import org.openregistry.core.domain.sor.SystemOfRecordHolder; import org.openregistry.core.repository.SystemOfRecordRepository; /** * @version $Revision$ $ Date$ * @since 0.1 */ @Aspect public final class SoRSpecificationThreadLocalAspect { private SystemOfRecordRepository systemOfRecordRepository; @Around("(execution (public * org.openregistry.core.service.PersonService+.*(..))) && args(sorPerson, ..)") public Object populateThreadLocalForSoRSpecification(final ProceedingJoinPoint proceedingJoinPoint, final SorPerson sorPerson) throws Throwable { try { final SoRSpecification soRSpecification = sorPerson != null ? this.systemOfRecordRepository.findSoRSpecificationById(sorPerson.getSourceSor()) : null; SystemOfRecordHolder.setCurrentSystemOfRecord(soRSpecification); return proceedingJoinPoint.proceed(); } finally { SystemOfRecordHolder.clearCurrentSystemOfRecord(); } } @Around("(execution (public * org.openregistry.core.service.PersonService+.*(..))) && args(reconciliationCriteria, ..)") public Object populateThreadLocalForSoRSpecification(final ProceedingJoinPoint proceedingJoinPoint, final ReconciliationCriteria reconciliationCriteria) throws Throwable { try { final SoRSpecification soRSpecification = reconciliationCriteria != null ? this.systemOfRecordRepository.findSoRSpecificationById(reconciliationCriteria.getSorPerson().getSourceSor()) : null; SystemOfRecordHolder.setCurrentSystemOfRecord(soRSpecification); return proceedingJoinPoint.proceed(); } finally { SystemOfRecordHolder.clearCurrentSystemOfRecord(); } } public void setSystemOfRecordRepository(final SystemOfRecordRepository systemOfRecordRepository) { this.systemOfRecordRepository = systemOfRecordRepository; } }
import javax.jms.*; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import java.util.Properties; public class MBQueuePublisher { public static final String QPID_ICF = "org.wso2.andes.jndi.PropertiesFileInitialContextFactory"; private static final String CF_NAME_PREFIX = "connectionfactory."; private static final String QUEUE_NAME_PREFIX = "queue."; private static final String CF_NAME = "qpidConnectionfactory"; private static String CARBON_CLIENT_ID = "carbon"; private static String CARBON_VIRTUAL_HOST_NAME = "carbon"; //Hostname private static String CARBON_DEFAULT_HOSTNAME = "localhost"; //Port of MB private static String CARBON_DEFAULT_PORT = "5672"; //Queue Name String queueName = "TestQueue"; //For tenants' queue //String queueName = "dilshani.wso2.com/TestQueue"; //Number of messages going to publish int publishMsgCount=100; //For super tenant String userName = "admin"; String password = "admin"; //For tenants //String userName ="dilshani!dilshani.wso2.com"; //String password ="dilshani"; //Message going to publish private static final String param = "IBM"; private static final String MESSAGE_WITH_HEADER = "<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\">\n" + "<soapenv:Header/>\n" + "<soapenv:Body>\n" + "<m:placeOrder xmlns:m=\"http://services.samples\">\n" + "<m:order>\n" + "<m:price>" + getRandom(100, 0.9, true) + "</m:price>\n" + "<m:quantity>" + (int) getRandom(10000, 1.0, true) + "</m:quantity>\n" + "<m:symbol>" + param + "</m:symbol>\n" + "</m:order>\n" + "</m:placeOrder>" + "</soapenv:Body>\n"+ "</soapenv:Envelope>"; public static void main(String[] args) throws NamingException, JMSException, InterruptedException { MBQueuePublisher queueSender = new MBQueuePublisher(); queueSender.sendMessages(); } public void sendMessages() throws NamingException, JMSException, InterruptedException { Properties properties = new Properties(); properties.put(Context.INITIAL_CONTEXT_FACTORY, QPID_ICF); properties.put(CF_NAME_PREFIX + CF_NAME, getTCPConnectionURL(userName, password)); properties.put(QUEUE_NAME_PREFIX + queueName, queueName); Properties properties2 = new Properties(); properties2.put(Context.INITIAL_CONTEXT_FACTORY, QPID_ICF); properties2.put(CF_NAME_PREFIX + CF_NAME, getTCPConnectionURL(userName, password)); System.out.println("getTCPConnectionURL(userName,password) = " + getTCPConnectionURL(userName, password)); InitialContext ctx = new InitialContext(properties); // Lookup connection factory QueueConnectionFactory connFactory = (QueueConnectionFactory) ctx.lookup(CF_NAME); QueueConnection queueConnection = connFactory.createQueueConnection(); queueConnection.start(); QueueSession queueSession = queueConnection.createQueueSession(false,QueueSession.AUTO_ACKNOWLEDGE); Queue queue = (Queue)ctx.lookup(queueName); javax.jms.QueueSender queueSender = queueSession.createSender(queue); for(Integer i=1;i<=publishMsgCount;i=i+1){ TextMessage textMessage = queueSession.createTextMessage(MESSAGE_WITH_HEADER); textMessage.setStringProperty("msgID", i.toString()); //Set this property to use for message selectors. textMessage.setLongProperty("releaseYear", 1990); queueSender.send(textMessage); System.out.println("Publishing Test Message "+i+"::Published From IP::"+CARBON_DEFAULT_HOSTNAME); //Configure interval between messages //Thread.sleep(5000); } queueSender.close(); queueSession.close(); queueConnection.close(); } public String getTCPConnectionURL(String username, String password) { return new StringBuffer() .append("amqp://").append(username).append(":").append(password) .append("@").append(CARBON_CLIENT_ID) .append("/").append(CARBON_VIRTUAL_HOST_NAME) .append("?brokerlist='tcp://").append(CARBON_DEFAULT_HOSTNAME).append(":").append(CARBON_DEFAULT_PORT).append("'") .toString(); } private static double getRandom(double base, double varience, boolean onlypositive) { double rand = Math.random(); return (base + (rand > 0.5 ? 1 : -1) * varience * base * rand) * (onlypositive ? 1 : rand > 0.5 ? 1 : -1); } }
package com.opengamma.financial.tool; import java.io.Closeable; import java.util.Map; import org.joda.beans.BeanBuilder; import org.joda.beans.BeanDefinition; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectBean; import org.joda.beans.impl.direct.DirectBeanBuilder; import org.joda.beans.impl.direct.DirectMetaBean; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import com.opengamma.core.config.ConfigSource; import com.opengamma.core.exchange.ExchangeSource; import com.opengamma.core.historicaltimeseries.HistoricalTimeSeriesSource; import com.opengamma.core.holiday.HolidaySource; import com.opengamma.core.marketdatasnapshot.MarketDataSnapshotSource; import com.opengamma.core.position.PositionSource; import com.opengamma.core.region.RegionSource; import com.opengamma.core.security.SecuritySource; import com.opengamma.financial.convention.ConventionBundleSource; import com.opengamma.master.config.ConfigMaster; import com.opengamma.master.exchange.ExchangeMaster; import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesLoader; import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster; import com.opengamma.master.holiday.HolidayMaster; import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster; import com.opengamma.master.portfolio.PortfolioMaster; import com.opengamma.master.position.PositionMaster; import com.opengamma.master.region.RegionMaster; import com.opengamma.master.security.SecurityLoader; import com.opengamma.master.security.SecurityMaster; import com.opengamma.util.ReflectionUtils; /** * A standard context that is used to provide components to tools. * <p> * This is populated and passed to tools that need component services. * Each component is optional, although typically all are provided. */ @BeanDefinition public class ToolContext extends DirectBean implements Closeable { /** * The manager that created this context. * This is used by the {@link #close()} method. */ @PropertyDefinition(set = "manual") private Object _contextManager; /** * The config master. */ @PropertyDefinition private ConfigMaster _configMaster; /** * The exchange master. */ @PropertyDefinition private ExchangeMaster _exchangeMaster; /** * The holiday master. */ @PropertyDefinition private HolidayMaster _holidayMaster; /** * The region master. */ @PropertyDefinition private RegionMaster _regionMaster; /** * The security master. */ @PropertyDefinition private SecurityMaster _securityMaster; /** * The position master. */ @PropertyDefinition private PositionMaster _positionMaster; /** * The portfolio master. */ @PropertyDefinition private PortfolioMaster _portfolioMaster; /** * The historical time-series master. */ @PropertyDefinition private HistoricalTimeSeriesMaster _historicalTimeSeriesMaster; /** * The snapshot master. */ @PropertyDefinition private MarketDataSnapshotMaster _marketDataSnapshotMaster; /** * The config source. */ @PropertyDefinition private ConfigSource _configSource; /** * The exchange source. */ @PropertyDefinition private ExchangeSource _exchangeSource; /** * The holiday source. */ @PropertyDefinition private HolidaySource _holidaySource; /** * The region source. */ @PropertyDefinition private RegionSource _regionSource; /** * The security source. */ @PropertyDefinition private SecuritySource _securitySource; /** * The position source. */ @PropertyDefinition private PositionSource _positionSource; /** * The historical time-series source. */ @PropertyDefinition private HistoricalTimeSeriesSource _historicalTimeSeriesSource; /** * The snapshot source. */ @PropertyDefinition private MarketDataSnapshotSource _marketDataSnapshotSource; /** * The convention bundle source. */ @PropertyDefinition private ConventionBundleSource _conventionBundleSource; /** * The security loader. */ @PropertyDefinition private SecurityLoader _securityLoader; /** * The time-series loader. */ @PropertyDefinition private HistoricalTimeSeriesLoader _historicalTimeSeriesLoader; /** * Creates an instance. */ public ToolContext() { } /** * Closes the context, freeing any underlying resources. * This calls the manager instance if present. */ @Override public void close() { Object manager = getContextManager(); ReflectionUtils.close(manager); } /** * Sets the tool context, used to free any underlying resources. * <p> * The method {@link ReflectionUtils#isCloseable(Class)} must return true for the object. * Call {@link #close()} to close the manager. * * @param contextManager the context manager. */ public void setContextManager(Object contextManager) { if (ReflectionUtils.isCloseable(contextManager.getClass()) == false) { throw new IllegalArgumentException("Object is not closeable: " + contextManager); } _contextManager = contextManager; } ///CLOVER:OFF /** * The meta-bean for {@code ToolContext}. * @return the meta-bean, not null */ public static ToolContext.Meta meta() { return ToolContext.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(ToolContext.Meta.INSTANCE); } @Override public ToolContext.Meta metaBean() { return ToolContext.Meta.INSTANCE; } @Override protected Object propertyGet(String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case 295862014: // contextManager return getContextManager(); case 10395716: // configMaster return getConfigMaster(); case -652001691: // exchangeMaster return getExchangeMaster(); case 246258906: // holidayMaster return getHolidayMaster(); case -1820969354: // regionMaster return getRegionMaster(); case -887218750: // securityMaster return getSecurityMaster(); case -1840419605: // positionMaster return getPositionMaster(); case -772274742: // portfolioMaster return getPortfolioMaster(); case 173967376: // historicalTimeSeriesMaster return getHistoricalTimeSeriesMaster(); case 2090650860: // marketDataSnapshotMaster return getMarketDataSnapshotMaster(); case 195157501: // configSource return getConfigSource(); case -467239906: // exchangeSource return getExchangeSource(); case 431020691: // holidaySource return getHolidaySource(); case -1636207569: // regionSource return getRegionSource(); case -702456965: // securitySource return getSecuritySource(); case -1655657820: // positionSource return getPositionSource(); case 358729161: // historicalTimeSeriesSource return getHistoricalTimeSeriesSource(); case -2019554651: // marketDataSnapshotSource return getMarketDataSnapshotSource(); case -1281578674: // conventionBundleSource return getConventionBundleSource(); case -903470221: // securityLoader return getSecurityLoader(); case 157715905: // historicalTimeSeriesLoader return getHistoricalTimeSeriesLoader(); } return super.propertyGet(propertyName, quiet); } @Override protected void propertySet(String propertyName, Object newValue, boolean quiet) { switch (propertyName.hashCode()) { case 295862014: // contextManager setContextManager((Object) newValue); return; case 10395716: // configMaster setConfigMaster((ConfigMaster) newValue); return; case -652001691: // exchangeMaster setExchangeMaster((ExchangeMaster) newValue); return; case 246258906: // holidayMaster setHolidayMaster((HolidayMaster) newValue); return; case -1820969354: // regionMaster setRegionMaster((RegionMaster) newValue); return; case -887218750: // securityMaster setSecurityMaster((SecurityMaster) newValue); return; case -1840419605: // positionMaster setPositionMaster((PositionMaster) newValue); return; case -772274742: // portfolioMaster setPortfolioMaster((PortfolioMaster) newValue); return; case 173967376: // historicalTimeSeriesMaster setHistoricalTimeSeriesMaster((HistoricalTimeSeriesMaster) newValue); return; case 2090650860: // marketDataSnapshotMaster setMarketDataSnapshotMaster((MarketDataSnapshotMaster) newValue); return; case 195157501: // configSource setConfigSource((ConfigSource) newValue); return; case -467239906: // exchangeSource setExchangeSource((ExchangeSource) newValue); return; case 431020691: // holidaySource setHolidaySource((HolidaySource) newValue); return; case -1636207569: // regionSource setRegionSource((RegionSource) newValue); return; case -702456965: // securitySource setSecuritySource((SecuritySource) newValue); return; case -1655657820: // positionSource setPositionSource((PositionSource) newValue); return; case 358729161: // historicalTimeSeriesSource setHistoricalTimeSeriesSource((HistoricalTimeSeriesSource) newValue); return; case -2019554651: // marketDataSnapshotSource setMarketDataSnapshotSource((MarketDataSnapshotSource) newValue); return; case -1281578674: // conventionBundleSource setConventionBundleSource((ConventionBundleSource) newValue); return; case -903470221: // securityLoader setSecurityLoader((SecurityLoader) newValue); return; case 157715905: // historicalTimeSeriesLoader setHistoricalTimeSeriesLoader((HistoricalTimeSeriesLoader) newValue); return; } super.propertySet(propertyName, newValue, quiet); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { ToolContext other = (ToolContext) obj; return JodaBeanUtils.equal(getContextManager(), other.getContextManager()) && JodaBeanUtils.equal(getConfigMaster(), other.getConfigMaster()) && JodaBeanUtils.equal(getExchangeMaster(), other.getExchangeMaster()) && JodaBeanUtils.equal(getHolidayMaster(), other.getHolidayMaster()) && JodaBeanUtils.equal(getRegionMaster(), other.getRegionMaster()) && JodaBeanUtils.equal(getSecurityMaster(), other.getSecurityMaster()) && JodaBeanUtils.equal(getPositionMaster(), other.getPositionMaster()) && JodaBeanUtils.equal(getPortfolioMaster(), other.getPortfolioMaster()) && JodaBeanUtils.equal(getHistoricalTimeSeriesMaster(), other.getHistoricalTimeSeriesMaster()) && JodaBeanUtils.equal(getMarketDataSnapshotMaster(), other.getMarketDataSnapshotMaster()) && JodaBeanUtils.equal(getConfigSource(), other.getConfigSource()) && JodaBeanUtils.equal(getExchangeSource(), other.getExchangeSource()) && JodaBeanUtils.equal(getHolidaySource(), other.getHolidaySource()) && JodaBeanUtils.equal(getRegionSource(), other.getRegionSource()) && JodaBeanUtils.equal(getSecuritySource(), other.getSecuritySource()) && JodaBeanUtils.equal(getPositionSource(), other.getPositionSource()) && JodaBeanUtils.equal(getHistoricalTimeSeriesSource(), other.getHistoricalTimeSeriesSource()) && JodaBeanUtils.equal(getMarketDataSnapshotSource(), other.getMarketDataSnapshotSource()) && JodaBeanUtils.equal(getConventionBundleSource(), other.getConventionBundleSource()) && JodaBeanUtils.equal(getSecurityLoader(), other.getSecurityLoader()) && JodaBeanUtils.equal(getHistoricalTimeSeriesLoader(), other.getHistoricalTimeSeriesLoader()); } return false; } @Override public int hashCode() { int hash = getClass().hashCode(); hash += hash * 31 + JodaBeanUtils.hashCode(getContextManager()); hash += hash * 31 + JodaBeanUtils.hashCode(getConfigMaster()); hash += hash * 31 + JodaBeanUtils.hashCode(getExchangeMaster()); hash += hash * 31 + JodaBeanUtils.hashCode(getHolidayMaster()); hash += hash * 31 + JodaBeanUtils.hashCode(getRegionMaster()); hash += hash * 31 + JodaBeanUtils.hashCode(getSecurityMaster()); hash += hash * 31 + JodaBeanUtils.hashCode(getPositionMaster()); hash += hash * 31 + JodaBeanUtils.hashCode(getPortfolioMaster()); hash += hash * 31 + JodaBeanUtils.hashCode(getHistoricalTimeSeriesMaster()); hash += hash * 31 + JodaBeanUtils.hashCode(getMarketDataSnapshotMaster()); hash += hash * 31 + JodaBeanUtils.hashCode(getConfigSource()); hash += hash * 31 + JodaBeanUtils.hashCode(getExchangeSource()); hash += hash * 31 + JodaBeanUtils.hashCode(getHolidaySource()); hash += hash * 31 + JodaBeanUtils.hashCode(getRegionSource()); hash += hash * 31 + JodaBeanUtils.hashCode(getSecuritySource()); hash += hash * 31 + JodaBeanUtils.hashCode(getPositionSource()); hash += hash * 31 + JodaBeanUtils.hashCode(getHistoricalTimeSeriesSource()); hash += hash * 31 + JodaBeanUtils.hashCode(getMarketDataSnapshotSource()); hash += hash * 31 + JodaBeanUtils.hashCode(getConventionBundleSource()); hash += hash * 31 + JodaBeanUtils.hashCode(getSecurityLoader()); hash += hash * 31 + JodaBeanUtils.hashCode(getHistoricalTimeSeriesLoader()); return hash; } /** * Gets the manager that created this context. * This is used by the {@link #close()} method. * @return the value of the property */ public Object getContextManager() { return _contextManager; } /** * Gets the the {@code contextManager} property. * This is used by the {@link #close()} method. * @return the property, not null */ public final Property<Object> contextManager() { return metaBean().contextManager().createProperty(this); } /** * Gets the config master. * @return the value of the property */ public ConfigMaster getConfigMaster() { return _configMaster; } /** * Sets the config master. * @param configMaster the new value of the property */ public void setConfigMaster(ConfigMaster configMaster) { this._configMaster = configMaster; } /** * Gets the the {@code configMaster} property. * @return the property, not null */ public final Property<ConfigMaster> configMaster() { return metaBean().configMaster().createProperty(this); } /** * Gets the exchange master. * @return the value of the property */ public ExchangeMaster getExchangeMaster() { return _exchangeMaster; } /** * Sets the exchange master. * @param exchangeMaster the new value of the property */ public void setExchangeMaster(ExchangeMaster exchangeMaster) { this._exchangeMaster = exchangeMaster; } /** * Gets the the {@code exchangeMaster} property. * @return the property, not null */ public final Property<ExchangeMaster> exchangeMaster() { return metaBean().exchangeMaster().createProperty(this); } /** * Gets the holiday master. * @return the value of the property */ public HolidayMaster getHolidayMaster() { return _holidayMaster; } /** * Sets the holiday master. * @param holidayMaster the new value of the property */ public void setHolidayMaster(HolidayMaster holidayMaster) { this._holidayMaster = holidayMaster; } /** * Gets the the {@code holidayMaster} property. * @return the property, not null */ public final Property<HolidayMaster> holidayMaster() { return metaBean().holidayMaster().createProperty(this); } /** * Gets the region master. * @return the value of the property */ public RegionMaster getRegionMaster() { return _regionMaster; } /** * Sets the region master. * @param regionMaster the new value of the property */ public void setRegionMaster(RegionMaster regionMaster) { this._regionMaster = regionMaster; } /** * Gets the the {@code regionMaster} property. * @return the property, not null */ public final Property<RegionMaster> regionMaster() { return metaBean().regionMaster().createProperty(this); } /** * Gets the security master. * @return the value of the property */ public SecurityMaster getSecurityMaster() { return _securityMaster; } /** * Sets the security master. * @param securityMaster the new value of the property */ public void setSecurityMaster(SecurityMaster securityMaster) { this._securityMaster = securityMaster; } /** * Gets the the {@code securityMaster} property. * @return the property, not null */ public final Property<SecurityMaster> securityMaster() { return metaBean().securityMaster().createProperty(this); } /** * Gets the position master. * @return the value of the property */ public PositionMaster getPositionMaster() { return _positionMaster; } /** * Sets the position master. * @param positionMaster the new value of the property */ public void setPositionMaster(PositionMaster positionMaster) { this._positionMaster = positionMaster; } /** * Gets the the {@code positionMaster} property. * @return the property, not null */ public final Property<PositionMaster> positionMaster() { return metaBean().positionMaster().createProperty(this); } /** * Gets the portfolio master. * @return the value of the property */ public PortfolioMaster getPortfolioMaster() { return _portfolioMaster; } /** * Sets the portfolio master. * @param portfolioMaster the new value of the property */ public void setPortfolioMaster(PortfolioMaster portfolioMaster) { this._portfolioMaster = portfolioMaster; } /** * Gets the the {@code portfolioMaster} property. * @return the property, not null */ public final Property<PortfolioMaster> portfolioMaster() { return metaBean().portfolioMaster().createProperty(this); } /** * Gets the historical time-series master. * @return the value of the property */ public HistoricalTimeSeriesMaster getHistoricalTimeSeriesMaster() { return _historicalTimeSeriesMaster; } /** * Sets the historical time-series master. * @param historicalTimeSeriesMaster the new value of the property */ public void setHistoricalTimeSeriesMaster(HistoricalTimeSeriesMaster historicalTimeSeriesMaster) { this._historicalTimeSeriesMaster = historicalTimeSeriesMaster; } /** * Gets the the {@code historicalTimeSeriesMaster} property. * @return the property, not null */ public final Property<HistoricalTimeSeriesMaster> historicalTimeSeriesMaster() { return metaBean().historicalTimeSeriesMaster().createProperty(this); } /** * Gets the snapshot master. * @return the value of the property */ public MarketDataSnapshotMaster getMarketDataSnapshotMaster() { return _marketDataSnapshotMaster; } /** * Sets the snapshot master. * @param marketDataSnapshotMaster the new value of the property */ public void setMarketDataSnapshotMaster(MarketDataSnapshotMaster marketDataSnapshotMaster) { this._marketDataSnapshotMaster = marketDataSnapshotMaster; } /** * Gets the the {@code marketDataSnapshotMaster} property. * @return the property, not null */ public final Property<MarketDataSnapshotMaster> marketDataSnapshotMaster() { return metaBean().marketDataSnapshotMaster().createProperty(this); } /** * Gets the config source. * @return the value of the property */ public ConfigSource getConfigSource() { return _configSource; } /** * Sets the config source. * @param configSource the new value of the property */ public void setConfigSource(ConfigSource configSource) { this._configSource = configSource; } /** * Gets the the {@code configSource} property. * @return the property, not null */ public final Property<ConfigSource> configSource() { return metaBean().configSource().createProperty(this); } /** * Gets the exchange source. * @return the value of the property */ public ExchangeSource getExchangeSource() { return _exchangeSource; } /** * Sets the exchange source. * @param exchangeSource the new value of the property */ public void setExchangeSource(ExchangeSource exchangeSource) { this._exchangeSource = exchangeSource; } /** * Gets the the {@code exchangeSource} property. * @return the property, not null */ public final Property<ExchangeSource> exchangeSource() { return metaBean().exchangeSource().createProperty(this); } /** * Gets the holiday source. * @return the value of the property */ public HolidaySource getHolidaySource() { return _holidaySource; } /** * Sets the holiday source. * @param holidaySource the new value of the property */ public void setHolidaySource(HolidaySource holidaySource) { this._holidaySource = holidaySource; } /** * Gets the the {@code holidaySource} property. * @return the property, not null */ public final Property<HolidaySource> holidaySource() { return metaBean().holidaySource().createProperty(this); } /** * Gets the region source. * @return the value of the property */ public RegionSource getRegionSource() { return _regionSource; } /** * Sets the region source. * @param regionSource the new value of the property */ public void setRegionSource(RegionSource regionSource) { this._regionSource = regionSource; } /** * Gets the the {@code regionSource} property. * @return the property, not null */ public final Property<RegionSource> regionSource() { return metaBean().regionSource().createProperty(this); } /** * Gets the security source. * @return the value of the property */ public SecuritySource getSecuritySource() { return _securitySource; } /** * Sets the security source. * @param securitySource the new value of the property */ public void setSecuritySource(SecuritySource securitySource) { this._securitySource = securitySource; } /** * Gets the the {@code securitySource} property. * @return the property, not null */ public final Property<SecuritySource> securitySource() { return metaBean().securitySource().createProperty(this); } /** * Gets the position source. * @return the value of the property */ public PositionSource getPositionSource() { return _positionSource; } /** * Sets the position source. * @param positionSource the new value of the property */ public void setPositionSource(PositionSource positionSource) { this._positionSource = positionSource; } /** * Gets the the {@code positionSource} property. * @return the property, not null */ public final Property<PositionSource> positionSource() { return metaBean().positionSource().createProperty(this); } /** * Gets the historical time-series source. * @return the value of the property */ public HistoricalTimeSeriesSource getHistoricalTimeSeriesSource() { return _historicalTimeSeriesSource; } /** * Sets the historical time-series source. * @param historicalTimeSeriesSource the new value of the property */ public void setHistoricalTimeSeriesSource(HistoricalTimeSeriesSource historicalTimeSeriesSource) { this._historicalTimeSeriesSource = historicalTimeSeriesSource; } /** * Gets the the {@code historicalTimeSeriesSource} property. * @return the property, not null */ public final Property<HistoricalTimeSeriesSource> historicalTimeSeriesSource() { return metaBean().historicalTimeSeriesSource().createProperty(this); } /** * Gets the snapshot source. * @return the value of the property */ public MarketDataSnapshotSource getMarketDataSnapshotSource() { return _marketDataSnapshotSource; } /** * Sets the snapshot source. * @param marketDataSnapshotSource the new value of the property */ public void setMarketDataSnapshotSource(MarketDataSnapshotSource marketDataSnapshotSource) { this._marketDataSnapshotSource = marketDataSnapshotSource; } /** * Gets the the {@code marketDataSnapshotSource} property. * @return the property, not null */ public final Property<MarketDataSnapshotSource> marketDataSnapshotSource() { return metaBean().marketDataSnapshotSource().createProperty(this); } /** * Gets the convention bundle source. * @return the value of the property */ public ConventionBundleSource getConventionBundleSource() { return _conventionBundleSource; } /** * Sets the convention bundle source. * @param conventionBundleSource the new value of the property */ public void setConventionBundleSource(ConventionBundleSource conventionBundleSource) { this._conventionBundleSource = conventionBundleSource; } /** * Gets the the {@code conventionBundleSource} property. * @return the property, not null */ public final Property<ConventionBundleSource> conventionBundleSource() { return metaBean().conventionBundleSource().createProperty(this); } /** * Gets the security loader. * @return the value of the property */ public SecurityLoader getSecurityLoader() { return _securityLoader; } /** * Sets the security loader. * @param securityLoader the new value of the property */ public void setSecurityLoader(SecurityLoader securityLoader) { this._securityLoader = securityLoader; } /** * Gets the the {@code securityLoader} property. * @return the property, not null */ public final Property<SecurityLoader> securityLoader() { return metaBean().securityLoader().createProperty(this); } /** * Gets the time-series loader. * @return the value of the property */ public HistoricalTimeSeriesLoader getHistoricalTimeSeriesLoader() { return _historicalTimeSeriesLoader; } /** * Sets the time-series loader. * @param historicalTimeSeriesLoader the new value of the property */ public void setHistoricalTimeSeriesLoader(HistoricalTimeSeriesLoader historicalTimeSeriesLoader) { this._historicalTimeSeriesLoader = historicalTimeSeriesLoader; } /** * Gets the the {@code historicalTimeSeriesLoader} property. * @return the property, not null */ public final Property<HistoricalTimeSeriesLoader> historicalTimeSeriesLoader() { return metaBean().historicalTimeSeriesLoader().createProperty(this); } /** * The meta-bean for {@code ToolContext}. */ public static class Meta extends DirectMetaBean { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code contextManager} property. */ private final MetaProperty<Object> _contextManager = DirectMetaProperty.ofReadWrite( this, "contextManager", ToolContext.class, Object.class); /** * The meta-property for the {@code configMaster} property. */ private final MetaProperty<ConfigMaster> _configMaster = DirectMetaProperty.ofReadWrite( this, "configMaster", ToolContext.class, ConfigMaster.class); /** * The meta-property for the {@code exchangeMaster} property. */ private final MetaProperty<ExchangeMaster> _exchangeMaster = DirectMetaProperty.ofReadWrite( this, "exchangeMaster", ToolContext.class, ExchangeMaster.class); /** * The meta-property for the {@code holidayMaster} property. */ private final MetaProperty<HolidayMaster> _holidayMaster = DirectMetaProperty.ofReadWrite( this, "holidayMaster", ToolContext.class, HolidayMaster.class); /** * The meta-property for the {@code regionMaster} property. */ private final MetaProperty<RegionMaster> _regionMaster = DirectMetaProperty.ofReadWrite( this, "regionMaster", ToolContext.class, RegionMaster.class); /** * The meta-property for the {@code securityMaster} property. */ private final MetaProperty<SecurityMaster> _securityMaster = DirectMetaProperty.ofReadWrite( this, "securityMaster", ToolContext.class, SecurityMaster.class); /** * The meta-property for the {@code positionMaster} property. */ private final MetaProperty<PositionMaster> _positionMaster = DirectMetaProperty.ofReadWrite( this, "positionMaster", ToolContext.class, PositionMaster.class); /** * The meta-property for the {@code portfolioMaster} property. */ private final MetaProperty<PortfolioMaster> _portfolioMaster = DirectMetaProperty.ofReadWrite( this, "portfolioMaster", ToolContext.class, PortfolioMaster.class); /** * The meta-property for the {@code historicalTimeSeriesMaster} property. */ private final MetaProperty<HistoricalTimeSeriesMaster> _historicalTimeSeriesMaster = DirectMetaProperty.ofReadWrite( this, "historicalTimeSeriesMaster", ToolContext.class, HistoricalTimeSeriesMaster.class); /** * The meta-property for the {@code marketDataSnapshotMaster} property. */ private final MetaProperty<MarketDataSnapshotMaster> _marketDataSnapshotMaster = DirectMetaProperty.ofReadWrite( this, "marketDataSnapshotMaster", ToolContext.class, MarketDataSnapshotMaster.class); /** * The meta-property for the {@code configSource} property. */ private final MetaProperty<ConfigSource> _configSource = DirectMetaProperty.ofReadWrite( this, "configSource", ToolContext.class, ConfigSource.class); /** * The meta-property for the {@code exchangeSource} property. */ private final MetaProperty<ExchangeSource> _exchangeSource = DirectMetaProperty.ofReadWrite( this, "exchangeSource", ToolContext.class, ExchangeSource.class); /** * The meta-property for the {@code holidaySource} property. */ private final MetaProperty<HolidaySource> _holidaySource = DirectMetaProperty.ofReadWrite( this, "holidaySource", ToolContext.class, HolidaySource.class); /** * The meta-property for the {@code regionSource} property. */ private final MetaProperty<RegionSource> _regionSource = DirectMetaProperty.ofReadWrite( this, "regionSource", ToolContext.class, RegionSource.class); /** * The meta-property for the {@code securitySource} property. */ private final MetaProperty<SecuritySource> _securitySource = DirectMetaProperty.ofReadWrite( this, "securitySource", ToolContext.class, SecuritySource.class); /** * The meta-property for the {@code positionSource} property. */ private final MetaProperty<PositionSource> _positionSource = DirectMetaProperty.ofReadWrite( this, "positionSource", ToolContext.class, PositionSource.class); /** * The meta-property for the {@code historicalTimeSeriesSource} property. */ private final MetaProperty<HistoricalTimeSeriesSource> _historicalTimeSeriesSource = DirectMetaProperty.ofReadWrite( this, "historicalTimeSeriesSource", ToolContext.class, HistoricalTimeSeriesSource.class); /** * The meta-property for the {@code marketDataSnapshotSource} property. */ private final MetaProperty<MarketDataSnapshotSource> _marketDataSnapshotSource = DirectMetaProperty.ofReadWrite( this, "marketDataSnapshotSource", ToolContext.class, MarketDataSnapshotSource.class); /** * The meta-property for the {@code conventionBundleSource} property. */ private final MetaProperty<ConventionBundleSource> _conventionBundleSource = DirectMetaProperty.ofReadWrite( this, "conventionBundleSource", ToolContext.class, ConventionBundleSource.class); /** * The meta-property for the {@code securityLoader} property. */ private final MetaProperty<SecurityLoader> _securityLoader = DirectMetaProperty.ofReadWrite( this, "securityLoader", ToolContext.class, SecurityLoader.class); /** * The meta-property for the {@code historicalTimeSeriesLoader} property. */ private final MetaProperty<HistoricalTimeSeriesLoader> _historicalTimeSeriesLoader = DirectMetaProperty.ofReadWrite( this, "historicalTimeSeriesLoader", ToolContext.class, HistoricalTimeSeriesLoader.class); /** * The meta-properties. */ private final Map<String, MetaProperty<Object>> _map = new DirectMetaPropertyMap( this, null, "contextManager", "configMaster", "exchangeMaster", "holidayMaster", "regionMaster", "securityMaster", "positionMaster", "portfolioMaster", "historicalTimeSeriesMaster", "marketDataSnapshotMaster", "configSource", "exchangeSource", "holidaySource", "regionSource", "securitySource", "positionSource", "historicalTimeSeriesSource", "marketDataSnapshotSource", "conventionBundleSource", "securityLoader", "historicalTimeSeriesLoader"); /** * Restricted constructor. */ protected Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case 295862014: // contextManager return _contextManager; case 10395716: // configMaster return _configMaster; case -652001691: // exchangeMaster return _exchangeMaster; case 246258906: // holidayMaster return _holidayMaster; case -1820969354: // regionMaster return _regionMaster; case -887218750: // securityMaster return _securityMaster; case -1840419605: // positionMaster return _positionMaster; case -772274742: // portfolioMaster return _portfolioMaster; case 173967376: // historicalTimeSeriesMaster return _historicalTimeSeriesMaster; case 2090650860: // marketDataSnapshotMaster return _marketDataSnapshotMaster; case 195157501: // configSource return _configSource; case -467239906: // exchangeSource return _exchangeSource; case 431020691: // holidaySource return _holidaySource; case -1636207569: // regionSource return _regionSource; case -702456965: // securitySource return _securitySource; case -1655657820: // positionSource return _positionSource; case 358729161: // historicalTimeSeriesSource return _historicalTimeSeriesSource; case -2019554651: // marketDataSnapshotSource return _marketDataSnapshotSource; case -1281578674: // conventionBundleSource return _conventionBundleSource; case -903470221: // securityLoader return _securityLoader; case 157715905: // historicalTimeSeriesLoader return _historicalTimeSeriesLoader; } return super.metaPropertyGet(propertyName); } @Override public BeanBuilder<? extends ToolContext> builder() { return new DirectBeanBuilder<ToolContext>(new ToolContext()); } @Override public Class<? extends ToolContext> beanType() { return ToolContext.class; } @Override public Map<String, MetaProperty<Object>> metaPropertyMap() { return _map; } /** * The meta-property for the {@code contextManager} property. * @return the meta-property, not null */ public final MetaProperty<Object> contextManager() { return _contextManager; } /** * The meta-property for the {@code configMaster} property. * @return the meta-property, not null */ public final MetaProperty<ConfigMaster> configMaster() { return _configMaster; } /** * The meta-property for the {@code exchangeMaster} property. * @return the meta-property, not null */ public final MetaProperty<ExchangeMaster> exchangeMaster() { return _exchangeMaster; } /** * The meta-property for the {@code holidayMaster} property. * @return the meta-property, not null */ public final MetaProperty<HolidayMaster> holidayMaster() { return _holidayMaster; } /** * The meta-property for the {@code regionMaster} property. * @return the meta-property, not null */ public final MetaProperty<RegionMaster> regionMaster() { return _regionMaster; } /** * The meta-property for the {@code securityMaster} property. * @return the meta-property, not null */ public final MetaProperty<SecurityMaster> securityMaster() { return _securityMaster; } /** * The meta-property for the {@code positionMaster} property. * @return the meta-property, not null */ public final MetaProperty<PositionMaster> positionMaster() { return _positionMaster; } /** * The meta-property for the {@code portfolioMaster} property. * @return the meta-property, not null */ public final MetaProperty<PortfolioMaster> portfolioMaster() { return _portfolioMaster; } /** * The meta-property for the {@code historicalTimeSeriesMaster} property. * @return the meta-property, not null */ public final MetaProperty<HistoricalTimeSeriesMaster> historicalTimeSeriesMaster() { return _historicalTimeSeriesMaster; } /** * The meta-property for the {@code marketDataSnapshotMaster} property. * @return the meta-property, not null */ public final MetaProperty<MarketDataSnapshotMaster> marketDataSnapshotMaster() { return _marketDataSnapshotMaster; } /** * The meta-property for the {@code configSource} property. * @return the meta-property, not null */ public final MetaProperty<ConfigSource> configSource() { return _configSource; } /** * The meta-property for the {@code exchangeSource} property. * @return the meta-property, not null */ public final MetaProperty<ExchangeSource> exchangeSource() { return _exchangeSource; } /** * The meta-property for the {@code holidaySource} property. * @return the meta-property, not null */ public final MetaProperty<HolidaySource> holidaySource() { return _holidaySource; } /** * The meta-property for the {@code regionSource} property. * @return the meta-property, not null */ public final MetaProperty<RegionSource> regionSource() { return _regionSource; } /** * The meta-property for the {@code securitySource} property. * @return the meta-property, not null */ public final MetaProperty<SecuritySource> securitySource() { return _securitySource; } /** * The meta-property for the {@code positionSource} property. * @return the meta-property, not null */ public final MetaProperty<PositionSource> positionSource() { return _positionSource; } /** * The meta-property for the {@code historicalTimeSeriesSource} property. * @return the meta-property, not null */ public final MetaProperty<HistoricalTimeSeriesSource> historicalTimeSeriesSource() { return _historicalTimeSeriesSource; } /** * The meta-property for the {@code marketDataSnapshotSource} property. * @return the meta-property, not null */ public final MetaProperty<MarketDataSnapshotSource> marketDataSnapshotSource() { return _marketDataSnapshotSource; } /** * The meta-property for the {@code conventionBundleSource} property. * @return the meta-property, not null */ public final MetaProperty<ConventionBundleSource> conventionBundleSource() { return _conventionBundleSource; } /** * The meta-property for the {@code securityLoader} property. * @return the meta-property, not null */ public final MetaProperty<SecurityLoader> securityLoader() { return _securityLoader; } /** * The meta-property for the {@code historicalTimeSeriesLoader} property. * @return the meta-property, not null */ public final MetaProperty<HistoricalTimeSeriesLoader> historicalTimeSeriesLoader() { return _historicalTimeSeriesLoader; } } ///CLOVER:ON }
package com.jetbrains.python.debugger; import com.google.common.collect.Maps; import com.intellij.ide.util.AbstractTreeClassChooserDialog; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.ui.IdeBorderFactory; import com.intellij.xdebugger.XDebuggerManager; import com.intellij.xdebugger.breakpoints.XBreakpoint; import com.intellij.xdebugger.breakpoints.XBreakpointType; import com.intellij.xdebugger.breakpoints.ui.XBreakpointCustomPropertiesPanel; import com.intellij.xdebugger.ui.DebuggerIcons; import com.jetbrains.python.psi.PyClass; import com.jetbrains.python.psi.PyUtil; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.lang.ref.WeakReference; import java.util.HashMap; public class PyExceptionBreakpointType extends XBreakpointType<XBreakpoint<PyExceptionBreakpointProperties>, PyExceptionBreakpointProperties> { private static final String BASE_EXCEPTION = "BaseException"; public PyExceptionBreakpointType() { super("python-exception", "Python Exception Breakpoint", false); } @NotNull @Override public Icon getEnabledIcon() { return DebuggerIcons.ENABLED_EXCEPTION_BREAKPOINT_ICON; } @NotNull @Override public Icon getDisabledIcon() { return DebuggerIcons.DISABLED_EXCEPTION_BREAKPOINT_ICON; } @Override public PyExceptionBreakpointProperties createProperties() { return new PyExceptionBreakpointProperties(BASE_EXCEPTION); } @Override public boolean isAddBreakpointButtonVisible() { return true; } @Override public XBreakpoint<PyExceptionBreakpointProperties> addBreakpoint(final Project project, JComponent parentComponent) { final PyClassTreeChooserDialog dialog = new PyClassTreeChooserDialog("Select Exception Class", project, GlobalSearchScope.allScope(project), new PyExceptionCachingFilter(), null); dialog.showDialog(); // on ok final PyClass pyClass = dialog.getSelected(); if (pyClass != null) { final String qualifiedName = pyClass.getQualifiedName(); assert qualifiedName != null : "Qualified name of the class shouldn't be null"; return ApplicationManager.getApplication().runWriteAction(new Computable<XBreakpoint<PyExceptionBreakpointProperties>>() { public XBreakpoint<PyExceptionBreakpointProperties> compute() { XBreakpoint<PyExceptionBreakpointProperties> breakpoint = XDebuggerManager.getInstance(project).getBreakpointManager() .addBreakpoint(PyExceptionBreakpointType.this, new PyExceptionBreakpointProperties(qualifiedName)); return breakpoint; } }); } return null; } private static class PyExceptionCachingFilter implements AbstractTreeClassChooserDialog.Filter<PyClass> { private final HashMap<Integer, Pair<WeakReference<PyClass>, Boolean>> processedElements = Maps.newHashMap(); public boolean isAccepted(@NotNull final PyClass pyClass) { final VirtualFile virtualFile = pyClass.getContainingFile().getVirtualFile(); if (virtualFile == null) { return false; } final int key = pyClass.hashCode(); final Pair<WeakReference<PyClass>, Boolean> pair = processedElements.get(key); boolean isException; if (pair == null || pair.first.get() != pyClass) { isException = PyUtil.isExceptionClass(pyClass); processedElements.put(key, Pair.create(new WeakReference<PyClass>(pyClass), isException)); } else { isException = pair.second; } return isException; } } @Override public String getBreakpointsDialogHelpTopic() { return "reference.dialogs.breakpoints"; } @Override public String getDisplayText(XBreakpoint<PyExceptionBreakpointProperties> breakpoint) { PyExceptionBreakpointProperties properties = breakpoint.getProperties(); if (properties != null) { String exception = properties.getException(); if (BASE_EXCEPTION.equals(exception)) { return "All exceptions"; } return exception; } return ""; } @Override public XBreakpoint<PyExceptionBreakpointProperties> createDefaultBreakpoint(@NotNull XBreakpointCreator<PyExceptionBreakpointProperties> creator) { final XBreakpoint<PyExceptionBreakpointProperties> breakpoint = creator.createBreakpoint(createDefaultBreakpointProperties()); breakpoint.setEnabled(true); return breakpoint; } private static PyExceptionBreakpointProperties createDefaultBreakpointProperties() { PyExceptionBreakpointProperties p = new PyExceptionBreakpointProperties(BASE_EXCEPTION); p.setNotifyOnTerminate(true); p.setNotifyAlways(false); return p; } @Override public XBreakpointCustomPropertiesPanel<XBreakpoint<PyExceptionBreakpointProperties>> createCustomPropertiesPanel() { return new PyExceptionBreakpointPropertiesPanel(); } private static class PyExceptionBreakpointPropertiesPanel extends XBreakpointCustomPropertiesPanel<XBreakpoint<PyExceptionBreakpointProperties>> { private JCheckBox myNotifyOnTerminateCheckBox; private JCheckBox myNotifyAlwaysCheckBox; @NotNull @Override public JComponent getComponent() { myNotifyOnTerminateCheckBox = new JCheckBox("On termination"); myNotifyAlwaysCheckBox = new JCheckBox("On raise at each level of call chain"); Box notificationsBox = Box.createVerticalBox(); JPanel panel = new JPanel(new BorderLayout()); panel.add(myNotifyOnTerminateCheckBox, BorderLayout.NORTH); notificationsBox.add(panel); panel = new JPanel(new BorderLayout()); panel.add(myNotifyAlwaysCheckBox, BorderLayout.NORTH); notificationsBox.add(panel); panel = new JPanel(new BorderLayout()); JPanel innerPanel = new JPanel(new BorderLayout()); innerPanel.add(notificationsBox, BorderLayout.CENTER); innerPanel.add(Box.createHorizontalStrut(3), BorderLayout.WEST); innerPanel.add(Box.createHorizontalStrut(3), BorderLayout.EAST); panel.add(innerPanel, BorderLayout.NORTH); panel.setBorder(IdeBorderFactory.createTitledBorder("Activation policy", false, true, true)); ActionListener listener = new ActionListener() { public void actionPerformed(ActionEvent e) { if (!myNotifyOnTerminateCheckBox.isSelected() && !myNotifyAlwaysCheckBox.isSelected()) { Object source = e.getSource(); JCheckBox toCheck = null; if (myNotifyOnTerminateCheckBox.equals(source)) { toCheck = myNotifyAlwaysCheckBox; } else if (myNotifyAlwaysCheckBox.equals(source)) { toCheck = myNotifyOnTerminateCheckBox; } if (toCheck != null) { toCheck.setSelected(true); } } } }; myNotifyOnTerminateCheckBox.addActionListener(listener); myNotifyAlwaysCheckBox.addActionListener(listener); return panel; } @Override public void saveTo(@NotNull XBreakpoint<PyExceptionBreakpointProperties> breakpoint) { breakpoint.getProperties().setNotifyAlways(myNotifyAlwaysCheckBox.isSelected()); breakpoint.getProperties().setNotifyOnTerminate(myNotifyOnTerminateCheckBox.isSelected()); } @Override public void loadFrom(@NotNull XBreakpoint<PyExceptionBreakpointProperties> breakpoint) { myNotifyAlwaysCheckBox.setSelected(breakpoint.getProperties().isNotifyAlways()); myNotifyOnTerminateCheckBox.setSelected(breakpoint.getProperties().isNotifyOnTerminate()); } } }
package com.jetbrains.python.psi.impl; import com.google.common.collect.Lists; import com.intellij.lang.ASTNode; import com.intellij.openapi.util.Pair; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiErrorElement; import com.intellij.psi.tree.TokenSet; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.SmartList; import com.jetbrains.python.PyTokenTypes; import com.jetbrains.python.psi.*; import com.jetbrains.python.toolbox.FP; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * @author yole */ public class PyAssignmentStatementImpl extends PyElementImpl implements PyAssignmentStatement { private PyExpression[] myTargets; public PyAssignmentStatementImpl(ASTNode astNode) { super(astNode); } @Override protected void acceptPyVisitor(PyElementVisitor pyVisitor) { pyVisitor.visitPyAssignmentStatement(this); } public PyExpression[] getTargets() { if (myTargets == null) { myTargets = calcTargets(false); } return myTargets; } @NotNull @Override public PyExpression[] getRawTargets() { return calcTargets(true); } private PyExpression[] calcTargets(boolean raw) { final ASTNode[] eqSigns = getNode().getChildren(TokenSet.create(PyTokenTypes.EQ)); if (eqSigns.length == 0) { return PyExpression.EMPTY_ARRAY; } final ASTNode lastEq = eqSigns[eqSigns.length - 1]; List<PyExpression> candidates = new ArrayList<PyExpression>(); ASTNode node = getNode().getFirstChildNode(); while (node != null && node != lastEq) { final PsiElement psi = node.getPsi(); if (psi instanceof PyExpression) { if (raw) { candidates.add((PyExpression) psi); } else { addCandidate(candidates, (PyExpression)psi); } } node = node.getTreeNext(); } List<PyExpression> targets = new ArrayList<PyExpression>(); for (PyExpression expr : candidates) { // only filter out targets if (raw || expr instanceof PyTargetExpression || expr instanceof PyReferenceExpression || expr instanceof PySubscriptionExpression || expr instanceof PySliceExpression) { targets.add(expr); } } return targets.toArray(new PyExpression[targets.size()]); } private static void addCandidate(List<PyExpression> candidates, PyExpression psi) { if (psi instanceof PyParenthesizedExpression) { addCandidate(candidates, ((PyParenthesizedExpression)psi).getContainedExpression()); } else if (psi instanceof PySequenceExpression) { final PyExpression[] pyExpressions = ((PySequenceExpression)psi).getElements(); for (PyExpression pyExpression : pyExpressions) { addCandidate(candidates, pyExpression); } } else if (psi instanceof PyStarExpression) { final PyExpression expression = ((PyStarExpression)psi).getExpression(); if (expression != null) { addCandidate(candidates, expression); } } else { candidates.add(psi); } } /** * @return rightmost expression in statement, which is supposedly the assigned value, or null. */ @Nullable public PyExpression getAssignedValue() { PsiElement child = getLastChild(); while (child != null && !(child instanceof PyExpression)) { if (child instanceof PsiErrorElement) return null; // incomplete assignment operator can't be analyzed properly, bail out. child = child.getPrevSibling(); } return (PyExpression)child; } @NotNull public List<Pair<PyExpression, PyExpression>> getTargetsToValuesMapping() { List<Pair<PyExpression, PyExpression>> ret = new SmartList<Pair<PyExpression, PyExpression>>(); if (!PsiTreeUtil.hasErrorElements(this)) { // no parse errors PyExpression[] constituents = PsiTreeUtil.getChildrenOfType(this, PyExpression.class); // "a = b = c" -> [a, b, c] if (constituents != null && constituents.length > 1) { PyExpression rhs = constituents[constituents.length - 1]; // last List<PyExpression> lhses = Lists.newArrayList(constituents); if (lhses.size()>0) lhses.remove(lhses.size()-1); // copy all but last; most often it's one element. for (PyExpression lhs : lhses) mapToValues(lhs, rhs, ret); } } return ret; } @Nullable public PyExpression getLeftHandSideExpression() { PsiElement child = getFirstChild(); while (child != null && !(child instanceof PyExpression)) { if (child instanceof PsiErrorElement) return null; // incomplete assignment operator can't be analyzed properly, bail out. child = child.getPrevSibling(); } return (PyExpression)child; } private static void mapToValues(PyExpression lhs, PyExpression rhs, List<Pair<PyExpression, PyExpression>> map) { // cast for convenience PySequenceExpression lhs_tuple = null; PyExpression lhs_one = null; if (lhs instanceof PySequenceExpression) lhs_tuple = (PySequenceExpression)lhs; else if (lhs != null) lhs_one = lhs; PySequenceExpression rhs_tuple = null; PyExpression rhs_one = null; if (rhs instanceof PyParenthesizedExpression) { PyExpression exp = ((PyParenthesizedExpression)rhs).getContainedExpression(); if (exp instanceof PyTupleExpression) rhs_tuple = (PySequenceExpression)exp; else rhs_one = rhs; } else if (rhs instanceof PySequenceExpression) rhs_tuple = (PySequenceExpression)rhs; else if (rhs != null) rhs_one = rhs; if (lhs_one != null) { // single LHS, single RHS (direct mapping) or multiple RHS (packing) map.add(new Pair<PyExpression, PyExpression>(lhs_one, rhs)); } else if (lhs_tuple != null && rhs_one != null) { // multiple LHS, single RHS: unpacking // PY-2648, PY-2649 PyElementGenerator elementGenerator = PyElementGenerator.getInstance(rhs_one.getProject()); int counter = 0; for (PyExpression tuple_elt : lhs_tuple.getElements()) { map.add(new Pair<PyExpression, PyExpression>(tuple_elt, elementGenerator.createExpressionFromText(rhs_one.getText() + "[" + counter + "]"))); ++counter; } // map.addAll(FP.zipList(Arrays.asList(lhs_tuple.getElements()), new RepeatIterable<PyExpression>(rhs_one))); } else if (lhs_tuple != null && rhs_tuple != null) { // multiple both sides: piecewise mapping map.addAll(FP.zipList(Arrays.asList(lhs_tuple.getElements()), Arrays.asList(rhs_tuple.getElements()), null, null)); } } @NotNull public Iterable<PyElement> iterateNames() { return new ArrayList<PyElement>(PyUtil.flattenedParensAndStars(getTargets())); } public PyElement getElementNamed(final String the_name) { // performance: check simple case first PyExpression[] targets = getTargets(); if (targets.length == 1 && targets[0] instanceof PyTargetExpression) { PyTargetExpression target = (PyTargetExpression)targets[0]; return target.getQualifier() == null && the_name.equals(target.getName()) ? target : null; } return IterHelper.findName(iterateNames(), the_name); } public boolean mustResolveOutside() { return true; // a = a+1 resolves 'a' outside itself. } @Override public void subtreeChanged() { super.subtreeChanged(); myTargets = null; } }
package com.jetbrains.python.run; import com.intellij.execution.configuration.EnvironmentVariablesComponent; import com.intellij.execution.configurations.*; import com.intellij.execution.runners.ProgramRunner; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.options.SettingsEditor; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.InvalidDataException; import com.intellij.openapi.util.JDOMExternalizable; import com.intellij.openapi.util.JDOMExternalizerUtil; import com.intellij.openapi.util.WriteExternalException; import com.intellij.openapi.util.text.StringUtil; import com.jetbrains.python.PyBundle; import com.jetbrains.python.sdk.PythonSdkType; import org.jdom.Element; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * @author Leonid Shalupov */ public abstract class AbstractPythonRunConfiguration extends ModuleBasedConfiguration<RunConfigurationModule> implements LocatableConfiguration, AbstractPythonRunConfigurationParams { private String myInterpreterOptions = ""; private String myWorkingDirectory = ""; private String mySdkHome = ""; private boolean myPassParentEnvs = true; private Map<String, String> myEnvs = new HashMap<String, String>(); private boolean myUseModuleSdk; public AbstractPythonRunConfiguration(final String name, final RunConfigurationModule module, final ConfigurationFactory factory) { super(name, module, factory); } public List<Module> getValidModules() { final Module[] modules = ModuleManager.getInstance(getProject()).getModules(); List<Module> result = new ArrayList<Module>(); for (Module module : modules) { if (PythonSdkType.findPythonSdk(module) != null) { result.add(module); } } return result; } @Nullable public JDOMExternalizable createRunnerSettings(ConfigurationInfoProvider configurationInfoProvider) { return null; } public SettingsEditor<JDOMExternalizable> getRunnerSettingsEditor(final ProgramRunner runner) { return null; } @Override public void checkConfiguration() throws RuntimeConfigurationException { super.checkConfiguration(); if (!myUseModuleSdk) { if (StringUtil.isEmptyOrSpaces(getSdkHome())) { final Sdk projectSdk = ProjectRootManager.getInstance(getProject()).getProjectJdk(); if (projectSdk == null || !(projectSdk.getSdkType() instanceof PythonSdkType)) { throw new RuntimeConfigurationException(PyBundle.message("runcfg.unittest.no_sdk")); } } else if (!PythonSdkType.getInstance().isValidSdkHome(getSdkHome())) { throw new RuntimeConfigurationException(PyBundle.message("runcfg.unittest.no_valid_sdk")); } } else { Sdk sdk = PythonSdkType.findPythonSdk(getModule()); if (sdk == null) { throw new RuntimeConfigurationException(PyBundle.message("runcfg.unittest.no_module_sdk")); } } } public String getSdkHome() { String sdkHome = mySdkHome; if (StringUtil.isEmptyOrSpaces(mySdkHome)) { final Sdk projectJdk = ProjectRootManager.getInstance(getProject()).getProjectJdk(); if (projectJdk != null) { sdkHome = projectJdk.getHomePath(); } } return sdkHome; } public String getInterpreterPath() { String sdkHome; if (myUseModuleSdk) { Sdk sdk = PythonSdkType.findPythonSdk(getModule()); sdkHome = sdk.getHomePath(); } else { sdkHome = getSdkHome(); } return PythonSdkType.getInterpreterPath(sdkHome); } public void readExternal(Element element) throws InvalidDataException { super.readExternal(element); myInterpreterOptions = JDOMExternalizerUtil.readField(element, "INTERPRETER_OPTIONS"); myPassParentEnvs = Boolean.parseBoolean(JDOMExternalizerUtil.readField(element, "PASS_PARENT_ENVS")); mySdkHome = JDOMExternalizerUtil.readField(element, "SDK_HOME"); myWorkingDirectory = JDOMExternalizerUtil.readField(element, "WORKING_DIRECTORY"); myUseModuleSdk = Boolean.parseBoolean(JDOMExternalizerUtil.readField(element, "IS_MODULE_SDK")); getConfigurationModule().readExternal(element); EnvironmentVariablesComponent.readExternal(element, getEnvs()); } public void writeExternal(Element element) throws WriteExternalException { super.writeExternal(element); JDOMExternalizerUtil.writeField(element, "INTERPRETER_OPTIONS", myInterpreterOptions); JDOMExternalizerUtil.writeField(element, "PASS_PARENT_ENVS", Boolean.toString(myPassParentEnvs)); JDOMExternalizerUtil.writeField(element, "SDK_HOME", mySdkHome); JDOMExternalizerUtil.writeField(element, "WORKING_DIRECTORY", myWorkingDirectory); JDOMExternalizerUtil.writeField(element, "IS_MODULE_SDK", Boolean.toString(myUseModuleSdk)); EnvironmentVariablesComponent.writeExternal(element, getEnvs()); getConfigurationModule().writeExternal(element); } public Map<String, String> getEnvs() { return myEnvs; } public void setEnvs(final Map<String, String> envs) { myEnvs = envs; } public String getInterpreterOptions() { return myInterpreterOptions; } public void setInterpreterOptions(String interpreterOptions) { myInterpreterOptions = interpreterOptions; } public String getWorkingDirectory() { return myWorkingDirectory; } public void setWorkingDirectory(String workingDirectory) { myWorkingDirectory = workingDirectory; } public void setSdkHome(String sdkHome) { mySdkHome = sdkHome; } public Module getModule() { return getConfigurationModule().getModule(); } public boolean isUseModuleSdk() { return myUseModuleSdk; } public void setUseModuleSdk(boolean useModuleSdk) { myUseModuleSdk = useModuleSdk; } public boolean isPassParentEnvs() { return myPassParentEnvs; } public void setPassParentEnvs(boolean passParentEnvs) { myPassParentEnvs = passParentEnvs; } public static void copyParams(AbstractPythonRunConfigurationParams source, AbstractPythonRunConfigurationParams target) { target.setEnvs(new HashMap<String, String>(source.getEnvs())); target.setInterpreterOptions(source.getInterpreterOptions()); target.setPassParentEnvs(source.isPassParentEnvs()); target.setSdkHome(source.getSdkHome()); target.setWorkingDirectory(source.getWorkingDirectory()); target.setModule(source.getModule()); target.setUseModuleSdk(source.isUseModuleSdk()); } }
package org.eclipse.oomph.setup.internal.core; import org.eclipse.oomph.base.Annotation; import org.eclipse.oomph.base.BaseFactory; import org.eclipse.oomph.base.ModelElement; import org.eclipse.oomph.base.provider.BaseEditUtil; import org.eclipse.oomph.base.util.BaseUtil; import org.eclipse.oomph.internal.setup.SetupPrompter; import org.eclipse.oomph.internal.setup.SetupProperties; import org.eclipse.oomph.p2.P2Factory; import org.eclipse.oomph.p2.Repository; import org.eclipse.oomph.p2.Requirement; import org.eclipse.oomph.preferences.util.PreferencesUtil; import org.eclipse.oomph.setup.AnnotationConstants; import org.eclipse.oomph.setup.AttributeRule; import org.eclipse.oomph.setup.CompoundTask; import org.eclipse.oomph.setup.EAnnotationConstants; import org.eclipse.oomph.setup.EclipseIniTask; import org.eclipse.oomph.setup.Installation; import org.eclipse.oomph.setup.InstallationTask; import org.eclipse.oomph.setup.Product; import org.eclipse.oomph.setup.ProductCatalog; import org.eclipse.oomph.setup.ProductVersion; import org.eclipse.oomph.setup.Project; import org.eclipse.oomph.setup.ProjectCatalog; import org.eclipse.oomph.setup.RedirectionTask; import org.eclipse.oomph.setup.ResourceCopyTask; import org.eclipse.oomph.setup.Scope; import org.eclipse.oomph.setup.ScopeType; import org.eclipse.oomph.setup.SetupFactory; import org.eclipse.oomph.setup.SetupPackage; import org.eclipse.oomph.setup.SetupTask; import org.eclipse.oomph.setup.SetupTaskContainer; import org.eclipse.oomph.setup.SetupTaskContext; import org.eclipse.oomph.setup.Stream; import org.eclipse.oomph.setup.Trigger; import org.eclipse.oomph.setup.User; import org.eclipse.oomph.setup.VariableChoice; import org.eclipse.oomph.setup.VariableTask; import org.eclipse.oomph.setup.VariableType; import org.eclipse.oomph.setup.Workspace; import org.eclipse.oomph.setup.WorkspaceTask; import org.eclipse.oomph.setup.impl.InstallationTaskImpl; import org.eclipse.oomph.setup.internal.core.util.Authenticator; import org.eclipse.oomph.setup.internal.core.util.SetupCoreUtil; import org.eclipse.oomph.setup.log.ProgressLog; import org.eclipse.oomph.setup.log.ProgressLogFilter; import org.eclipse.oomph.setup.log.ProgressLogMonitor; import org.eclipse.oomph.setup.p2.P2Task; import org.eclipse.oomph.setup.p2.SetupP2Factory; import org.eclipse.oomph.setup.util.StringExpander; import org.eclipse.oomph.util.CollectionUtil; import org.eclipse.oomph.util.IOUtil; import org.eclipse.oomph.util.ObjectUtil; import org.eclipse.oomph.util.Pair; import org.eclipse.oomph.util.PropertiesUtil; import org.eclipse.oomph.util.ReflectUtil; import org.eclipse.oomph.util.StringUtil; import org.eclipse.oomph.util.UserCallback; import org.eclipse.emf.common.CommonPlugin; import org.eclipse.emf.common.EMFPlugin; import org.eclipse.emf.common.notify.Adapter; import org.eclipse.emf.common.notify.impl.AdapterImpl; import org.eclipse.emf.common.util.BasicEList; import org.eclipse.emf.common.util.ECollections; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.common.util.EMap; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.common.util.UniqueEList; import org.eclipse.emf.ecore.EAnnotation; import org.eclipse.emf.ecore.EAttribute; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EModelElement; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.EReference; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.ecore.EStructuralFeature.Setting; import org.eclipse.emf.ecore.EcoreFactory; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.plugin.EcorePlugin; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.Resource.Factory.Registry; import org.eclipse.emf.ecore.resource.Resource.Internal; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.emf.ecore.resource.URIConverter; import org.eclipse.emf.ecore.util.EcoreUtil; import org.eclipse.emf.ecore.util.ExtendedMetaData; import org.eclipse.emf.ecore.util.InternalEList; import org.eclipse.emf.edit.provider.AdapterFactoryItemDelegator; import org.eclipse.emf.edit.provider.ComposedAdapterFactory; import org.eclipse.emf.edit.provider.IItemLabelProvider; import org.eclipse.core.resources.IWorkspace; import org.eclipse.core.resources.IWorkspaceDescription; import org.eclipse.core.resources.IWorkspaceRoot; import org.eclipse.core.resources.IWorkspaceRunnable; import org.eclipse.core.resources.IncrementalProjectBuilder; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.core.runtime.OperationCanceledException; import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.SubProgressMonitor; import org.eclipse.core.runtime.jobs.Job; import org.eclipse.equinox.p2.metadata.VersionRange; import org.osgi.framework.Bundle; import org.osgi.framework.FrameworkUtil; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.io.Reader; import java.io.Writer; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.URL; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author Eike Stepper */ public class SetupTaskPerformer extends AbstractSetupTaskContext { public static final boolean REMOTE_DEBUG = PropertiesUtil.isProperty(SetupProperties.PROP_SETUP_REMOTE_DEBUG) || tempTest(); private static boolean tempTest() { int xxx; try { return new File("C:\\develop\\cpp22").exists(); } catch (Throwable ex) { return false; } } public static final Adapter RULE_VARIABLE_ADAPTER = new AdapterImpl(); private static final Map<String, ValueConverter> CONVERTERS = new HashMap<String, ValueConverter>(); static { CONVERTERS.put("java.lang.String", new ValueConverter()); CONVERTERS.put("org.eclipse.emf.common.util.URI", new URIValueConverter()); } private static final SimpleDateFormat DATE_TIME = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private static final Pattern INSTALLABLE_UNIT_WITH_RANGE_PATTERN = Pattern.compile("([^\\[\\(]*)(.*)"); private static Pattern ATTRIBUTE_REFERENCE_PATTERN = Pattern.compile("@[\\p{Alpha}_][\\p{Alnum}_]*"); private ProgressLog progress; private boolean canceled; private EList<SetupTask> triggeredSetupTasks; private Map<EObject, EObject> copyMap; private EList<SetupTask> neededSetupTasks; private Set<Bundle> bundles = new HashSet<Bundle>(); /** * A list that contains instances of String and/or Pair<String, ProgressLog.Severity>. */ private List<Object> logMessageBuffer; private PrintStream logStream; private ProgressLogFilter logFilter = new ProgressLogFilter(); private IProgressMonitor progressMonitor; private List<EStructuralFeature.Setting> unresolvedSettings = new ArrayList<EStructuralFeature.Setting>(); private List<VariableTask> passwordVariables = new ArrayList<VariableTask>(); private Map<URI, String> passwords = new LinkedHashMap<URI, String>(); private List<VariableTask> unresolvedVariables = new ArrayList<VariableTask>(); private List<VariableTask> resolvedVariables = new ArrayList<VariableTask>(); private List<VariableTask> appliedRuleVariables = new ArrayList<VariableTask>(); private Map<String, VariableTask> allVariables = new LinkedHashMap<String, VariableTask>(); private Set<String> undeclaredVariables = new LinkedHashSet<String>(); private Map<VariableTask, EAttribute> ruleAttributes = new LinkedHashMap<VariableTask, EAttribute>(); private Map<VariableTask, EAttribute> ruleBasedAttributes = new LinkedHashMap<VariableTask, EAttribute>(); private List<AttributeRule> attributeRules = new ArrayList<AttributeRule>(); private ComposedAdapterFactory adapterFactory = BaseEditUtil.createAdapterFactory(); private String vmPath; private boolean hasSuccessfullyPerformed; public SetupTaskPerformer(URIConverter uriConverter, SetupPrompter prompter, Trigger trigger, SetupContext setupContext, Stream stream) { super(uriConverter, prompter, trigger, setupContext); initTriggeredSetupTasks(stream, true); } public SetupTaskPerformer(URIConverter uriConverter, SetupPrompter prompter, Trigger trigger, SetupContext setupContext, EList<SetupTask> triggeredSetupTasks) { super(uriConverter, prompter, trigger, setupContext); this.triggeredSetupTasks = triggeredSetupTasks; initTriggeredSetupTasks(null, false); } public String getVMPath() { return vmPath; } public void setVMPath(String vmPath) { this.vmPath = vmPath; } public boolean hasSuccessfullyPerformed() { return hasSuccessfullyPerformed; } private void initTriggeredSetupTasks(Stream stream, boolean firstPhase) { Trigger trigger = getTrigger(); User user = getUser(); // Gather all possible tasks. // Later this will be filtered to only the triggered tasks. // This approach ensures that implicit variables for all tasks (even for untriggered tasks) are created with the right values. if (firstPhase) { triggeredSetupTasks = new BasicEList<SetupTask>(getSetupTasks(stream)); bundles.add(SetupCorePlugin.INSTANCE.getBundle()); // 1. Collect and flatten all tasks Set<EClass> eClasses = new LinkedHashSet<EClass>(); Map<EClass, Set<SetupTask>> instances = new LinkedHashMap<EClass, Set<SetupTask>>(); Set<String> keys = new LinkedHashSet<String>(); for (SetupTask setupTask : triggeredSetupTasks) { try { Bundle bundle = FrameworkUtil.getBundle(setupTask.getClass()); if (bundle != null) { bundles.add(bundle); } } catch (Throwable ex) { //$FALL-THROUGH$ } EClass eClass = setupTask.eClass(); CollectionUtil.add(instances, eClass, setupTask); eClasses.add(eClass); for (EClass eSuperType : eClass.getEAllSuperTypes()) { if (SetupPackage.Literals.SETUP_TASK.isSuperTypeOf(eSuperType)) { eClasses.add(eSuperType); CollectionUtil.add(instances, eSuperType, setupTask); } } if (setupTask instanceof InstallationTask) { Resource resource = getInstallation().eResource(); if (resource != null) { URI uri = resource.getURI(); if (!SetupContext.INSTALLATION_SETUP_FILE_NAME_URI.equals(uri)) { InstallationTask installationTask = (InstallationTask)setupTask; installationTask.setLocation(uri.trimSegments(4).toFileString()); } } } else if (setupTask instanceof WorkspaceTask) { Resource resource = getWorkspace().eResource(); if (resource != null) { URI uri = resource.getURI(); if (!SetupContext.WORKSPACE_SETUP_FILE_NAME_URI.equals(uri)) { WorkspaceTask workspaceTask = (WorkspaceTask)setupTask; workspaceTask.setLocation(uri.trimSegments(4).toFileString()); } } } else if (setupTask instanceof VariableTask) { VariableTask variable = (VariableTask)setupTask; keys.add(variable.getName()); } } for (EClass eClass : eClasses) { // 1.1. Collect enablement info to synthesize P2Tasks that are placed at the head of the task list EList<SetupTask> enablementTasks = createEnablementTasks(eClass, true); if (enablementTasks != null) { triggeredSetupTasks.addAll(0, enablementTasks); } for (EAnnotation eAnnotation : eClass.getEAnnotations()) { String source = eAnnotation.getSource(); if (EAnnotationConstants.ANNOTATION_VARIABLE.equals(source)) { triggeredSetupTasks.add(0, createImpliedVariable(eAnnotation)); } } if (user.eResource() != null) { // 1.2. Determine whether new rules need to be created for (EAttribute eAttribute : eClass.getEAttributes()) { if (eAttribute.getEType().getInstanceClass() == String.class) { EAnnotation eAnnotation = eAttribute.getEAnnotation(EAnnotationConstants.ANNOTATION_VARIABLE); if (eAnnotation != null && eAttribute.getEAnnotation(EAnnotationConstants.ANNOTATION_RULE_VARIABLE) != null) { AttributeRule attributeRule = getAttributeRule(eAttribute, true); if (attributeRule == null) { // Determine if there exists an actual instance that really needs the rule. String attributeName = ExtendedMetaData.INSTANCE.getName(eAttribute); for (SetupTask setupTask : instances.get(eAttribute.getEContainingClass())) { // If there is an instance with an empty value. Object value = setupTask.eGet(eAttribute); if (value == null || "".equals(value)) { // If that instance has an ID and hence will create an implied variable and that variable name isn't already defined by an existing // context variable. String id = setupTask.getID(); if (!StringUtil.isEmpty(id) && !keys.contains(id + "." + attributeName)) { EMap<String, String> details = eAnnotation.getDetails(); // TODO class name/attribute name pairs might not be unique. String variableName = getAttributeRuleVariableName(eAttribute); VariableTask variable = SetupFactory.eINSTANCE.createVariableTask(); annotateAttributeRuleVariable(variable, eAttribute); variable.setName(variableName); variable.setType(VariableType.get(details.get("type"))); variable.setLabel(details.get("label")); variable.setDescription(details.get("description")); variable.eAdapters().add(RULE_VARIABLE_ADAPTER); for (EAnnotation subAnnotation : eAnnotation.getEAnnotations()) { if ("Choice".equals(subAnnotation.getSource())) { EMap<String, String> subDetails = subAnnotation.getDetails(); VariableChoice choice = SetupFactory.eINSTANCE.createVariableChoice(); choice.setValue(subDetails.get("value")); choice.setLabel(subDetails.get("label")); variable.getChoices().add(choice); } } unresolvedVariables.add(variable); ruleAttributes.put(variable, eAttribute); } } break; } } } } } } } // 1.2.1. Prompt new rules and store them in User scope SetupPrompter prompter = getPrompter(); prompter.promptVariables(Collections.singletonList(this)); recordRules(attributeRules, false); } if (!triggeredSetupTasks.isEmpty()) { Map<SetupTask, SetupTask> substitutions = getSubstitutions(triggeredSetupTasks); // Shorten the paths through the substitutions map Map<SetupTask, SetupTask> directSubstitutions = new HashMap<SetupTask, SetupTask>(substitutions); for (Map.Entry<SetupTask, SetupTask> entry : directSubstitutions.entrySet()) { SetupTask task = entry.getValue(); for (;;) { SetupTask overridingTask = directSubstitutions.get(task); if (overridingTask == null) { break; } entry.setValue(overridingTask); task = overridingTask; } } if (!firstPhase) { // Perform override merging. Map<SetupTask, SetupTask> overrides = new HashMap<SetupTask, SetupTask>(); for (Map.Entry<SetupTask, SetupTask> entry : substitutions.entrySet()) { SetupTask overriddenSetupTask = entry.getKey(); SetupTask overridingSetupTask = entry.getValue(); overrides.put(overriddenSetupTask, overridingSetupTask); overridingSetupTask.overrideFor(overriddenSetupTask); } for (int i = triggeredSetupTasks.size(); --i >= 0;) { SetupTask setupTask = triggeredSetupTasks.get(i); if (!directSubstitutions.containsKey(setupTask)) { for (int j = i; --j >= 0;) { SetupTask otherSetupTask = triggeredSetupTasks.get(j); // We must ignore specific references that are bound to be different, but don't affect what the task actually does. EcoreUtil.EqualityHelper equalityHelper = new EcoreUtil.EqualityHelper() { private static final long serialVersionUID = 1L; @Override protected boolean haveEqualReference(EObject eObject1, EObject eObject2, EReference reference) { if (reference == SetupPackage.Literals.SETUP_TASK__PREDECESSORS || reference == SetupPackage.Literals.SETUP_TASK__SUCCESSORS || reference == SetupPackage.Literals.SETUP_TASK__RESTRICTIONS) { return true; } return super.haveEqualReference(eObject1, eObject2, reference); } }; if (equalityHelper.equals(setupTask, otherSetupTask)) { directSubstitutions.put(otherSetupTask, setupTask); overrides.put(setupTask, otherSetupTask); setupTask.overrideFor(otherSetupTask); } } } } EList<SetupTask> remainingSetupTasks = new UniqueEList.FastCompare<SetupTask>(); for (SetupTask setupTask : triggeredSetupTasks) { SetupTask overridingSetupTask = directSubstitutions.get(setupTask); if (overridingSetupTask != null) { remainingSetupTasks.add(overridingSetupTask); } else { remainingSetupTasks.add(setupTask); } } // Replace predecessor references to refer to the direct substitution. for (SetupTask setupTask : remainingSetupTasks) { EList<SetupTask> predecessors = setupTask.getPredecessors(); for (ListIterator<SetupTask> it = predecessors.listIterator(); it.hasNext();) { SetupTask predecessor = it.next(); SetupTask overridingSetupTask = directSubstitutions.get(predecessor); if (overridingSetupTask != null) { if (predecessors.contains(overridingSetupTask) || overridingSetupTask.requires(predecessor)) { it.remove(); } else { it.set(overridingSetupTask); } } } } ECollections.setEList(triggeredSetupTasks, remainingSetupTasks); } else { // 2.2. Create copy based on overrides copySetup(stream, triggeredSetupTasks, substitutions, directSubstitutions); // 2.4. Build variable map in the context Map<String, VariableTask> explicitKeys = new HashMap<String, VariableTask>(); for (SetupTask setupTask : triggeredSetupTasks) { if (setupTask instanceof VariableTask) { VariableTask variableTask = (VariableTask)setupTask; String name = variableTask.getName(); explicitKeys.put(name, variableTask); } } // 2.3. Create implied variables for annotated task attributes for (ListIterator<SetupTask> it = triggeredSetupTasks.listIterator(); it.hasNext();) { SetupTask setupTask = it.next(); String id = setupTask.getID(); if (!StringUtil.isEmpty(id)) { EClass eClass = setupTask.eClass(); for (EAttribute eAttribute : eClass.getEAllAttributes()) { if (eAttribute != SetupPackage.Literals.SETUP_TASK__ID && !eAttribute.isMany() && eAttribute.getEType().getInstanceClass() == String.class) { String variableName = id + "." + ExtendedMetaData.INSTANCE.getName(eAttribute); String value = (String)setupTask.eGet(eAttribute); String variableReference = getVariableReference(variableName); if (explicitKeys.containsKey(variableName)) { if (StringUtil.isEmpty(value) || setupTask instanceof WorkspaceTask) { EAnnotation variableAnnotation = eAttribute.getEAnnotation(EAnnotationConstants.ANNOTATION_VARIABLE); if (variableAnnotation != null) { setupTask.eSet(eAttribute, variableReference); } } } else { VariableTask variable = SetupFactory.eINSTANCE.createVariableTask(); annotateImpliedVariable(variable, setupTask, eAttribute); variable.setName(variableName); EObject eContainer = setupTask.eContainer(); EReference eContainmentFeature = setupTask.eContainmentFeature(); @SuppressWarnings("unchecked") EList<SetupTask> list = (EList<SetupTask>)eContainer.eGet(eContainmentFeature); list.add(variable); if (StringUtil.isEmpty(value)) { EAnnotation variableAnnotation = eAttribute.getEAnnotation(EAnnotationConstants.ANNOTATION_VARIABLE); if (variableAnnotation != null) { ruleBasedAttributes.put(variable, eAttribute); populateImpliedVariable(setupTask, eAttribute, variableAnnotation, variable); setupTask.eSet(eAttribute, variableReference); } } else { EAnnotation variableAnnotation = eAttribute.getEAnnotation(EAnnotationConstants.ANNOTATION_VARIABLE); if (variableAnnotation != null) { populateImpliedVariable(setupTask, null, variableAnnotation, variable); setupTask.eSet(eAttribute, variableReference); } variable.setValue(value); } it.add(variable); explicitKeys.put(variableName, variable); for (EAnnotation ruleVariableAnnotation : eAttribute.getEAnnotations()) { if (EAnnotationConstants.ANNOTATION_RULE_VARIABLE.equals(ruleVariableAnnotation.getSource())) { EMap<String, String> details = ruleVariableAnnotation.getDetails(); VariableTask ruleVariable = SetupFactory.eINSTANCE.createVariableTask(); annotateRuleVariable(ruleVariable, variable); String ruleVariableName = details.get(EAnnotationConstants.KEY_NAME); ruleVariable.setName(ruleVariableName); ruleVariable.setStorageURI(BaseFactory.eINSTANCE.createURI(details.get(EAnnotationConstants.KEY_STORAGE_URI))); populateImpliedVariable(setupTask, null, ruleVariableAnnotation, ruleVariable); it.add(ruleVariable); explicitKeys.put(ruleVariableName, ruleVariable); } } // If the variable is a self reference. EAnnotation variableAnnotation = eAttribute.getEAnnotation(EAnnotationConstants.ANNOTATION_VARIABLE); if (variableAnnotation != null && variableReference.equals(variable.getValue())) { EMap<String, String> details = variableAnnotation.getDetails(); VariableTask explicitVariable = SetupFactory.eINSTANCE.createVariableTask(); String explicitVariableName = variableName + ".explicit"; explicitVariable.setName(explicitVariableName); explicitVariable.setStorageURI(null); explicitVariable.setType(VariableType.get(details.get(EAnnotationConstants.KEY_EXPLICIT_TYPE))); explicitVariable.setLabel(expandAttributeReferences(setupTask, details.get(EAnnotationConstants.KEY_EXPLICIT_LABEL))); explicitVariable.setDescription(expandAttributeReferences(setupTask, details.get(EAnnotationConstants.KEY_EXPLICIT_DESCRIPTION))); it.add(explicitVariable); explicitKeys.put(explicitVariableName, explicitVariable); annotateRuleVariable(explicitVariable, variable); variable.setValue(getVariableReference(explicitVariableName)); } } } } } } for (SetupTask setupTask : triggeredSetupTasks) { handleActiveAnnotations(setupTask, explicitKeys); } // 2.4. Build variable map in the context Set<String> keys = new LinkedHashSet<String>(); boolean fullPromptUser = isFullPromptUser(user); VariableAdapter variableAdapter = new VariableAdapter(this); for (SetupTask setupTask : triggeredSetupTasks) { if (setupTask instanceof VariableTask) { VariableTask variable = (VariableTask)setupTask; variable.eAdapters().add(variableAdapter); String name = variable.getName(); keys.add(name); String value = variable.getValue(); if (variable.getType() == VariableType.PASSWORD) { passwordVariables.add(variable); if (StringUtil.isEmpty(value) && !fullPromptUser) { URI storageURI = getEffectiveStorage(variable); if (storageURI != null && PreferencesUtil.PREFERENCE_SCHEME.equals(storageURI.scheme())) { URIConverter uriConverter = getURIConverter(); if (uriConverter.exists(storageURI, null)) { try { Reader reader = ((URIConverter.ReadableInputStream)uriConverter.createInputStream(storageURI)).asReader(); StringBuilder result = new StringBuilder(); for (int character = reader.read(); character != -1; character = reader.read()) { result.append((char)character); } reader.close(); value = PreferencesUtil.encrypt(result.toString()); } catch (IOException ex) { SetupCorePlugin.INSTANCE.log(ex); } } } } } put(name, value); allVariables.put(name, variable); } } expandVariableKeys(keys); // 2.8. Expand task attributes in situ expandStrings(triggeredSetupTasks); flattenPredecessorsAndSuccessors(triggeredSetupTasks); propagateRestrictionsPredecessorsAndSuccessors(triggeredSetupTasks); } reorderSetupTasks(triggeredSetupTasks); // Filter out the tasks that aren't triggered. if (trigger != null) { for (Iterator<SetupTask> it = triggeredSetupTasks.iterator(); it.hasNext();) { if (!it.next().getTriggers().contains(trigger)) { it.remove(); } } } for (Iterator<SetupTask> it = triggeredSetupTasks.iterator(); it.hasNext();) { SetupTask setupTask = it.next(); setupTask.consolidate(); if (setupTask instanceof VariableTask) { VariableTask contextVariableTask = (VariableTask)setupTask; if (!unresolvedVariables.contains(contextVariableTask)) { resolvedVariables.add(contextVariableTask); } it.remove(); } } } } private String getAttributeRuleVariableName(EAttribute eAttribute) { String attributeName = ExtendedMetaData.INSTANCE.getName(eAttribute); String variableName = "@<id>." + eAttribute.getEContainingClass() + "." + attributeName; return variableName; } private void handleActiveAnnotations(SetupTask setupTask, Map<String, VariableTask> explicitKeys) { for (Annotation annotation : setupTask.getAnnotations()) { String source = annotation.getSource(); if (AnnotationConstants.ANNOTATION_INHERITED_CHOICES.equals(source) && setupTask instanceof VariableTask) { VariableTask variableTask = (VariableTask)setupTask; EList<VariableChoice> choices = variableTask.getChoices(); EMap<String, String> details = annotation.getDetails(); String inherit = details.get(AnnotationConstants.KEY_INHERIT); if (inherit != null) { for (String variableName : inherit.trim().split("\\s")) { VariableTask referencedVariableTask = explicitKeys.get(variableName); if (referencedVariableTask != null) { for (VariableChoice variableChoice : referencedVariableTask.getChoices()) { String value = variableChoice.getValue(); String label = variableChoice.getLabel(); for (Map.Entry<String, String> detail : annotation.getDetails().entrySet()) { String detailKey = detail.getKey(); String detailValue = detail.getValue(); if (detailKey != null && !AnnotationConstants.KEY_INHERIT.equals(detailKey) && detailValue != null) { String target = "@{" + detailKey + "}"; if (value != null) { value = value.replace(target, detailValue); } if (label != null) { label = label.replace(target, detailValue); } } } VariableChoice choice = SetupFactory.eINSTANCE.createVariableChoice(); choice.setValue(value); choice.setLabel(label); choices.add(choice); } } } } } else if (AnnotationConstants.ANNOTATION_INDUCED_CHOICES.equals(source)) { String id = setupTask.getID(); if (id != null) { EMap<String, String> details = annotation.getDetails(); String inherit = details.get(AnnotationConstants.KEY_INHERIT); String target = details.get(AnnotationConstants.KEY_TARGET); if (target != null && inherit != null) { EStructuralFeature eStructuralFeature = BaseUtil.getFeature(setupTask.eClass(), target); if (eStructuralFeature.getEType().getInstanceClass() == String.class) { VariableTask variableTask = explicitKeys.get(id + "." + target); if (variableTask != null) { EList<VariableChoice> targetChoices = variableTask.getChoices(); if (!targetChoices.isEmpty()) { if (!StringUtil.isEmpty(variableTask.getValue())) { setupTask.eSet(eStructuralFeature, getVariableReference(variableTask.getName())); } } else { EList<VariableChoice> choices = targetChoices; Map<String, String> substitutions = new LinkedHashMap<String, String>(); for (Map.Entry<String, String> detail : annotation.getDetails().entrySet()) { String detailKey = detail.getKey(); String detailValue = detail.getValue(); if (detailKey != null && !AnnotationConstants.KEY_INHERIT.equals(detailKey) && !AnnotationConstants.KEY_TARGET.equals(detailKey) && !AnnotationConstants.KEY_LABEL.equals(detailKey) && !AnnotationConstants.KEY_DESCRIPTION.equals(detailKey) && detailValue != null) { if (detailValue.startsWith("@")) { String featureName = detailValue.substring(1); EStructuralFeature referencedEStructuralFeature = BaseUtil.getFeature(setupTask.eClass(), featureName); if (referencedEStructuralFeature != null && referencedEStructuralFeature.getEType().getInstanceClass() == String.class && !referencedEStructuralFeature.isMany()) { Object value = setupTask.eGet(referencedEStructuralFeature); if (value != null) { detailValue = value.toString(); } } } substitutions.put("@{" + detailKey + "}", detailValue); } } for (EAttribute eAttribute : setupTask.eClass().getEAllAttributes()) { if (eAttribute.getEType().getInstanceClass() == String.class && !eAttribute.isMany()) { String value = (String)setupTask.eGet(eAttribute); if (!StringUtil.isEmpty(value)) { substitutions.put("@{" + ExtendedMetaData.INSTANCE.getName(eAttribute) + "}", value); } } } String inheritedLabel = null; String inheritedDescription = null; for (String variableName : inherit.trim().split("\\s")) { VariableTask referencedVariableTask = explicitKeys.get(variableName); if (referencedVariableTask != null) { if (inheritedLabel == null) { inheritedLabel = referencedVariableTask.getLabel(); } if (inheritedDescription == null) { inheritedDescription = referencedVariableTask.getDescription(); } for (VariableChoice variableChoice : referencedVariableTask.getChoices()) { String value = variableChoice.getValue(); String label = variableChoice.getLabel(); for (Map.Entry<String, String> detail : substitutions.entrySet()) { String detailKey = detail.getKey(); String detailValue = detail.getValue(); if (value != null) { value = value.replace(detailKey, detailValue); } if (label != null) { label = label.replace(detailKey, detailValue); } } VariableChoice choice = SetupFactory.eINSTANCE.createVariableChoice(); choice.setValue(value); choice.setLabel(label); choices.add(choice); } } } if (ObjectUtil.equals(setupTask.eGet(eStructuralFeature), variableTask.getValue())) { String explicitLabel = details.get(AnnotationConstants.KEY_LABEL); if (explicitLabel == null) { explicitLabel = inheritedLabel; } String explicitDescription = details.get(AnnotationConstants.KEY_DESCRIPTION); if (explicitDescription == null) { explicitDescription = inheritedDescription; } variableTask.setValue(null); variableTask.setLabel(explicitLabel); variableTask.setDescription(explicitDescription); } setupTask.eSet(eStructuralFeature, getVariableReference(variableTask.getName())); } } } } } } } } private void recordRules(List<AttributeRule> attributeRules, boolean remove) { for (Iterator<VariableTask> it = unresolvedVariables.iterator(); it.hasNext();) { VariableTask variable = it.next(); String value = variable.getValue(); if (value != null) { String variableName = variable.getName(); for (Map.Entry<VariableTask, EAttribute> entry : ruleAttributes.entrySet()) { if (variableName.equals(entry.getKey().getName())) { URI uri = getAttributeURI(entry.getValue()); AttributeRule attributeRule = null; for (AttributeRule existingAttributeRule : attributeRules) { if (uri.equals(existingAttributeRule.getAttributeURI())) { attributeRule = existingAttributeRule; break; } } if (attributeRule == null) { attributeRule = SetupFactory.eINSTANCE.createAttributeRule(); attributeRule.setAttributeURI(uri); } attributeRule.setValue(value); attributeRules.add(attributeRule); if (remove) { it.remove(); } break; } } } } } private void annotateAttributeRuleVariable(VariableTask variable, EAttribute eAttribute) { annotate(variable, "AttributeRuleVariable", eAttribute); } public EAttribute getAttributeRuleVariableData(VariableTask variable) { Annotation annotation = variable.getAnnotation("AttributeRuleVariable"); if (annotation != null) { return (EAttribute)annotation.getReferences().get(0); } return null; } private void annotateImpliedVariable(VariableTask variable, SetupTask setupTask, EAttribute eAttribute) { annotate(variable, "ImpliedVariable", setupTask, eAttribute); } public EStructuralFeature.Setting getImpliedVariableData(VariableTask variable) { Annotation annotation = variable.getAnnotation("ImpliedVariable"); if (annotation != null) { EList<EObject> references = annotation.getReferences(); InternalEObject setupTask = (InternalEObject)references.get(0); return setupTask.eSetting((EStructuralFeature)references.get(1)); } return null; } private void annotateRuleVariable(VariableTask variable, VariableTask dependentVariable) { annotate(variable, "RuleVariable", dependentVariable); } public VariableTask getRuleVariableData(VariableTask variable) { Annotation annotation = variable.getAnnotation("RuleVariable"); if (annotation != null) { return (VariableTask)annotation.getReferences().get(0); } return null; } private void annotate(ModelElement modelElement, String source, EObject... references) { Annotation annotation = BaseFactory.eINSTANCE.createAnnotation(); annotation.setSource(source); annotation.getReferences().addAll(Arrays.asList(references)); modelElement.getAnnotations().add(annotation); } private VariableTask createImpliedVariable(EAnnotation eAnnotation) { EMap<String, String> details = eAnnotation.getDetails(); VariableTask variable = SetupFactory.eINSTANCE.createVariableTask(); variable.setName(details.get(EAnnotationConstants.KEY_NAME)); populateImpliedVariable(null, null, eAnnotation, variable); return variable; } private void populateImpliedVariable(SetupTask setupTask, EAttribute eAttribute, EAnnotation eAnnotation, VariableTask variable) { EMap<String, String> details = eAnnotation.getDetails(); variable.setType(VariableType.get(details.get(EAnnotationConstants.KEY_TYPE))); variable.setLabel(expandAttributeReferences(setupTask, details.get(EAnnotationConstants.KEY_LABEL))); variable.setDescription(expandAttributeReferences(setupTask, details.get(EAnnotationConstants.KEY_DESCRIPTION))); // The storageURI remains the default unless there is an explicit key to specify it be null or whatever else is specified. if (details.containsKey(EAnnotationConstants.KEY_STORAGE_URI)) { String storageURIValue = expandAttributeReferences(setupTask, details.get(EAnnotationConstants.KEY_STORAGE_URI)); variable.setStorageURI(StringUtil.isEmpty(storageURIValue) ? null : URI.createURI(storageURIValue)); } if (eAttribute != null) { AttributeRule attributeRule = getAttributeRule(eAttribute, false); if (attributeRule != null) { String value = attributeRule.getValue(); VariableTask ruleVariable = getRuleVariable(variable); if (ruleVariable == null) { ruleVariable = SetupFactory.eINSTANCE.createVariableTask(); ruleVariable.setName(getAttributeRuleVariableName(eAttribute)); } VariableType explicitVariableType = VariableType.get(details.get(EAnnotationConstants.KEY_EXPLICIT_TYPE)); if (explicitVariableType != null) { variable.setType(explicitVariableType); } String explicitLabel = details.get(EAnnotationConstants.KEY_EXPLICIT_LABEL); variable.setLabel(expandAttributeReferences(setupTask, explicitLabel)); String explicitDescription = details.get(EAnnotationConstants.KEY_EXPLICIT_DESCRIPTION); variable.setDescription(expandAttributeReferences(setupTask, explicitDescription)); String promptedValue = getPrompter().getValue(ruleVariable); if (promptedValue != null) { value = promptedValue; } String attributeExpandedValue = expandAttributeReferences(setupTask, value); variable.setValue(attributeExpandedValue); // We must remember this applied rule in the preferences restricted to this workspace. appliedRuleVariables.add(variable); return; } } // Handle variable choices for (EAnnotation subAnnotation : eAnnotation.getEAnnotations()) { if (EAnnotationConstants.NESTED_ANNOTATION_CHOICE.equals(subAnnotation.getSource())) { EMap<String, String> subDetails = subAnnotation.getDetails(); VariableChoice choice = SetupFactory.eINSTANCE.createVariableChoice(); String subValue = expandAttributeReferences(setupTask, subDetails.get(EAnnotationConstants.KEY_VALUE)); choice.setValue(subValue); choice.setLabel(subDetails.get(EAnnotationConstants.KEY_LABEL)); variable.getChoices().add(choice); } } } private String expandAttributeReferences(SetupTask setupTask, String value) { if (setupTask == null || value == null) { return value; } EClass eClass = setupTask.eClass(); Matcher matcher = ATTRIBUTE_REFERENCE_PATTERN.matcher(value); StringBuilder builder = new StringBuilder(); int index = 0; for (; matcher.find(); index = matcher.end()) { builder.append(value, index, matcher.start()); String key = matcher.group().substring(1); EStructuralFeature feature = eClass.getEStructuralFeature(key); if (feature == null) { feature = BaseUtil.getFeature(eClass, key); if (feature == null) { builder.append('@'); builder.append(key); continue; } } Object featureValue = setupTask.eGet(feature); builder.append(featureValue); } builder.append(value, index, value.length()); return builder.toString(); } private AttributeRule getAttributeRule(EAttribute eAttribute, boolean userOnly) { URI attributeURI = getAttributeURI(eAttribute); User user = getUser(); AttributeRule attributeRule = userOnly ? null : getAttributeRule(attributeURI, attributeRules); if (attributeRule == null) { attributeRule = getAttributeRule(attributeURI, user.getAttributeRules()); } return attributeRule; } private AttributeRule getAttributeRule(URI attributeURI, List<AttributeRule> attributeRules) { for (AttributeRule attributeRule : attributeRules) { if (attributeURI.equals(attributeRule.getAttributeURI())) { return attributeRule; } } return null; } public static URI getAttributeURI(EAttribute eAttribute) { EClass eClass = eAttribute.getEContainingClass(); EPackage ePackage = eClass.getEPackage(); URI uri = URI.createURI(ePackage.getNsURI()).appendFragment("//" + eClass.getName() + "/" + eAttribute.getName()); return uri; } public Set<Bundle> getBundles() { return bundles; } public EList<SetupTask> getTriggeredSetupTasks() { return triggeredSetupTasks; } public File getInstallationLocation() { for (SetupTask setupTask : triggeredSetupTasks) { if (setupTask instanceof InstallationTask) { return new File(((InstallationTask)setupTask).getLocation()); } } return null; } public File getWorkspaceLocation() { for (SetupTask setupTask : triggeredSetupTasks) { if (setupTask instanceof WorkspaceTask) { return new File(((WorkspaceTask)setupTask).getLocation()); } } if (getTrigger() != Trigger.BOOTSTRAP && EMFPlugin.IS_RESOURCES_BUNDLE_AVAILABLE) { IWorkspace workspace = ResourcesPlugin.getWorkspace(); if (workspace != null) { IWorkspaceRoot root = workspace.getRoot(); if (root != null) { IPath location = root.getLocation(); if (location != null) { return location.toFile(); } } } } return null; } public EList<SetupTask> getSetupTasks(Stream stream) { User user = getUser(); Installation installation = getInstallation(); Workspace workspace = getWorkspace(); ProductVersion productVersion = installation.getProductVersion(); EList<SetupTask> result = new BasicEList<SetupTask>(); if (productVersion != null && !productVersion.eIsProxy()) { List<Scope> configurableItems = new ArrayList<Scope>(); List<Scope> scopes = new ArrayList<Scope>(); Product product = productVersion.getProduct(); configurableItems.add(product); scopes.add(product); ProductCatalog productCatalog = product.getProductCatalog(); configurableItems.add(productCatalog); scopes.add(0, productCatalog); configurableItems.add(productVersion); scopes.add(productVersion); if (stream != null) { Project project = stream.getProject(); ProjectCatalog projectCatalog = project.getProjectCatalog(); for (; project != null; project = project.getParentProject()) { configurableItems.add(project); scopes.add(3, project); } if (projectCatalog != null) { configurableItems.add(projectCatalog); scopes.add(3, projectCatalog); } configurableItems.add(stream); scopes.add(stream); } configurableItems.add(installation); scopes.add(installation); if (workspace != null) { configurableItems.add(workspace); scopes.add(workspace); } scopes.add(user); String qualifier = null; for (Scope scope : scopes) { ScopeType type = scope.getType(); String name = scope.getName(); String label = scope.getLabel(); if (label == null) { label = name; } String description = scope.getDescription(); if (description == null) { description = label; } switch (type) { case PRODUCT_CATALOG: { generateScopeVariables(result, "product.catalog", qualifier, name, label, description); qualifier = name; break; } case PRODUCT: { generateScopeVariables(result, "product", qualifier, name, label, description); qualifier += "." + name; break; } case PRODUCT_VERSION: { generateScopeVariables(result, "product.version", qualifier, name, label, description); qualifier = null; break; } case PROJECT_CATALOG: { generateScopeVariables(result, "project.catalog", qualifier, name, label, description); qualifier = name; break; } case PROJECT: { generateScopeVariables(result, "project", qualifier, name, label, description); qualifier += "." + name; break; } case STREAM: { generateScopeVariables(result, "project.stream", qualifier, name, label, description); qualifier = null; break; } case INSTALLATION: { generateScopeVariables(result, "installation", qualifier, name, label, description); break; } case WORKSPACE: { generateScopeVariables(result, "workspace", qualifier, name, label, description); break; } case USER: { generateScopeVariables(result, "user", qualifier, name, label, description); break; } } getSetupTasks(result, configurableItems, scope); } } return result; } private void generateScopeVariables(EList<SetupTask> setupTasks, String type, String qualifier, String name, String label, String description) { setupTasks.add(createVariable(setupTasks, "scope." + type + ".name", name, null)); if (qualifier != null) { setupTasks.add(createVariable(setupTasks, "scope." + type + ".name.qualified", qualifier + "." + name, null)); } setupTasks.add(createVariable(setupTasks, "scope." + type + ".label", label, null)); setupTasks.add(createVariable(setupTasks, "scope." + type + ".description", description, null)); } private VariableTask createVariable(EList<SetupTask> setupTasks, String name, String value, String description) { VariableTask variable = SetupFactory.eINSTANCE.createVariableTask(); variable.setName(name); variable.setValue(value); variable.setDescription(description); return variable; } private void getSetupTasks(EList<SetupTask> setupTasks, List<Scope> configurableItems, SetupTaskContainer setupTaskContainer) { for (SetupTask setupTask : setupTaskContainer.getSetupTasks()) { if (setupTask.isDisabled()) { continue; } EList<Scope> restrictions = setupTask.getRestrictions(); if (!configurableItems.containsAll(restrictions)) { continue; } if (setupTask instanceof SetupTaskContainer) { SetupTaskContainer container = (SetupTaskContainer)setupTask; getSetupTasks(setupTasks, configurableItems, container); } else { setupTasks.add(setupTask); } } } public EList<SetupTask> initNeededSetupTasks(IProgressMonitor monitor) throws Exception { if (neededSetupTasks == null) { neededSetupTasks = new BasicEList<SetupTask>(); if (!undeclaredVariables.isEmpty()) { throw new RuntimeException("Missing variables for " + undeclaredVariables); } if (!unresolvedVariables.isEmpty()) { throw new RuntimeException("Unresolved variables for " + unresolvedVariables); } if (triggeredSetupTasks != null) { monitor.beginTask("", triggeredSetupTasks.size()); try { for (Iterator<SetupTask> it = triggeredSetupTasks.iterator(); it.hasNext();) { SetupTask setupTask = it.next(); checkCancelation(); progressMonitor = new SubProgressMonitor(monitor, 1); try { if (setupTask.isNeeded(this)) { neededSetupTasks.add(setupTask); } } catch (NoClassDefFoundError ex) { // Don't perform tasks that can't load their enabling dependencies SetupCorePlugin.INSTANCE.log(ex); } finally { progressMonitor.done(); progressMonitor = null; } } } finally { monitor.done(); } } } return neededSetupTasks; } public EList<SetupTask> getNeededTasks() { return neededSetupTasks; } public Map<EObject, EObject> getCopyMap() { return copyMap; } public IProgressMonitor getProgressMonitor(boolean working) { if (!working || progressMonitor == null) { return new ProgressLogMonitor(this); } return progressMonitor; } public boolean isCanceled() { if (canceled) { return true; } if (progress != null) { return progress.isCanceled(); } return false; } public void setCanceled(boolean canceled) { this.canceled = canceled; } public void setTerminating() { if (progress != null) { progress.setTerminating(); } } public void task(SetupTask setupTask) { progress.task(setupTask); log("Performing " + getLabel(setupTask), false, Severity.INFO); } public void log(Throwable t) { log(SetupCorePlugin.toString(t), false, Severity.ERROR); } public void log(IStatus status) { log(SetupCorePlugin.toString(status), false, Severity.fromStatus(status)); } public void log(String line) { log(line, true, Severity.OK); } public void log(String line, Severity severity) { log(line, true, severity); } public void log(String line, boolean filter) { log(line, filter, Severity.OK); } public void log(String line, boolean filter, Severity severity) { if (progress != null) { if (logMessageBuffer != null) { for (Object value : logMessageBuffer) { String bufferedLine; Severity bufferedSeverity; if (value instanceof String) { bufferedLine = (String)value; bufferedSeverity = Severity.OK; } else { @SuppressWarnings("unchecked") Pair<String, Severity> pair = (Pair<String, Severity>)value; bufferedLine = pair.getElement1(); bufferedSeverity = pair.getElement2(); } doLog(bufferedLine, filter, bufferedSeverity); } logMessageBuffer = null; } doLog(line, filter, severity); } else { if (logMessageBuffer == null) { logMessageBuffer = new ArrayList<Object>(); } if (severity == Severity.OK) { logMessageBuffer.add(line); } else { logMessageBuffer.add(Pair.create(line, severity)); } } } private void doLog(String line, boolean filter, Severity severity) { if (filter) { line = logFilter.filter(line); } if (line == null) { return; } try { PrintStream logStream = getLogStream(true); logStream.println("[" + DATE_TIME.format(new Date()) + "] " + line); logStream.flush(); } catch (Exception ex) { SetupCorePlugin.INSTANCE.log(ex); } progress.log(line, filter, severity); } public PrintStream getLogStream() { return logStream; } private PrintStream getLogStream(boolean demandCreate) { if (logStream == null && demandCreate) { try { File productLocation = getProductLocation(); String path = InstallationTaskImpl.CONFIGURATION_FOLDER_NAME + "/" + SetupContext.OOMPH_NODE + "/" + SetupContext.LOG_FILE_NAME; File logFile = new File(productLocation, path); logFile.getParentFile().mkdirs(); FileOutputStream out = new FileOutputStream(logFile, true); logStream = new PrintStream(out); } catch (FileNotFoundException ex) { throw new RuntimeException(ex); } } return logStream; } public VariableTask getRuleVariable(VariableTask variable) { EAttribute eAttribute = ruleBasedAttributes.get(variable); if (eAttribute != null) { for (Map.Entry<VariableTask, EAttribute> entry : ruleAttributes.entrySet()) { if (entry.getValue() == eAttribute) { return entry.getKey(); } } } return null; } public boolean isRuleBased(VariableTask variable) { return ruleBasedAttributes.containsKey(variable); } public List<VariableTask> getUnresolvedVariables() { return unresolvedVariables; } public List<VariableTask> getPasswordVariables() { return passwordVariables; } public Map<VariableTask, EAttribute> getRuleAttributes() { return ruleAttributes; } public List<VariableTask> getAppliedRuleVariables() { return appliedRuleVariables; } public List<VariableTask> getResolvedVariables() { return resolvedVariables; } public Set<String> getUndeclaredVariables() { return undeclaredVariables; } public void redirectTriggeredSetupTasks() { Map<URI, URI> uriMap = getURIConverter().getURIMap(); for (Iterator<SetupTask> it = triggeredSetupTasks.iterator(); it.hasNext();) { SetupTask setupTask = it.next(); if (setupTask instanceof RedirectionTask) { RedirectionTask redirectionTask = (RedirectionTask)setupTask; URI sourceURI = URI.createURI(redirectionTask.getSourceURL()); URI targetURI = URI.createURI(redirectionTask.getTargetURL()); uriMap.put(sourceURI, targetURI); it.remove(); } } for (SetupTask setupTask : triggeredSetupTasks) { redirectStrings(setupTask); for (Iterator<EObject> it = EcoreUtil.getAllContents(setupTask, false); it.hasNext();) { redirectStrings(it.next()); } } } private void redirectStrings(EObject eObject) { EClass eClass = eObject.eClass(); for (EAttribute attribute : eClass.getEAllAttributes()) { if (attribute.isChangeable() && attribute.getEAnnotation(EAnnotationConstants.ANNOTATION_REDIRECT) != null) { String instanceClassName = attribute.getEAttributeType().getInstanceClassName(); ValueConverter valueConverter = CONVERTERS.get(instanceClassName); if (valueConverter != null) { if (attribute.isMany()) { List<?> values = (List<?>)eObject.eGet(attribute); List<Object> newValues = new ArrayList<Object>(); boolean changed = false; for (Object value : values) { String convertedValue = valueConverter.convertToString(value); String redirectedValue = redirect(convertedValue); if (!ObjectUtil.equals(convertedValue, redirectedValue)) { changed = true; } newValues.add(valueConverter.createFromString(redirectedValue)); } if (changed) { eObject.eSet(attribute, newValues); } } else { Object value = eObject.eGet(attribute); if (value != null) { String convertedValue = valueConverter.convertToString(value); String redirectedValue = redirect(convertedValue); if (!ObjectUtil.equals(convertedValue, redirectedValue)) { eObject.eSet(attribute, valueConverter.createFromString(redirectedValue)); } } } } } } } private void expandStrings(EList<SetupTask> setupTasks) { Set<String> keys = new LinkedHashSet<String>(); for (SetupTask setupTask : setupTasks) { expandVariableTaskValue(keys, setupTask); } for (Iterator<EObject> it = EcoreUtil.getAllContents(setupTasks); it.hasNext();) { expand(keys, it.next()); } handleFeatureSubstitutions(setupTasks); if (!unresolvedSettings.isEmpty()) { for (String key : keys) { boolean found = false; for (SetupTask setupTask : setupTasks) { if (setupTask instanceof VariableTask) { VariableTask contextVariableTask = (VariableTask)setupTask; if (key.equals(contextVariableTask.getName())) { unresolvedVariables.add(contextVariableTask); found = true; break; } } } if (!found) { undeclaredVariables.add(key); } } } } @Override protected String resolve(String key) { return lookup(key); } @Override protected boolean isUnexpanded(String key) { VariableTask variableTask = allVariables.get(key); if (variableTask != null) { for (Setting setting : unresolvedSettings) { if (setting.getEObject() == variableTask && setting.getEStructuralFeature() == SetupPackage.Literals.VARIABLE_TASK__VALUE) { return true; } } } return false; } public Set<String> getVariables(String string) { if (string == null) { return null; } Set<String> result = new LinkedHashSet<String>(); for (Matcher matcher = StringExpander.STRING_EXPANSION_PATTERN.matcher(string); matcher.find();) { String key = matcher.group(1); if (!"$".equals(key)) { key = matcher.group(2); } result.add(key); } return result; } private void propagateRestrictionsPredecessorsAndSuccessors(EList<SetupTask> setupTasks) { for (SetupTask setupTask : setupTasks) { EList<Scope> restrictions = setupTask.getRestrictions(); for (EObject eContainer = setupTask.eContainer(); eContainer instanceof SetupTask; eContainer = eContainer.eContainer()) { restrictions.addAll(((SetupTask)eContainer).getRestrictions()); } EList<SetupTask> predecessors = setupTask.getPredecessors(); for (EObject eContainer = setupTask.eContainer(); eContainer instanceof SetupTask; eContainer = eContainer.eContainer()) { predecessors.addAll(((SetupTask)eContainer).getPredecessors()); } EList<SetupTask> successors = setupTask.getSuccessors(); for (EObject eContainer = setupTask.eContainer(); eContainer instanceof SetupTask; eContainer = eContainer.eContainer()) { successors.addAll(((SetupTask)eContainer).getSuccessors()); } } } private void flattenPredecessorsAndSuccessors(EList<SetupTask> setupTasks) { for (SetupTask setupTask : setupTasks) { for (ListIterator<SetupTask> it = setupTask.getPredecessors().listIterator(); it.hasNext();) { SetupTask predecessor = it.next(); if (predecessor instanceof SetupTaskContainer) { it.remove(); for (SetupTask expandedPrecessor : ((SetupTaskContainer)predecessor).getSetupTasks()) { it.add(expandedPrecessor); it.previous(); } } } for (ListIterator<SetupTask> it = setupTask.getSuccessors().listIterator(); it.hasNext();) { SetupTask successor = it.next(); if (successor instanceof SetupTaskContainer) { it.remove(); for (SetupTask expandedSuccessor : ((SetupTaskContainer)successor).getSetupTasks()) { it.add(expandedSuccessor); it.previous(); } } } } } private CompoundTask findOrCreate(AdapterFactoryItemDelegator itemDelegator, Scope configurableItem, EList<SetupTask> setupTasks) { EObject eContainer = configurableItem.eContainer(); if (eContainer instanceof Scope) { CompoundTask compoundSetupTask = findOrCreate(itemDelegator, (Scope)eContainer, setupTasks); setupTasks = compoundSetupTask.getSetupTasks(); } CompoundTask compoundSetupTask = find(configurableItem, setupTasks); if (compoundSetupTask == null) { compoundSetupTask = SetupFactory.eINSTANCE.createCompoundTask(); compoundSetupTask.setName(itemDelegator.getText(configurableItem)); compoundSetupTask.getRestrictions().add(configurableItem); setupTasks.add(compoundSetupTask); } return compoundSetupTask; } private CompoundTask find(Scope configurableItem, EList<SetupTask> setupTasks) { LOOP: for (SetupTask setupTask : setupTasks) { if (setupTask instanceof CompoundTask) { CompoundTask compoundSetupTask = (CompoundTask)setupTask; List<Scope> restrictions = ((InternalEList<Scope>)compoundSetupTask.getRestrictions()).basicList(); URI uri = EcoreUtil.getURI(configurableItem); boolean found = false; for (Scope restriction : restrictions) { URI otherURI = EcoreUtil.getURI(restriction); if (!otherURI.equals(uri)) { continue LOOP; } found = true; } if (found) { return compoundSetupTask; } compoundSetupTask = find(configurableItem, compoundSetupTask.getSetupTasks()); if (compoundSetupTask != null) { return compoundSetupTask; } } } return null; } public void resolveSettings() { // Do this before expanding any more strings. List<Setting> unresolvedSettings = new ArrayList<EStructuralFeature.Setting>(this.unresolvedSettings); this.unresolvedSettings.clear(); Set<String> keys = new LinkedHashSet<String>(); for (VariableTask unspecifiedVariable : unresolvedVariables) { String name = unspecifiedVariable.getName(); keys.add(name); String value = unspecifiedVariable.getValue(); put(name, value); } for (EStructuralFeature.Setting setting : unresolvedSettings) { if (setting.getEStructuralFeature() == SetupPackage.Literals.VARIABLE_TASK__VALUE) { VariableTask variable = (VariableTask)setting.getEObject(); String name = variable.getName(); keys.add(name); String value = variable.getValue(); put(name, value); } } expandVariableKeys(keys); for (EStructuralFeature.Setting setting : unresolvedSettings) { EStructuralFeature eStructuralFeature = setting.getEStructuralFeature(); ValueConverter valueConverter = CONVERTERS.get(eStructuralFeature.getEType().getInstanceClassName()); if (eStructuralFeature.isMany()) { @SuppressWarnings("unchecked") List<Object> values = (List<Object>)setting.get(false); for (ListIterator<Object> it = values.listIterator(); it.hasNext();) { it.set(valueConverter.createFromString(expandString(valueConverter.convertToString(it.next())))); } } else { Object value = setting.get(false); String expandedString = expandString(valueConverter.convertToString(value)); setting.set(valueConverter.createFromString(expandedString)); if (eStructuralFeature == SetupPackage.Literals.VARIABLE_TASK__VALUE) { put(((VariableTask)setting.getEObject()).getName(), expandedString); } } } handleFeatureSubstitutions(triggeredSetupTasks); } private void handleFeatureSubstitutions(Collection<? extends EObject> eObjects) { // Find all the feature substitution annotations. for (Iterator<EObject> it = EcoreUtil.getAllContents(eObjects); it.hasNext();) { InternalEObject eObject = (InternalEObject)it.next(); if (eObject instanceof Annotation) { Annotation annotation = (Annotation)eObject; if (AnnotationConstants.ANNOTATION_FEATURE_SUBSTITUTION.equals(annotation.getSource())) { ModelElement modelElement = annotation.getModelElement(); EClass eClass = modelElement.eClass(); for (Map.Entry<String, String> detail : annotation.getDetails()) { // Look for an attribute with the name of the detail's key. EStructuralFeature eStructuralFeature = eClass.getEStructuralFeature(detail.getKey()); if (eStructuralFeature instanceof EAttribute) { try { // Convert the detail's value to a value of that attribute's type and replace it. modelElement.eSet(eStructuralFeature, EcoreUtil.createFromString(((EAttribute)eStructuralFeature).getEAttributeType(), detail.getValue())); } catch (RuntimeException ex) { // Ignore. } } } } } } } private void expandVariableKeys(Set<String> keys) { Map<String, Set<String>> variables = new LinkedHashMap<String, Set<String>>(); for (Map.Entry<Object, Object> entry : getMap().entrySet()) { Object entryKey = entry.getKey(); if (keys.contains(entryKey)) { String key = (String)entryKey; Object entryValue = entry.getValue(); if (entryValue == null) { VariableTask variable = allVariables.get(key); if (variable != null) { SetupPrompter prompter = getPrompter(); String value = prompter.getValue(variable); if (value != null) { variable.setValue(value); Set<String> valueVariables = getVariables(value); variables.put(key, valueVariables); unresolvedVariables.add(variable); if (!valueVariables.isEmpty()) { unresolvedSettings.add(((InternalEObject)variable).eSetting(SetupPackage.Literals.VARIABLE_TASK__VALUE)); } } } } else if (entryKey instanceof String) { String value = entryValue.toString(); variables.put(key, getVariables(value)); } } } EList<Map.Entry<String, Set<String>>> orderedVariables = reorderVariables(variables); for (Map.Entry<String, Set<String>> entry : orderedVariables) { String key = entry.getKey(); Object object = get(key); if (object != null) { String value = expandString(object.toString()); put(key, value); } } } public void recordVariables(Installation installation, Workspace workspace, User user) { recordRules(user.getAttributeRules(), true); AdapterFactoryItemDelegator itemDelegator = new AdapterFactoryItemDelegator(adapterFactory) { @Override public String getText(Object object) { String result = super.getText(object); if (object instanceof ProjectCatalog) { if (!result.endsWith("Projects")) { result += " Projects"; } } else if (object instanceof ProductCatalog) { if (!result.endsWith("Products")) { result += " Products"; } } return result; } }; EList<SetupTask> userSetupTasks = user.getSetupTasks(); if (!unresolvedVariables.isEmpty()) { applyUnresolvedVariables(installation, workspace, user, unresolvedVariables, userSetupTasks, itemDelegator); } if (!appliedRuleVariables.isEmpty()) { List<VariableTask> productCatalogScopedVariables = new ArrayList<VariableTask>(); List<VariableTask> projectCatalogScopedVariables = new ArrayList<VariableTask>(); EList<SetupTask> workspaceScopeTasks = null; EList<SetupTask> installationScopeTasks = null; for (VariableTask unspecifiedVariable : appliedRuleVariables) { for (EObject container = unspecifiedVariable.eContainer(); container != null; container = container.eContainer()) { if (container instanceof Scope) { Scope scope = (Scope)container; switch (scope.getType()) { case STREAM: case PROJECT: case PROJECT_CATALOG: case WORKSPACE: { if (workspaceScopeTasks == null) { workspaceScopeTasks = workspace.getSetupTasks(); } projectCatalogScopedVariables.add(unspecifiedVariable); break; } case PRODUCT_VERSION: case PRODUCT: case PRODUCT_CATALOG: case INSTALLATION: { if (installationScopeTasks == null) { installationScopeTasks = installation.getSetupTasks(); } productCatalogScopedVariables.add(unspecifiedVariable); break; } case USER: { if (workspace != null) { if (workspaceScopeTasks == null) { workspaceScopeTasks = findOrCreate(itemDelegator, workspace, userSetupTasks).getSetupTasks(); } projectCatalogScopedVariables.add(unspecifiedVariable); } if (installationScopeTasks == null) { installationScopeTasks = findOrCreate(itemDelegator, installation, userSetupTasks).getSetupTasks(); } productCatalogScopedVariables.add(unspecifiedVariable); break; } } break; } } } applyUnresolvedVariables(installation, workspace, user, productCatalogScopedVariables, installationScopeTasks, itemDelegator); applyUnresolvedVariables(installation, workspace, user, projectCatalogScopedVariables, workspaceScopeTasks, itemDelegator); } } private URI getEffectiveStorage(VariableTask variable) { URI storageURI = variable.getStorageURI(); if (variable.getType() == VariableType.PASSWORD) { if (VariableTask.DEFAULT_STORAGE_URI.equals(storageURI)) { storageURI = PreferencesUtil.ROOT_PREFERENCE_NODE_URI.appendSegments(new String[] { PreferencesUtil.SECURE_NODE, SetupContext.OOMPH_NODE, variable.getName(), "" }); } else if (storageURI != null && PreferencesUtil.PREFERENCE_SCHEME.equals(storageURI.scheme()) && !storageURI.hasTrailingPathSeparator()) { storageURI = storageURI.appendSegment(""); } } return storageURI; } public void savePasswords() { for (Map.Entry<URI, String> entry : passwords.entrySet()) { String value = PreferencesUtil.decrypt(entry.getValue()); if (!StringUtil.isEmpty(value) && !" ".equals(value)) { URI storageURI = entry.getKey(); URIConverter uriConverter = getURIConverter(); try { Writer writer = ((URIConverter.WriteableOutputStream)uriConverter.createOutputStream(storageURI)).asWriter(); writer.write(value); writer.close(); } catch (IOException ex) { SetupCorePlugin.INSTANCE.log(ex); } } } } private void applyUnresolvedVariables(Installation installation, Workspace workspace, User user, Collection<VariableTask> variables, EList<SetupTask> rootTasks, AdapterFactoryItemDelegator itemDelegator) { Resource installationResource = installation.eResource(); Resource workspaceResource = workspace == null ? null : workspace.eResource(); Resource userResource = user.eResource(); List<VariableTask> unspecifiedVariables = new ArrayList<VariableTask>(); for (VariableTask variable : variables) { URI storageURI = getEffectiveStorage(variable); if (storageURI != null) { String value = variable.getValue(); if (value != null) { if (variable.getType() == VariableType.PASSWORD) { passwords.put(storageURI, value); } else { Scope scope = variable.getScope(); if (scope != null && storageURI.equals(VariableTask.DEFAULT_STORAGE_URI)) { switch (scope.getType()) { case INSTALLATION: { apply(installationResource, scope, variable, value); break; } case WORKSPACE: { apply(workspaceResource, scope, variable, value); break; } case USER: { apply(userResource, scope, variable, value); break; } default: { unspecifiedVariables.add(variable); break; } } } else { unspecifiedVariables.add(variable); } } } } } LOOP: for (VariableTask unspecifiedVariable : unspecifiedVariables) { String name = unspecifiedVariable.getName(); String value = unspecifiedVariable.getValue(); URI storageURI = unspecifiedVariable.getStorageURI(); EList<SetupTask> targetSetupTasks = rootTasks; Scope scope = unspecifiedVariable.getScope(); if (scope != null) { if (storageURI.equals(VariableTask.WORKSPACE_STORAGE_URI)) { if (workspace != null) { targetSetupTasks = workspace.getSetupTasks(); } } else if (storageURI.equals(VariableTask.INSTALLATION_STORAGE_URI)) { targetSetupTasks = installation.getSetupTasks(); } if (unspecifiedVariable.getAnnotation(AnnotationConstants.ANNOTATION_GLOBAL_VARIABLE) == null) { targetSetupTasks = findOrCreate(itemDelegator, scope, targetSetupTasks).getSetupTasks(); } } // This happens in the multi-stream case where each perform wants to add setup-restricted tasks for the same variable. for (SetupTask setupTask : targetSetupTasks) { if (setupTask instanceof VariableTask) { VariableTask variable = (VariableTask)setupTask; if (name.equals(variable.getName())) { variable.setValue(value); continue LOOP; } } } VariableTask userPreference = EcoreUtil.copy(unspecifiedVariable); userPreference.getAnnotations().clear(); userPreference.getChoices().clear(); userPreference.setStorageURI(VariableTask.DEFAULT_STORAGE_URI); targetSetupTasks.add(userPreference); } } private void apply(Resource resource, Scope scope, VariableTask variable, String value) { String uriFragment = scope.eResource().getURIFragment(variable); EObject eObject = resource.getEObject(uriFragment); if (eObject instanceof VariableTask) { VariableTask targetVariable = (VariableTask)eObject; if (variable.getName().equals(targetVariable.getName())) { targetVariable.setValue(value); } } } private void expandVariableTaskValue(Set<String> keys, EObject eObject) { if (eObject instanceof VariableTask) { VariableTask variableTask = (VariableTask)eObject; String value = variableTask.getValue(); if (value != null) { String newValue = expandString(value, keys); if (newValue == null) { unresolvedSettings.add(((InternalEObject)eObject).eSetting(SetupPackage.Literals.VARIABLE_TASK__VALUE)); } else if (!value.equals(newValue)) { variableTask.setValue(newValue); } } } } private void expand(Set<String> keys, EObject eObject) { EClass eClass = eObject.eClass(); for (EAttribute attribute : eClass.getEAllAttributes()) { if (attribute.isChangeable() && attribute.getEAnnotation(EAnnotationConstants.ANNOTATION_NO_EXPAND) == null) { String instanceClassName = attribute.getEAttributeType().getInstanceClassName(); ValueConverter valueConverter = CONVERTERS.get(instanceClassName); if (valueConverter != null) { if (attribute.isMany()) { List<?> values = (List<?>)eObject.eGet(attribute); List<Object> newValues = new ArrayList<Object>(); boolean failed = false; for (Object value : values) { String newValue = expandString(valueConverter.convertToString(value), keys); if (newValue == null) { if (!failed) { unresolvedSettings.add(((InternalEObject)eObject).eSetting(attribute)); failed = true; } } else { newValues.add(valueConverter.createFromString(newValue)); } } if (!failed) { eObject.eSet(attribute, newValues); } } else { Object value = eObject.eGet(attribute); if (value != null) { String newValue; if (attribute == SetupPackage.Literals.VARIABLE_TASK__DEFAULT_VALUE) { // With second parameter not null, if value is not resolved, expandString returns newValue as null newValue = expandString(valueConverter.convertToString(value), new LinkedHashSet<String>()); eObject.eSet(attribute, valueConverter.createFromString(newValue)); } else if (attribute != SetupPackage.Literals.VARIABLE_TASK__VALUE) { newValue = expandString(valueConverter.convertToString(value), keys); if (newValue == null) { unresolvedSettings.add(((InternalEObject)eObject).eSetting(attribute)); } else { Object object = valueConverter.createFromString(newValue); if (!value.equals(object)) { eObject.eSet(attribute, object); } } } } } } } } } private void performEclipseIniTask(boolean vm, String option, String value, IProgressMonitor monitor) throws Exception { EclipseIniTask task = SetupFactory.eINSTANCE.createEclipseIniTask(); task.setVm(vm); task.setOption(option); task.setValue(value); performTask(task, monitor); } private void performIndexRediction(URI indexURI, String name, IProgressMonitor monitor) throws Exception { { URI redirectedURI = redirect(indexURI); if (!redirectedURI.equals(indexURI)) { URI baseURI = indexURI.trimSegments(1).appendSegment(""); URI redirectedBaseURI = redirect(baseURI); if (!redirectedBaseURI.equals(baseURI)) { URI baseBaseURI = baseURI.trimSegments(1).appendSegment(""); URI redirectedBaseBaseURI = redirect(baseBaseURI); if (!redirectedBaseBaseURI.equals(baseBaseURI)) { performEclipseIniTask(true, "-D" + SetupProperties.PROP_REDIRECTION_BASE + "index" + name + ".redirection", "=" + baseBaseURI + "->" + redirectedBaseBaseURI, monitor); } else { performEclipseIniTask(true, "-D" + SetupProperties.PROP_REDIRECTION_BASE + "index" + name + ".redirection", "=" + baseURI + "->" + redirectedBaseURI, monitor); } } else { performEclipseIniTask(true, "-D" + SetupProperties.PROP_REDIRECTION_BASE + "index" + name + ".redirection", "=" + indexURI + "->" + redirectedURI, monitor); } } } } private void performTask(SetupTask task, IProgressMonitor monitor) throws Exception { monitor.beginTask("", 101); try { progressMonitor = new SubProgressMonitor(monitor, 1); if (task.isNeeded(this)) { progressMonitor.done(); progressMonitor = new SubProgressMonitor(monitor, 100); task.perform(this); } else { progressMonitor.done(); monitor.worked(100); } } finally { progressMonitor.done(); progressMonitor = null; monitor.done(); } } public void perform(IProgressMonitor monitor) throws Exception { boolean bootstrap = getTrigger() == Trigger.BOOTSTRAP; monitor.beginTask("", (bootstrap ? 105 : 100) + (vmPath == null ? 0 : 1)); try { performTriggeredSetupTasks(new SubProgressMonitor(monitor, 100)); if (bootstrap) { log("Performing post bootstrap tasks", false, Severity.INFO); performEclipseIniTask(false, "--launcher.appendVmargs", null, new SubProgressMonitor(monitor, 1)); if (vmPath != null) { performEclipseIniTask(false, "-vm", vmPath, new SubProgressMonitor(monitor, 1)); } performEclipseIniTask(true, "-D" + SetupProperties.PROP_UPDATE_URL, "=" + redirect(URI.createURI((String)get(SetupProperties.PROP_UPDATE_URL))), new SubProgressMonitor(monitor, 1)); performIndexRediction(SetupContext.INDEX_SETUP_URI, "", new SubProgressMonitor(monitor, 1)); performIndexRediction(SetupContext.INDEX_SETUP_LOCATION_URI, ".location", new SubProgressMonitor(monitor, 1)); if (REMOTE_DEBUG) { performEclipseIniTask(true, "-D" + SetupProperties.PROP_SETUP_REMOTE_DEBUG, "=true", new NullProgressMonitor()); performEclipseIniTask(true, "-Xdebug", "", new NullProgressMonitor()); performEclipseIniTask(true, "-Xrunjdwp", ":transport=dt_socket,server=y,suspend=n,address=8123", new NullProgressMonitor()); } String[] networkPreferences = new String[] { ".settings", "org.eclipse.core.net.prefs" }; URI sourceLocation = SetupContext.CONFIGURATION_LOCATION_URI.appendSegments(networkPreferences); if (getURIConverter().exists(sourceLocation, null)) { URI targetURI = URI.createFileURI(getProductConfigurationLocation().toString()).appendSegments(networkPreferences); ResourceCopyTask resourceCopyTask = SetupFactory.eINSTANCE.createResourceCopyTask(); resourceCopyTask.setSourceURL(sourceLocation.toString()); resourceCopyTask.setTargetURL(targetURI.toString()); performTask(resourceCopyTask, new SubProgressMonitor(monitor, 1)); } else { monitor.worked(1); } } } finally { monitor.done(); log("", false); } hasSuccessfullyPerformed = true; } private void performTriggeredSetupTasks(IProgressMonitor monitor) throws Exception { monitor.beginTask("", 101); try { initNeededSetupTasks(new SubProgressMonitor(monitor, 1)); if (!neededSetupTasks.isEmpty()) { performNeededSetupTasks(new SubProgressMonitor(monitor, 100)); } else { monitor.worked(100); } } finally { monitor.done(); } } private void performNeededSetupTasks(IProgressMonitor monitor) throws Exception { setPerforming(true); if (getTrigger() == Trigger.BOOTSTRAP) { doPerformNeededSetupTasks(monitor); } else { if (CommonPlugin.IS_RESOURCES_BUNDLE_AVAILABLE) { WorkspaceUtil.performNeededSetupTasks(this, monitor); } else { doPerformNeededSetupTasks(monitor); } } } private void doPerformNeededSetupTasks(IProgressMonitor monitor) throws Exception { Boolean autoBuilding = null; try { Trigger trigger = getTrigger(); if (trigger != Trigger.BOOTSTRAP) { autoBuilding = disableAutoBuilding(); } logBundleInfos(); int totalWork = 0; for (SetupTask neededTask : neededSetupTasks) { int work = Math.max(0, neededTask.getProgressMonitorWork()); totalWork += work; } monitor.beginTask("", totalWork); for (SetupTask neededTask : neededSetupTasks) { checkCancelation(); // Once we're past all the installation priority tasks that might cause restart reasons and there are restart reasons, stop performing. if (trigger != Trigger.BOOTSTRAP && neededTask.getPriority() >= SetupTask.PRIORITY_CONFIGURATION && !getRestartReasons().isEmpty()) { break; } task(neededTask); int work = Math.max(0, neededTask.getProgressMonitorWork()); progressMonitor = new SubProgressMonitor(monitor, work); try { neededTask.perform(this); neededTask.dispose(); } catch (NoClassDefFoundError ex) { log(ex); } finally { progressMonitor.done(); progressMonitor = null; } } } catch (OperationCanceledException ex) { throw ex; } catch (InterruptedException ex) { throw ex; } catch (Exception ex) { log(ex); throw ex; } finally { monitor.done(); if (Boolean.TRUE.equals(autoBuilding)) { // Disable the PDE's API analysis builder, if it's installed, and remember its previously current state. // It's considered disabled if it's not installed at all. final boolean disabled = PDEAPIUtil.setDisableAPIAnalysisBuilder(true); Job buildJob = new Job("Build") { @Override protected IStatus run(IProgressMonitor monitor) { try { EcorePlugin.getWorkspaceRoot().getWorkspace().build(IncrementalProjectBuilder.INCREMENTAL_BUILD, monitor); return Status.OK_STATUS; } catch (CoreException ex) { return SetupCorePlugin.INSTANCE.getStatus(ex); } finally { try { restoreAutoBuilding(true); } catch (CoreException ex) { SetupCorePlugin.INSTANCE.log(ex); } if (!disabled) { // Restore it to false if it was true before we set it to false; PDEAPIUtil.setDisableAPIAnalysisBuilder(false); } } } @Override public boolean belongsTo(Object family) { return ResourcesPlugin.FAMILY_MANUAL_BUILD == family; } }; buildJob.setRule(EcorePlugin.getWorkspaceRoot()); buildJob.schedule(); } } } private void logBundleInfos() { List<String> bundleInfos = new ArrayList<String>(); for (Bundle bundle : bundles) { StringBuilder builder = new StringBuilder("Bundle "); builder.append(bundle.getSymbolicName()); builder.append(" "); builder.append(bundle.getVersion()); InputStream source = null; try { URL url = bundle.getResource("about.mappings"); if (url != null) { source = url.openStream(); Properties properties = new Properties(); properties.load(source); String buildID = (String)properties.get("0"); if (buildID != null && !buildID.startsWith("$")) { builder.append(", build="); builder.append(buildID); } String gitBranch = (String)properties.get("1"); if (gitBranch != null && !gitBranch.startsWith("$")) { builder.append(", branch="); builder.append(gitBranch); } String gitCommit = (String)properties.get("2"); if (gitCommit != null && !gitCommit.startsWith("$")) { builder.append(", commit="); builder.append(gitCommit); } } } catch (IOException ex) { //$FALL-THROUGH$ } finally { IOUtil.closeSilent(source); } bundleInfos.add(builder.toString()); } Collections.sort(bundleInfos); for (String bundleInfo : bundleInfos) { log(bundleInfo); } } private Map<SetupTask, SetupTask> getSubstitutions(EList<SetupTask> setupTasks) { Map<Object, SetupTask> overrides = new LinkedHashMap<Object, SetupTask>(); Map<SetupTask, SetupTask> substitutions = new LinkedHashMap<SetupTask, SetupTask>(); for (SetupTask setupTask : setupTasks) { Object overrideToken = setupTask.getOverrideToken(); SetupTask overriddenTask = overrides.put(overrideToken, setupTask); if (overriddenTask != null) { substitutions.put(overriddenTask, setupTask); } } return substitutions; } private void gather(Set<EObject> roots, Set<Scope> scopesToCopy, EObject eObject) { EObject result = eObject; for (EObject parent = eObject; parent != null; parent = parent.eContainer()) { if (parent instanceof Scope) { if (!scopesToCopy.add((Scope)parent)) { return; } } result = parent; } roots.add(result); } private void copySetup(Stream stream, EList<SetupTask> setupTasks, Map<SetupTask, SetupTask> substitutions, Map<SetupTask, SetupTask> directSubstitutions) { Set<EObject> roots = new LinkedHashSet<EObject>(); final Set<Scope> scopesToCopy = new LinkedHashSet<Scope>(); Workspace originalWorkspace = getWorkspace(); if (originalWorkspace != null) { scopesToCopy.add(originalWorkspace); roots.add(originalWorkspace); if (stream != null) { gather(roots, scopesToCopy, stream); } } User originalPreferences = getUser(); if (originalPreferences != null) { scopesToCopy.add(originalPreferences); roots.add(originalPreferences); } Installation originalInstallation = getInstallation(); if (originalInstallation != null) { scopesToCopy.add(originalInstallation); roots.add(originalInstallation); for (EObject eObject : originalInstallation.eCrossReferences()) { gather(roots, scopesToCopy, eObject); } } for (SetupTask setupTask : setupTasks) { gather(roots, scopesToCopy, setupTask); } EcoreUtil.Copier copier = new EcoreUtil.Copier(true, stream == null) { private static final long serialVersionUID = 1L; @Override public <T> Collection<T> copyAll(Collection<? extends T> eObjects) { Collection<T> result = new ArrayList<T>(eObjects.size()); for (Object object : eObjects) { @SuppressWarnings("unchecked") T t = (T)copy((EObject)object); if (t != null) { result.add(t); } } return result; } @Override protected EObject createCopy(EObject eObject) { if (eObject instanceof Scope && !scopesToCopy.contains(eObject)) { return null; } return super.createCopy(eObject); } }; copier.copyAll(roots); // Determine all the copied objects for which the original object is directly contained in a resource. // For each such resource, create a copy of that resource. Map<Resource, Resource> resourceCopies = new LinkedHashMap<Resource, Resource>(); @SuppressWarnings("unchecked") Set<InternalEObject> originals = (Set<InternalEObject>)(Set<?>)copier.keySet(); for (InternalEObject original : originals) { Internal resource = original.eDirectResource(); if (resource != null) { Resource newResource = resourceCopies.get(resource); if (newResource == null) { URI uri = resource.getURI(); ResourceSet resourceSet = resource.getResourceSet(); Registry resourceFactoryRegistry = resourceSet == null ? Resource.Factory.Registry.INSTANCE : resourceSet.getResourceFactoryRegistry(); newResource = resourceFactoryRegistry.getFactory(uri).createResource(uri); resourceCopies.put(resource, newResource); } } } // For each original resource, ensure that the copied resource contains either the corresponding copies or // a placeholder object. for (Map.Entry<Resource, Resource> entry : resourceCopies.entrySet()) { Resource originalResource = entry.getKey(); Resource copyResource = entry.getValue(); EList<EObject> copyResourceContents = copyResource.getContents(); for (EObject eObject : originalResource.getContents()) { EObject copy = copier.get(eObject); if (copy == null) { copy = EcoreFactory.eINSTANCE.createEObject(); } copyResourceContents.add(copy); } } // Must determine mapping from original setup's references (ProductVersion and Streams) to their copies currently in the copier. Map<URI, EObject> originalCrossReferences = new LinkedHashMap<URI, EObject>(); if (originalWorkspace != null) { for (EObject eObject : originalWorkspace.eCrossReferences()) { originalCrossReferences.put(EcoreUtil.getURI(eObject), eObject); } } for (EObject copiedObject : new ArrayList<EObject>(copier.values())) { URI uri = EcoreUtil.getURI(copiedObject); EObject originalObject = originalCrossReferences.get(uri); if (originalObject != null) { copier.put(originalObject, copiedObject); } } Map<EObject, EObject> originalCopier = new LinkedHashMap<EObject, EObject>(copier); for (Map.Entry<SetupTask, SetupTask> entry : directSubstitutions.entrySet()) { SetupTask overriddenTask = entry.getKey(); SetupTask overridingTask = entry.getValue(); EObject copy = copier.get(overridingTask); copier.put(overriddenTask, copy == null ? overridingTask : copy); } copyMap = copier; copier.copyReferences(); // Perform override merging. for (Map.Entry<SetupTask, SetupTask> entry : substitutions.entrySet()) { SetupTask originalOverriddenSetupTask = entry.getKey(); SetupTask overriddenSetupTask = (SetupTask)originalCopier.get(originalOverriddenSetupTask); // For synthesized tasks, there is no copy, only the original. if (overriddenSetupTask == null) { overriddenSetupTask = originalOverriddenSetupTask; } SetupTask originalOverridingSetupTask = entry.getValue(); SetupTask overridingSetupTask = (SetupTask)originalCopier.get(originalOverridingSetupTask); // For synthesized tasks, there is no copy, only the original. if (overridingSetupTask == null) { overridingSetupTask = originalOverridingSetupTask; } overridingSetupTask.overrideFor(overriddenSetupTask); } for (ListIterator<SetupTask> it = setupTasks.listIterator(); it.hasNext();) { SetupTask setupTask = it.next(); if (directSubstitutions.containsKey(setupTask)) { it.remove(); } else { SetupTask copy = (SetupTask)copier.get(setupTask); it.set(copy); } } setSetupContext(SetupContext.create((Installation)copier.get(originalInstallation), (Workspace)copier.get(originalWorkspace), (User)copier.get(originalPreferences))); } private EList<Map.Entry<String, Set<String>>> reorderVariables(final Map<String, Set<String>> variables) { EList<Map.Entry<String, Set<String>>> list = new BasicEList<Map.Entry<String, Set<String>>>(variables.entrySet()); SetupCoreUtil.reorder(list, new SetupCoreUtil.DependencyProvider<Map.Entry<String, Set<String>>>() { public Collection<Map.Entry<String, Set<String>>> getDependencies(Map.Entry<String, Set<String>> variable) { Collection<Map.Entry<String, Set<String>>> result = new ArrayList<Map.Entry<String, Set<String>>>(); for (String key : variable.getValue()) { for (Map.Entry<String, Set<String>> entry : variables.entrySet()) { if (entry.getKey().equals(key)) { result.add(entry); } } } return result; } }); return list; } private void reorderSetupTasks(EList<SetupTask> setupTasks) { ECollections.sort(setupTasks, new Comparator<SetupTask>() { public int compare(SetupTask setupTask1, SetupTask setupTask2) { return setupTask1.getPriority() - setupTask2.getPriority(); } }); final Map<SetupTask, Set<SetupTask>> dependencies = new LinkedHashMap<SetupTask, Set<SetupTask>>(); for (SetupTask setupTask : setupTasks) { CollectionUtil.addAll(dependencies, setupTask, setupTask.getPredecessors()); for (SetupTask successor : setupTask.getSuccessors()) { CollectionUtil.add(dependencies, successor, setupTask); } } SetupCoreUtil.reorder(setupTasks, new SetupCoreUtil.DependencyProvider<SetupTask>() { public Collection<SetupTask> getDependencies(SetupTask setupTask) { return dependencies.get(setupTask); } }); // Set up the predecessor dependencies so these tasks will not be reordered relative to each other when they are merged with tasks from other streams. SetupTask previousSetupTask = null; for (SetupTask setupTask : setupTasks) { setupTask.getSuccessors().clear(); EList<SetupTask> predecessors = setupTask.getPredecessors(); predecessors.clear(); if (!(setupTask instanceof VariableTask)) { if (previousSetupTask != null) { predecessors.add(previousSetupTask); } previousSetupTask = setupTask; } } } private String getLabel(SetupTask setupTask) { IItemLabelProvider labelProvider = (IItemLabelProvider)adapterFactory.adapt(setupTask, IItemLabelProvider.class); String type; try { Method getTypeTextMethod = ReflectUtil.getMethod(labelProvider.getClass(), "getTypeText", Object.class); getTypeTextMethod.setAccessible(true); type = getTypeTextMethod.invoke(labelProvider, setupTask).toString(); } catch (Exception ex) { type = setupTask.eClass().getName(); } String label = labelProvider.getText(setupTask); if (!label.startsWith(type)) { label = type + " " + label; } int eol = Math.min(label.indexOf('\r'), label.indexOf('\n')); if (eol != -1) { label = label.substring(0, eol) + "..."; } return label.startsWith(type) ? label : type + " " + label; } /** * Used in IDE. */ public static SetupTaskPerformer createForIDE(ResourceSet resourceSet, SetupPrompter prompter, Trigger trigger) throws Exception { return create(resourceSet.getURIConverter(), prompter, trigger, SetupContext.create(resourceSet), false); } /** * Used in installer and IDE. */ public static SetupTaskPerformer create(URIConverter uriConverter, final SetupPrompter prompter, Trigger trigger, SetupContext setupContext, boolean fullPrompt) throws Exception { List<SetupTaskPerformer> performers = new ArrayList<SetupTaskPerformer>(); boolean needsPrompt = false; Map<Object, Set<Object>> composedMap = new HashMap<Object, Set<Object>>(); List<VariableTask> allAppliedRuleVariables = new ArrayList<VariableTask>(); List<VariableTask> allUnresolvedVariables = new ArrayList<VariableTask>(); List<VariableTask> allPasswordVariables = new ArrayList<VariableTask>(); Map<VariableTask, EAttribute> allRuleAttributes = new LinkedHashMap<VariableTask, EAttribute>(); Workspace workspace = setupContext.getWorkspace(); List<Stream> streams = workspace == null ? null : workspace.getStreams(); if (streams == null || streams.isEmpty()) { streams = Collections.singletonList(null); } for (Stream stream : streams) { if (stream == null || !stream.eIsProxy()) { SetupTaskPerformer performer = new SetupTaskPerformer(uriConverter, prompter, null, setupContext, stream); Set<String> undeclaredVariables = performer.getUndeclaredVariables(); final Set<VariableTask> demandCreatedUnresolvedVariables = new LinkedHashSet<VariableTask>(); if (!undeclaredVariables.isEmpty()) { List<VariableTask> unresolvedVariables = performer.getUnresolvedVariables(); for (String variableName : undeclaredVariables) { VariableTask variable = SetupFactory.eINSTANCE.createVariableTask(); variable.setName(variableName); variable.setLabel(variableName + " (undeclared)"); variable.setStorageURI(null); unresolvedVariables.add(variable); demandCreatedUnresolvedVariables.add(variable); } undeclaredVariables.clear(); } CollectionUtil.putAll(composedMap, performer.getMap()); if (fullPrompt) { SetupContext fullPromptContext = SetupContext.createCopy(setupContext.getInstallation(), setupContext.getWorkspace(), setupContext.getUser()); Set<VariableTask> variables = new LinkedHashSet<VariableTask>(); final SetupTaskPerformer partialPromptPerformer = performer; prepareFullPrompt(variables, fullPromptContext.getInstallation()); prepareFullPrompt(variables, fullPromptContext.getWorkspace()); User user = fullPromptContext.getUser(); prepareFullPrompt(variables, user); user.getAttributeRules().clear(); SetupPrompter fullPrompter = new SetupPrompter() { private boolean first = true; public UserCallback getUserCallback() { return prompter.getUserCallback(); } public String getValue(VariableTask variable) { if (!first) { return prompter.getValue(variable); } return null; } public boolean promptVariables(List<? extends SetupTaskContext> performers) { for (SetupTaskContext context : performers) { SetupTaskPerformer promptedPerformer = (SetupTaskPerformer)context; Map<VariableTask, EAttribute> ruleAttributes = promptedPerformer.getRuleAttributes(); for (VariableTask variable : promptedPerformer.getUnresolvedVariables()) { EAttribute eAttribute = ruleAttributes.get(variable); if (ruleAttributes.keySet().contains(variable)) { AttributeRule attributeRule = partialPromptPerformer.getAttributeRule(eAttribute, false); if (attributeRule != null) { String value = prompter.getValue(variable); variable.setValue(value == null ? attributeRule.getValue() : value); } } else { Object value = partialPromptPerformer.get(variable.getName()); if (value instanceof String) { variable.setValue(value.toString()); } } } promptedPerformer.getUnresolvedVariables().addAll(demandCreatedUnresolvedVariables); } first = false; return true; } }; SetupTaskPerformer fullPromptPerformer = new SetupTaskPerformer(uriConverter, fullPrompter, null, fullPromptContext, stream); fullPrompter.promptVariables(Collections.singletonList(fullPromptPerformer)); CollectionUtil.putAll(composedMap, performer.getMap()); performer = fullPromptPerformer; } allAppliedRuleVariables.addAll(performer.getAppliedRuleVariables()); allUnresolvedVariables.addAll(performer.getUnresolvedVariables()); allPasswordVariables.addAll(performer.getPasswordVariables()); allRuleAttributes.putAll(performer.getRuleAttributes()); performers.add(performer); if (!performer.getUnresolvedVariables().isEmpty()) { needsPrompt = true; } } } if (needsPrompt) { if (!prompter.promptVariables(performers)) { return null; } for (SetupTaskPerformer setupTaskPerformer : performers) { setupTaskPerformer.resolveSettings(); } } // All variables have been expanded, no unresolved variables remain. // We need a single performer for all streams. // The per-stream performers from above have triggered task lists that must be composed into a single setup for multiple streams. EList<SetupTask> setupTasks = new BasicEList<SetupTask>(); Set<Bundle> bundles = new HashSet<Bundle>(); for (SetupTaskPerformer performer : performers) { setupTasks.addAll(performer.getTriggeredSetupTasks()); bundles.addAll(performer.getBundles()); } SetupTaskPerformer composedPerformer = new SetupTaskPerformer(uriConverter, prompter, trigger, setupContext, setupTasks); composedPerformer.getBundles().addAll(bundles); composedPerformer.getAppliedRuleVariables().addAll(allAppliedRuleVariables); composedPerformer.getUnresolvedVariables().addAll(allUnresolvedVariables); composedPerformer.getPasswordVariables().addAll(allPasswordVariables); composedPerformer.getRuleAttributes().putAll(allRuleAttributes); composedPerformer.redirectTriggeredSetupTasks(); File workspaceLocation = composedPerformer.getWorkspaceLocation(); if (workspaceLocation != null) { File workspaceSetupLocation = new File(workspaceLocation, ".metadata/.plugins/org.eclipse.oomph.setup/workspace.setup"); URI workspaceURI = URI.createFileURI(workspaceSetupLocation.toString()); for (SetupTaskPerformer performer : performers) { performer.getWorkspace().eResource().setURI(workspaceURI); } } File configurationLocation = composedPerformer.getProductConfigurationLocation(); if (configurationLocation != null) { File installationLocation = new File(configurationLocation, "org.eclipse.oomph.setup/installation.setup"); URI installationURI = URI.createFileURI(installationLocation.toString()); for (SetupTaskPerformer performer : performers) { performer.getInstallation().eResource().setURI(installationURI); } } Map<Object, Object> finalComposedMap = composedPerformer.getMap(); for (Map.Entry<Object, Set<Object>> entry : composedMap.entrySet()) { Object key = entry.getKey(); if (!finalComposedMap.containsKey(key)) { Set<Object> value = entry.getValue(); value.remove(null); value.remove(""); if (value.size() == 1) { finalComposedMap.put(key, value.iterator().next()); } } } return composedPerformer; } public static Set<? extends Authenticator> getAuthenticators(VariableTask variable) { VariableAdapter variableAdapter = (VariableAdapter)EcoreUtil.getExistingAdapter(variable, VariableAdapter.class); if (variableAdapter != null) { return variableAdapter.getAuthenticators(variable); } return null; } private static class VariableAdapter extends AdapterImpl { private SetupTaskPerformer performer; public VariableAdapter(SetupTaskPerformer perform) { performer = perform; } @Override public boolean isAdapterForType(Object type) { return type == VariableAdapter.class; } protected Set<? extends Authenticator> getAuthenticators(final VariableTask variable) { StringExpander stringExpander = new StringExpander() { @Override protected String resolve(String key) { String value = getValue(key); return StringUtil.isEmpty(value) ? performer.resolve(key) : value; } @Override protected boolean isUnexpanded(String key) { return performer.isUnexpanded(key) && StringUtil.isEmpty(getValue(key)); } private String getValue(String key) { VariableTask variable = performer.allVariables.get(key); if (variable != null) { return performer.getPrompter().getValue(variable); } return null; } @Override protected String filter(String value, String filterName) { return performer.filter(value, filterName); } }; return Authenticator.create(variable, stringExpander); } } private static class FullPromptMarker extends AdapterImpl { @Override public boolean isAdapterForType(Object type) { return type == FullPromptMarker.class; } } private static boolean isFullPromptUser(User user) { return EcoreUtil.getExistingAdapter(user, FullPromptMarker.class) != null; } private static void prepareFullPrompt(Set<VariableTask> variables, ModelElement modelElement) { if (modelElement != null) { for (Iterator<EObject> it = modelElement.eAllContents(); it.hasNext();) { EObject eObject = it.next(); if (eObject instanceof VariableTask) { VariableTask variableTask = (VariableTask)eObject; variables.add(variableTask); variableTask.setValue(null); } } setFullPromptMarker(modelElement); } } private static void setFullPromptMarker(ModelElement modelElement) { modelElement.eAdapters().add(new FullPromptMarker()); } public void setProgress(ProgressLog progress) { this.progress = progress; } public static boolean disableAutoBuilding() throws CoreException { return CommonPlugin.IS_RESOURCES_BUNDLE_AVAILABLE && WorkspaceUtil.disableAutoBuilding(); } public static void restoreAutoBuilding(boolean autoBuilding) throws CoreException { if (CommonPlugin.IS_RESOURCES_BUNDLE_AVAILABLE) { WorkspaceUtil.restoreAutoBuilding(autoBuilding); } } public static EList<SetupTask> createEnablementTasks(EModelElement eModelElement, boolean withVariables) { EList<SetupTask> enablementTasks = null; for (EAnnotation eAnnotation : eModelElement.getEAnnotations()) { String source = eAnnotation.getSource(); if (EAnnotationConstants.ANNOTATION_ENABLEMENT.equals(source)) { if (enablementTasks == null) { enablementTasks = new BasicEList<SetupTask>(); } String repositoryLocation = eAnnotation.getDetails().get(EAnnotationConstants.KEY_REPOSITORY); if (!StringUtil.isEmpty(repositoryLocation)) { if (withVariables) { String variableName = eAnnotation.getDetails().get(EAnnotationConstants.KEY_VARIABLE_NAME); if (!StringUtil.isEmpty(variableName)) { VariableTask variable = SetupFactory.eINSTANCE.createVariableTask(); variable.setName(variableName); variable.setValue(repositoryLocation); enablementTasks.add(0, variable); repositoryLocation = getVariableReference(variableName); } } } P2Task p2Task = SetupP2Factory.eINSTANCE.createP2Task(); EList<Requirement> requirements = p2Task.getRequirements(); String ius = eAnnotation.getDetails().get(EAnnotationConstants.KEY_INSTALLABLE_UNITS); if (!StringUtil.isEmpty(ius)) { for (String requirementSpecification : ius.split("\\s")) { Matcher matcher = INSTALLABLE_UNIT_WITH_RANGE_PATTERN.matcher(requirementSpecification); if (matcher.matches()) { Requirement requirement = P2Factory.eINSTANCE.createRequirement(matcher.group(1)); String versionRange = matcher.group(2); if (!StringUtil.isEmpty(versionRange)) { requirement.setVersionRange(new VersionRange(versionRange)); } requirements.add(requirement); } } } if (!StringUtil.isEmpty(repositoryLocation)) { Repository repository = P2Factory.eINSTANCE.createRepository(repositoryLocation); p2Task.getRepositories().add(repository); } // Ensure that these are first so that these are the targets for merging rather than the sources. // The latter causes problems in the copier. enablementTasks.add(0, p2Task); } } EObject eContainer = eModelElement.eContainer(); if (eContainer instanceof EModelElement) { EList<SetupTask> containerEnablementTasks = createEnablementTasks((EModelElement)eContainer, withVariables); if (containerEnablementTasks != null) { if (enablementTasks == null) { enablementTasks = containerEnablementTasks; } else { enablementTasks.addAll(containerEnablementTasks); } } } return enablementTasks; } private static String getVariableReference(String variableName) { return "${" + variableName + "}"; } protected static class ValueConverter { public Object createFromString(String literal) { return literal; } public String convertToString(Object value) { return value == null ? null : value.toString(); } } protected static class URIValueConverter extends ValueConverter { @Override public Object createFromString(String literal) { return BaseFactory.eINSTANCE.createURI(literal); } } private static class PDEAPIUtil { private static final Field BUILD_DISABLED_FIELD; static { Field buildDisabledField = null; // Disable API analysis building for the initial build. try { Class<?> apiAnalysisBuilder = CommonPlugin.loadClass("org.eclipse.pde.api.tools", "org.eclipse.pde.api.tools.internal.builder.ApiAnalysisBuilder"); buildDisabledField = apiAnalysisBuilder.getDeclaredField("buildDisabled"); buildDisabledField.setAccessible(true); } catch (Exception ex) { // Ignore } BUILD_DISABLED_FIELD = buildDisabledField; } private static boolean setDisableAPIAnalysisBuilder(boolean disabled) { if (BUILD_DISABLED_FIELD != null) { try { boolean result = (Boolean)BUILD_DISABLED_FIELD.get(null); if (result != disabled) { BUILD_DISABLED_FIELD.set(null, disabled); } return result; } catch (Exception ex) { // Ignore. } } return true; } } /** * @author Eike Stepper */ private static class WorkspaceUtil { private static boolean disableAutoBuilding() throws CoreException { boolean autoBuilding = ResourcesPlugin.getWorkspace().isAutoBuilding(); if (autoBuilding) { restoreAutoBuilding(false); } return autoBuilding; } private static void restoreAutoBuilding(boolean autoBuilding) throws CoreException { if (autoBuilding != ResourcesPlugin.getWorkspace().isAutoBuilding()) { IWorkspaceDescription description = ResourcesPlugin.getWorkspace().getDescription(); description.setAutoBuilding(autoBuilding); ResourcesPlugin.getWorkspace().setDescription(description); } } private static void performNeededSetupTasks(final SetupTaskPerformer performer, IProgressMonitor monitor) throws Exception { ResourcesPlugin.getWorkspace().run(new IWorkspaceRunnable() { public void run(IProgressMonitor monitor) throws CoreException { try { performer.doPerformNeededSetupTasks(monitor); } catch (Throwable t) { SetupCorePlugin.INSTANCE.coreException(t); } } }, null, IWorkspace.AVOID_UPDATE, monitor); } } }
package org.eclipse.oomph.setup.ui.recorder; import org.eclipse.oomph.preferences.PreferencesFactory; import org.eclipse.oomph.setup.ui.AbstractSetupDialog; import org.eclipse.oomph.setup.ui.SetupUIPlugin; import org.eclipse.oomph.util.StringUtil; import org.eclipse.emf.common.util.URI; import org.eclipse.jface.dialogs.Dialog; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; import java.util.Map; /** * @author Eike Stepper */ public class RecorderPoliciesDialog extends AbstractSetupDialog { private final RecorderTransaction transaction; private final Map<URI, String> preferences; private boolean enablePreferenceRecorder = true; private RecorderPoliciesComposite recorderPoliciesComposite; private Text valueText; public RecorderPoliciesDialog(Shell parentShell, RecorderTransaction transaction, Map<URI, String> preferences) { super(parentShell, "Preference Recorder", 600, 400, SetupUIPlugin.INSTANCE, false); this.transaction = transaction; this.preferences = preferences; } public boolean isEnablePreferenceRecorder() { return enablePreferenceRecorder; } public void setEnablePreferenceRecorder(boolean enablePreferenceRecorder) { this.enablePreferenceRecorder = enablePreferenceRecorder; } @Override protected String getShellText() { return "Oomph Preference Recorder"; } @Override protected String getDefaultMessage() { return "Define whether to record preference tasks for the listed preferences from now on."; } @Override protected void createUI(Composite parent) { initializeDialogUnits(parent); SashForm sashForm = new SashForm(parent, SWT.VERTICAL); sashForm.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true)); recorderPoliciesComposite = new RecorderPoliciesComposite(sashForm, SWT.NONE, transaction, false); recorderPoliciesComposite.setFocus(); recorderPoliciesComposite.addSelectionChangedListener(new ISelectionChangedListener() { public void selectionChanged(SelectionChangedEvent event) { updateValue((IStructuredSelection)event.getSelection()); } }); valueText = new Text(sashForm, SWT.READ_ONLY | SWT.V_SCROLL | SWT.H_SCROLL); valueText.setBackground(getShell().getDisplay().getSystemColor(SWT.COLOR_WHITE)); Listener scrollBarListener = new Listener() { protected boolean changing; public void handleEvent(Event event) { if (!changing) { changing = true; Rectangle clientArea = valueText.getClientArea(); Rectangle trimArea = valueText.computeTrim(clientArea.x, clientArea.y, clientArea.width, clientArea.height); Point size = valueText.computeSize(SWT.DEFAULT, SWT.DEFAULT, true); valueText.getHorizontalBar().setVisible(trimArea.width <= size.x); valueText.getVerticalBar().setVisible(trimArea.height <= size.y); changing = false; } } }; valueText.addListener(SWT.Resize, scrollBarListener); valueText.addListener(SWT.Modify, scrollBarListener); sashForm.setWeights(new int[] { 4, 1 }); Dialog.applyDialogFont(sashForm); updateValue(recorderPoliciesComposite.getSelection()); } @Override protected void createButtonsForButtonBar(Composite parent) { final Button enableButton = createCheckbox(parent, "Recorder enabled"); enableButton.setToolTipText("The enablement can be changed later on the preference page Oomph | Setup | Preference Recorder"); enableButton.setSelection(enablePreferenceRecorder); enableButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { enablePreferenceRecorder = enableButton.getSelection(); recorderPoliciesComposite.setEnabled(enablePreferenceRecorder); valueText.setVisible(enablePreferenceRecorder); } }); super.createButtonsForButtonBar(parent); } private void updateValue(IStructuredSelection selection) { String path = (String)selection.getFirstElement(); URI uri = PreferencesFactory.eINSTANCE.createURI(path); String value = StringUtil.safe(preferences.get(uri)); valueText.setText(value); } }
package org.jkiss.dbeaver.ui.dialogs.connection; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.jface.dialogs.IDialogPage; import org.eclipse.osgi.util.NLS; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.TabFolder; import org.eclipse.swt.widgets.TabItem; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.core.CoreMessages; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.runtime.DBRRunnableContext; import org.jkiss.dbeaver.registry.DataSourceDescriptor; import org.jkiss.dbeaver.registry.DataSourceRegistry; import org.jkiss.dbeaver.registry.DataSourceViewDescriptor; import org.jkiss.dbeaver.registry.DriverDescriptor; import org.jkiss.dbeaver.runtime.RunnableContextDelegate; import org.jkiss.dbeaver.ui.*; import org.jkiss.dbeaver.ui.dialogs.ActiveWizardPage; import org.jkiss.dbeaver.ui.dialogs.driver.DriverEditDialog; import org.jkiss.utils.ArrayUtils; import org.jkiss.utils.CommonUtils; import java.util.*; /** * Settings connection page. Hosts particular drivers' connection pages */ class ConnectionPageSettings extends ActiveWizardPage<ConnectionWizard> implements IDataSourceConnectionEditorSite, ICompositeDialogPage { static final Log log = Log.getLog(DriverDescriptor.class); @NotNull private final ConnectionWizard wizard; @NotNull private DataSourceViewDescriptor viewDescriptor; @Nullable private IDataSourceConnectionEditor connectionEditor; @Nullable private DataSourceDescriptor dataSource; private final Set<DataSourceDescriptor> activated = new HashSet<DataSourceDescriptor>(); private IDialogPage[] subPages, extraPages; /** * Constructor for ConnectionPageSettings */ ConnectionPageSettings( @NotNull ConnectionWizard wizard, @NotNull DataSourceViewDescriptor viewDescriptor) { super("newConnectionSettings"); this.wizard = wizard; this.viewDescriptor = viewDescriptor; setTitle(wizard.isNew() ? viewDescriptor.getLabel() : "Connection settings"); setDescription(CoreMessages.dialog_connection_description); } /** * Constructor for ConnectionPageSettings */ ConnectionPageSettings( @NotNull ConnectionWizard wizard, @NotNull DataSourceViewDescriptor viewDescriptor, @Nullable DataSourceDescriptor dataSource) { this(wizard, viewDescriptor); this.dataSource = dataSource; } @Override public void activatePage() { if (connectionEditor == null) { createProviderPage(getControl().getParent()); } setMessage(NLS.bind(CoreMessages.dialog_connection_message, getDriver().getFullName())); DataSourceDescriptor connectionInfo = getActiveDataSource(); if (!activated.contains(connectionInfo)) { if (this.connectionEditor != null) { this.connectionEditor.loadSettings(); } if (subPages != null) { for (IDialogPage page : subPages) { if (page instanceof IDataSourceConnectionEditor) { ((IDataSourceConnectionEditor) page).loadSettings(); } } } activated.add(connectionInfo); } else if (connectionEditor != null) { connectionEditor.loadSettings(); } getContainer().updateTitleBar(); } @Override public Image getImage() { if (this.connectionEditor != null) { Image image = this.connectionEditor.getImage(); if (image != null) { return image; } } return super.getImage(); } void saveSettings(DataSourceDescriptor dataSource) { getActiveDataSource().getConnectionConfiguration().getProperties().clear(); if (connectionEditor != null) { connectionEditor.saveSettings(dataSource); } if (subPages != null) { for (IDialogPage page : subPages) { if (page instanceof IDataSourceConnectionEditor) { ((IDataSourceConnectionEditor) page).saveSettings(dataSource); } } } } @Override public void createControl(Composite parent) { if (wizard.isNew()) { setControl(new Composite(parent, SWT.BORDER)); } else { createProviderPage(parent); } } private void createProviderPage(Composite parent) { if (this.connectionEditor != null) { return; } if (getControl() != null) { getControl().dispose(); } try { this.connectionEditor = viewDescriptor.createView(IDataSourceConnectionEditor.class); this.connectionEditor.setSite(this); // init sub pages (if any) getSubPages(); if (wizard.isNew() && !ArrayUtils.isEmpty(subPages)) { // Create tab folder List<IDialogPage> allPages = new ArrayList<IDialogPage>(); allPages.add(connectionEditor); Collections.addAll(allPages, subPages); TabFolder tabFolder = new TabFolder(parent, SWT.TOP); tabFolder.setLayoutData(new GridData(GridData.FILL_BOTH)); for (IDialogPage page : allPages) { TabItem item = new TabItem(tabFolder, SWT.NONE); page.createControl(tabFolder); Control pageControl = page.getControl(); item.setControl(pageControl); item.setText(CommonUtils.isEmpty(page.getTitle()) ? "General" : page.getTitle()); item.setToolTipText(page.getDescription()); } tabFolder.setSelection(0); setControl(tabFolder); } else { // Create single editor control this.connectionEditor.createControl(parent); setControl(this.connectionEditor.getControl()); } UIUtils.setHelp(getControl(), IHelpContextIds.CTX_CON_WIZARD_SETTINGS); } catch (Exception ex) { log.warn(ex); setErrorMessage("Can't create settings dialog: " + ex.getMessage()); } parent.layout(); } @Override public boolean canFlipToNextPage() { return true; } @Override public boolean isPageComplete() { return wizard.getPageSettings() != this || this.connectionEditor != null && this.connectionEditor.isComplete(); } @Override public DBRRunnableContext getRunnableContext() { return new RunnableContextDelegate(wizard.getContainer()); } @Override public DataSourceRegistry getDataSourceRegistry() { return wizard.getDataSourceRegistry(); } @Override public boolean isNew() { return wizard.isNew(); } @Override public DriverDescriptor getDriver() { return wizard.getSelectedDriver(); } @NotNull @Override public DataSourceDescriptor getActiveDataSource() { if (dataSource != null) { return dataSource; } return wizard.getActiveDataSource(); } @Override public void updateButtons() { getWizard().getContainer().updateButtons(); } @Override public boolean openDriverEditor() { DriverEditDialog dialog = new DriverEditDialog(wizard.getShell(), this.getDriver()); return dialog.open() == IDialogConstants.OK_ID; } @Override public void dispose() { if (connectionEditor != null) { connectionEditor.dispose(); connectionEditor = null; } super.dispose(); } @Nullable @Override public IDialogPage[] getSubPages() { if (subPages != null) { return subPages; } if (connectionEditor instanceof ICompositeDialogPage) { subPages = ((ICompositeDialogPage) connectionEditor).getSubPages(); if (!ArrayUtils.isEmpty(subPages)) { for (IDialogPage page : subPages) { if (page instanceof IDataSourceConnectionEditor) { ((IDataSourceConnectionEditor) page).setSite(this); } } } if (extraPages != null) { subPages = ArrayUtils.concatArrays(subPages, extraPages); } return subPages; } else { return null; } } public void addSubPage(IDialogPage page) { if (extraPages == null) { extraPages = new IDialogPage[] { page }; } else { extraPages = ArrayUtils.concatArrays(extraPages, new IDialogPage[] { page }); } } }
package jltools.frontend; import jltools.lex.Lexer; import jltools.ast.Node; import jltools.parse.Grm; import jltools.types.*; import jltools.util.*; import jltools.visit.*; import java.io.*; import java.util.*; public class Compiler { private static TypeSystem ts; private static CompoundClassResolver systemResolver; private static TableClassResolver parsedResolver; private static Map options; static { Compiler.systemResolver = new CompoundClassResolver(); Compiler.parsedResolver = new TableClassResolver(); systemResolver.addClassResolver( parsedResolver); //systemResolver.addResolver( new FileClassResolver()); Compiler.ts = new StandardTypeSystem( systemResolver); Compiler.options = new HashMap(); } public static void setOptions( Map options) { Compiler.options = options; } public Compiler() { } public Node parse( Reader source) throws IOException { Lexer lexer; Grm grm; java_cup.runtime.Symbol sym; lexer = new Lexer( source); grm = new Grm(lexer, null); try { sym = grm.parse(); } catch( Exception e) { throw new IOException( e.getMessage()); } return (Node)sym.value; } public void readSymbols( Node ast) { SymbolReader sr = new SymbolReader( ts, parsedResolver); ast.visit( sr); } public Node typeCheck( Node ast) { TypeChecker tc = new TypeChecker( null); return ast.visit( tc); } public void translate( Node ast, Writer output) throws IOException { CodeWriter cw = new CodeWriter( output, 72); ast.translate( null, cw); cw.flush(); } // A hack, but only here to get type checking working... public static void enqueueError(int line, ErrorInfo e) { System.err.println( "Error on line " + line + ": " + e.getMessage()); } }
package polyglot.visit; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import polyglot.ast.ClassBody; import polyglot.ast.ClassMember; import polyglot.ast.CodeDecl; import polyglot.ast.ConstructorCall; import polyglot.ast.ConstructorDecl; import polyglot.ast.Expr; import polyglot.ast.Field; import polyglot.ast.FieldAssign; import polyglot.ast.FieldDecl; import polyglot.ast.Formal; import polyglot.ast.Initializer; import polyglot.ast.Local; import polyglot.ast.LocalAssign; import polyglot.ast.LocalDecl; import polyglot.ast.Node; import polyglot.ast.NodeFactory; import polyglot.ast.Special; import polyglot.ast.Term; import polyglot.frontend.Job; import polyglot.types.ClassType; import polyglot.types.ConstructorInstance; import polyglot.types.FieldInstance; import polyglot.types.LocalInstance; import polyglot.types.SemanticException; import polyglot.types.TypeSystem; import polyglot.types.VarInstance; /** * Visitor which checks that all local variables must be defined before use, * and that final variables and fields are initialized correctly. * * The checking of the rules is implemented in the methods leaveCall(Node) * and check(FlowGraph, Term, Item, Item). * */ public class InitChecker extends DataFlow { public InitChecker(Job job, TypeSystem ts, NodeFactory nf) { super(job, ts, nf, true /* forward analysis */); } protected ClassBodyInfo currCBI = null; /** * This class is just a data structure containing relevant information * needed for performing initialization checking of a class declaration. * * These objects form a stack, since class declarations can be nested. */ protected static class ClassBodyInfo { /** * The info for the outer ClassBody. The <code>ClassBodyInfo</code>s * form a stack. */ ClassBodyInfo outer = null; /** The current CodeDecl being processed by the dataflow equations */ CodeDecl currCodeDecl = null; /** * A Map of all the final fields in the class currently being processed * to MinMaxInitCounts. This Map is used as the basis for the Maps returned * in createInitialItem(). * */ Map currClassFinalFieldInitCounts = new HashMap(); /** * List of all the constructors. These will be checked once all the * initializer blocks have been processed. */ List allConstructors = new ArrayList(); /** * Map from ConstructorInstances to ConstructorInstances detailing * which constructors call which constructors. * This is used in checking the initialization of final fields. */ Map constructorCalls = new HashMap(); /** * Map from ConstructorInstances to Sets of FieldInstances, detailing * which final non-static fields each constructor initializes. * This is used in checking the initialization of final fields. */ Map fieldsConstructorInitializes = new HashMap(); /** * Set of LocalInstances from the outer class body that were used * during the declaration of this class. We need to track this * in order to correctly populate <code>localsUsedInClassBodies</code> */ Set outerLocalsUsed = new HashSet(); Map localsUsedInClassBodies = new HashMap(); } /** * Class representing the initialization counts of variables. The * different values of the counts that we are interested in are ZERO, * ONE and MANY. */ protected static class InitCount { static InitCount ZERO = new InitCount(0); static InitCount ONE = new InitCount(1); static InitCount MANY = new InitCount(2); private int count; private InitCount(int i) { count = i; } public boolean equals(Object o) { if (o instanceof InitCount) { return this.count == ((InitCount)o).count; } return false; } public String toString() { if (count == 0) { return "0"; } else if (count == 1) { return "1"; } else if (count == 2) { return "many"; } throw new RuntimeException("Unexpected value for count"); } public InitCount increment() { if (count == 0) { return ONE; } return MANY; } public static InitCount min(InitCount a, InitCount b) { if (ZERO.equals(a) || ZERO.equals(b)) { return ZERO; } if (ONE.equals(a) || ONE.equals(b)) { return ONE; } return MANY; } public static InitCount max(InitCount a, InitCount b) { if (MANY.equals(a) || MANY.equals(b)) { return MANY; } if (ONE.equals(a) || ONE.equals(b)) { return ONE; } return ZERO; } } /** * Class to record counts of the minimum and maximum number of times * a variable or field has been initialized or assigned to. */ protected static class MinMaxInitCount { private InitCount min, max; MinMaxInitCount(InitCount min, InitCount max) { MinMaxInitCount.this.min = min; MinMaxInitCount.this.max = max; } InitCount getMin() { return min; } InitCount getMax() { return max; } public String toString() { return "[ min: " + min + "; max: " + max + " ]"; } public boolean equals(Object o) { if (o instanceof MinMaxInitCount) { return this.min.equals(((MinMaxInitCount)o).min) && this.max.equals(((MinMaxInitCount)o).max); } return false; } static MinMaxInitCount join(MinMaxInitCount initCount1, MinMaxInitCount initCount2) { if (initCount1 == null) { return initCount2; } if (initCount2 == null) { return initCount1; } MinMaxInitCount t = new MinMaxInitCount( InitCount.min(initCount1.getMin(), initCount2.getMin()), InitCount.max(initCount1.getMax(), initCount2.getMax())); return t; } } /** * Dataflow items for this dataflow are maps of VarInstances to counts * of the min and max number of times those variables/fields have * been initialized. These min and max counts are then used to determine * if variables have been initialized before use, and that final variables * are not initialized too many times. * * This class is immutable. */ static class DataFlowItem extends Item { Map initStatus; // map of VarInstances to MinMaxInitCount DataFlowItem(Map m) { this.initStatus = Collections.unmodifiableMap(m); } public String toString() { return initStatus.toString(); } public boolean equals(Object o) { if (o instanceof DataFlowItem) { return this.initStatus.equals(((DataFlowItem)o).initStatus); } return false; } public int hashCode() { return (initStatus.hashCode()); } } /** * Initialise the FlowGraph to be used in the dataflow analysis. * @return null if no dataflow analysis should be performed for this * code declaration; otherwise, an apropriately initialized * FlowGraph. */ protected FlowGraph initGraph(CodeDecl code, Term root) { currCBI.currCodeDecl = code; return new FlowGraph(root, forward); } /** * Overridden superclass method. * * Set up the state that must be tracked during a Class Declaration. */ protected NodeVisitor enterCall(Node n) throws SemanticException { if (n instanceof ClassBody) { // we are starting to process a class declaration, but have yet // to do any of the dataflow analysis. // set up the new ClassBodyInfo, and make sure that it forms // a stack. ClassBodyInfo newCDI = new ClassBodyInfo(); newCDI.outer = currCBI; currCBI = newCDI; // set up currClassFinalFieldInitCounts to contain mappings // for all the final fields of the class. Iterator classMembers = ((ClassBody)n).members().iterator(); while (classMembers.hasNext()) { ClassMember cm = (ClassMember)classMembers.next(); if (cm instanceof FieldDecl) { FieldDecl fd = (FieldDecl)cm; if (fd.flags().isFinal()) { MinMaxInitCount initCount; if (fd.init() != null) { // the field has an initializer initCount = new MinMaxInitCount(InitCount.ONE,InitCount.ONE); // do dataflow over the initialization expression // to pick up any uses of outer local variables. if (currCBI.outer != null) dataflow(fd.init()); } else { // the field does not have an initializer initCount = new MinMaxInitCount(InitCount.ZERO,InitCount.ZERO); } newCDI.currClassFinalFieldInitCounts.put(fd.fieldInstance(), initCount); } } } } return super.enterCall(n); } /** * Postpone the checking of constructors until the end of the class * declaration is encountered, to ensure that all initializers are * processed first. * * Also, at the end of the class declaration, check that all static final * fields have been initialized at least once, and that for each constructor * all non-static final fields must have been initialized at least once, * taking into account the constructor calls. * */ public Node leaveCall(Node n) throws SemanticException { if (n instanceof ConstructorDecl) { // postpone the checking of the constructors until all the // initializer blocks have been processed. currCBI.allConstructors.add(n); return n; } if (n instanceof ClassBody) { // Now that we are at the end of the class declaration, and can // be sure that all of the initializer blocks have been processed, // we can now process the constructors. for (Iterator iter = currCBI.allConstructors.iterator(); iter.hasNext(); ) { ConstructorDecl cd = (ConstructorDecl)iter.next(); // rely on the fact that our dataflow does not change the AST, // so we can discard the result of this call. dataflow(cd); } // check that all static fields have been initialized exactly once checkStaticFinalFieldsInit((ClassBody)n); // check that at the end of each constructor!@! checkNonStaticFinalFieldsInit((ClassBody)n); // copy the locals used to the outer scope if (currCBI.outer != null) { currCBI.outer.localsUsedInClassBodies.put(n, currCBI.outerLocalsUsed); } // pop the stack currCBI = currCBI.outer; } return super.leaveCall(n); } /** * Check that each static final field is initialized exactly once. * * @param cb The ClassBody of the class declaring the fields to check. * @throws SemanticException */ protected void checkStaticFinalFieldsInit(ClassBody cb) throws SemanticException { // check that all static fields have been initialized exactly once. for (Iterator iter = currCBI.currClassFinalFieldInitCounts.entrySet().iterator(); iter.hasNext(); ) { Map.Entry e = (Map.Entry)iter.next(); if (e.getKey() instanceof FieldInstance) { FieldInstance fi = (FieldInstance)e.getKey(); if (fi.flags().isStatic() && fi.flags().isFinal()) { MinMaxInitCount initCount = (MinMaxInitCount)e.getValue(); if (InitCount.ZERO.equals(initCount.getMin())) { throw new SemanticException("field \"" + fi.name() + "\" might not have been initialized", cb.position()); } } } } } /** * Check that each non static final field has been initialized exactly once, * taking into account the fact that constructors may call other * constructors. * * @param cb The ClassBody of the class declaring the fields to check. * @throws SemanticException */ protected void checkNonStaticFinalFieldsInit(ClassBody cb) throws SemanticException { // for each non-static final field instance, check that all // constructors intialize it exactly once, taking into account constructor calls. for (Iterator iter = currCBI.currClassFinalFieldInitCounts.keySet().iterator(); iter.hasNext(); ) { FieldInstance fi = (FieldInstance)iter.next(); if (fi.flags().isFinal() && !fi.flags().isStatic()) { // the field is final and not static // it must be initialized exactly once. // navigate up through all of the the constructors // that this constructor calls. boolean fieldInitializedBeforeConstructors = false; MinMaxInitCount ic = (MinMaxInitCount) currCBI.currClassFinalFieldInitCounts.get(fi); if (ic != null && !InitCount.ZERO.equals(ic.getMin())) { fieldInitializedBeforeConstructors = true; } for (Iterator iter2 = currCBI.allConstructors.iterator(); iter2.hasNext(); ) { ConstructorDecl cd = (ConstructorDecl)iter2.next(); ConstructorInstance ciStart = cd.constructorInstance(); ConstructorInstance ci = ciStart; boolean isInitialized = fieldInitializedBeforeConstructors; while (ci != null) { Set s = (Set)currCBI.fieldsConstructorInitializes.get(ci); if (s != null && s.contains(fi)) { if (isInitialized) { throw new SemanticException("field \"" + fi.name() + "\" might have already been initialized", cd.position()); } isInitialized = true; } ci = (ConstructorInstance)currCBI.constructorCalls.get(ci); } if (!isInitialized) { throw new SemanticException("field \"" + fi.name() + "\" might not have been initialized", ciStart.position()); } } } } } /** * Construct a flow graph for the <code>Expr</code> provided, and call * <code>dataflow(FlowGraph)</code>. Is also responsible for calling * <code>post(FlowGraph, Term)</code> after * <code>dataflow(FlowGraph)</code> has been called. */ protected Expr dataflow(Expr root) throws SemanticException { // Build the control flow graph. FlowGraph g = new FlowGraph(root, forward); CFGBuilder v = new CFGBuilder(ts, g, this); v.visitGraph(); dataflow(g); return (Expr)post(g, root); } /** * The initial item to be given to the entry point of the dataflow contains * the init counts for the final fields. */ public Item createInitialItem(FlowGraph graph) { return new DataFlowItem(new HashMap(currCBI.currClassFinalFieldInitCounts)); } /** * The confluence operator is essentially the union of all of the * inItems. However, if two or more of the initCount maps from * the inItems each have a MinMaxInitCounts entry for the same * VarInstance, the conflict must be resolved, by using the * minimum of all mins and the maximum of all maxs. */ public Item confluence(List inItems, Term node) { // Resolve any conflicts pairwise. Iterator iter = inItems.iterator(); Map m = new HashMap(((DataFlowItem)iter.next()).initStatus); while (iter.hasNext()) { Map n = ((DataFlowItem)iter.next()).initStatus; for (Iterator iter2 = n.entrySet().iterator(); iter2.hasNext(); ) { Map.Entry entry = (Map.Entry)iter2.next(); VarInstance v = (VarInstance)entry.getKey(); MinMaxInitCount initCount1 = (MinMaxInitCount)m.get(v); MinMaxInitCount initCount2 = (MinMaxInitCount)entry.getValue(); m.put(v, MinMaxInitCount.join(initCount1, initCount2)); } } return new DataFlowItem(m); } /** * Perform the appropriate flow operations for the Terms. This method * delegates to other appropriate methods in this class, for modularity. * * To summarize: * - Formals: declaration of a Formal param, just insert a new * MinMaxInitCount for the LocalInstance. * - LocalDecl: a declaration of a local variable, just insert a new * MinMaxInitCount for the LocalInstance as appropriate * based on whether the declaration has an initializer or not. * - Assign: if the LHS of the assign is a local var or a field that we * are interested in, then increment the min and max counts * for that local var or field. */ public Map flow(Item inItem, FlowGraph graph, Term n, Set succEdgeKeys) { DataFlowItem inDFItem = ((DataFlowItem)inItem); Map ret = null; if (n instanceof Formal) { // formal argument declaration. ret = flowFormal(inDFItem, graph, (Formal)n, succEdgeKeys); } else if (n instanceof LocalDecl) { // local variable declaration. ret = flowLocalDecl(inDFItem, graph, (LocalDecl)n, succEdgeKeys); } else if (n instanceof LocalAssign) { // assignment to a local variable ret = flowLocalAssign(inDFItem, graph, (LocalAssign)n, succEdgeKeys); } else if (n instanceof FieldAssign) { // assignment to a field ret = flowFieldAssign(inDFItem, graph, (FieldAssign)n, succEdgeKeys); } else if (n instanceof ConstructorCall) { // call to another constructor. ret = flowConstructorCall(inDFItem, graph, (ConstructorCall)n, succEdgeKeys); } if (ret != null) { return ret; } return itemToMap(inItem, succEdgeKeys); } /** * Perform the appropriate flow operations for declaration of a formal * parameter */ protected Map flowFormal(DataFlowItem inItem, FlowGraph graph, Formal f, Set succEdgeKeys) { Map m = new HashMap(inItem.initStatus); // a formal argument is always defined. m.put(f.localInstance(), new MinMaxInitCount(InitCount.ONE,InitCount.ONE)); return itemToMap(new DataFlowItem(m), succEdgeKeys); } /** * Perform the appropriate flow operations for declaration of a local * variable */ protected Map flowLocalDecl(DataFlowItem inItem, FlowGraph graph, LocalDecl ld, Set succEdgeKeys) { Map m = new HashMap(inItem.initStatus); if (ld.init() == null) { // declaration of local var with no initialization m.put(ld.localInstance(), new MinMaxInitCount(InitCount.ZERO,InitCount.ZERO)); } else { // declaration of local var with initialization. m.put(ld.localInstance(), new MinMaxInitCount(InitCount.ONE,InitCount.ONE)); } return itemToMap(new DataFlowItem(m), succEdgeKeys); } /** * Perform the appropriate flow operations for assignment to a local * variable */ protected Map flowLocalAssign(DataFlowItem inItem, FlowGraph graph, LocalAssign a, Set succEdgeKeys) { Local l = (Local) a.left(); Map m = new HashMap(inItem.initStatus); MinMaxInitCount initCount = (MinMaxInitCount)m.get(l.localInstance()); // initcount could be null if the local is defined in the outer // class. if (initCount != null ) { initCount = new MinMaxInitCount(initCount.getMin().increment(), initCount.getMax().increment()); m.put(l.localInstance(), initCount); return itemToMap(new DataFlowItem(m), succEdgeKeys); } return null; } /** * Perform the appropriate flow operations for assignment to a field */ protected Map flowFieldAssign(DataFlowItem inItem, FlowGraph graph, FieldAssign a, Set succEdgeKeys) { Field f = (Field)a.left(); FieldInstance fi = f.fieldInstance(); if (fi.flags().isFinal() && isFieldsTargetAppropriate(f)) { // this field is final and the target for this field is // appropriate for what we are interested in. Map m = new HashMap(inItem.initStatus); MinMaxInitCount initCount = (MinMaxInitCount)m.get(fi); // initCount may be null if the field is defined in an // outer class. if (initCount != null) { initCount = new MinMaxInitCount(initCount.getMin().increment(), initCount.getMax().increment()); m.put(fi, initCount); return itemToMap(new DataFlowItem(m), succEdgeKeys); } } return null; } /** * Perform the appropriate flow operations for a constructor call */ protected Map flowConstructorCall(DataFlowItem inItem, FlowGraph graph, ConstructorCall cc, Set succEdgeKeys) { if (ConstructorCall.THIS.equals(cc.kind())) { // currCodeDecl must be a ConstructorDecl, as that // is the only place constructor calls are allowed // record the fact that the current constructor calls the other // constructor currCBI.constructorCalls.put(((ConstructorDecl)currCBI.currCodeDecl).constructorInstance(), cc.constructorInstance()); } return null; } /** * Determine if we are interested in this field on the basis of the * target of the field. To wit, if the field * is static, then the target of the field must be the current class; if * the field is not static then the target must be "this". */ protected boolean isFieldsTargetAppropriate(Field f) { if (f.fieldInstance().flags().isStatic()) { ClassType containingClass = (ClassType)currCBI.currCodeDecl.codeInstance().container(); return containingClass.equals(f.fieldInstance().container()); } else { return (f.target() instanceof Special && Special.THIS.equals(((Special)f.target()).kind())); } } /** * Check that the conditions of initialization are not broken. * * To summarize the conditions: * - Local variables must be initialized before use, (i.e. min count > 0) * - Final local variables (including Formals) cannot be assigned to more * than once (i.e. max count <= 1) * - Final non-static fields whose target is this cannot be assigned to * more than once * - Final static fields whose target is the current class cannot be * assigned to more than once * * * This method is also responsible for maintaining state between the * dataflows over Initializers, by copying back the appropriate * MinMaxInitCounts to the map currClassFinalFieldInitCounts. */ public void check(FlowGraph graph, Term n, Item inItem, Map outItems) throws SemanticException { DataFlowItem dfIn = (DataFlowItem)inItem; if (dfIn == null) { // There is no input data flow item. This can happen if we are // checking an unreachable term, and so no Items have flowed // through the term. For example, in the code fragment: // a: do { break a; } while (++i < 10); // the expression "++i < 10" is unreachable, but the as there is // no unreachable statement, the Java Language Spec permits it. // Set inItem to a default Item dfIn = (DataFlowItem)createInitialItem(graph); } DataFlowItem dfOut = null; if (outItems != null && !outItems.isEmpty()) { // due to the flow equations, all DataFlowItems in the outItems map // are the same, so just take the first one. dfOut = (DataFlowItem)outItems.values().iterator().next(); } if (n instanceof Local) { checkLocal(graph, (Local)n, dfIn, dfOut); } else if (n instanceof LocalAssign) { checkLocalAssign(graph, (LocalAssign)n, dfIn, dfOut); } else if (n instanceof FieldAssign) { checkFieldAssign(graph, (FieldAssign)n, dfIn, dfOut); } else if (n instanceof ClassBody) { // we need to check that the locals used inside this class body // have all been defined at this point. Set localsUsed = (Set)currCBI.localsUsedInClassBodies.get(n); if (localsUsed != null) { checkLocalsUsedByInnerClass(graph, (ClassBody)n, localsUsed, dfIn, dfOut); } } if (n == graph.finishNode()) { if (currCBI.currCodeDecl instanceof Initializer) { // We are finishing the checking of an intializer. // We need to copy back the init counts of any fields back into // currClassFinalFieldInitCounts, so that the counts are // correct for the next initializer or constructor. Iterator iter = dfOut.initStatus.entrySet().iterator(); while (iter.hasNext()) { Map.Entry e = (Map.Entry)iter.next(); if (e.getKey() instanceof FieldInstance) { FieldInstance fi = (FieldInstance)e.getKey(); if (fi.flags().isFinal()) { // we don't need to join the init counts, as all // dataflows will go through all of the // initializers currCBI.currClassFinalFieldInitCounts.put(fi, e.getValue()); } } } } } } /** * Check that the local variable <code>l</code> is used correctly. */ protected void checkLocal(FlowGraph graph, Local l, DataFlowItem dfIn, DataFlowItem dfOut) throws SemanticException { MinMaxInitCount initCount = (MinMaxInitCount) dfIn.initStatus.get(l.localInstance()); if (initCount == null) { // it's a local variable that has not been declared within // this scope. The only way this can arise is from an // inner class that is not a member of a class (typically // a local class, or an anonymous class declared in a method, // constructor or initializer). // We need to check that it is a final local, and also // keep track of it, to ensure that it has been definitely // assigned at this point. currCBI.outerLocalsUsed.add(l.localInstance()); } else { if (InitCount.ZERO.equals(initCount.getMin())) { throw new SemanticException("Local variable \"" + l.name() + "\" may not have been initialized", l.position()); } } } /** * Check that the assignment to a local variable is correct. */ protected void checkLocalAssign(FlowGraph graph, LocalAssign a, DataFlowItem dfIn, DataFlowItem dfOut) throws SemanticException { LocalInstance li = ((Local)a.left()).localInstance(); MinMaxInitCount initCount = (MinMaxInitCount) dfOut.initStatus.get(li); if (initCount == null) { throw new SemanticException("Final local variable \"" + li.name() + "\" cannot be assigned to in an inner class.", a.position()); } if (li.flags().isFinal() && InitCount.MANY.equals(initCount.getMax())) { throw new SemanticException("variable \"" + li.name() + "\" might already have been assigned to", a.position()); } } /** * Check that the assignment to a field is correct. */ protected void checkFieldAssign(FlowGraph graph, FieldAssign a, DataFlowItem dfIn, DataFlowItem dfOut) throws SemanticException { Field f = (Field)a.left(); FieldInstance fi = f.fieldInstance(); if (fi.flags().isFinal()) { if ((currCBI.currCodeDecl instanceof ConstructorDecl || currCBI.currCodeDecl instanceof Initializer) && isFieldsTargetAppropriate(f)) { // we are in a constructor or initializer block and // if the field is static then the target is the class // at hand, and if it is not static then the // target of the field is this. // So a final field in this situation can be // assigned to at most once. MinMaxInitCount initCount = (MinMaxInitCount) dfOut.initStatus.get(fi); if (InitCount.MANY.equals(initCount.getMax())) { throw new SemanticException("field \"" + fi.name() + "\" might already have been assigned to", a.position()); } // if the field is non-static and final, and we are in // a constructor, record the fact that this constructor // initializes the field if (!fi.flags().isStatic() && currCBI.currCodeDecl instanceof ConstructorDecl) { ConstructorInstance ci = ((ConstructorDecl)currCBI.currCodeDecl).constructorInstance(); Set s = (Set)currCBI.fieldsConstructorInitializes.get(ci); if (s == null) { s = new HashSet(); currCBI.fieldsConstructorInitializes.put(ci, s); } s.add(fi); } } else { // not in a constructor or intializer, or the target is // not appropriate. So we cannot assign // to a final field at all. throw new SemanticException("Cannot assign a value " + "to final field \"" + fi.name() + "\"", a.position()); } } } /** * Check that the set of <code>LocalInstance</code>s * <code>localsUsed</code>, which is the set of locals used in the inner * class declared by <code>cb</code> * are initialized before the class declaration. */ protected void checkLocalsUsedByInnerClass(FlowGraph graph, ClassBody cb, Set localsUsed, DataFlowItem dfIn, DataFlowItem dfOut) throws SemanticException { for (Iterator iter = localsUsed.iterator(); iter.hasNext(); ) { LocalInstance li = (LocalInstance)iter.next(); MinMaxInitCount initCount = (MinMaxInitCount) dfOut.initStatus.get(li); if (initCount == null) { // the local wasn't defined in this scope. currCBI.outerLocalsUsed.add(li); } else if (InitCount.ZERO.equals(initCount.getMin())) { throw new SemanticException("Local variable \"" + li.name() + "\" must be initialized before the class " + "declaration.", cb.position()); } } } }
package com.opengamma.engine.marketdata; import com.opengamma.DataNotFoundException; import com.opengamma.core.security.SecuritySource; import com.opengamma.engine.ComputationTargetSpecification; import com.opengamma.engine.target.ComputationTargetReference; import com.opengamma.engine.target.ComputationTargetReferenceVisitor; import com.opengamma.engine.target.ComputationTargetType; import com.opengamma.engine.target.ComputationTargetTypeMap; import com.opengamma.id.ExternalId; import com.opengamma.id.ExternalIdBundle; import com.opengamma.id.UniqueId; import com.opengamma.util.functional.Function1; /** * Look up an external identifier or identifier bundle from a computation target. */ /* package */abstract class AbstractExternalIdentifierLookup<T> implements ComputationTargetReferenceVisitor<T> { private final ComputationTargetTypeMap<Function1<UniqueId, T>> _lookup = new ComputationTargetTypeMap<Function1<UniqueId, T>>(); @SuppressWarnings("unchecked") protected void registerBundleLookup(final ComputationTargetType type, final Function1<UniqueId, ExternalIdBundle> operation) { _lookup.put(type, (Function1<UniqueId, T>) operation); } @SuppressWarnings("unchecked") protected void registerIdentifierLookup(final ComputationTargetType type, final Function1<UniqueId, ExternalId> operation) { _lookup.put(type, (Function1<UniqueId, T>) operation); } public AbstractExternalIdentifierLookup(final SecuritySource securitySource) { if (securitySource != null) { registerBundleLookup(ComputationTargetType.SECURITY, new Function1<UniqueId, ExternalIdBundle>() { @Override public ExternalIdBundle execute(final UniqueId uid) { try { return securitySource.get(uid).getExternalIdBundle(); } catch (DataNotFoundException e) { return null; } } }); } } protected T lookup(final ComputationTargetReference target) { return target.accept(this); } protected abstract T fromUniqueId(final UniqueId uid); @Override public T visitComputationTargetSpecification(final ComputationTargetSpecification specification) { final Function1<UniqueId, T> operation = _lookup.get(specification.getType()); T result = null; if (operation != null) { result = operation.execute(specification.getUniqueId()); } if (result == null && specification.getUniqueId() != null) { // Some code may still exist that is forcing an identifier into a unique id when constructing value requirements. We handle // that case here temporarily. This code should be removed when all of the functions have been fixed and there are no remaining // view definitions in the configuration database that have unique id forms for primitive requirements. There are also the // ValueSpecification keys used in the dependency graph for market data. Going from those to the original requirements is not // good but code may still be doing it - it should be querying the dependency graph for the ValueRequirement data. return fromUniqueId(specification.getUniqueId()); } else { return result; } } }
package be.howest.twentytwo.parametergame.model.component; import com.badlogic.ashley.core.Component; import com.badlogic.ashley.core.ComponentMapper; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.utils.Pool.Poolable; /** * Sprite / Texture data */ public class SpriteComponent implements Component, Poolable { // TODO: Should I rename this TextureRegionComponent? This could create confusion with actual Sprite class public static final ComponentMapper<SpriteComponent> MAPPER = ComponentMapper.getFor(SpriteComponent.class); private TextureRegion region; // TextureRegion for sprite sheet reasons public TextureRegion getRegion() { return region; } public void setRegion(TextureRegion region) { this.region = region; } @Override public void reset() { // Need to reset? Should be set by factory anyway. } }
package engine; import java.util.*; import engine.gui.AbilityListArea; import engine.gui.UnitListArea; import engine.gui.UnitStatusArea; import model.tile.Tile; import model.unit.Unit; import jgame.JGObject; import jgame.platform.JGEngine; public class GameTileObject extends JGObject implements EngineConstants{ private Tile myTile; private static final int myCollisionID = 8; private static final int mySize = 79; private boolean isHighlighted = false; private GameEngine myEngine; public GameTileObject(Tile tile, GameEngine engine) { super("tile", true, 0, 0, myCollisionID, tile.getStatCollection("Terrain").getID()); myEngine = engine; myTile = tile; } @Override public void move(){ this.setPos(myTile.getX()*this.getImageBBox().width, myTile.getY()*this.getImageBBox().height); } public static int getCollisionID() { return myCollisionID; } public static int getSize() { return mySize; } public Tile getTile() { return myTile; } public void setHighlighted(boolean highlighted) { isHighlighted = highlighted; } public boolean isHighlighted() { return isHighlighted; } @Override public void hit(JGObject other){ //System.out.println("Tile hit"); if (other.colid == MOUSE_COL_ID && myEngine.getMouseButton(1)) { myEngine.clearMouseButton(1); System.out.println("tile hit"); System.out.println(isHighlighted()); if (this.isHighlighted()) { myEngine.getModel().useAbility(myTile); myEngine.removeHighlights(); return; } List<Unit> unitList = myTile.getUnits(); List<Unit> selectableUnitList = new ArrayList<Unit>(); for (Unit unit : unitList) { if (myEngine.getGameManager().getCurrentPlayer().equals(unit.getPlayer())) { selectableUnitList.add(unit); } } for (Unit unit : unitList) { System.out.println(unit.getID()); } AbilityListArea abilityListArea = (AbilityListArea) GameViewer.getActionPanel().getAbilityListArea(); abilityListArea.clear(); UnitStatusArea unitStatusArea = (UnitStatusArea) GameViewer.getFeedbackPanel().getUnitStatusArea(); unitStatusArea.setStatusText(""); UnitListArea unitListArea = (UnitListArea) GameViewer.getActionPanel().getUnitListArea(); unitListArea.loadUnitList(selectableUnitList); } } }
package orc.runtime.sites; import java.util.TreeMap; import java.util.Map; import orc.error.MessageNotUnderstoodException; import orc.error.TokenException; import orc.runtime.Args; import orc.runtime.values.Value; /** * @author dkitchin * * Dot-accessible sites should extend this class and declare their Orc-available * methods using addMethods. The code is forward-compatible with many possible * optimizations on the field lookup strategy. */ public abstract class DotSite extends EvalSite { Map<String,Value> methodMap; public DotSite() { methodMap = new TreeMap<String,Value>(); this.addMethods(); } /* (non-Javadoc) * @see orc.runtime.sites.Site#callSite(java.lang.Object[], orc.runtime.Token, orc.runtime.values.GroupCell, orc.runtime.OrcEngine) */ @Override public Value evaluate(Args args) throws TokenException { String f = args.fieldName(); Value m = getMethod(f); if (m != null) { return m; } else { throw new MessageNotUnderstoodException(f); } } Value getMethod(String f) { return methodMap.get(f); } // Subclasses implement this method with a sequence of addMethod calls. protected abstract void addMethods(); protected void addMethod(String f, Value s) { methodMap.put(f, s); } }
package net.runelite.client.plugins.barbarianassault; import com.google.inject.Provides; import java.awt.Font; import java.awt.Image; import javax.inject.Inject; import net.runelite.api.ChatMessageType; import net.runelite.api.Client; import net.runelite.api.Varbits; import net.runelite.api.events.ChatMessage; import net.runelite.api.events.VarbitChanged; import net.runelite.api.events.WidgetLoaded; import net.runelite.api.widgets.Widget; import net.runelite.api.widgets.WidgetID; import net.runelite.api.widgets.WidgetInfo; import net.runelite.client.chat.ChatColorType; import net.runelite.client.chat.ChatMessageBuilder; import net.runelite.client.chat.ChatMessageManager; import net.runelite.client.chat.QueuedMessage; import net.runelite.client.config.ConfigManager; import net.runelite.client.eventbus.Subscribe; import net.runelite.client.plugins.Plugin; import net.runelite.client.plugins.PluginDescriptor; import net.runelite.client.ui.FontManager; import net.runelite.client.ui.overlay.OverlayManager; import net.runelite.client.util.ImageUtil; @PluginDescriptor( name = "Barbarian Assault", description = "Show a timer to the next call change and game/wave duration in chat.", tags = {"minigame", "overlay", "timer"} ) public class BarbarianAssaultPlugin extends Plugin { private static final int BA_WAVE_NUM_INDEX = 2; private static final String START_WAVE = "1"; private static final String ENDGAME_REWARD_NEEDLE_TEXT = "<br>5"; private Font font; private Image clockImage; private int inGameBit = 0; private String currentWave = START_WAVE; private GameTimer gameTime; @Inject private Client client; @Inject private ChatMessageManager chatMessageManager; @Inject private OverlayManager overlayManager; @Inject private BarbarianAssaultConfig config; @Inject private BarbarianAssaultOverlay overlay; @Provides BarbarianAssaultConfig provideConfig(ConfigManager configManager) { return configManager.getConfig(BarbarianAssaultConfig.class); } @Override protected void startUp() throws Exception { overlayManager.add(overlay); font = FontManager.getRunescapeFont() .deriveFont(Font.BOLD, 24); clockImage = ImageUtil.getResourceStreamFromClass(getClass(), "clock.png"); } @Override protected void shutDown() throws Exception { overlayManager.remove(overlay); gameTime = null; currentWave = START_WAVE; inGameBit = 0; } @Subscribe public void onWidgetLoaded(WidgetLoaded event) { switch (event.getGroupId()) { case WidgetID.BA_REWARD_GROUP_ID: { Widget rewardWidget = client.getWidget(WidgetInfo.BA_REWARD_TEXT); if (config.waveTimes() && rewardWidget != null && rewardWidget.getText().contains(ENDGAME_REWARD_NEEDLE_TEXT) && gameTime != null) { announceTime("Game finished, duration: ", gameTime.getTime(false)); } break; } case WidgetID.BA_ATTACKER_GROUP_ID: { setOverlayRound(Role.ATTACKER); break; } case WidgetID.BA_DEFENDER_GROUP_ID: { setOverlayRound(Role.DEFENDER); break; } case WidgetID.BA_HEALER_GROUP_ID: { setOverlayRound(Role.HEALER); break; } case WidgetID.BA_COLLECTOR_GROUP_ID: { setOverlayRound(Role.COLLECTOR); break; } } } @Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() == ChatMessageType.GAMEMESSAGE && event.getMessage().startsWith("---- Wave:")) { String[] message = event.getMessage().split(" "); currentWave = message[BA_WAVE_NUM_INDEX]; if (currentWave.equals(START_WAVE)) { gameTime = new GameTimer(); } else if (gameTime != null) { gameTime.setWaveStartTime(); } } } @Subscribe public void onVarbitChanged(VarbitChanged event) { int inGame = client.getVar(Varbits.IN_GAME_BA); if (inGameBit != inGame) { if (inGameBit == 1) { overlay.setCurrentRound(null); if (config.waveTimes() && gameTime != null) { announceTime("Wave " + currentWave + " duration: ", gameTime.getTime(true)); } } } inGameBit = inGame; } private void setOverlayRound(Role role) { // Prevent changing roles when a role is already set, as widgets can be // loaded multiple times in game from eg. opening and closing the horn // of glory. if (overlay.getCurrentRound() != null) { return; } overlay.setCurrentRound(new Round(role)); } private void announceTime(String preText, String time) { final String chatMessage = new ChatMessageBuilder() .append(ChatColorType.NORMAL) .append(preText) .append(ChatColorType.HIGHLIGHT) .append(time) .build(); chatMessageManager.queue(QueuedMessage.builder() .type(ChatMessageType.CONSOLE) .runeLiteFormattedMessage(chatMessage) .build()); } public Font getFont() { return font; } public Image getClockImage() { return clockImage; } }
package com.polidea.rxandroidble.sample.example7_long_write; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.util.Pair; import com.polidea.rxandroidble.RxBleClient; import com.polidea.rxandroidble.RxBleConnection; import com.polidea.rxandroidble.sample.SampleApplication; import com.trello.rxlifecycle.components.support.RxAppCompatActivity; import java.util.UUID; import rx.Observable; import rx.Subscription; /** * For the sake of this example lets assume that we have a Bluetooth Device that is retrieved by: * <p> * rxBleClient.getBleDevice(DUMMY_DEVICE_ADDRESS) // (it can be retrieved by scanning as well) * <p> * This device has two notification characteristics: * DEVICE_CALLBACK_0 (DC0) notifies when the previously sent batch was received * DEVICE_CALLBACK_1 (DC1) notifies when the device is ready to receive the next packet * <p> * Lets assume that we do not know if the DC0 or DC1 will notify first. * It may also happen that Android OS will inform that the batch was transmitted after both DC0 and DC1 notify. * <p> * We need to write 1024 bytes of data to the device */ public class LongWriteExampleActivity extends RxAppCompatActivity { public static final String DUMMY_DEVICE_ADDRESS = "AA:AA:AA:AA:AA:AA"; private static final UUID DEVICE_CALLBACK_0 = UUID.randomUUID(); private static final UUID DEVICE_CALLBACK_1 = UUID.randomUUID(); private static final UUID WRITE_CHARACTERISTIC = UUID.randomUUID(); private byte[] bytesToWrite = new byte[1024]; // a kilobyte array private Subscription subscription; @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); final RxBleClient rxBleClient = SampleApplication.getRxBleClient(this); subscription = rxBleClient.getBleDevice(DUMMY_DEVICE_ADDRESS) // get our assumed device .establishConnection(this, false) // establish the connection .flatMap(rxBleConnection -> Observable.combineLatest( // after establishing the connection lets setup the notifications rxBleConnection.setupNotification(DEVICE_CALLBACK_0), rxBleConnection.setupNotification(DEVICE_CALLBACK_1), Pair::new ), (rxBleConnection, callbackObservablePair) -> { // after the setup lets start the long write Observable<byte[]> deviceCallback0 = callbackObservablePair.first; Observable<byte[]> deviceCallback1 = callbackObservablePair.second; return rxBleConnection.createNewLongWriteBuilder() // create a new long write builder .setBytes(bytesToWrite) // REQUIRED - set the bytes to write /** * REQUIRED - To perform a write you need to specify to which characteristic you want to write. You can do it * either by calling {@link LongWriteOperationBuilder#setCharacteristicUuid(UUID)} or * {@link LongWriteOperationBuilder#setCharacteristic(BluetoothGattCharacteristic)} */ .setCharacteristicUuid(WRITE_CHARACTERISTIC) // set the UUID of the characteristic to write // .setCharacteristic( /* some BluetoothGattCharacteristic */ ) // alternative to setCharacteristicUuid() /** * If you want to send batches with length other than default. * Default value is 20 bytes if MTU was not negotiated. If the MTU was negotiated prior to the Long Write * Operation execution then the batch size default is the new MTU. */ // .setMaxBatchSize( /* your batch size */ ) /** * Inform the Long Write when we want to send the next batch of data. If not set the operation will try to write * the next batch of data as soon as the Android will call `BluetoothGattCallback.onCharacteristicWrite()` but * we want to postpone it until also DC0 and DC1 will emit. */ .setWriteOperationAckStrategy(new RxBleConnection.WriteOperationAckStrategy() { @Override public Observable<Boolean> call(Observable<Boolean> booleanObservable) { return Observable.zip( // so we zip three observables deviceCallback0, // DEVICE_CALLBACK_0 deviceCallback1, // DEVICE_CALLBACK_1 booleanObservable, /* previous batch of data was sent - we do not care if value emitted from the booleanObservable is TRUE or FALSE. But the value will be TRUE unless the previously sent data batch was the final one */ (callback0, callback1, aBoolean) -> aBoolean // value of the returned Boolean is not important ); } }) .build(); }) .flatMap(observable -> observable) .take(1) // after the successful write we are no longer interested in the connection so it will be released .subscribe( bytes -> { // react }, throwable -> { // handle error } ); } @Override protected void onDestroy() { super.onDestroy(); subscription.unsubscribe(); subscription = null; } }
package gov.nih.nci.codegen.util; import gov.nih.nci.codegen.GenerationException; import gov.nih.nci.ncicb.xmiinout.domain.UMLAttribute; import gov.nih.nci.ncicb.xmiinout.domain.UMLClass; import gov.nih.nci.ncicb.xmiinout.domain.UMLModel; import gov.nih.nci.ncicb.xmiinout.domain.UMLPackage; import gov.nih.nci.ncicb.xmiinout.domain.UMLTaggedValue; import gov.nih.nci.ncicb.xmiinout.util.ModelUtil; import java.util.ArrayList; import java.util.List; public class IsoDatatypeTransformationHelper { private UMLModel model; private TransformerUtils utils; public UMLModel getModel() { return model; } public void setModel(UMLModel model) { this.model = model; } public TransformerUtils getUtils() { return utils; } public void setUtils(TransformerUtils utils) { this.utils = utils; } /** * Determines if the node requires Join element in the Hibernate layer * * @param rootNode * @return */ public boolean requiresJoin(RootNode rootNode) { if(rootNode.getTargetTableName() != null && !"".equals(rootNode.getTargetTableName())) { if((rootNode.getTargetTablePrimaryKey() == null || "".equals(rootNode.getTargetTablePrimaryKey())) && !rootNode.getIsoClassName().startsWith(utils.ISO_ROOT_PACKAGE_NAME+".DSET")) { return true; } else if(rootNode.getJoinTableName() != null && !"".equals(rootNode.getJoinTableName()) && !rootNode.getIsoClassName().startsWith(utils.ISO_ROOT_PACKAGE_NAME+".DSET")) { return true; } } return false; } /** * Determines if the node requires separate entity class mapping in the Hibernate layer * * @param rootNode * @return */ public boolean requiresSeperateClassMapping(RootNode rootNode) { if(rootNode.getTargetTableName()!= null && !"".equals(rootNode.getTargetTableName()) && rootNode.getTargetTablePrimaryKey() != null && !"".equals(rootNode.getTargetTablePrimaryKey())) { return true; } return false; } /** * Higher level class which constructs the Graph by reading the mapped-attributes tag values. * Subsequently it looks into the UML Model for the ISO type structure and sets the datatype for all the attributes * * @param klass * @param attr * @param table * @throws GenerationException */ public RootNode getDatatypeNode(UMLClass klass, UMLAttribute attr, UMLClass table) throws GenerationException { RootNode rootNode = createDatatypeNode(klass, attr, table); rootNode.setIsoClassName(utils.ISO_ROOT_PACKAGE_NAME+"."+attr.getDatatype().getName()); rootNode.setParentClassName(utils.getFQCN(klass)); traverseNodeAndAttachDataType(rootNode, klass, attr); attachDataModelInformation(rootNode,klass, attr, table); //printNode(rootNode, ""); return rootNode; } /** * Converts the rootNode into corresponding Hibernate entity class mapping * * @param rootNode * @param prefix * @return * @throws GenerationException */ public StringBuffer convertToHibernateClass(RootNode rootNode, String prefix) throws GenerationException { StringBuffer buffer = new StringBuffer(); ComplexNode processingNode = rootNode; if(rootNode.getIsoClassName().startsWith(utils.ISO_ROOT_PACKAGE_NAME+".DSET")) { for(Node innerNode: rootNode.getInnerNodes()) { if(innerNode instanceof ComplexNode) { processingNode = (ComplexNode)innerNode; break; } } } String entityName = converteIsoPropertyToEntityName(rootNode.getParentClassName(),rootNode.getName()); String className = converteIsoClassNameToJavaClassName(processingNode.getIsoClassName()); buffer.append(prefix+"<class name=\""+className+"\" table=\""+rootNode.getTargetTableName()+"\" entity-name=\""+entityName+"\">"); buffer.append(prefix+"\t<id column=\""+rootNode.getTargetTablePrimaryKey()+"\" type=\"int\"/>"); for(Node innerNode: processingNode.getInnerNodes()) { if(innerNode instanceof SimpleNode) { convertSimpleNodeToHibernateProperty((SimpleNode)innerNode, buffer, prefix+"\t"); } } for(Node innerNode: processingNode.getInnerNodes()) { if(innerNode instanceof ComplexNode) { convertComplexNodeToHibernateComponent((ComplexNode)innerNode,buffer,prefix+"\t"); } } buffer.append(prefix+"</class>"); return buffer; } /** * Converts the rootNode into corresponding Hibernate component mapping * * @param rootNode * @param prefix * @return * @throws GenerationException */ public StringBuffer convertToHibernateComponent(RootNode rootNode, String prefix) throws GenerationException { StringBuffer buffer = new StringBuffer(); String mappingStyle; if(rootNode.getTargetTableName() == null || "".equals(rootNode.getTargetTableName())) { //Same table mappingStyle = "component"; convertComplexNodeToHibernateComponent(rootNode,buffer,prefix); } else { if(rootNode.getTargetTablePrimaryKey() == null || "".equals(rootNode.getTargetTablePrimaryKey())) { if(!rootNode.getIsoClassName().startsWith(utils.ISO_ROOT_PACKAGE_NAME+".DSET")) { //Other table with no primary key mappingStyle = "joined component"; convertComplexNodeToHibernateJoinedComponent(rootNode,buffer,prefix, rootNode.getTargetTableName(), rootNode.getTargetTableForeignKey()); } else { //Other table collection with no primary key mappingStyle = "component with set of composite-elements"; convertComplexNodeToHibernateComponentWithSet(rootNode,buffer,prefix,rootNode.getTargetTableName(), rootNode.getTargetTableForeignKey(),null,0); } } else { if(rootNode.getJoinTableName() == null || "".equals(rootNode.getJoinTableName())) { if(!rootNode.getIsoClassName().startsWith(utils.ISO_ROOT_PACKAGE_NAME+".DSET")) { //Other table with primary key mappingStyle = "many-to-one"; convertComplexNodeToHibernateManyToOne(rootNode,buffer,prefix,rootNode.getSelfTableForeignKey()); } else { //Other table collection with primary key mappingStyle = "component with set of one-to-many"; convertComplexNodeToHibernateComponentWithSet(rootNode,buffer,prefix,rootNode.getTargetTableName(), rootNode.getTargetTableForeignKey(),null,1); } } else { if(!rootNode.getIsoClassName().startsWith(utils.ISO_ROOT_PACKAGE_NAME+".DSET")) { //Other table with primary key and join table mappingStyle = "joined many-to-one"; convertComplexNodeToHibernateJoinedManyToOne(rootNode,buffer,prefix,rootNode.getJoinTableName(), rootNode.getJoinTableForeignKey(), rootNode.getJoinTableInverseKey()); } else { //Other table collection with primary key and join table mappingStyle = "component with set of many-to-many"; convertComplexNodeToHibernateComponentWithSet(rootNode,buffer,prefix,rootNode.getJoinTableName(), rootNode.getJoinTableInverseKey(),rootNode.getJoinTableForeignKey(),2); } } } } return buffer; } private void convertComplexNodeToHibernateJoinedComponent(ComplexNode node, StringBuffer buffer, String prefix, String tableName, String keyColumnName) throws GenerationException { buffer.append(prefix+"<join table=\""+tableName+"\" optional=\"true\">"); buffer.append(prefix+"\t<key column=\""+keyColumnName+"\"/>"); convertComplexNodeToHibernateComponent(node, buffer, prefix+"\t", "component", true); buffer.append(prefix+"</join>"); } private void convertComplexNodeToHibernateJoinedManyToOne(RootNode rootNode, StringBuffer buffer, String prefix, String joinTableName, String keyColumnName, String inverseColumnName) { buffer.append(prefix+"<join table=\""+joinTableName+"\" optional=\"true\">"); buffer.append(prefix+"\t<key column=\""+inverseColumnName+"\"/>"); convertComplexNodeToHibernateManyToOne(rootNode, buffer, prefix+"\t",keyColumnName ); buffer.append(prefix+"</join>"); } private void convertComplexNodeToHibernateManyToOne(RootNode rootNode, StringBuffer buffer, String prefix, String keyColumnName) { String entityName = converteIsoPropertyToEntityName(rootNode.getParentClassName(),rootNode.getName()); buffer.append(prefix+"<many-to-one name=\""+rootNode.getName()+"\" entity-name=\""+entityName+"\" column=\""+keyColumnName+"\" lazy=\"false\" cascade=\"all\"/>"); } private void convertComplexNodeToHibernateOneToMany(RootNode rootNode, StringBuffer buffer, String prefix) { String entityName = converteIsoPropertyToEntityName(rootNode.getParentClassName(),rootNode.getName()); buffer.append(prefix+"<one-to-many entity-name=\""+entityName+"\"/>"); } private void convertComplexNodeToHibernateManyToMany(RootNode rootNode, StringBuffer buffer, String prefix, String keyColumnName) { String entityName = converteIsoPropertyToEntityName(rootNode.getParentClassName(),rootNode.getName()); buffer.append(prefix+"<many-to-many entity-name=\""+entityName+"\" column=\""+keyColumnName+"\" lazy=\"false\"/>"); } private void convertComplexNodeToHibernateComponentWithSet(RootNode rootNode, StringBuffer buffer, String prefix, String tableName, String inverseColumnName, String keyColumnName, int collectionNodeProcessingInstruction) throws GenerationException { String componentClassName = converteIsoClassNameToJavaClassName(rootNode.getIsoClassName()); buffer.append(prefix+"<component name=\""+rootNode.getName()+"\" class=\""+componentClassName+"\">"); buffer.append(prefix+"\t<tuplizer class=\"gov.nih.nci.iso21090.hibernate.tuple.ConstantAndNullFlavorTuplizer\"/>"); for(Node innerNode: rootNode.getInnerNodes()) { if(innerNode instanceof SimpleNode) { convertSimpleNodeToHibernateProperty((SimpleNode)innerNode, buffer, prefix+"\t"); } } for(Node innerNode: rootNode.getInnerNodes()) { if(innerNode instanceof ComplexNode) { ComplexNode collectionNode = (ComplexNode) innerNode; String tableNameString = "table=\""+tableName+"\""; buffer.append(prefix+"\t<set name=\""+collectionNode.getName()+"\" lazy=\"false\" "+tableNameString+" cascade=\"all\">"); buffer.append(prefix+"\t\t<key column=\""+inverseColumnName+"\" not-null=\"false\"/>"); switch(collectionNodeProcessingInstruction) { case 1: convertComplexNodeToHibernateOneToMany(rootNode, buffer, prefix+"\t\t"); break; case 2: convertComplexNodeToHibernateManyToMany(rootNode, buffer, prefix+"\t\t", keyColumnName); break; default: convertComplexNodeToHibernateComponent(collectionNode, buffer,prefix+"\t\t", "composite-element",false); } buffer.append(prefix+"\t</set>"); break; //Can process only one collection element } } buffer.append(prefix+"</component>"); } private void convertComplexNodeToHibernateComponent(ComplexNode node, StringBuffer buffer, String prefix) throws GenerationException { convertComplexNodeToHibernateComponent(node, buffer, prefix, "component", true); } private void convertComplexNodeToHibernateComponent(ComplexNode node, StringBuffer buffer, String prefix, String elementType, Boolean useElementName) throws GenerationException { String componentClassName = converteIsoClassNameToJavaClassName(node.getIsoClassName()); if(node.getName().indexOf('[')>0) { String name = node.getName(); name = name.replace('[','_'); name = name.substring(0, name.length()-1); String componentName = useElementName?" name=\""+name+"\"":""; String propertyAccessor = "gov.nih.nci.iso21090.hibernate.property.CollectionPropertyAccessor"; buffer.append(prefix+"<"+elementType+componentName+" class=\""+componentClassName+"\" access=\""+propertyAccessor+"\">"); } else { String componentName = useElementName?" name=\""+node.getName()+"\"":""; buffer.append(prefix+"<"+elementType+componentName+" class=\""+componentClassName+"\">"); } buffer.append(prefix+"\t<tuplizer class=\"gov.nih.nci.iso21090.hibernate.tuple.ConstantAndNullFlavorTuplizer\"/>"); for(Node innerNode: node.getInnerNodes()) { if(innerNode instanceof SimpleNode) { convertSimpleNodeToHibernateProperty((SimpleNode)innerNode, buffer, prefix+"\t"); } } for(Node innerNode: node.getInnerNodes()) { if(innerNode instanceof ComplexNode) { String convertedElementType = "composite-element".equals(elementType)?"nested-composite-element":elementType; convertComplexNodeToHibernateComponent((ComplexNode)innerNode, buffer, prefix+"\t", convertedElementType, true); } } buffer.append(prefix+"</"+elementType+">"); } private void convertSimpleNodeToHibernateProperty(SimpleNode node, StringBuffer buffer, String prefix) throws GenerationException { String type = node.getIsoClassName(); if(isEnum(type)) { buffer.append(prefix+"<property name=\""+node.getName()+"\" column=\""+node.getColumnName()+"\">"); buffer.append(prefix+"\t<type name=\"gov.nih.nci.iso21090.hibernate.usertype.EnumUserType\">"); buffer.append(prefix+"\t\t<param name=\"enumClassName\">"+type+"</param>"); buffer.append(prefix+"\t</type>"); buffer.append(prefix+"</property>"); } else { String convertedType = converteIsoAttributeTypeToHibernateType(type); buffer.append(prefix+"<property name=\""+node.getName()+"\" column=\""+node.getColumnName()+"\" type=\""+convertedType+"\"/>"); } } private boolean isEnum(String type) throws GenerationException { if(!type.startsWith(utils.ISO_ROOT_PACKAGE_NAME+".")) return false; UMLClass klass = findISOClass(type); if (klass == null) return false; if("enumeration".equals(klass.getStereotype())) return true; return false; } private String converteIsoAttributeTypeToHibernateType(String isoType) { if(isoType.startsWith(utils.ISO_ROOT_PACKAGE_NAME+".")) isoType = isoType.substring((utils.ISO_ROOT_PACKAGE_NAME+".").length()); String propertyType = null; if("Uri".equals(isoType)) propertyType = "gov.nih.nci.iso21090.hibernate.usertype.UriUserType"; if("Uid".equals(isoType)) propertyType = "string"; if("Code".equals(isoType)) propertyType = "string"; if("String".equals(isoType)) propertyType = "string"; if("Binary".equals(isoType)) propertyType = "org.springframework.orm.hibernate3.support.BlobByteArrayType"; if("Integer".equals(isoType)) propertyType = "integer"; if("Real".equals(isoType)) propertyType = "big_decimal"; if("Boolean".equals(isoType)) propertyType = "boolean"; if("date".equals(isoType)) propertyType = "java.util.Date"; return propertyType; } private String converteIsoClassNameToJavaClassName(String isoClassName) { String isoName = isoClassName.substring((utils.ISO_ROOT_PACKAGE_NAME+".").length()); if(isoName.indexOf('<')>0) isoName = isoName.substring(0,isoName.indexOf('<')); return utils.ISO_ROOT_PACKAGE_NAME+"."+utils.isoDatatypeCompleteMap.get(isoName); } private String converteIsoPropertyToEntityName(String className, String attributeName) { return "_xxEntityxx_"+className.replace('.', '_')+"_"+attributeName; } /** * Determines the table that the datatype is suppose to be persisted. * Determines the foreign key column, inverse column, primary key column, join table * * @param rootNode * @throws GenerationException */ private void attachDataModelInformation(RootNode rootNode, UMLClass klass, UMLAttribute attr, UMLClass table) throws GenerationException { String selfTableName = null; String selfTableForeignKey = null; String targetTableName = null; String targetTablePrimaryKey = null; String targetTableForeignKey = null; String joinTableName = null; String joinTableForeignKey = null; String joinTableInverseKey = null; selfTableName = table.getName(); targetTableName = utils.getTagValue(attr,utils.TV_MAPPED_COLLECTION_TABLE); if(targetTableName != null && !"".equals(targetTableName)) { UMLClass assocTable = ModelUtil.findClass(model,utils.getBasePkgDataModel()+"."+targetTableName); if(assocTable == null) throw new GenerationException("No associated table found named : \""+targetTableName+"\""); String fqcn = utils.getFQCN(klass); String attrFQCN = fqcn + "." + attr.getName(); joinTableName = utils.getTagValue(attr,utils.TV_CORRELATION_TABLE); if(joinTableName != null && !"".equals(joinTableName)) { UMLClass joinTable = ModelUtil.findClass(model,utils.getBasePkgDataModel()+"."+joinTableName); if(joinTable == null) throw new GenerationException("No associated table found named : \""+targetTableName+"\""); joinTableForeignKey = utils.getColumnName(joinTable, utils.TV_ASSOC_COLUMN, attrFQCN, false, 1, 1); joinTableInverseKey = utils.getColumnName(joinTable, utils.TV_INVERSE_ASSOC_COLUMN, attrFQCN, false, 1, 1); targetTablePrimaryKey = utils.getColumnName(assocTable, utils.TV_MAPPED_ATTR_COLUMN, attrFQCN + ".id", false, 1, 1); } else { targetTablePrimaryKey = utils.getColumnName(assocTable, utils.TV_MAPPED_ATTR_COLUMN, attrFQCN + ".id", false, 0, 1); if(rootNode.getIsoClassName().startsWith(utils.ISO_ROOT_PACKAGE_NAME+".DSET")) { targetTableForeignKey = utils.getColumnName(assocTable, utils.TV_ASSOC_COLUMN, attrFQCN, false, 1, 1); } else { if(targetTablePrimaryKey!=null && !"".equals(targetTablePrimaryKey)) { selfTableForeignKey = utils.getColumnName(table, utils.TV_ASSOC_COLUMN, attrFQCN, false, 1, 1); } else { targetTableForeignKey = utils.getColumnName(assocTable, utils.TV_ASSOC_COLUMN, attrFQCN, false, 1, 1); } } } } rootNode.setSelfTableName(selfTableName); rootNode.setSelfTableForeignKey(selfTableForeignKey); rootNode.setTargetTableName(targetTableName); rootNode.setTargetTablePrimaryKey(targetTablePrimaryKey); rootNode.setTargetTableForeignKey(targetTableForeignKey); rootNode.setJoinTableName(joinTableName); rootNode.setJoinTableForeignKey(joinTableForeignKey); rootNode.setJoinTableInverseKey(joinTableInverseKey); } /** * Main method to create the data structure for the graph by reading the mapped-attributes tag value * * @param klass * @param attr * @param table * @return * @throws GenerationException */ public RootNode createDatatypeNode(UMLClass klass, UMLAttribute attr, UMLClass table) throws GenerationException { String prefix = utils.getFQCN(klass) + "." + attr.getName() + "."; String assocTableName = utils.getTagValue(attr,utils.TV_MAPPED_COLLECTION_TABLE); UMLClass datatypeTable = assocTableName == null? table : utils.findCollectionTable(attr, model); RootNode rootNode = new RootNode(attr.getName()); for(UMLAttribute column: datatypeTable.getAttributes()) { for(UMLTaggedValue tv: column.getTaggedValues()) { if (utils.TV_MAPPED_ATTR_COLUMN.equals(tv.getName())) { String tvValue = tv.getValue(); String[] tvValues = tvValue.split(","); for(String val:tvValues) { if(val.startsWith(prefix) && !val.equals(prefix+"id")) { parseAndAddNode(rootNode,val.substring(prefix.length()),column.getName()); } } } } } return rootNode; } /** * Method responsible for parsing the tag value and converting in the graph node * * @param rootNode * @param value * @param columnName * @throws GenerationException */ private void parseAndAddNode(ComplexNode rootNode, String value, String columnName) throws GenerationException { String[] nodePath = value.split("\\."); ComplexNode currentNode = rootNode; for(int i=0;i<nodePath.length -1; i++) { boolean createNewNode = true; for(Node node: currentNode.getInnerNodes()) { if(node.getName().equals(nodePath[i])) { if(node instanceof SimpleNode) throw new GenerationException("Can not add "+rootNode.getName()+"."+value+". It is defined as simple"); currentNode = (ComplexNode)node; createNewNode = false; } } if(createNewNode) { ComplexNode newNode = new ComplexNode(nodePath[i]); currentNode.addInnerNode(newNode); currentNode = newNode; } } for(Node node: currentNode.getInnerNodes()) { if(node.getName().equals(nodePath[nodePath.length-1])) { throw new GenerationException("Can not map "+rootNode.getName()+"."+value+" twice. It is already defined"); } } SimpleNode newNode = new SimpleNode(nodePath[nodePath.length-1]); newNode.setColumnName(columnName); currentNode.addInnerNode(newNode); } /** * Traverses the graph and attaches the property type to the graph nodes * * @param parentNode * @throws GenerationException */ private void traverseNodeAndAttachDataType(Node parentNode, UMLClass klass, UMLAttribute attribute) throws GenerationException { if(parentNode instanceof ComplexNode) { ComplexNode currentNode = (ComplexNode)parentNode; for(Node node: currentNode.getInnerNodes()) { if(node.getIsoClassName() == null) { String datatype = findIsoDatatypeInNode(currentNode.getIsoClassName(), node.getName(), klass, attribute); node.setIsoClassName(datatype); } if(node instanceof ComplexNode) traverseNodeAndAttachDataType(node, klass, attribute); } } } /** * Looks up the type of the attributeName in he isoClassName. Certain specific rules on the DSET, EN, and AD are followed * * @param isoClassName * @param attributeName * @param attribute * @param klass * @return * @throws GenerationException */ private String findIsoDatatypeInNode(String isoClassName, String attributeName, UMLClass klass, UMLAttribute attribute) throws GenerationException { if(!isoClassName.startsWith(utils.ISO_ROOT_PACKAGE_NAME+".")) throw new GenerationException("Can not process Non-ISO datatype "+isoClassName+". Discovered in "+utils.getFQCN(klass)+"."+attribute.getName()); String returnVal = null; if(isoClassName.startsWith(utils.ISO_ROOT_PACKAGE_NAME+".EN") && !isoClassName.startsWith(utils.ISO_ROOT_PACKAGE_NAME+".ENXP")) { returnVal = utils.ISO_ROOT_PACKAGE_NAME+".ENXP"; } else if(isoClassName.equals(utils.ISO_ROOT_PACKAGE_NAME+".AD")) { String key = utils.TV_MAPPED_COLLECTION_ELEMENT_TYPE+":"+attributeName; String collectionElementTypeValue = utils.getTagValue(klass, attribute, key, null , false, 0, 1); if(collectionElementTypeValue == null) throw new GenerationException("Can not find type of AD collection item for "+attributeName +"in "+utils.getFQCN(klass)+"."+attribute.getName()); UMLClass isoKlass = findISOClass(utils.ISO_ROOT_PACKAGE_NAME+"."+collectionElementTypeValue); if(isoKlass == null) throw new GenerationException("Can not find class "+collectionElementTypeValue); //TODO returnVal = utils.ISO_ROOT_PACKAGE_NAME+"."+collectionElementTypeValue; } else if(isoClassName.equals(utils.ISO_ROOT_PACKAGE_NAME+".TS") && "value".equals(attributeName)) { returnVal = "date"; } else { UMLClass isoKlass = findISOClass(isoClassName); //For DSet elements if(isoKlass == null && isoClassName.indexOf('<')>0 && isoClassName.startsWith(utils.ISO_ROOT_PACKAGE_NAME+".DSET")) { String typeClassName = isoClassName.substring(isoClassName.indexOf('<')+1,isoClassName.indexOf('>')); returnVal = utils.ISO_ROOT_PACKAGE_NAME+"."+typeClassName; } else { UMLClass currentKlass = isoKlass; UMLAttribute attr = null; while(currentKlass != null) { attr = currentKlass.getAttribute(attributeName); if(attr!=null) break; currentKlass = utils.getSuperClass(currentKlass); } if(attr == null) throw new GenerationException("Can not find attribute "+attributeName+ " in "+ isoClassName); UMLClass attrKlass = findISOClass(utils.ISO_ROOT_PACKAGE_NAME+"."+attr.getDatatype().getName()); if(attrKlass != null) { returnVal = utils.ISO_ROOT_PACKAGE_NAME+"."+attr.getDatatype().getName(); } else { returnVal = attr.getDatatype().getName(); } } } return returnVal; } /** * Finds the ISO datatype class in the UML Model * * @param isoType * @return * @throws GenerationException */ private UMLClass findISOClass(String isoType) throws GenerationException { UMLPackage pkg = ModelUtil.findPackage(model, utils.getBasePkgLogicalModel()+"."+utils.ISO_ROOT_PACKAGE_NAME); UMLClass klass = pkg.getClass(isoType.substring((utils.ISO_ROOT_PACKAGE_NAME+".").length())); return klass; } /** * Prints the Node on the console * * @param node * @param prefix */ private void printNode(Node node, String prefix) { Node currentNode = node; if (currentNode != null) { System.out.println(prefix+node.getName()+":"+node.getIsoClassName()); if(node instanceof RootNode) { RootNode rootNode = (RootNode)node; System.out.println(prefix+"{"); System.out.println(prefix+"\tSelf Table:"+rootNode.getSelfTableName()); System.out.println(prefix+"\tSelf Table FK:"+rootNode.getSelfTableForeignKey()); if(((RootNode)node).getTargetTableName()!=null) { System.out.println(prefix+"\tTarget Table:"+rootNode.getTargetTableName()); System.out.println(prefix+"\tTarget Table PK:"+rootNode.getTargetTablePrimaryKey()); System.out.println(prefix+"\tTarget Table FK:"+rootNode.getTargetTableForeignKey()); } if(((RootNode)node).getJoinTableName()!=null) { System.out.println(prefix+"\tJoin Table Name:"+rootNode.getJoinTableName()); System.out.println(prefix+"\tJoin Table FK:"+rootNode.getJoinTableForeignKey()); System.out.println(prefix+"\tJoin Table IK:"+rootNode.getJoinTableInverseKey()); } String mappingStyle; if(rootNode.getTargetTableName() == null || "".equals(rootNode.getTargetTableName())) { mappingStyle = "component"; } else { if(rootNode.getTargetTablePrimaryKey() == null || "".equals(rootNode.getTargetTablePrimaryKey())) { if(!rootNode.getIsoClassName().startsWith(utils.ISO_ROOT_PACKAGE_NAME+".DSET")) mappingStyle = "joined component"; else mappingStyle = "component with set of composite-elements"; } else { if(rootNode.getJoinTableName() == null || "".equals(rootNode.getJoinTableName())) { if(!rootNode.getIsoClassName().startsWith(utils.ISO_ROOT_PACKAGE_NAME+".DSET")) mappingStyle = "one-to-one"; else mappingStyle = "component with set of one-to-many"; } else { if(!rootNode.getIsoClassName().startsWith(utils.ISO_ROOT_PACKAGE_NAME+".DSET")) mappingStyle = "joined many-to-one"; else mappingStyle = "component with set of many-to-many"; } } } System.out.println(prefix+"\tMapping Style:"+mappingStyle); System.out.println(prefix+"}"); } if(node instanceof ComplexNode) { System.out.println(prefix+"["); for(Node innerNode: ((ComplexNode)currentNode).getInnerNodes()) printNode(innerNode, prefix+"\t"); System.out.println(prefix+"]"); } } } /** * Temporary method for testing purpose * * @throws GenerationException */ private void executeTest() throws GenerationException { String fileName = "CodegenConfig.xml"; ObjectFactory.initialize(fileName); utils = (TransformerUtils)ObjectFactory.getObject("TransformerUtils"); model = (UMLModel)ObjectFactory.getObject("UMLModel"); UMLPackage pkg = ModelUtil.findPackage(model, utils.getBasePkgLogicalModel()+".gov.nih.nci.cacoresdk.iso21090.datatype"); for(UMLClass klass:pkg.getClasses()) { //UMLClass klass = ModelUtil.findClass(model, utils.getBasePkgLogicalModel()+".gov.nih.nci.cacoresdk.iso21090.datatype.EnDataType"); try { UMLClass table = utils.getTable(klass); List<UMLAttribute> attributes = sortAttributesByJoin(klass, table); for(UMLAttribute attr: attributes) { RootNode rootNode = getDatatypeNode(klass, attr, table); printNode(rootNode, ""); StringBuffer buffer = convertToHibernateComponent(rootNode,"\n"); System.out.print(buffer); } for(UMLAttribute attr: attributes) { RootNode rootNode = getDatatypeNode(klass, attr, table); if(requiresSeperateClassMapping(rootNode)) { StringBuffer buffer = convertToHibernateClass(rootNode,"\n"); System.out.print(buffer); } } } catch (Exception e) { System.out.println("Error causing class:"+utils.getFQCN(klass)); e.printStackTrace(); } } } /** * Temporary method used for testing purpose only * @param klass * @param table * @return * @throws GenerationException */ private List<UMLAttribute> sortAttributesByJoin(UMLClass klass, UMLClass table) throws GenerationException { List<UMLAttribute> noJoinCollection = new ArrayList<UMLAttribute>(); List<UMLAttribute> joinCollection = new ArrayList<UMLAttribute>(); for(UMLAttribute attr: klass.getAttributes()) { if(!attr.getName().equals("id")) { RootNode rootNode = getDatatypeNode(klass, attr, table); if(requiresJoin(rootNode)) joinCollection.add(attr); else noJoinCollection.add(attr); } } noJoinCollection.addAll(joinCollection); return noJoinCollection; } public static void main(String args[]) throws GenerationException { IsoDatatypeTransformationHelper t = new IsoDatatypeTransformationHelper(); t.executeTest(); } }
package com.java110.api.listener.owner; import com.alibaba.fastjson.JSONObject; import com.java110.api.bmo.owner.IOwnerBMO; import com.java110.api.listener.AbstractServiceApiPlusListener; import com.java110.core.annotation.Java110Listener; import com.java110.core.context.DataFlowContext; import com.java110.core.event.service.api.ServiceDataFlowEvent; import com.java110.core.smo.user.IOwnerAppUserInnerServiceSMO; import com.java110.dto.owner.OwnerAppUserDto; import com.java110.po.owner.OwnerAppUserPo; import com.java110.utils.constant.BusinessTypeConstant; import com.java110.utils.constant.ServiceCodeConstant; import com.java110.utils.util.Assert; import com.java110.utils.util.BeanConvertUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpMethod; import java.util.List; /** * token * add by wuxw 2019-06-30 */ @Java110Listener("refreshAppUserBindingOwnerTokenListener") public class RefreshAppUserBindingOwnerOpenIdListener extends AbstractServiceApiPlusListener { @Autowired private IOwnerAppUserInnerServiceSMO ownerAppUserInnerServiceSMOImpl; @Autowired private IOwnerBMO ownerBMOImpl; @Override protected void validate(ServiceDataFlowEvent event, JSONObject reqJson) { Assert.hasKeyAndValue(reqJson, "appUserId", "ID"); Assert.hasKeyAndValue(reqJson, "openId", ""); Assert.hasKeyAndValue(reqJson, "communityId", "ID"); if (reqJson.getString("appUserId").startsWith("-")) { Assert.hasKeyAndValue(reqJson, "oldAppUserId", "ID"); Assert.hasKeyAndValue(reqJson, "appType", "appType"); } } @Override protected void doSoService(ServiceDataFlowEvent event, DataFlowContext context, JSONObject reqJson) { //ownerBMOImpl.updateAuditAppUserBindingOwner(reqJson, context); String appUserId = reqJson.getString("appUserId"); if (appUserId.startsWith("-")) { OwnerAppUserDto ownerAppUserDto = new OwnerAppUserDto(); ownerAppUserDto.setAppUserId(reqJson.getString("oldAppUserId")); ownerAppUserDto.setCommunityId(reqJson.getString("communityId")); List<OwnerAppUserDto> ownerAppUserDtos = ownerAppUserInnerServiceSMOImpl.queryOwnerAppUsers(ownerAppUserDto); Assert.listOnlyOne(ownerAppUserDtos, "oldAppUserId"); OwnerAppUserPo ownerAppUserPo = BeanConvertUtil.covertBean(ownerAppUserDtos.get(0), OwnerAppUserPo.class); ownerAppUserPo.setAppUserId("-1"); ownerAppUserPo.setAppType(reqJson.getString("appType")); ownerAppUserPo.setOpenId(reqJson.getString("openId")); super.insert(context, ownerAppUserPo, BusinessTypeConstant.BUSINESS_TYPE_SAVE_OWNER_APP_USER); return; } OwnerAppUserPo ownerAppUserPo = new OwnerAppUserPo(); ownerAppUserPo.setAppUserId(appUserId); ownerAppUserPo.setCommunityId(reqJson.getString("communityId")); ownerAppUserPo.setOpenId(reqJson.getString("openId")); super.update(context, ownerAppUserPo, BusinessTypeConstant.BUSINESS_TYPE_UPDATE_OWNER_APP_USER); } @Override public String getServiceCode() { return ServiceCodeConstant.REFRESH_APP_USER_BINDING_OWNER_OPEN_ID; } @Override public HttpMethod getHttpMethod() { return HttpMethod.POST; } @Override public int getOrder() { return DEFAULT_ORDER; } public IOwnerAppUserInnerServiceSMO getOwnerAppUserInnerServiceSMOImpl() { return ownerAppUserInnerServiceSMOImpl; } public void setOwnerAppUserInnerServiceSMOImpl(IOwnerAppUserInnerServiceSMO ownerAppUserInnerServiceSMOImpl) { this.ownerAppUserInnerServiceSMOImpl = ownerAppUserInnerServiceSMOImpl; } }
package com.salesmanager.shop.store.controller.order; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TreeMap; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.validation.Valid; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Controller; import org.springframework.validation.BeanPropertyBindingResult; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseBody; import com.salesmanager.core.business.modules.integration.payment.impl.PayPalExpressCheckoutPayment; import com.salesmanager.core.business.modules.integration.payment.impl.Stripe3Payment; import com.salesmanager.core.business.services.payments.PaymentService; import com.salesmanager.core.business.services.payments.TransactionService; import com.salesmanager.core.business.utils.CoreConfiguration; import com.salesmanager.core.business.utils.ajax.AjaxResponse; import com.salesmanager.core.model.merchant.MerchantStore; import com.salesmanager.core.model.order.OrderTotalSummary; import com.salesmanager.core.model.payments.PaypalPayment; import com.salesmanager.core.model.payments.Transaction; import com.salesmanager.core.model.reference.language.Language; import com.salesmanager.core.model.shipping.ShippingSummary; import com.salesmanager.core.model.shoppingcart.ShoppingCartItem; import com.salesmanager.core.model.system.IntegrationConfiguration; import com.salesmanager.core.model.system.IntegrationModule; import com.salesmanager.core.modules.integration.payment.model.PaymentModule; import com.salesmanager.shop.constants.Constants; import com.salesmanager.shop.model.order.ShopOrder; import com.salesmanager.shop.store.controller.AbstractController; import com.salesmanager.shop.store.controller.order.facade.OrderFacade; import com.salesmanager.shop.store.controller.shoppingCart.facade.ShoppingCartFacade; /** * Initialization of different payment services * @author carlsamson * */ @Controller @RequestMapping(Constants.SHOP_URI) public class ShoppingOrderPaymentController extends AbstractController { private static final Logger LOGGER = LoggerFactory.getLogger(ShoppingOrderPaymentController.class); private final static String INIT_ACTION = "init"; @Inject private ShoppingCartFacade shoppingCartFacade; @Inject private PaymentService paymentService; @Inject private OrderFacade orderFacade; @Inject private TransactionService transactionService; @Inject private CoreConfiguration coreConfiguration; /** * Recalculates shipping and tax following a change in country or province * * @param order * @param request * @param response * @param locale * @return * @throws Exception */ @RequestMapping(value = { "/order/payment/{action}/{paymentmethod}.html" }, method = RequestMethod.POST) public @ResponseBody String paymentAction(@Valid @ModelAttribute(value = "order") ShopOrder order, @PathVariable String action, @PathVariable String paymentmethod, HttpServletRequest request, HttpServletResponse response, Locale locale) throws Exception { Language language = (Language) request.getAttribute("LANGUAGE"); MerchantStore store = (MerchantStore) request.getAttribute(Constants.MERCHANT_STORE); String shoppingCartCode = getSessionAttribute(Constants.SHOPPING_CART, request); Validate.notNull(shoppingCartCode, "shoppingCartCode does not exist in the session"); AjaxResponse ajaxResponse = new AjaxResponse(); try { com.salesmanager.core.model.shoppingcart.ShoppingCart cart = shoppingCartFacade .getShoppingCartModel(shoppingCartCode, store); Set<ShoppingCartItem> items = cart.getLineItems(); List<ShoppingCartItem> cartItems = new ArrayList<ShoppingCartItem>(items); order.setShoppingCartItems(cartItems); // validate order first Map<String, String> messages = new TreeMap<String, String>(); orderFacade.validateOrder(order, new BeanPropertyBindingResult(order, "order"), messages, store, locale); if (CollectionUtils.isNotEmpty(messages.values())) { for (String key : messages.keySet()) { String value = messages.get(key); ajaxResponse.addValidationMessage(key, value); } ajaxResponse.setStatus(AjaxResponse.RESPONSE_STATUS_VALIDATION_FAILED); return ajaxResponse.toJSONString(); } IntegrationConfiguration config = paymentService.getPaymentConfiguration(order.getPaymentModule(), store); IntegrationModule integrationModule = paymentService.getPaymentMethodByCode(store, order.getPaymentModule()); // OrderTotalSummary orderTotalSummary = // orderFacade.calculateOrderTotal(store, order, language); OrderTotalSummary orderTotalSummary = super.getSessionAttribute(Constants.ORDER_SUMMARY, request); if (orderTotalSummary == null) { orderTotalSummary = orderFacade.calculateOrderTotal(store, order, language); super.setSessionAttribute(Constants.ORDER_SUMMARY, orderTotalSummary, request); } ShippingSummary summary = (ShippingSummary) request.getSession().getAttribute("SHIPPING_SUMMARY"); if (summary != null) { order.setShippingSummary(summary); } if (action.equals(INIT_ACTION)) { if (paymentmethod.equals("PAYPAL")) { try { PaymentModule module = paymentService.getPaymentModule("paypal-express-checkout"); PayPalExpressCheckoutPayment p = (PayPalExpressCheckoutPayment) module; PaypalPayment payment = new PaypalPayment(); payment.setCurrency(store.getCurrency()); Transaction transaction = p.initPaypalTransaction(store, cartItems, orderTotalSummary, payment, config, integrationModule); transactionService.create(transaction); super.setSessionAttribute(Constants.INIT_TRANSACTION_KEY, transaction, request); StringBuilder urlAppender = new StringBuilder(); urlAppender.append(coreConfiguration.getProperty("PAYPAL_EXPRESSCHECKOUT_REGULAR")); urlAppender.append(transaction.getTransactionDetails().get("TOKEN")); if (config.getEnvironment() .equals(com.salesmanager.core.business.constants.Constants.PRODUCTION_ENVIRONMENT)) { StringBuilder url = new StringBuilder() .append(coreConfiguration.getProperty("PAYPAL_EXPRESSCHECKOUT_PRODUCTION")) .append(urlAppender.toString()); ajaxResponse.addEntry("url", url.toString()); } else { StringBuilder url = new StringBuilder() .append(coreConfiguration.getProperty("PAYPAL_EXPRESSCHECKOUT_SANDBOX")) .append(urlAppender.toString()); ajaxResponse.addEntry("url", url.toString()); } // keep order in session when user comes back from pp super.setSessionAttribute(Constants.ORDER, order, request); ajaxResponse.setStatus(AjaxResponse.RESPONSE_OPERATION_COMPLETED); } catch (Exception e) { ajaxResponse.setStatus(AjaxResponse.RESPONSE_STATUS_FAIURE); } } else if (paymentmethod.equals("stripe3")) { try { PaymentModule module = paymentService.getPaymentModule(paymentmethod); Stripe3Payment p = (Stripe3Payment) module; PaypalPayment payment = new PaypalPayment(); payment.setCurrency(store.getCurrency()); Transaction transaction = p.initTransaction(store, null, orderTotalSummary.getTotal(), null, config, integrationModule); transactionService.create(transaction); super.setSessionAttribute(Constants.INIT_TRANSACTION_KEY, transaction, request); // keep order in session when user comes back from pp super.setSessionAttribute(Constants.ORDER, order, request); ajaxResponse.setStatus(AjaxResponse.RESPONSE_OPERATION_COMPLETED); ajaxResponse.setDataMap(transaction.getTransactionDetails()); } catch (Exception e) { ajaxResponse.setStatus(AjaxResponse.RESPONSE_STATUS_FAIURE); } } } } catch (Exception e) { LOGGER.error("Error while performing payment action " + action + " for payment method " + paymentmethod, e); ajaxResponse.setErrorMessage(e); ajaxResponse.setStatus(AjaxResponse.RESPONSE_STATUS_FAIURE); } return ajaxResponse.toJSONString(); } // cancel - success paypal order @RequestMapping(value = { "/paypal/checkout.html/{code}" }, method = RequestMethod.GET) public String returnPayPalPayment(@PathVariable String code, HttpServletRequest request, HttpServletResponse response, Locale locale) throws Exception { if (Constants.SUCCESS.equals(code)) { return "redirect:" + Constants.SHOP_URI + "/order/commitPreAuthorized.html"; } else {// process as cancel return "redirect:" + Constants.SHOP_URI + "/order/checkout.html"; } } }
package net.ellitron.ldbcsnbimpls.interactive.torcdb2; import net.ellitron.ldbcsnbimpls.interactive.torcdb2.TorcDb2.*; import net.ellitron.ldbcsnbimpls.interactive.torcdb2.TorcDb2Client.*; import net.ellitron.ldbcsnbimpls.interactive.torcdb2.LdbcSerializableQueriesAndResults.*; import com.ldbc.driver.control.LoggingService; import com.ldbc.driver.Db; import com.ldbc.driver.DbConnectionState; import com.ldbc.driver.DbException; import com.ldbc.driver.Operation; import com.ldbc.driver.OperationHandler; import com.ldbc.driver.ResultReporter; import com.ldbc.driver.runtime.ConcurrentErrorReporter; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcNoResult; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery1; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery1Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery2; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery2Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery3; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery3Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery4; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery4Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery5; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery5Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery6; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery6Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery7; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery7Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery8; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery8Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery9; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery9Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery10; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery10Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery11; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery11Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery12; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery12Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery13; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery13Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery14; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcQuery14Result; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery1PersonProfile; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery1PersonProfileResult; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery2PersonPosts; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery2PersonPostsResult; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery3PersonFriends; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery3PersonFriendsResult; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery4MessageContent; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery4MessageContentResult; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery5MessageCreator; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery5MessageCreatorResult; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery6MessageForum; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery6MessageForumResult; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery7MessageReplies; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcShortQuery7MessageRepliesResult; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate1AddPerson; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate2AddPostLike; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate3AddCommentLike; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate4AddForum; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate5AddForumMembership; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate6AddPost; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate7AddComment; import com.ldbc.driver.workloads.ldbc.snb.interactive.LdbcUpdate8AddFriendship; import org.docopt.Docopt; import java.io.*; import java.net.*; import java.util.*; /** * A multithreaded server that executes LDBC SNB Interactive Workload queries * against TorcDB2 on behalf of remote clients. * * @author Jonathan Ellithorpe ([email protected]) */ public class TorcDb2Server { private static final String doc = "TorcDb2Server: A multithreaded server that executes LDBC SNB\n" + "Interactive Workload queries against TorcDB2 on behalf of remote\n" + "clients.\n" + "\n" + "Usage:\n" + " TorcDb2Server [options] COORDLOC GRAPHNAME\n" + " TorcDb2Server (-h | --help)\n" + " TorcDb2Server --version\n" + "\n" + "Arguments:\n" + " COORDLOC RAMCloud coordinator locator string.\n" + " GRAPHNAME Name of TorcDB2 graph to execute queries against.\n" + "\n" + "Options:\n" + " --port=<n> Port on which to listen for new connections.\n" + " [default: 5577].\n" + " --verbose Print verbose output to stdout.\n" + " -h --help Show this screen.\n" + " --version Show version.\n" + "\n"; /** * Thread that listens for connections and spins off new threads to serve * client connections. */ private static class ListenerThread implements Runnable { // Port on which we listen for incoming connections. private final int port; // Passed off to each client thread for executing queries. private final TorcDb2ConnectionState connectionState; private final Map<Class<? extends Operation>, OperationHandler> queryHandlerMap; private final ConcurrentErrorReporter concurrentErrorReporter; private int clientID = 1; public ListenerThread(int port, TorcDb2ConnectionState connectionState, Map<Class<? extends Operation>, OperationHandler> queryHandlerMap, ConcurrentErrorReporter concurrentErrorReporter) { this.port = port; this.connectionState = connectionState; this.queryHandlerMap = queryHandlerMap; this.concurrentErrorReporter = concurrentErrorReporter; } @Override public void run() { try { ServerSocket server = new ServerSocket(port); System.out.println("Listening on: " + server.toString()); while (true) { Socket client = server.accept(); System.out.println("Client connected: " + client.toString()); Thread clientThread = new Thread(new ClientThread(client, concurrentErrorReporter, connectionState, queryHandlerMap, clientID)); clientThread.start(); clientID++; } // server.close(); } catch (Exception e) { } } } /** * Thread that receives requests from clients, executes them, and returns a * response. Handles requests for the lifetime of the connection to the * client. */ private static class ClientThread implements Runnable { private final Socket client; private final ConcurrentErrorReporter concurrentErrorReporter; private final ResultReporter resultReporter; private final TorcDb2ConnectionState connectionState; private final Map<Class<? extends Operation>, OperationHandler> queryHandlerMap; private final int clientID; public ClientThread(Socket client, ConcurrentErrorReporter concurrentErrorReporter, TorcDb2ConnectionState connectionState, Map<Class<? extends Operation>, OperationHandler> queryHandlerMap, int clientID) { this.client = client; this.concurrentErrorReporter = concurrentErrorReporter; this.resultReporter = new ResultReporter.SimpleResultReporter(concurrentErrorReporter); this.connectionState = connectionState; this.queryHandlerMap = queryHandlerMap; this.clientID = clientID; } public void run() { try { ObjectInputStream in = new ObjectInputStream(client.getInputStream()); ObjectOutputStream out = new ObjectOutputStream(client.getOutputStream()); while (true) { Object query = in.readObject(); System.out.println("Client " + clientID + " Received Query: " + query.toString()); if (query instanceof LdbcQuery1Serializable) { LdbcQuery1 op = ((LdbcQuery1Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcQuery1Result> result = (List<LdbcQuery1Result>) resultReporter.result(); List<LdbcQuery1ResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcQuery1ResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcQuery2Serializable) { LdbcQuery2 op = ((LdbcQuery2Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcQuery2Result> result = (List<LdbcQuery2Result>) resultReporter.result(); List<LdbcQuery2ResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcQuery2ResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcQuery3Serializable) { LdbcQuery3 op = ((LdbcQuery3Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcQuery3Result> result = (List<LdbcQuery3Result>) resultReporter.result(); List<LdbcQuery3ResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcQuery3ResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcQuery4Serializable) { LdbcQuery4 op = ((LdbcQuery4Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcQuery4Result> result = (List<LdbcQuery4Result>) resultReporter.result(); List<LdbcQuery4ResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcQuery4ResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcQuery5Serializable) { LdbcQuery5 op = ((LdbcQuery5Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcQuery5Result> result = (List<LdbcQuery5Result>) resultReporter.result(); List<LdbcQuery5ResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcQuery5ResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcQuery6Serializable) { LdbcQuery6 op = ((LdbcQuery6Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcQuery6Result> result = (List<LdbcQuery6Result>) resultReporter.result(); List<LdbcQuery6ResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcQuery6ResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcQuery7Serializable) { LdbcQuery7 op = ((LdbcQuery7Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcQuery7Result> result = (List<LdbcQuery7Result>) resultReporter.result(); List<LdbcQuery7ResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcQuery7ResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcQuery8Serializable) { LdbcQuery8 op = ((LdbcQuery8Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcQuery8Result> result = (List<LdbcQuery8Result>) resultReporter.result(); List<LdbcQuery8ResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcQuery8ResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcQuery9Serializable) { LdbcQuery9 op = ((LdbcQuery9Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcQuery9Result> result = (List<LdbcQuery9Result>) resultReporter.result(); List<LdbcQuery9ResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcQuery9ResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcQuery10Serializable) { LdbcQuery10 op = ((LdbcQuery10Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcQuery10Result> result = (List<LdbcQuery10Result>) resultReporter.result(); List<LdbcQuery10ResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcQuery10ResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcQuery11Serializable) { LdbcQuery11 op = ((LdbcQuery11Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcQuery11Result> result = (List<LdbcQuery11Result>) resultReporter.result(); List<LdbcQuery11ResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcQuery11ResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcQuery12Serializable) { LdbcQuery12 op = ((LdbcQuery12Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcQuery12Result> result = (List<LdbcQuery12Result>) resultReporter.result(); List<LdbcQuery12ResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcQuery12ResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcQuery13Serializable) { LdbcQuery13 op = ((LdbcQuery13Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); LdbcQuery13Result result = (LdbcQuery13Result) resultReporter.result(); LdbcQuery13ResultSerializable resp = new LdbcQuery13ResultSerializable(result); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcQuery14Serializable) { LdbcQuery14 op = ((LdbcQuery14Serializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcQuery14Result> result = (List<LdbcQuery14Result>) resultReporter.result(); List<LdbcQuery14ResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcQuery14ResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcShortQuery1PersonProfileSerializable) { LdbcShortQuery1PersonProfile op = ((LdbcShortQuery1PersonProfileSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); LdbcShortQuery1PersonProfileResult result = (LdbcShortQuery1PersonProfileResult) resultReporter.result(); LdbcShortQuery1PersonProfileResultSerializable resp = new LdbcShortQuery1PersonProfileResultSerializable(result); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcShortQuery2PersonPostsSerializable) { LdbcShortQuery2PersonPosts op = ((LdbcShortQuery2PersonPostsSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcShortQuery2PersonPostsResult> result = (List<LdbcShortQuery2PersonPostsResult>) resultReporter.result(); List<LdbcShortQuery2PersonPostsResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcShortQuery2PersonPostsResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcShortQuery3PersonFriendsSerializable) { LdbcShortQuery3PersonFriends op = ((LdbcShortQuery3PersonFriendsSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcShortQuery3PersonFriendsResult> result = (List<LdbcShortQuery3PersonFriendsResult>) resultReporter.result(); List<LdbcShortQuery3PersonFriendsResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcShortQuery3PersonFriendsResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcShortQuery4MessageContentSerializable) { LdbcShortQuery4MessageContent op = ((LdbcShortQuery4MessageContentSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); LdbcShortQuery4MessageContentResult result = (LdbcShortQuery4MessageContentResult) resultReporter.result(); LdbcShortQuery4MessageContentResultSerializable resp = new LdbcShortQuery4MessageContentResultSerializable(result); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcShortQuery5MessageCreatorSerializable) { LdbcShortQuery5MessageCreator op = ((LdbcShortQuery5MessageCreatorSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); LdbcShortQuery5MessageCreatorResult result = (LdbcShortQuery5MessageCreatorResult) resultReporter.result(); LdbcShortQuery5MessageCreatorResultSerializable resp = new LdbcShortQuery5MessageCreatorResultSerializable(result); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcShortQuery6MessageForumSerializable) { LdbcShortQuery6MessageForum op = ((LdbcShortQuery6MessageForumSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); LdbcShortQuery6MessageForumResult result = (LdbcShortQuery6MessageForumResult) resultReporter.result(); LdbcShortQuery6MessageForumResultSerializable resp = new LdbcShortQuery6MessageForumResultSerializable(result); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcShortQuery7MessageRepliesSerializable) { LdbcShortQuery7MessageReplies op = ((LdbcShortQuery7MessageRepliesSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); List<LdbcShortQuery7MessageRepliesResult> result = (List<LdbcShortQuery7MessageRepliesResult>) resultReporter.result(); List<LdbcShortQuery7MessageRepliesResultSerializable> resp = new ArrayList<>(); result.forEach((v) -> { resp.add(new LdbcShortQuery7MessageRepliesResultSerializable(v)); }); out.writeObject(resp); out.flush(); } else if (query instanceof LdbcUpdate1AddPersonSerializable) { LdbcUpdate1AddPerson op = ((LdbcUpdate1AddPersonSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); out.writeObject(LdbcNoResultSerializable.INSTANCE); out.flush(); } else if (query instanceof LdbcUpdate2AddPostLikeSerializable) { LdbcUpdate2AddPostLike op = ((LdbcUpdate2AddPostLikeSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); out.writeObject(LdbcNoResultSerializable.INSTANCE); out.flush(); } else if (query instanceof LdbcUpdate3AddCommentLikeSerializable) { LdbcUpdate3AddCommentLike op = ((LdbcUpdate3AddCommentLikeSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); out.writeObject(LdbcNoResultSerializable.INSTANCE); out.flush(); } else if (query instanceof LdbcUpdate4AddForumSerializable) { LdbcUpdate4AddForum op = ((LdbcUpdate4AddForumSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); out.writeObject(LdbcNoResultSerializable.INSTANCE); out.flush(); } else if (query instanceof LdbcUpdate5AddForumMembershipSerializable) { LdbcUpdate5AddForumMembership op = ((LdbcUpdate5AddForumMembershipSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); out.writeObject(LdbcNoResultSerializable.INSTANCE); out.flush(); } else if (query instanceof LdbcUpdate6AddPostSerializable) { LdbcUpdate6AddPost op = ((LdbcUpdate6AddPostSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); out.writeObject(LdbcNoResultSerializable.INSTANCE); out.flush(); } else if (query instanceof LdbcUpdate7AddCommentSerializable) { LdbcUpdate7AddComment op = ((LdbcUpdate7AddCommentSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); out.writeObject(LdbcNoResultSerializable.INSTANCE); out.flush(); } else if (query instanceof LdbcUpdate8AddFriendshipSerializable) { LdbcUpdate8AddFriendship op = ((LdbcUpdate8AddFriendshipSerializable) query).unpack(); queryHandlerMap.get(op.getClass()).executeOperation(op, connectionState, resultReporter); out.writeObject(LdbcNoResultSerializable.INSTANCE); out.flush(); } else { throw new RuntimeException("Unrecognized query type."); } } } catch (Exception e) { } } } public static void main(String[] args) throws Exception { Map<String, Object> opts = new Docopt(doc).withVersion("TorcDb2Server 1.0").parse(args); // Arguments. final String coordinatorLocator = (String) opts.get("COORDLOC"); final String graphName = (String) opts.get("GRAPHNAME"); final int port = Integer.decode((String) opts.get("--port")); System.out.println(String.format("TorcDb2Server: {coordinatorLocator: %s, " + "graphName: %s, port: %d}", coordinatorLocator, graphName, port)); // Connect to database. Map<String, String> props = new HashMap<>(); props.put("coordinatorLocator", coordinatorLocator); props.put("graphName", graphName); System.out.println("Connecting to TorcDB2..."); TorcDb2ConnectionState connectionState = new TorcDb2ConnectionState(props); // Create mapping from op type to op handler for processing requests. Map<Class<? extends Operation>, OperationHandler> queryHandlerMap = new HashMap<>(); queryHandlerMap.put(LdbcQuery1.class, new TorcDb2.LdbcQuery1Handler()); queryHandlerMap.put(LdbcQuery2.class, new TorcDb2.LdbcQuery2Handler()); queryHandlerMap.put(LdbcQuery3.class, new TorcDb2.LdbcQuery3Handler()); queryHandlerMap.put(LdbcQuery4.class, new TorcDb2.LdbcQuery4Handler()); queryHandlerMap.put(LdbcQuery5.class, new TorcDb2.LdbcQuery5Handler()); queryHandlerMap.put(LdbcQuery6.class, new TorcDb2.LdbcQuery6Handler()); queryHandlerMap.put(LdbcQuery7.class, new TorcDb2.LdbcQuery7Handler()); queryHandlerMap.put(LdbcQuery8.class, new TorcDb2.LdbcQuery8Handler()); queryHandlerMap.put(LdbcQuery9.class, new TorcDb2.LdbcQuery9Handler()); queryHandlerMap.put(LdbcQuery10.class, new TorcDb2.LdbcQuery10Handler()); queryHandlerMap.put(LdbcQuery11.class, new TorcDb2.LdbcQuery11Handler()); queryHandlerMap.put(LdbcQuery12.class, new TorcDb2.LdbcQuery12Handler()); queryHandlerMap.put(LdbcQuery13.class, new TorcDb2.LdbcQuery13Handler()); queryHandlerMap.put(LdbcQuery14.class, new TorcDb2.LdbcQuery14Handler()); queryHandlerMap.put(LdbcShortQuery1PersonProfile.class, new TorcDb2.LdbcShortQuery1PersonProfileHandler()); queryHandlerMap.put(LdbcShortQuery2PersonPosts.class, new TorcDb2.LdbcShortQuery2PersonPostsHandler()); queryHandlerMap.put(LdbcShortQuery3PersonFriends.class, new TorcDb2.LdbcShortQuery3PersonFriendsHandler()); queryHandlerMap.put(LdbcShortQuery4MessageContent.class, new TorcDb2.LdbcShortQuery4MessageContentHandler()); queryHandlerMap.put(LdbcShortQuery5MessageCreator.class, new TorcDb2.LdbcShortQuery5MessageCreatorHandler()); queryHandlerMap.put(LdbcShortQuery6MessageForum.class, new TorcDb2.LdbcShortQuery6MessageForumHandler()); queryHandlerMap.put(LdbcShortQuery7MessageReplies.class, new TorcDb2.LdbcShortQuery7MessageRepliesHandler()); queryHandlerMap.put(LdbcUpdate1AddPerson.class, new TorcDb2.LdbcUpdate1AddPersonHandler()); queryHandlerMap.put(LdbcUpdate2AddPostLike.class, new TorcDb2.LdbcUpdate2AddPostLikeHandler()); queryHandlerMap.put(LdbcUpdate3AddCommentLike.class, new TorcDb2.LdbcUpdate3AddCommentLikeHandler()); queryHandlerMap.put(LdbcUpdate4AddForum.class, new TorcDb2.LdbcUpdate4AddForumHandler()); queryHandlerMap.put(LdbcUpdate5AddForumMembership.class, new TorcDb2.LdbcUpdate5AddForumMembershipHandler()); queryHandlerMap.put(LdbcUpdate6AddPost.class, new TorcDb2.LdbcUpdate6AddPostHandler()); queryHandlerMap.put(LdbcUpdate7AddComment.class, new TorcDb2.LdbcUpdate7AddCommentHandler()); queryHandlerMap.put(LdbcUpdate8AddFriendship.class, new TorcDb2.LdbcUpdate8AddFriendshipHandler()); // Presumably for reporting LDBC driver errors. ConcurrentErrorReporter concurrentErrorReporter = new ConcurrentErrorReporter(); // Listener thread accepts connections and spawns client threads. Thread listener = new Thread(new ListenerThread(port, connectionState, queryHandlerMap, concurrentErrorReporter)); listener.start(); listener.join(); } }
package edu.wustl.catissuecore.actionForm; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.servlet.http.HttpServletRequest; import org.apache.struts.action.ActionError; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionMapping; import edu.wustl.catissuecore.bean.DefinedArrayDetailsBean; import edu.wustl.catissuecore.bean.RequestDetailsBean; import edu.wustl.catissuecore.bizlogic.OrderBizLogic; import edu.wustl.catissuecore.domain.ConsentTierStatus; import edu.wustl.catissuecore.domain.DerivedSpecimenOrderItem; import edu.wustl.catissuecore.domain.ExistingSpecimenArrayOrderItem; import edu.wustl.catissuecore.domain.ExistingSpecimenOrderItem; import edu.wustl.catissuecore.domain.NewSpecimenArrayOrderItem; import edu.wustl.catissuecore.domain.OrderDetails; import edu.wustl.catissuecore.domain.OrderItem; import edu.wustl.catissuecore.domain.PathologicalCaseOrderItem; import edu.wustl.catissuecore.domain.Specimen; import edu.wustl.catissuecore.domain.SpecimenArray; import edu.wustl.catissuecore.domain.SpecimenOrderItem; import edu.wustl.catissuecore.util.IdComparator; import edu.wustl.catissuecore.util.OrderingSystemUtil; import edu.wustl.catissuecore.util.SpecimenComparator; import edu.wustl.catissuecore.util.global.Constants; import edu.wustl.common.actionForm.AbstractActionForm; import edu.wustl.common.domain.AbstractDomainObject; import edu.wustl.common.exception.BizLogicException; import edu.wustl.common.factory.AbstractFactoryConfig; import edu.wustl.common.factory.IFactory; import edu.wustl.common.util.MapDataParser; import edu.wustl.common.util.global.ApplicationProperties; import edu.wustl.common.util.global.Validator; import edu.wustl.common.util.logger.Logger; import edu.wustl.dao.DAO; import edu.wustl.dao.util.HibernateMetaData; /** * @author renuka_bajpai * */ public class RequestDetailsForm extends AbstractActionForm { private static final long serialVersionUID = 1L; /** * logger Logger - Generic logger. */ private static Logger logger = Logger.getCommonLogger(RequestDetailsForm.class); // The status which the user wants to update in one go. private String status; // The Map containg submitted values for 'assigned quantity', 'assigned status' and 'request for'. protected Map values = new LinkedHashMap(); // The administrator comments. private String administratorComments; // The Order Id required to retrieve the corresponding order items from the database. private long id; //The Site associated with the distribution. private String site; /** * The distribution protocol associated with that order. */ private String distributionProtocolId; /** * Name of the order */ private String orderName; /** * Requested Date. */ private String requestedDate; /** * * @return */ public String getOrderName() { return this.orderName; } /** * * @param orderName */ public void setOrderName(String orderName) { this.orderName = orderName; } private Boolean isDirectDistribution = Boolean.FALSE; public Boolean getIsDirectDistribution() { return this.isDirectDistribution; } /** * * @param isDirectDistribution */ public void setIsDirectDistribution(Boolean isDirectDistribution) { this.isDirectDistribution = isDirectDistribution; } /** * The tab page which should be visible to the user. */ private int tabIndex; /** * The map to display the list of specimens in request For drop down. */ private Map requestForDropDownMap = new LinkedHashMap(); private String specimenId; private Boolean mailNotification = Boolean.FALSE; /** Associates the specified object with the specified key in the map. * @param key the key to which the object is mapped. * @param value the object which is mapped. */ public void setRequestFor(String key, List value) { if (this.isMutable()) { this.requestForDropDownMap.put(key, value); } } /** * Returns the object to which this map maps the specified key. * @param key the required key. * @return the object to which this map maps the specified key. */ public List getRequestFor(String key) { return ((List) (this.requestForDropDownMap.get(key))); } /** * @return Returns the mailNotification. */ public Boolean getMailNotification() { return this.mailNotification; } /** * @param mailNotification The mailNotification to set. */ public void setMailNotification(Boolean mailNotification) { this.mailNotification = mailNotification; } /** * @return the site */ public String getSite() { return this.site; } /** * @param site the site to set */ public void setSite(String site) { this.site = site; } //For 'EDIT' operation in CommonAddEditAction. /** * @return boolean. 'true' if operation is add. */ public boolean isAddOperation() { return false; } /** * @return the id */ public long getId() { return this.id; } /** * @param id the id to set */ public void setId(long id) { this.id = id; } /** * @return the administratorComments */ public String getAdministratorComments() { return this.administratorComments; } /** * @param administratorComments the administratorComments to set */ public void setAdministratorComments(String administratorComments) { this.administratorComments = administratorComments; } /** * @return Returns the values. */ public Collection getAllValues() { return this.values.values(); } /** Associates the specified object with the specified key in the map. * @param key the key to which the object is mapped. * @param value the object which is mapped. */ public void setValue(String key, Object value) { if (this.isMutable()) { this.values.put(key, value); } } /** * Returns the object to which this map maps the specified key. * @param key the required key. * @return the object to which this map maps the specified key. */ public Object getValue(String key) { return this.values.get(key); } /** * @return values */ public Map getValues() { return this.values; } /** * @param values Map */ public void setValues(Map values) { this.values = values; } /** * @return int formId. */ public int getFormId() { return Constants.REQUEST_DETAILS_FORM_ID; } /** * reset. */ protected void reset() { } /** * @param abstractDomain object * @throws BizLogicException BizLogic Exception */ public void setAllValuesForOrder(AbstractDomainObject abstractDomain, HttpServletRequest request, DAO dao) throws BizLogicException { int requestDetailsBeanCounter = 0; int existingArrayBeanCounter = 0; final OrderDetails order = (OrderDetails) abstractDomain; final Collection<OrderItem> orderItemColl = order.getOrderItemCollection(); final List<OrderItem> orderItemList = new ArrayList<OrderItem> (orderItemColl); // Sorting by OrderItem.id Collections.sort(orderItemList, new IdComparator()); final Iterator<OrderItem> iter = orderItemList.iterator(); final List totalSpecimenListInRequestForDropDown = new ArrayList(); final Map definedArrayMap = new LinkedHashMap(); while (iter.hasNext()) { final OrderItem orderItem = (OrderItem) iter.next(); //Making keys String assignStatus = ""; String description = ""; String requestedItem = ""; String requestedQty = ""; String availableQty = ""; String specimenClass = ""; String specimenType = ""; String orderItemId = ""; String requestFor = ""; String assignQty = ""; String instanceOf = ""; String specimenId = ""; String distributedItemId = ""; String specimenList = ""; String specimenCollGrpId = ""; String consentVerificationkey = ""; String rowStatuskey = ""; String selectedSpecimenTypeKey = ""; String actualSpecimenClass = ""; String actualSpecimenType = ""; //For array String arrayId = ""; String canDistributeKey = ""; String selectedSpecimenQuantityUnit = ""; String selectedSpecimenQuantity = ""; if (((orderItem instanceof ExistingSpecimenOrderItem) || (orderItem instanceof DerivedSpecimenOrderItem) || (orderItem instanceof PathologicalCaseOrderItem))) { final SpecimenOrderItem specimenOrderItem = (SpecimenOrderItem) orderItem; if (specimenOrderItem.getNewSpecimenArrayOrderItem() == null) { assignStatus = "RequestDetailsBean:" + requestDetailsBeanCounter + "_assignedStatus"; description = "RequestDetailsBean:" + requestDetailsBeanCounter + "_description"; requestedQty = "RequestDetailsBean:" + requestDetailsBeanCounter + "_requestedQty"; orderItemId = "RequestDetailsBean:" + requestDetailsBeanCounter + "_orderItemId"; requestedItem = "RequestDetailsBean:" + requestDetailsBeanCounter + "_requestedItem"; availableQty = "RequestDetailsBean:" + requestDetailsBeanCounter + "_availableQty"; specimenClass = "RequestDetailsBean:" + requestDetailsBeanCounter + "_className"; specimenType = "RequestDetailsBean:" + requestDetailsBeanCounter + "_type"; requestFor = "RequestDetailsBean:" + requestDetailsBeanCounter + "_requestFor"; specimenId = "RequestDetailsBean:" + requestDetailsBeanCounter + "_specimenId"; consentVerificationkey = "RequestDetailsBean:" + requestDetailsBeanCounter + "_consentVerificationkey"; canDistributeKey = "RequestDetailsBean:" + requestDetailsBeanCounter + "_canDistribute"; rowStatuskey = "RequestDetailsBean:" + requestDetailsBeanCounter + "_rowStatuskey"; assignQty = "RequestDetailsBean:" + requestDetailsBeanCounter + "_assignedQty"; instanceOf = "RequestDetailsBean:" + requestDetailsBeanCounter + "_instanceOf"; distributedItemId = "RequestDetailsBean:" + requestDetailsBeanCounter + "_distributedItemId"; specimenCollGrpId = "RequestDetailsBean:" + requestDetailsBeanCounter + "_specimenCollGroupId"; specimenList = "RequestForDropDownList:" + requestDetailsBeanCounter; actualSpecimenClass = "RequestDetailsBean:" + requestDetailsBeanCounter + "_actualSpecimenClass"; actualSpecimenType = "RequestDetailsBean:" + requestDetailsBeanCounter + "_actualSpecimenType"; selectedSpecimenTypeKey = "RequestDetailsBean:" + requestDetailsBeanCounter + "_selectedSpecimenType"; selectedSpecimenQuantityUnit = "RequestDetailsBean:" + requestDetailsBeanCounter + "_specimenQuantityUnit"; selectedSpecimenQuantity = "RequestDetailsBean:" + requestDetailsBeanCounter + "_selectedSpecimenQuantity"; this.populateValuesMap(orderItem, requestedItem, availableQty, specimenClass, specimenType, requestFor, specimenId, assignQty, instanceOf, specimenList, specimenCollGrpId, totalSpecimenListInRequestForDropDown, actualSpecimenClass, actualSpecimenType, assignStatus, consentVerificationkey, canDistributeKey, rowStatuskey, selectedSpecimenTypeKey, selectedSpecimenQuantityUnit, selectedSpecimenQuantity, dao); requestDetailsBeanCounter++; } else { List defineArrayContentsList = null; if (definedArrayMap.get(specimenOrderItem.getNewSpecimenArrayOrderItem()) == null) { defineArrayContentsList = new ArrayList(); } else { defineArrayContentsList = (List) definedArrayMap.get(specimenOrderItem .getNewSpecimenArrayOrderItem()); } defineArrayContentsList.add(specimenOrderItem); definedArrayMap.put(specimenOrderItem.getNewSpecimenArrayOrderItem(), defineArrayContentsList); } } else if (orderItem instanceof ExistingSpecimenArrayOrderItem) { assignStatus = "ExistingArrayDetailsBean:" + existingArrayBeanCounter + "_assignedStatus"; description = "ExistingArrayDetailsBean:" + existingArrayBeanCounter + "_description"; requestedQty = "ExistingArrayDetailsBean:" + existingArrayBeanCounter + "_requestedQuantity"; orderItemId = "ExistingArrayDetailsBean:" + existingArrayBeanCounter + "_orderItemId"; requestedItem = "ExistingArrayDetailsBean:" + existingArrayBeanCounter + "_bioSpecimenArrayName"; arrayId = "ExistingArrayDetailsBean:" + existingArrayBeanCounter + "_arrayId"; assignQty = "ExistingArrayDetailsBean:" + existingArrayBeanCounter + "_assignedQuantity"; distributedItemId = "ExistingArrayDetailsBean:" + existingArrayBeanCounter + "_distributedItemId"; final ExistingSpecimenArrayOrderItem existingSpecimenArrayOrderItem = (ExistingSpecimenArrayOrderItem) orderItem; this.values.put(requestedItem, existingSpecimenArrayOrderItem.getSpecimenArray() .getName()); this.values.put(arrayId, existingSpecimenArrayOrderItem.getSpecimenArray().getId() .toString()); this.values.put(assignQty, "0.0"); existingArrayBeanCounter++; } /*else if (orderItem instanceof NewSpecimenArrayOrderItem) { NewSpecimenArrayOrderItem newSpecimenArrayOrderItem = (NewSpecimenArrayOrderItem) orderItem; List defineArrayContentsList = null; if (definedArrayMap.get(newSpecimenArrayOrderItem) == null) { defineArrayContentsList = new ArrayList(); definedArrayMap.put(newSpecimenArrayOrderItem, defineArrayContentsList); } }*/ this.putCommonValuesInValuesMap(orderItem, assignStatus, description, requestedQty, assignQty, orderItemId, distributedItemId); } // Putting defined array values in Values map if (definedArrayMap.size() > 0) { this.makeValuesMapForDefinedArray(definedArrayMap, totalSpecimenListInRequestForDropDown, dao); } request.getSession().removeAttribute("finalSpecimenList"); request.getSession().setAttribute("finalSpecimenList", totalSpecimenListInRequestForDropDown); } /** * @param definedArrayMap * @throws BizLogicException */ private void makeValuesMapForDefinedArray(Map definedArrayMap, List totalSpecimenListInRequestForDropDown, DAO dao) throws BizLogicException { final Set keySet = definedArrayMap.keySet(); final Iterator iter = keySet.iterator(); int arrayRequestBeanCounter = 0; int arrayDetailsBeanCounter = 0; while (iter.hasNext()) { final NewSpecimenArrayOrderItem newSpecimenArrayOrderItem = (NewSpecimenArrayOrderItem) iter .next(); this.makeKeysForNewSpecimenArrayOrderItem(arrayRequestBeanCounter, newSpecimenArrayOrderItem, dao); final List specimenOrderItemList = (List) definedArrayMap .get(newSpecimenArrayOrderItem); final String noOfItems = "DefinedArrayRequestBean:" + arrayRequestBeanCounter + "_noOfItems"; this.values.put(noOfItems, "" + specimenOrderItemList.size()); final Iterator specimenItemListIter = specimenOrderItemList.iterator(); while (specimenItemListIter.hasNext()) { final SpecimenOrderItem specimenOrderItem = (SpecimenOrderItem) specimenItemListIter .next(); this.makeKeysForDefinedArrayContents(arrayDetailsBeanCounter, specimenOrderItem, totalSpecimenListInRequestForDropDown, dao); arrayDetailsBeanCounter++; } arrayRequestBeanCounter++; } } /** * @param arrayRequestBeanCounter * @param newSpecimenArrayOrderItem */ private void makeKeysForNewSpecimenArrayOrderItem(int arrayRequestBeanCounter, NewSpecimenArrayOrderItem newSpecimenArrayOrderItem, DAO dao) { final String assignStatus = "DefinedArrayRequestBean:" + arrayRequestBeanCounter + "_assignedStatus"; final String orderItemId = "DefinedArrayRequestBean:" + arrayRequestBeanCounter + "_orderItemId"; final String requestedItem = "DefinedArrayRequestBean:" + arrayRequestBeanCounter + "_arrayName"; final String specimenClass = "DefinedArrayRequestBean:" + arrayRequestBeanCounter + "_arrayClass"; final String specimenType = "DefinedArrayRequestBean:" + arrayRequestBeanCounter + "_arrayType"; final String positionDimensionOne = "DefinedArrayRequestBean:" + arrayRequestBeanCounter + "_oneDimensionCapacity"; final String positionDimensionTwo = "DefinedArrayRequestBean:" + arrayRequestBeanCounter + "_twoDimensionCapacity"; final String arrayId = "DefinedArrayRequestBean:" + arrayRequestBeanCounter + "_arrayId"; final String distributedItemId = "DefinedArrayRequestBean:" + arrayRequestBeanCounter + "_distributedItemId"; final String createArrayCondition = "DefinedArrayRequestBean:" + arrayRequestBeanCounter + "_createArrayButtonDisabled"; this.values.put(requestedItem, newSpecimenArrayOrderItem.getName()); this.values.put(positionDimensionOne, newSpecimenArrayOrderItem.getSpecimenArrayType() .getCapacity().getOneDimensionCapacity().toString()); this.values.put(positionDimensionTwo, newSpecimenArrayOrderItem.getSpecimenArrayType() .getCapacity().getTwoDimensionCapacity().toString()); this.values.put(specimenClass, newSpecimenArrayOrderItem.getSpecimenArrayType() .getSpecimenClass()); this.values.put(specimenType, newSpecimenArrayOrderItem.getSpecimenArrayType().getName()); final SpecimenArray specimenArrayObj = newSpecimenArrayOrderItem.getSpecimenArray(); if (specimenArrayObj != null) { this.values.put(arrayId, specimenArrayObj.getId().toString()); } final Collection<SpecimenOrderItem> specimenOrderItemCollection = newSpecimenArrayOrderItem .getSpecimenOrderItemCollection(); //Calculating the condition to enable or disable "Create Array Button" String condition = OrderingSystemUtil .determineCreateArrayCondition(specimenOrderItemCollection); if (newSpecimenArrayOrderItem.getStatus().trim().equalsIgnoreCase( Constants.ORDER_REQUEST_STATUS_DISTRIBUTED) || (newSpecimenArrayOrderItem.getStatus().trim() .equalsIgnoreCase(Constants.ORDER_REQUEST_STATUS_DISTRIBUTED_AND_CLOSE))) { condition = "true"; } this.values.put(createArrayCondition, condition); this.putCommonValuesInValuesMap(newSpecimenArrayOrderItem, assignStatus, "", "", "", orderItemId, distributedItemId); } /** * @param arrayDetailsBeanCounter * @param specimenOrderItem * @param totalSpecimenListInRequestForDropDown * @throws BizLogicException BizLogic Exception */ private void makeKeysForDefinedArrayContents(int arrayDetailsBeanCounter, SpecimenOrderItem specimenOrderItem, List totalSpecimenListInRequestForDropDown, DAO dao) throws BizLogicException { final String assignStatus = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_assignedStatus"; final String description = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_description"; final String requestedQty = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_requestedQuantity"; final String orderItemId = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_orderItemId"; final String requestedItem = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_requestedItem"; final String availableQty = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_availableQuantity"; final String specimenClass = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_className"; final String specimenType = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_type"; final String requestFor = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_requestFor"; final String specimenId = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_specimenId"; final String assignQty = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_assignedQuantity"; final String instanceOf = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_instanceOf"; //distributedItemId = "DefinedArrayDetailsBean:"+arrayDetailsBeanCounter+"_distributedItemId"; final String specimenList = "RequestForDropDownListArray:" + arrayDetailsBeanCounter; final String specimenCollGrpId = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_specimenCollGroupId"; final String actualSpecimenClass = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_actualSpecimenClass"; final String actualSpecimenType = "DefinedArrayDetailsBean:" + arrayDetailsBeanCounter + "_actualSpecimenType"; this.populateValuesMap(specimenOrderItem, requestedItem, availableQty, specimenClass, specimenType, requestFor, specimenId, assignQty, instanceOf, specimenList, specimenCollGrpId, totalSpecimenListInRequestForDropDown, actualSpecimenClass, actualSpecimenType, assignStatus, "", "", "", "", "", "", dao); this.putCommonValuesInValuesMap(specimenOrderItem, assignStatus, description, requestedQty, assignQty, orderItemId, ""); } /** * @param orderItem * @param assignStatus * @param description * @param requestedQty * @param orderItemId * @param distributedItemId */ private void putCommonValuesInValuesMap(OrderItem orderItem, String assignStatus, String description, String requestedQty, String assignQty, String orderItemId, String distributedItemId) { if (this.values.get(assignStatus) == null) { this.values.put(assignStatus, orderItem.getStatus()); } this.values.put(description, orderItem.getDescription()); if (orderItem.getRequestedQuantity() != null) {//condition is for define array this.values.put(requestedQty, orderItem.getRequestedQuantity().toString()); } this.values.put(orderItemId, orderItem.getId()); if (orderItem.getDistributedItem() != null) { this.values.put(distributedItemId, orderItem.getDistributedItem().getId().toString()); } else { this.values.put(distributedItemId, ""); if (orderItem.getRequestedQuantity() != null) { this.values.put(assignQty, orderItem.getRequestedQuantity().toString()); } } } /** * @param orderItem * @param requestedItem * @param availableQty * @param specimenClass * @param specimenType * @throws BizLogicException BizLogic Exception */ private void populateValuesMap(OrderItem orderItem, String requestedItem, String availableQty, String specimenClass, String specimenType, String requestFor, String specimenId, String assignQty, String instanceOf, String specimenList, String specimenCollGrpId, List totalSpecimenListInRequestForDropDown, String actualSpecimenClass, String actualSpecimenType, String assignStatus, String consentVerificationkey, String canDistributeKey, String rowStatuskey, String selectedSpecimenTypeKey, String selectedSpecimenQuantityUnit, String selectedSpecimenQuantity, DAO dao) throws BizLogicException { if (orderItem instanceof ExistingSpecimenOrderItem) { ExistingSpecimenOrderItem existingSpecimenOrderItem = (ExistingSpecimenOrderItem) orderItem; // OrderBizLogic orderBizLogic = (OrderBizLogic) BizLogicFactory.getInstance().getBizLogic(Constants.REQUEST_LIST_FILTERATION_FORM_ID); existingSpecimenOrderItem = (ExistingSpecimenOrderItem) HibernateMetaData .getProxyObjectImpl(existingSpecimenOrderItem); //orderBizLogic.getSpecimen(existingSpecimenOrderItem.getSpecimen().getId(),dao); this.values.put(requestedItem, existingSpecimenOrderItem.getSpecimen().getLabel()); this.values.put(availableQty, existingSpecimenOrderItem.getSpecimen() .getAvailableQuantity()); this.values.put(specimenClass, existingSpecimenOrderItem.getSpecimen() .getSpecimenClass()); this.values .put(specimenType, existingSpecimenOrderItem.getSpecimen().getSpecimenType()); this.values.put(specimenId, existingSpecimenOrderItem.getSpecimen().getId().toString()); this.values.put(instanceOf, "Existing"); if (existingSpecimenOrderItem.getStatus().equals( Constants.ORDER_REQUEST_STATUS_DISTRIBUTED) || existingSpecimenOrderItem.getStatus().equals( Constants.ORDER_REQUEST_STATUS_DISTRIBUTED_AND_CLOSE)) { this.values.put(canDistributeKey, Constants.TRUE); this.values.put(rowStatuskey, "disable"); } else { this.values.put(canDistributeKey, Constants.FALSE); this.values.put(rowStatuskey, "enable"); } final Collection<ConsentTierStatus> col = existingSpecimenOrderItem.getSpecimen() .getConsentTierStatusCollection(); if(col!=null) { final Iterator<ConsentTierStatus> itr = col.iterator(); if (itr.hasNext()) { this.values.put(consentVerificationkey, Constants.VIEW_CONSENTS); } else { this.values.put(consentVerificationkey, Constants.NO_CONSENTS); } } //values.put(consentVerificationkey, "No Consents"); //Fix me second condition added by vaishali if (existingSpecimenOrderItem.getDistributedItem() != null && existingSpecimenOrderItem.getDistributedItem().getQuantity() != null) { this.values.put(assignQty, existingSpecimenOrderItem.getDistributedItem() .getQuantity().toString()); } this.values.put(actualSpecimenClass, existingSpecimenOrderItem.getSpecimen() .getSpecimenClass()); this.values.put(actualSpecimenType, existingSpecimenOrderItem.getSpecimen() .getSpecimenType()); // setting default status if (existingSpecimenOrderItem.getStatus().equals(Constants.ORDER_REQUEST_STATUS_NEW)) { if (existingSpecimenOrderItem.getNewSpecimenArrayOrderItem() == null) { this.values.put(assignStatus, Constants.ORDER_REQUEST_STATUS_PENDING_FOR_DISTRIBUTION); } else { this.values.put(assignStatus, Constants.ORDER_REQUEST_STATUS_READY_FOR_ARRAY_PREPARATION); } } this.values.put(requestFor, existingSpecimenOrderItem.getSpecimen().getId()); this.values.put(selectedSpecimenTypeKey, existingSpecimenOrderItem.getSpecimen() .getSpecimenType()); this.values.put(selectedSpecimenQuantity, existingSpecimenOrderItem.getSpecimen() .getAvailableQuantity().toString()); this.values.put(selectedSpecimenQuantityUnit, OrderingSystemUtil .getUnit(existingSpecimenOrderItem.getSpecimen())); List allSpecimen = new ArrayList(); allSpecimen = OrderingSystemUtil .getAllSpecimen(existingSpecimenOrderItem.getSpecimen()); final SpecimenComparator comparator = new SpecimenComparator(); Collections.sort(allSpecimen, comparator); final List childrenSpecimenListToDisplay = OrderingSystemUtil.getNameValueBeanList( allSpecimen, existingSpecimenOrderItem.getSpecimen()); this.requestForDropDownMap.put(specimenList, childrenSpecimenListToDisplay); } else if (orderItem instanceof DerivedSpecimenOrderItem) { final DerivedSpecimenOrderItem derivedSpecimenOrderItem = (DerivedSpecimenOrderItem) orderItem; this.values.put(requestedItem, derivedSpecimenOrderItem.getParentSpecimen().getLabel()); //Collection childrenSpecimenList = OrderingSystemUtil.getAllChildrenSpecimen(derivedSpecimenOrderItem.getParentSpecimen().getChildSpecimenCollection()); //List finalChildrenSpecimenList = OrderingSystemUtil.getChildrenSpecimenForClassAndType(childrenSpecimenList, derivedSpecimenOrderItem // .getSpecimenClass(), derivedSpecimenOrderItem.getSpecimenType()); //Iterator i = finalChildrenSpecimenList.iterator(); //while (i.hasNext()) //{// Ajax conditions //totalSpecimenListInRequestForDropDown.add(i.next()); List allSpecimen = new ArrayList(); if (derivedSpecimenOrderItem.getNewSpecimenArrayOrderItem() != null) { final Collection childrenSpecimenList = OrderingSystemUtil .getAllChildrenSpecimen(derivedSpecimenOrderItem.getParentSpecimen() .getChildSpecimenCollection()); allSpecimen = OrderingSystemUtil.getChildrenSpecimenForClassAndType( childrenSpecimenList, derivedSpecimenOrderItem.getSpecimenClass(), derivedSpecimenOrderItem.getSpecimenType()); } else { allSpecimen = OrderingSystemUtil.getAllSpecimen(derivedSpecimenOrderItem .getParentSpecimen()); } final SpecimenComparator comparator = new SpecimenComparator(); Collections.sort(allSpecimen, comparator); final List childrenSpecimenListToDisplay = OrderingSystemUtil.getNameValueBeanList( allSpecimen, null); this.values.put(availableQty, derivedSpecimenOrderItem.getParentSpecimen() .getAvailableQuantity().toString()); this.values.put(selectedSpecimenTypeKey, "NA"); if (orderItem.getStatus().equals(Constants.ORDER_REQUEST_STATUS_NEW)) { this.values.put(assignStatus, Constants.ORDER_REQUEST_STATUS_PENDING_FOR_DISTRIBUTION); } final Collection col = derivedSpecimenOrderItem.getParentSpecimen() .getConsentTierStatusCollection(); final Iterator itr = col.iterator(); if (!allSpecimen.isEmpty()) { this.values.put(availableQty, (((Specimen) allSpecimen.get(0)) .getAvailableQuantity().toString())); if (itr.hasNext()) { this.values.put(consentVerificationkey, Constants.VIEW_CONSENTS); } else { this.values.put(consentVerificationkey, Constants.NO_CONSENTS); } } else { this.values.put(availableQty, "");//derivedSpecimenorderItem.getSpecimen().getAvailableQuantity().getValue().toString() this.values.put(consentVerificationkey, Constants.NO_CONSENTS); } if (allSpecimen.size() != 0 && derivedSpecimenOrderItem.getNewSpecimenArrayOrderItem() != null) { final Specimen spec = ((Specimen) allSpecimen.get(0)); this.values.put(requestFor, spec.getId()); } else { this.values.put(requestFor, " } if (derivedSpecimenOrderItem.getStatus().equals( Constants.ORDER_REQUEST_STATUS_DISTRIBUTED) || derivedSpecimenOrderItem .getStatus() .equals( assignStatus .equalsIgnoreCase(Constants.ORDER_REQUEST_STATUS_DISTRIBUTED_AND_CLOSE))) { this.values.put(canDistributeKey, Constants.TRUE); this.values.put(rowStatuskey, "disable"); } else { this.values.put(canDistributeKey, Constants.FALSE); this.values.put(rowStatuskey, "enable"); } this.values.put(specimenClass, derivedSpecimenOrderItem.getSpecimenClass()); this.values.put(specimenType, derivedSpecimenOrderItem.getSpecimenType()); this.values.put(specimenId, derivedSpecimenOrderItem.getParentSpecimen().getId() .toString()); // values.put(consentVerificationkey, "View"); this.values.put(instanceOf, "Derived"); //fix me second condition added by vaishali if (derivedSpecimenOrderItem.getDistributedItem() != null && derivedSpecimenOrderItem.getDistributedItem().getQuantity() != null) { this.values.put(assignQty, derivedSpecimenOrderItem.getDistributedItem() .getQuantity().toString()); } this.values.put(actualSpecimenClass, derivedSpecimenOrderItem.getParentSpecimen() .getSpecimenClass()); this.values.put(actualSpecimenType, derivedSpecimenOrderItem.getParentSpecimen() .getSpecimenType()); this.requestForDropDownMap.put(specimenList, childrenSpecimenListToDisplay); } else if (orderItem instanceof PathologicalCaseOrderItem) { final PathologicalCaseOrderItem pathologicalCaseOrderItem = (PathologicalCaseOrderItem) orderItem; this.values.put(requestedItem, pathologicalCaseOrderItem.getSpecimenCollectionGroup() .getSurgicalPathologyNumber()); //Fetching requestFor list final List totalSpecimenColl = OrderingSystemUtil.getAllSpecimensForPathologicalCases( pathologicalCaseOrderItem.getSpecimenCollectionGroup(), pathologicalCaseOrderItem); final Iterator i = totalSpecimenColl.iterator(); while (i.hasNext()) {// Ajax conditions totalSpecimenListInRequestForDropDown.add(i.next()); } final List specimenListToDisplay = OrderingSystemUtil.getNameValueBeanList( totalSpecimenColl, null); Logger.out.debug("size of specimenListToDisplay :::" + specimenListToDisplay.size()); this.requestForDropDownMap.put(specimenList, specimenListToDisplay); this.values.put(specimenCollGrpId, pathologicalCaseOrderItem .getSpecimenCollectionGroup().getId().toString()); if (totalSpecimenColl.size() != 0) { final Specimen spec = ((Specimen) totalSpecimenColl.get(0)); final Collection col = spec.getConsentTierStatusCollection(); final Iterator itr = col.iterator(); if (itr.hasNext()) { this.values.put(consentVerificationkey, Constants.VIEW_CONSENTS); } else { this.values.put(consentVerificationkey, Constants.NO_CONSENTS); } this.values.put(requestFor, spec.getId()); this.values.put(selectedSpecimenTypeKey, spec.getSpecimenType()); this.values.put(selectedSpecimenQuantityUnit, OrderingSystemUtil.getUnit(spec)); this.values.put(selectedSpecimenQuantity, spec.getAvailableQuantity().toString()); } else { this.values.put(requestFor, " this.values.put(selectedSpecimenTypeKey, "NA"); this.values.put(consentVerificationkey, Constants.NO_CONSENTS); } if (specimenListToDisplay.isEmpty() || (pathologicalCaseOrderItem.getSpecimenClass() != null && pathologicalCaseOrderItem.getSpecimenType() != null && !pathologicalCaseOrderItem.getSpecimenClass().trim() .equalsIgnoreCase("") && !pathologicalCaseOrderItem .getSpecimenType().trim().equalsIgnoreCase(""))) { this.values.put(instanceOf, "DerivedPathological"); } else { this.values.put(instanceOf, "Pathological"); } if (pathologicalCaseOrderItem.getDistributedItem() != null) { this.values.put(assignQty, pathologicalCaseOrderItem.getDistributedItem() .getQuantity().toString()); } this.values.put(specimenClass, pathologicalCaseOrderItem.getSpecimenClass()); this.values.put(specimenType, pathologicalCaseOrderItem.getSpecimenType()); this.values.put(actualSpecimenClass, pathologicalCaseOrderItem.getSpecimenClass()); this.values.put(actualSpecimenType, pathologicalCaseOrderItem.getSpecimenType()); if (pathologicalCaseOrderItem.getStatus().equals( Constants.ORDER_REQUEST_STATUS_DISTRIBUTED) || pathologicalCaseOrderItem .getStatus() .equals( assignStatus .equalsIgnoreCase(Constants.ORDER_REQUEST_STATUS_DISTRIBUTED_AND_CLOSE))) { this.values.put(canDistributeKey, Constants.TRUE); this.values.put(rowStatuskey, "disable"); } else { this.values.put(canDistributeKey, Constants.FALSE); this.values.put(rowStatuskey, "enable"); } this.values.put(availableQty, ""); } } /** * @return the status */ public String getStatus() { return this.status; } /** * @param status the status to set */ public void setStatus(String status) { this.status = status; } /** * @return the distributionProtocolId */ public String getDistributionProtocolId() { return this.distributionProtocolId; } /** * @param distributionProtocolId the distributionProtocolId to set */ public void setDistributionProtocolId(String distributionProtocolId) { this.distributionProtocolId = distributionProtocolId; } /** * @return ActionErrors object * @param mapping ActionMapping * @param request HttpServletRequest */ public ActionErrors validate(ActionMapping mapping, HttpServletRequest request) { final ActionErrors errors = new ActionErrors(); final Validator validator = new Validator(); final String noOfRecords = (String) request.getParameter("noOfRecords"); final int recordCount = Integer.parseInt(noOfRecords); if (this.getDistributionProtocolId() == null || this.getDistributionProtocolId().equalsIgnoreCase("") || this.getDistributionProtocolId().equalsIgnoreCase("-1")) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("errors.distribution.protocol")); } if (this.orderName == null || this.orderName.equals("")) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("errors.order.name")); } for (int i = 0; i < recordCount; i++) { final String consentVerificationkey = "RequestDetailsBean:" + i + "_consentVerificationkey"; final String verificationStatus = (String) this.getValue(consentVerificationkey); final String assignStatusKey = "RequestDetailsBean:" + i + "_assignedStatus"; final String assignStatus = (String) this.getValue(assignStatusKey); final String canDistribute = (String) this.getValue("RequestDetailsBean:" + i + "_canDistribute"); this.setValue("RequestDetailsBean:" + i + "_availableQty", this.getValue( "RequestDetailsBean:" + i + "_availableQty").toString()); this.setValue("RequestDetailsBean:" + i + "_requestFor", this.getValue( "RequestDetailsBean:" + i + "_requestFor").toString()); this.setValue("RequestDetailsBean:" + i + "_orderItemId", this.getValue( "RequestDetailsBean:" + i + "_orderItemId").toString()); if (verificationStatus != null) { if ((verificationStatus.equalsIgnoreCase(Constants.VIEW_CONSENTS) && assignStatus .equalsIgnoreCase(Constants.ORDER_REQUEST_STATUS_DISTRIBUTED)) || (verificationStatus.equalsIgnoreCase(Constants.VIEW_CONSENTS) && assignStatus .equalsIgnoreCase(Constants.ORDER_REQUEST_STATUS_DISTRIBUTED_AND_CLOSE))) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("errors.verify.Consent")); break; } } if (canDistribute != null && Constants.FALSE.equals(canDistribute) && (assignStatus.equalsIgnoreCase(Constants.ORDER_REQUEST_STATUS_DISTRIBUTED) || assignStatus .equalsIgnoreCase(Constants.ORDER_REQUEST_STATUS_DISTRIBUTED_AND_CLOSE))) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError( "errors.distribution.quantity.should.equal")); break; } } //getting values from a map. RequestDetailsBean requestDetailsBean = null; DefinedArrayDetailsBean definedArrayDetailsBean = null; boolean specimenItem = false; boolean arrayDetailsItem = false; final MapDataParser mapDataParser = new MapDataParser("edu.wustl.catissuecore.bean"); Collection beanObjSet = null; try { beanObjSet = mapDataParser.generateData(this.values); } catch (final Exception e) { RequestDetailsForm.logger.info("in request details form:" + " map data parser exception:" + e.getMessage(), e); e.printStackTrace(); } final Iterator iter = beanObjSet.iterator(); while (iter.hasNext()) { final Object obj = iter.next(); //For specimen order item. if (obj instanceof RequestDetailsBean) { requestDetailsBean = (RequestDetailsBean) obj; specimenItem = true; } //For defined array details. else if (obj instanceof DefinedArrayDetailsBean) { definedArrayDetailsBean = (DefinedArrayDetailsBean) obj; arrayDetailsItem = true; } if (specimenItem) { if (requestDetailsBean.getAssignedQty() != null && !requestDetailsBean.getAssignedQty().equalsIgnoreCase("")) { if (!validator.isDouble(requestDetailsBean.getAssignedQty())) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("errors.item.format", ApplicationProperties.getValue("itemrecord.quantity"))); break; } } if (requestDetailsBean.getInstanceOf().equals("Derived") || requestDetailsBean.getInstanceOf().equals("Pathological") || requestDetailsBean.getInstanceOf().equals("DerivedPathological")) { try { final IFactory factory = AbstractFactoryConfig.getInstance() .getBizLogicFactory(); final OrderBizLogic orderBizLogic = (OrderBizLogic) factory .getBizLogic(Constants.REQUEST_LIST_FILTERATION_FORM_ID); Specimen specimen = null; if (requestDetailsBean.getRowStatuskey().equals("enable") && (requestDetailsBean.getAssignedStatus().equals( Constants.ORDER_REQUEST_STATUS_DISTRIBUTED) || requestDetailsBean .getAssignedStatus() .equals( Constants.ORDER_REQUEST_STATUS_DISTRIBUTED_AND_CLOSE))) { if (requestDetailsBean.getRequestFor().equals(" { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError( "errors.distribution.derivedspecimen.no.specimen")); break; } else { specimen = (Specimen) orderBizLogic.getSpecimenObject(Long .valueOf(requestDetailsBean.getRequestFor())); if (!(specimen.getSpecimenClass().equals( requestDetailsBean.getClassName()) && specimen .getSpecimenType().equals(requestDetailsBean.getType()))) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError( "errors.distribution.derivedspecimen.type.class")); break; } } } } catch (final BizLogicException e) { RequestDetailsForm.logger.error("Bizlogic exception while getting " + "IFactory instance:" + e, e); e.printStackTrace(); } catch (final NumberFormatException e) { RequestDetailsForm.logger.error("RequestDetailsForm.java"+e, e); e.printStackTrace(); } } else if (arrayDetailsItem) { if (definedArrayDetailsBean.getAssignedQuantity() != null && !definedArrayDetailsBean.getAssignedQuantity().equalsIgnoreCase("")) { if (!validator.isDouble(definedArrayDetailsBean.getAssignedQuantity())) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError( "errors.item.format", ApplicationProperties .getValue("itemrecord.quantity"))); break; } } } } } return errors; } /** * @return the tabIndex */ public int getTabIndex() { return this.tabIndex; } /** * @param tabIndex the tabIndex to set */ public void setTabIndex(int tabIndex) { this.tabIndex = tabIndex; } /** * @return the requestForDropDownMap */ public Map getRequestForDropDownMap() { return this.requestForDropDownMap; } /** * @param requestForDropDownMap the requestForDropDownMap to set */ public void setRequestForDropDownMap(Map requestForDropDownMap) { this.requestForDropDownMap = requestForDropDownMap; } /** * @return the specimenId */ public String getSpecimenId() { return this.specimenId; } /** * @param specimenId the specimenId to set */ public void setSpecimenId(String specimenId) { this.specimenId = specimenId; } /* (non-Javadoc) * @see edu.wustl.common.actionForm.IValueObject#setAllValues(edu.wustl.common.domain.AbstractDomainObject) */ public void setAllValues(AbstractDomainObject arg0) { // TODO Auto-generated method stub } @Override public void setAddNewObjectIdentifier(String arg0, Long arg1) { // TODO Auto-generated method stub } /** * @param requestedDate the requestedDate to set */ public void setRequestedDate(String requestedDate) { this.requestedDate = requestedDate; } /** * @return the requestedDate */ public String getRequestedDate() { return requestedDate; } }
package gov.nih.nci.cananolab.service.admin.impl; import gov.nih.nci.cananolab.domain.common.Author; import gov.nih.nci.cananolab.domain.common.Datum; import gov.nih.nci.cananolab.domain.common.ExperimentConfig; import gov.nih.nci.cananolab.domain.common.File; import gov.nih.nci.cananolab.domain.common.Finding; import gov.nih.nci.cananolab.domain.common.Instrument; import gov.nih.nci.cananolab.domain.common.Organization; import gov.nih.nci.cananolab.domain.common.PointOfContact; import gov.nih.nci.cananolab.domain.common.Protocol; import gov.nih.nci.cananolab.domain.common.Publication; import gov.nih.nci.cananolab.domain.function.Target; import gov.nih.nci.cananolab.domain.function.TargetingFunction; import gov.nih.nci.cananolab.domain.particle.Characterization; import gov.nih.nci.cananolab.domain.particle.ChemicalAssociation; import gov.nih.nci.cananolab.domain.particle.ComposingElement; import gov.nih.nci.cananolab.domain.particle.Function; import gov.nih.nci.cananolab.domain.particle.FunctionalizingEntity; import gov.nih.nci.cananolab.domain.particle.NanomaterialEntity; import gov.nih.nci.cananolab.domain.particle.Sample; import gov.nih.nci.cananolab.domain.particle.SampleComposition; import gov.nih.nci.cananolab.exception.AdministrationException; import gov.nih.nci.cananolab.exception.NoAccessException; import gov.nih.nci.cananolab.exception.NotExistException; import gov.nih.nci.cananolab.service.protocol.ProtocolService; import gov.nih.nci.cananolab.service.protocol.helper.ProtocolServiceHelper; import gov.nih.nci.cananolab.service.protocol.impl.ProtocolServiceLocalImpl; import gov.nih.nci.cananolab.service.publication.PublicationService; import gov.nih.nci.cananolab.service.publication.helper.PublicationServiceHelper; import gov.nih.nci.cananolab.service.publication.impl.PublicationServiceLocalImpl; import gov.nih.nci.cananolab.service.sample.SampleService; import gov.nih.nci.cananolab.service.sample.impl.SampleServiceLocalImpl; import gov.nih.nci.cananolab.service.security.SecurityService; import gov.nih.nci.cananolab.system.applicationservice.CustomizedApplicationService; import gov.nih.nci.cananolab.util.Constants; import gov.nih.nci.system.client.ApplicationServiceProvider; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.hibernate.FetchMode; import org.hibernate.criterion.CriteriaSpecification; import org.hibernate.criterion.DetachedCriteria; import org.hibernate.criterion.Property; /** * Service methods for update createdBy field. * * @author lethai, pansu */ public class UpdateCreatedByServiceImpl { private static Logger logger = Logger .getLogger(UpdateCreatedByServiceImpl.class); // special mappings for characterization and composition data that have // different created_by than sample private static Map<String, String> specialMappings = new HashMap<String, String>(); static { specialMappings.put("michal", "lijowskim"); specialMappings.put("jennifer", "halljennifer"); specialMappings.put("skoczen", "skoczens"); specialMappings.put("clogston", "clogstonj"); specialMappings.put("admin", "canano_admin"); specialMappings.put("janedoe", "canano_admin"); specialMappings.put("sharon", "gaheens"); specialMappings.put("johndoe", "canano_guest"); specialMappings.put("SPREAD_SHEET_PARSER_4_STANSHAW_DATA", "canano_admin"); specialMappings.put("SPREAD_SHEET_PARSER_4_KELLY_DATA", "canano_admin"); specialMappings.put("DATA_MIGRATION", "canano_admin"); specialMappings.put("data_migration", "canano_admin"); } private int update(SampleService sampleService, List<String> sampleIds, String currentCreatedBy, String newCreatedBy) throws AdministrationException, NoAccessException { int i = 0; try { CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); for (String sampleId : sampleIds) { try { Sample domain = this.findFullyLoadedSampleById(sampleId); domain.setCreatedBy(newCreatedBy(domain.getCreatedBy(), currentCreatedBy, newCreatedBy)); appService.saveOrUpdate(domain); SampleComposition sampleComposition = domain .getSampleComposition(); Collection<ChemicalAssociation> chemicalAssociation = new ArrayList<ChemicalAssociation>(); Collection<FunctionalizingEntity> functionalizingEntity = new ArrayList<FunctionalizingEntity>(); Collection<NanomaterialEntity> nanomaterialEntity = new ArrayList<NanomaterialEntity>(); Collection<Characterization> characterization = new ArrayList<Characterization>(); // point of contact PointOfContact poc = domain.getPrimaryPointOfContact(); if (poc != null) { poc.setCreatedBy(newCreatedBy(poc.getCreatedBy(), currentCreatedBy, newCreatedBy)); // organization Organization organization = poc.getOrganization(); if (organization != null) { organization.setCreatedBy(newCreatedBy(organization .getCreatedBy(), currentCreatedBy, newCreatedBy)); } appService.saveOrUpdate(poc); } if (domain.getOtherPointOfContactCollection() != null) { for (PointOfContact otherpoc : domain .getOtherPointOfContactCollection()) { otherpoc.setCreatedBy(newCreatedBy(otherpoc .getCreatedBy(), currentCreatedBy, newCreatedBy)); Organization org = otherpoc.getOrganization(); if (org != null) { org.setCreatedBy(newCreatedBy(org .getCreatedBy(), currentCreatedBy, newCreatedBy)); } appService.saveOrUpdate(otherpoc); } } // updating Sample Composition if (sampleComposition != null) { if (sampleComposition.getFileCollection() != null) { for (File file : sampleComposition .getFileCollection()) { file.setCreatedBy(newCreatedBy(file .getCreatedBy(), currentCreatedBy, newCreatedBy)); appService.saveOrUpdate(file); } } chemicalAssociation = sampleComposition .getChemicalAssociationCollection(); functionalizingEntity = sampleComposition .getFunctionalizingEntityCollection(); nanomaterialEntity = sampleComposition .getNanomaterialEntityCollection(); characterization = domain .getCharacterizationCollection(); if (chemicalAssociation != null) { for (ChemicalAssociation ca : chemicalAssociation) { ca.setCreatedBy(newCreatedBy(ca.getCreatedBy(), currentCreatedBy, newCreatedBy)); Collection<File> fileCollection = ca .getFileCollection(); if (fileCollection != null) { for (File file : fileCollection) { if (file != null) { file.setCreatedBy(newCreatedBy(file .getCreatedBy(), currentCreatedBy, newCreatedBy)); } } } appService.saveOrUpdate(ca); } } if (functionalizingEntity != null) { for (FunctionalizingEntity fe : functionalizingEntity) { fe.setCreatedBy(newCreatedBy(fe.getCreatedBy(), currentCreatedBy, newCreatedBy)); Collection<File> fileCollection = fe .getFileCollection(); if (fileCollection != null) { for (File file : fileCollection) { file .setCreatedBy(newCreatedBy(file .getCreatedBy(), currentCreatedBy, newCreatedBy)); } } Collection<Function> functionCollection = fe .getFunctionCollection(); if (functionCollection != null) { for (Function f : functionCollection) { f .setCreatedBy(newCreatedBy(f .getCreatedBy(), currentCreatedBy, newCreatedBy)); } } appService.saveOrUpdate(fe); } } if (nanomaterialEntity != null) { for (NanomaterialEntity ne : nanomaterialEntity) { ne.setCreatedBy(newCreatedBy(ne.getCreatedBy(), currentCreatedBy, newCreatedBy)); Collection<File> fileCollection = ne .getFileCollection(); if (fileCollection != null) { for (File file : fileCollection) { file .setCreatedBy(newCreatedBy(file .getCreatedBy(), currentCreatedBy, newCreatedBy)); } } if (ne.getComposingElementCollection() != null) { for (ComposingElement ce : ne .getComposingElementCollection()) { ce .setCreatedBy(newCreatedBy(ce .getCreatedBy(), currentCreatedBy, newCreatedBy)); if (ce.getInherentFunctionCollection() != null) { for (Function function : ce .getInherentFunctionCollection()) { function .setCreatedBy(newCreatedBy( function .getCreatedBy(), currentCreatedBy, newCreatedBy)); if (function instanceof TargetingFunction) { TargetingFunction tFunc = (TargetingFunction) function; if (tFunc .getTargetCollection() != null) { for (Target target : tFunc .getTargetCollection()) { target .setCreatedBy(newCreatedBy( target .getCreatedBy(), currentCreatedBy, newCreatedBy)); } } } } } } } appService.saveOrUpdate(ne); } } if (characterization != null) { for (Characterization c : characterization) { c.setCreatedBy(newCreatedBy(c.getCreatedBy(), currentCreatedBy, newCreatedBy)); appService.saveOrUpdate(c); Collection<ExperimentConfig> experimentConfigCollection = c .getExperimentConfigCollection(); if (experimentConfigCollection != null) { for (ExperimentConfig expConfig : experimentConfigCollection) { expConfig .setCreatedBy(newCreatedBy( expConfig .getCreatedBy(), currentCreatedBy, newCreatedBy)); if (expConfig.getTechnique() != null) { expConfig .getTechnique() .setCreatedBy( newCreatedBy( expConfig .getTechnique() .getCreatedBy(), currentCreatedBy, newCreatedBy)); } if (expConfig.getInstrumentCollection() != null) { for (Instrument instrument : expConfig .getInstrumentCollection()) { instrument .setCreatedBy(newCreatedBy( instrument .getCreatedBy(), currentCreatedBy, newCreatedBy)); } } appService.saveOrUpdate(expConfig); } } Collection<Finding> findingCollection = c .getFindingCollection(); if (findingCollection != null) { for (Finding f : findingCollection) { f .setCreatedBy(newCreatedBy(f .getCreatedBy(), currentCreatedBy, newCreatedBy)); Collection<Datum> datumCollection = f .getDatumCollection(); if (datumCollection != null) { for (Datum d : datumCollection) { d.setCreatedBy(newCreatedBy(d .getCreatedBy(), currentCreatedBy, newCreatedBy)); } } Collection<File> fileCollection = f .getFileCollection(); if (fileCollection != null) { for (File file : fileCollection) { file.setCreatedBy(newCreatedBy( file.getCreatedBy(), currentCreatedBy, newCreatedBy)); } } appService.saveOrUpdate(f); } } } } } } catch (Exception e) { i++; String error = "Error updating createdBy field for sample: " + sampleId; logger.error(error, e); } } } catch (Exception e) { String error = "Error updating createdBy field for samples"; logger.error(error, e); throw new AdministrationException(error, e); } return i; } private int update(PublicationService publicationService, List<String> publicationIds, String currentCreatedBy, String newCreatedBy) throws AdministrationException, NoAccessException { SecurityService securityService = ((PublicationServiceLocalImpl) publicationService) .getSecurityService(); int i = 0; try { CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); PublicationServiceHelper helper = new PublicationServiceHelper( securityService); for (String publicationId : publicationIds) { try { Publication publication = helper .findPublicationById(publicationId); publication.setCreatedBy(newCreatedBy(publication .getCreatedBy(), currentCreatedBy, newCreatedBy)); appService.saveOrUpdate(publication); // author Collection<Author> authorCollection = publication .getAuthorCollection(); for (Author a : authorCollection) { if (a != null) { a.setCreatedBy(newCreatedBy(a.getCreatedBy(), currentCreatedBy, newCreatedBy)); appService.saveOrUpdate(a); } } } catch (Exception e) { i++; String error = "Error updating createdBy field for publication: " + publicationId; logger.error(error, e); } } } catch (Exception e) { String error = "Error updating createdBy field for publications"; logger.error(error, e); throw new AdministrationException(error, e); } return i; } private int update(ProtocolService protocolService, List<String> protocolIds, String currentCreatedBy, String newCreatedBy) throws AdministrationException, NoAccessException { SecurityService securityService = ((ProtocolServiceLocalImpl) protocolService) .getSecurityService(); int i = 0; try { CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); ProtocolServiceHelper helper = new ProtocolServiceHelper( securityService); for (String protocolId : protocolIds) { try { Protocol protocol = helper.findProtocolById(protocolId); protocol.setCreatedBy(newCreatedBy(protocol.getCreatedBy(), currentCreatedBy, newCreatedBy)); appService.saveOrUpdate(protocol); // file File file = protocol.getFile(); if (file != null) { file.setCreatedBy(newCreatedBy(file.getCreatedBy(), currentCreatedBy, newCreatedBy)); appService.saveOrUpdate(file); } } catch (Exception e) { i++; String error = "Error updating createdBy field for protocol: " + protocolId; logger.error(error, e); } } } catch (Exception e) { String error = "Error updating createdBy field for protocols"; logger.error(error, e); throw new AdministrationException(error, e); } return i; } public int update(SecurityService securityService, String currentCreatedBy, String newCreatedBy) throws AdministrationException, NoAccessException { String userLoginName = securityService.getUserBean().getLoginName(); if (!("lethai".equals(userLoginName) || "pansu".equals(userLoginName))) { throw new NoAccessException(); } int numFailures = 0; try { SampleService sampleService = new SampleServiceLocalImpl( securityService); List<String> sampleIds = sampleService .findSampleIdsByOwner(currentCreatedBy); numFailures = this.update(sampleService, sampleIds, currentCreatedBy, newCreatedBy); ProtocolService protocolService = new ProtocolServiceLocalImpl( securityService); List<String> protocolIds = protocolService .findProtocolIdsByOwner(currentCreatedBy); numFailures += this.update(protocolService, protocolIds, currentCreatedBy, newCreatedBy); PublicationService publicationService = new PublicationServiceLocalImpl( securityService); List<String> publicationIds = publicationService .findPublicationIdsByOwner(currentCreatedBy); numFailures += this.update(publicationService, publicationIds, currentCreatedBy, newCreatedBy); } catch (Exception e) { String error = "Error in updating createBy field " + e; logger.error(error, e); throw new AdministrationException(error, e); } return numFailures; } private String newCreatedBy(String existingOwner, String currentOwner, String newOwner) { // if existingOwner starts with COPY, prefix it with newOwner if (existingOwner.startsWith(Constants.AUTO_COPY_ANNOTATION_PREFIX)) { return newOwner + ":" + existingOwner; } // if existingOwner is the same as currentOwner if (existingOwner.equals(currentOwner)) { return newOwner; } // if existingOwner is not the same as currentOwner, check // specialMappings if (specialMappings.get(existingOwner) != null) { return specialMappings.get(existingOwner); } return existingOwner; } private Sample findFullyLoadedSampleById(String sampleId) throws Exception { CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); // load composition and characterization separate because of Hibernate // join limitation DetachedCriteria crit = DetachedCriteria.forClass(Sample.class).add( Property.forName("id").eq(new Long(sampleId))); Sample sample = null; // load composition and characterization separate because of // Hibernate join limitation crit.setFetchMode("primaryPointOfContact", FetchMode.JOIN); crit.setFetchMode("primaryPointOfContact.organization", FetchMode.JOIN); crit.setFetchMode("otherPointOfContactCollection", FetchMode.JOIN); crit.setFetchMode("otherPointOfContactCollection.organization", FetchMode.JOIN); crit.setFetchMode("keywordCollection", FetchMode.JOIN); crit.setFetchMode("publicationCollection", FetchMode.JOIN); crit.setFetchMode("publicationCollection.authorCollection", FetchMode.JOIN); crit.setFetchMode("publicationCollection.keywordCollection", FetchMode.JOIN); crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY); List result = appService.query(crit); if (!result.isEmpty()) { sample = (Sample) result.get(0); } if (sample == null) { throw new NotExistException("Sample doesn't exist in the database"); } // fully load composition SampleComposition comp = this .loadComposition(sample.getId().toString()); sample.setSampleComposition(comp); // fully load characterizations List<Characterization> chars = this.loadCharacterizations(sample .getId().toString()); if (chars != null && !chars.isEmpty()) { sample.setCharacterizationCollection(new HashSet<Characterization>( chars)); } else { sample.setCharacterizationCollection(null); } return sample; } private SampleComposition loadComposition(String sampleId) throws Exception { SampleComposition composition = null; CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); DetachedCriteria crit = DetachedCriteria .forClass(SampleComposition.class); crit.createAlias("sample", "sample"); crit.add(Property.forName("sample.id").eq(new Long(sampleId))); crit.setFetchMode("nanomaterialEntityCollection", FetchMode.JOIN); crit.setFetchMode("nanomaterialEntityCollection.fileCollection", FetchMode.JOIN); crit .setFetchMode( "nanomaterialEntityCollection.fileCollection.keywordCollection", FetchMode.JOIN); crit.setFetchMode( "nanomaterialEntityCollection.composingElementCollection", FetchMode.JOIN); crit .setFetchMode( "nanomaterialEntityCollection.composingElementCollection.inherentFunctionCollection", FetchMode.JOIN); crit .setFetchMode( "nanomaterialEntityCollection.composingElementCollection.inherentFunctionCollection.targetCollection", FetchMode.JOIN); crit.setFetchMode("functionalizingEntityCollection", FetchMode.JOIN); crit.setFetchMode("functionalizingEntityCollection.fileCollection", FetchMode.JOIN); crit .setFetchMode( "functionalizingEntityCollection.fileCollection.keywordCollection", FetchMode.JOIN); crit.setFetchMode("functionalizingEntityCollection.functionCollection", FetchMode.JOIN); crit .setFetchMode( "functionalizingEntityCollection.functionCollection.targetCollection", FetchMode.JOIN); crit.setFetchMode("functionalizingEntityCollection.activationMethod", FetchMode.JOIN); crit.setFetchMode("chemicalAssociationCollection", FetchMode.JOIN); crit.setFetchMode("chemicalAssociationCollection.fileCollection", FetchMode.JOIN); crit .setFetchMode( "chemicalAssociationCollection.fileCollection.keywordCollection", FetchMode.JOIN); crit.setFetchMode("chemicalAssociationCollection.associatedElementA", FetchMode.JOIN); crit.setFetchMode("chemicalAssociationCollection.associatedElementB", FetchMode.JOIN); crit.setFetchMode("fileCollection", FetchMode.JOIN); crit.setFetchMode("fileCollection.keywordCollection", FetchMode.JOIN); crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY); List result = appService.query(crit); if (!result.isEmpty()) { composition = (SampleComposition) result.get(0); } return composition; } private List<Characterization> loadCharacterizations(String sampleId) throws Exception { List<Characterization> chars = new ArrayList<Characterization>(); CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); DetachedCriteria crit = DetachedCriteria .forClass(Characterization.class); crit.createAlias("sample", "sample"); crit.add(Property.forName("sample.id").eq(new Long(sampleId))); // fully load characterization crit.setFetchMode("pointOfContact", FetchMode.JOIN); crit.setFetchMode("pointOfContact.organization", FetchMode.JOIN); crit.setFetchMode("protocol", FetchMode.JOIN); crit.setFetchMode("protocol.file", FetchMode.JOIN); crit.setFetchMode("protocol.file.keywordCollection", FetchMode.JOIN); crit.setFetchMode("experimentConfigCollection", FetchMode.JOIN); crit.setFetchMode("experimentConfigCollection.technique", FetchMode.JOIN); crit.setFetchMode("experimentConfigCollection.instrumentCollection", FetchMode.JOIN); crit.setFetchMode("findingCollection", FetchMode.JOIN); crit.setFetchMode("findingCollection.datumCollection", FetchMode.JOIN); crit.setFetchMode( "findingCollection.datumCollection.conditionCollection", FetchMode.JOIN); crit.setFetchMode("findingCollection.fileCollection", FetchMode.JOIN); crit.setFetchMode("findingCollection.fileCollection.keywordCollection", FetchMode.JOIN); crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY); List results = appService.query(crit); for (Object obj : results) { Characterization achar = (Characterization) obj; chars.add(achar); } return chars; } }
package com.mmnaseri.utils.spring.data.proxy.impl; import com.mmnaseri.utils.spring.data.domain.*; import com.mmnaseri.utils.spring.data.domain.impl.MethodQueryDescriptionExtractor; import com.mmnaseri.utils.spring.data.domain.impl.key.NoOpKeyGenerator; import com.mmnaseri.utils.spring.data.proxy.*; import com.mmnaseri.utils.spring.data.proxy.impl.resolvers.DefaultDataOperationResolver; import com.mmnaseri.utils.spring.data.query.DataFunctionRegistry; import com.mmnaseri.utils.spring.data.store.DataStore; import com.mmnaseri.utils.spring.data.store.DataStoreOperation; import com.mmnaseri.utils.spring.data.store.DataStoreRegistry; import com.mmnaseri.utils.spring.data.store.impl.DefaultDataStoreEventListenerContext; import com.mmnaseri.utils.spring.data.store.impl.EventPublishingDataStore; import com.mmnaseri.utils.spring.data.store.impl.MemoryDataStore; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.io.Serializable; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * <p>This class is the entry point to this framework as a whole. Using this class, you can mock a repository * interface by passing the proper set of configurations and parameters.</p> * * @author Milad Naseri ([email protected]) * @since 1.0 (9/29/15) */ public class DefaultRepositoryFactory implements RepositoryFactory { private static final Log log = LogFactory.getLog(DefaultRepositoryFactory.class); private final RepositoryMetadataResolver repositoryMetadataResolver; private final Map<Class<?>, RepositoryMetadata> metadataMap = new ConcurrentHashMap<>(); private final MethodQueryDescriptionExtractor descriptionExtractor; private final DataFunctionRegistry functionRegistry; private final DataStoreRegistry dataStoreRegistry; private final ResultAdapterContext adapterContext; private final TypeMappingContext typeMappingContext; private final RepositoryFactoryConfiguration configuration; private final NonDataOperationInvocationHandler operationInvocationHandler; public DefaultRepositoryFactory(RepositoryFactoryConfiguration configuration) { this.configuration = configuration; this.repositoryMetadataResolver = configuration.getRepositoryMetadataResolver(); this.descriptionExtractor = configuration.getDescriptionExtractor(); this.functionRegistry = configuration.getFunctionRegistry(); this.dataStoreRegistry = configuration.getDataStoreRegistry(); this.adapterContext = configuration.getResultAdapterContext(); this.typeMappingContext = configuration.getTypeMappingContext(); this.operationInvocationHandler = configuration.getOperationInvocationHandler(); } @Override public <E> E getInstance(KeyGenerator<? extends Serializable> keyGenerator, Class<E> repositoryInterface, Class... implementations) { final KeyGenerator<? extends Serializable> actualKeyGenerator; if (keyGenerator == null) { if (configuration.getDefaultKeyGenerator() != null) { //if no key generator is passed and there is a default key generator specified, we fall back to that actualKeyGenerator = configuration.getDefaultKeyGenerator(); } else { //otherwise, let's assume that not key generation is required actualKeyGenerator = new NoOpKeyGenerator<>(); } } else { actualKeyGenerator = keyGenerator; } log.info("We are going to create a proxy instance of type " + repositoryInterface + " using key generator " + actualKeyGenerator + " and binding the implementations to " + Arrays.toString(implementations)); //figure out the repository metadata log.info("Resolving repository metadata for " + repositoryInterface); final RepositoryMetadata metadata = getRepositoryMetadata(repositoryInterface); //get the underlying data store log.info("Resolving the data store for " + repositoryInterface); final DataStore<Serializable, Object> dataStore = getDataStore(metadata); //figure out type mappings log.info("Trying to find all the proper type mappings for entity repository " + repositoryInterface); final List<TypeMapping<?>> typeMappings = getTypeMappings(metadata, dataStore, actualKeyGenerator, implementations); //set up the data operation resolver final DataOperationResolver operationResolver = new DefaultDataOperationResolver(typeMappings, descriptionExtractor, metadata, functionRegistry, configuration); //get all of this repository's methods final Method[] methods = repositoryInterface.getMethods(); //get mappings for the repository methods log.info("Trying to find all the invocation mappings for methods declared on " + repositoryInterface); final List<InvocationMapping<? extends Serializable, ?>> invocationMappings = getInvocationMappings(operationResolver, methods); //extract the bound implementation types final List<Class<?>> boundImplementations = new LinkedList<>(); for (TypeMapping<?> mapping : typeMappings) { boundImplementations.add(mapping.getType()); } //set up the repository configuration final RepositoryConfiguration repositoryConfiguration = new ImmutableRepositoryConfiguration(metadata, actualKeyGenerator, boundImplementations); //create the interceptor //noinspection unchecked final InvocationHandler interceptor = new DataOperationInvocationHandler(repositoryConfiguration, invocationMappings, dataStore, adapterContext, operationInvocationHandler); //create a proxy for the repository log.info("Instantiating the proxy using the provided configuration"); final Object instance = Proxy.newProxyInstance(getClass().getClassLoader(), new Class[]{repositoryInterface}, interceptor); //for each type mapping, inject proper dependencies for (TypeMapping<?> typeMapping : typeMappings) { log.info("Injecting all the required dependencies into the repository mapping implementations"); if (typeMapping.getInstance() instanceof RepositoryAware<?>) { //noinspection unchecked ((RepositoryAware) typeMapping.getInstance()).setRepository(instance); } if (typeMapping.getInstance() instanceof RepositoryConfigurationAware) { ((RepositoryConfigurationAware) typeMapping.getInstance()).setRepositoryConfiguration(repositoryConfiguration); } if (typeMapping.getInstance() instanceof RepositoryFactoryAware) { ((RepositoryFactoryAware) typeMapping.getInstance()).setRepositoryFactory(this); } } //return the repository instance return repositoryInterface.cast(instance); } @Override public RepositoryFactoryConfiguration getConfiguration() { return configuration; } /** * <p>Given a repository metadata, it will find out all the proper type mappings bound as implementations to the repository. These will come from the * {@link TypeMappingContext}, overridden by the implementations provided by the user for this specific case.</p> * * <p>If the mapped concrete class needs to know anything from the current mocking context, it can implement one of the * various {@link org.springframework.beans.factory.Aware aware} interfaces to be given the proper piece of contextual * information.</p> * * @param metadata the repository metadata * @param dataStore the data store * @param keyGenerator the key generator * @param implementations the implementations specified by the user * @return the resolved list of type mappings */ private List<TypeMapping<?>> getTypeMappings(RepositoryMetadata metadata, DataStore<Serializable, Object> dataStore, KeyGenerator<? extends Serializable> keyGenerator, Class[] implementations) { final List<TypeMapping<?>> typeMappings = new LinkedList<>(); final TypeMappingContext localContext = new DefaultTypeMappingContext(typeMappingContext); for (Class implementation : implementations) { localContext.register(metadata.getRepositoryInterface(), implementation); } typeMappings.addAll(localContext.getMappings(metadata.getRepositoryInterface())); for (TypeMapping<?> mapping : typeMappings) { if (mapping.getInstance() instanceof DataStoreAware<?, ?>) { DataStoreAware instance = (DataStoreAware<?, ?>) mapping.getInstance(); instance.setDataStore(dataStore); } if (mapping.getInstance() instanceof RepositoryMetadataAware) { RepositoryMetadataAware instance = (RepositoryMetadataAware) mapping.getInstance(); instance.setRepositoryMetadata(metadata); } if (mapping.getInstance() instanceof KeyGeneratorAware) { KeyGeneratorAware instance = (KeyGeneratorAware) mapping.getInstance(); //noinspection unchecked instance.setKeyGenerator(keyGenerator); } if (mapping.getInstance() instanceof RepositoryFactoryConfigurationAware) { RepositoryFactoryConfigurationAware instance = (RepositoryFactoryConfigurationAware) mapping.getInstance(); instance.setRepositoryFactoryConfiguration(configuration); } } return typeMappings; } /** * Given a repository interface, it will resolve the metadata for that interface. * * @param repositoryInterface the interface * @param <E> the type of the interface * @return the repository metadata associated with the interface */ private <E> RepositoryMetadata getRepositoryMetadata(Class<E> repositoryInterface) { final RepositoryMetadata metadata; if (metadataMap.containsKey(repositoryInterface)) { metadata = metadataMap.get(repositoryInterface); } else { metadata = repositoryMetadataResolver.resolve(repositoryInterface); metadataMap.put(repositoryInterface, metadata); } return metadata; } /** * <p>Given a repository metadata, it will return the data store instance associated with the entity type for that repository.</p> * * <p>If the data store is not an instance of {@link EventPublishingDataStore} it will wrap it in one, thus enabling event processing * for this repository.</p> * * <p>It will also register the data store instance to let the user access the data store, as well as cache it for future use.</p> * * @param metadata the metadata * @return the data store */ private DataStore<Serializable, Object> getDataStore(RepositoryMetadata metadata) { DataStore<Serializable, Object> dataStore; if (dataStoreRegistry.has(metadata.getEntityType())) { //noinspection unchecked dataStore = (DataStore<Serializable, Object>) dataStoreRegistry.getDataStore(metadata.getEntityType()); } else { //noinspection unchecked dataStore = new MemoryDataStore<>((Class<Object>) metadata.getEntityType()); } if (!(dataStore instanceof EventPublishingDataStore)) { dataStore = new EventPublishingDataStore<>(dataStore, metadata, new DefaultDataStoreEventListenerContext(configuration.getEventListenerContext())); } dataStoreRegistry.register(dataStore); return dataStore; } /** * Given a set of methods, it will rely on a {@link DataOperationResolver} to find the mappings for each of the methods. * @param operationResolver the resolver to use * @param methods the array of methods * @return resolved invocations */ private List<InvocationMapping<? extends Serializable, ?>> getInvocationMappings(DataOperationResolver operationResolver, Method[] methods) { final List<InvocationMapping<? extends Serializable, ?>> invocationMappings = new LinkedList<>(); for (Method method : methods) { final DataStoreOperation<?, ?, ?> operation = operationResolver.resolve(method); //noinspection unchecked invocationMappings.add(new ImmutableInvocationMapping<>(method, (DataStoreOperation<?, Serializable, Object>) operation)); } return invocationMappings; } }
package real.estate.ui; import real.estate.dao.List; import real.estate.dao.Listable; /** * * @author Chathu */ public class RealEsateUI extends javax.swing.JFrame { /** * Creates new form RealEsateUI */ public RealEsateUI() { initComponents(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jPanel1 = new javax.swing.JPanel(); jLabel1 = new javax.swing.JLabel(); jLabel2 = new javax.swing.JLabel(); jLabel3 = new javax.swing.JLabel(); jLabel4 = new javax.swing.JLabel(); jLabel5 = new javax.swing.JLabel(); jLabel6 = new javax.swing.JLabel(); jTextField1 = new javax.swing.JTextField(); jTextField2 = new javax.swing.JTextField(); jTextField3 = new javax.swing.JTextField(); jTextField4 = new javax.swing.JTextField(); jTextField5 = new javax.swing.JTextField(); jTextField6 = new javax.swing.JTextField(); jButton1 = new javax.swing.JButton(); jButton2 = new javax.swing.JButton(); jButton3 = new javax.swing.JButton(); jButton4 = new javax.swing.JButton(); jButton5 = new javax.swing.JButton(); jButton6 = new javax.swing.JButton(); jButton7 = new javax.swing.JButton(); setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); getContentPane().setLayout(new org.netbeans.lib.awtextra.AbsoluteLayout()); jPanel1.setLayout(new org.netbeans.lib.awtextra.AbsoluteLayout()); jLabel1.setFont(new java.awt.Font("Tahoma", 0, 12)); // NOI18N jLabel1.setText("Number of Bedrooms:"); jPanel1.add(jLabel1, new org.netbeans.lib.awtextra.AbsoluteConstraints(30, 260, -1, -1)); jLabel2.setFont(new java.awt.Font("Tahoma", 0, 12)); // NOI18N jLabel2.setText("Square Feet:"); jPanel1.add(jLabel2, new org.netbeans.lib.awtextra.AbsoluteConstraints(80, 220, -1, -1)); jLabel3.setFont(new java.awt.Font("Tahoma", 0, 12)); // NOI18N jLabel3.setText("Price:"); jPanel1.add(jLabel3, new org.netbeans.lib.awtextra.AbsoluteConstraints(120, 180, -1, -1)); jLabel4.setFont(new java.awt.Font("Tahoma", 0, 12)); // NOI18N jLabel4.setText("Last Name:"); jPanel1.add(jLabel4, new org.netbeans.lib.awtextra.AbsoluteConstraints(90, 140, -1, -1)); jLabel5.setFont(new java.awt.Font("Tahoma", 0, 12)); // NOI18N jLabel5.setText("First Name:"); jPanel1.add(jLabel5, new org.netbeans.lib.awtextra.AbsoluteConstraints(90, 100, -1, -1)); jLabel6.setFont(new java.awt.Font("Tahoma", 0, 12)); // NOI18N jLabel6.setText("Lot Number:"); jPanel1.add(jLabel6, new org.netbeans.lib.awtextra.AbsoluteConstraints(80, 60, -1, -1)); jPanel1.add(jTextField1, new org.netbeans.lib.awtextra.AbsoluteConstraints(179, 220, 190, -1)); jPanel1.add(jTextField2, new org.netbeans.lib.awtextra.AbsoluteConstraints(179, 180, 190, -1)); jPanel1.add(jTextField3, new org.netbeans.lib.awtextra.AbsoluteConstraints(179, 140, 190, -1)); jPanel1.add(jTextField4, new org.netbeans.lib.awtextra.AbsoluteConstraints(179, 100, 190, -1)); jTextField5.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jTextField5ActionPerformed(evt); } }); jPanel1.add(jTextField5, new org.netbeans.lib.awtextra.AbsoluteConstraints(179, 60, 190, -1)); jPanel1.add(jTextField6, new org.netbeans.lib.awtextra.AbsoluteConstraints(179, 260, 190, -1)); jButton1.setFont(new java.awt.Font("Tahoma", 1, 14)); // NOI18N jButton1.setText("Find"); jPanel1.add(jButton1, new org.netbeans.lib.awtextra.AbsoluteConstraints(220, 420, 160, -1)); jButton2.setFont(new java.awt.Font("Tahoma", 1, 14)); // NOI18N jButton2.setText("Delete"); jPanel1.add(jButton2, new org.netbeans.lib.awtextra.AbsoluteConstraints(220, 380, 160, -1)); jButton3.setFont(new java.awt.Font("Tahoma", 1, 14)); // NOI18N jButton3.setText("Next"); jButton3.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton3ActionPerformed(evt); } }); jPanel1.add(jButton3, new org.netbeans.lib.awtextra.AbsoluteConstraints(220, 340, 160, -1)); jButton4.setFont(new java.awt.Font("Tahoma", 1, 14)); // NOI18N jButton4.setText("Add"); jPanel1.add(jButton4, new org.netbeans.lib.awtextra.AbsoluteConstraints(30, 380, 170, -1)); jButton5.setFont(new java.awt.Font("Tahoma", 1, 14)); // NOI18N jButton5.setText("Clear"); jButton5.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton5ActionPerformed(evt); } }); jPanel1.add(jButton5, new org.netbeans.lib.awtextra.AbsoluteConstraints(30, 420, 170, -1)); jButton6.setFont(new java.awt.Font("Tahoma", 1, 14)); // NOI18N jButton6.setText("Reset"); jButton6.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton6ActionPerformed(evt); } }); jPanel1.add(jButton6, new org.netbeans.lib.awtextra.AbsoluteConstraints(30, 340, 170, -1)); jButton7.setText("jButton1"); jPanel1.add(jButton7, new org.netbeans.lib.awtextra.AbsoluteConstraints(70, 340, -1, -1)); getContentPane().add(jPanel1, new org.netbeans.lib.awtextra.AbsoluteConstraints(0, 0, 420, 480)); pack(); }// </editor-fold>//GEN-END:initComponents private void jButton5ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton5ActionPerformed // TODO add your handling code here: jTextField1.setText(null); jTextField2.setText(null); jTextField3.setText(null); jTextField4.setText(null); jTextField5.setText(null); jTextField6.setText(null); }//GEN-LAST:event_jButton5ActionPerformed private void jButton6ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton6ActionPerformed // TODO add your handling code here: }//GEN-LAST:event_jButton6ActionPerformed private void jButton3ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton3ActionPerformed // TODO add your handling code here: }//GEN-LAST:event_jButton3ActionPerformed private void jTextField5ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jTextField5ActionPerformed // TODO add your handling code here: }//GEN-LAST:event_jTextField5ActionPerformed /** * @param args the command line arguments */ public static void main(String args[]) { /* Set the Nimbus look and feel */ //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) "> try { for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) { if ("Nimbus".equals(info.getName())) { javax.swing.UIManager.setLookAndFeel(info.getClassName()); break; } } } catch (ClassNotFoundException ex) { java.util.logging.Logger.getLogger(RealEsateUI.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (InstantiationException ex) { java.util.logging.Logger.getLogger(RealEsateUI.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { java.util.logging.Logger.getLogger(RealEsateUI.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (javax.swing.UnsupportedLookAndFeelException ex) { java.util.logging.Logger.getLogger(RealEsateUI.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } //</editor-fold> /* Create and display the form */ java.awt.EventQueue.invokeLater(new Runnable() { public void run() { new RealEsateUI().setVisible(true); } }); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton jButton1; private javax.swing.JButton jButton2; private javax.swing.JButton jButton3; private javax.swing.JButton jButton4; private javax.swing.JButton jButton5; private javax.swing.JButton jButton6; private javax.swing.JButton jButton7; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JPanel jPanel1; private javax.swing.JTextField jTextField1; private javax.swing.JTextField jTextField2; private javax.swing.JTextField jTextField3; private javax.swing.JTextField jTextField4; private javax.swing.JTextField jTextField5; private javax.swing.JTextField jTextField6; // End of variables declaration//GEN-END:variables }
package org.objectweb.proactive.extra.gcmdeployment.GCMApplication; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.objectweb.proactive.core.runtime.ProActiveRuntimeImpl; import org.objectweb.proactive.extra.gcmdeployment.GCMDeployment.Executor; import org.objectweb.proactive.extra.gcmdeployment.GCMDeployment.GCMDeploymentDescriptor; import org.objectweb.proactive.extra.gcmdeployment.GCMDeployment.GCMDeploymentDescriptorImpl; import org.objectweb.proactive.extra.gcmdeployment.GCMDeployment.GCMDeploymentResources; import org.objectweb.proactive.extra.gcmdeployment.Helpers; import org.objectweb.proactive.extra.gcmdeployment.core.DeploymentNode; import org.objectweb.proactive.extra.gcmdeployment.core.DeploymentTree; import org.objectweb.proactive.extra.gcmdeployment.core.VMNodes; import org.objectweb.proactive.extra.gcmdeployment.core.VirtualNode; import org.objectweb.proactive.extra.gcmdeployment.core.VirtualNodeInternal; import org.objectweb.proactive.extra.gcmdeployment.process.Bridge; import org.objectweb.proactive.extra.gcmdeployment.process.CommandBuilder; import org.objectweb.proactive.extra.gcmdeployment.process.Group; import org.objectweb.proactive.extra.gcmdeployment.process.HostInfo; import org.objectweb.proactive.extra.gcmdeployment.process.hostinfo.HostInfoImpl; public class GCMApplicationDescriptorImpl implements GCMApplicationDescriptor { /** The descriptor file */ private File gadFile = null; /** A parser dedicated to this GCM Application descriptor */ private GCMApplicationParser gadParser = null; /** All the Virtual Nodes defined in this application */ private Map<String, VirtualNodeInternal> virtualNodes = null; private DeploymentTree deploymentTree; private Map<String, GCMDeploymentDescriptor> selectedDeploymentDesc; private ArrayList<String> currentDeploymentPath; public GCMApplicationDescriptorImpl(String filename) throws IllegalArgumentException { this(new File(filename)); } public GCMApplicationDescriptorImpl(File file) throws IllegalArgumentException { currentDeploymentPath = new ArrayList<String>(); gadFile = Helpers.checkDescriptorFileExist(file); try { gadParser = new GCMApplicationParserImpl(gadFile); } catch (IOException e) { throw new IllegalArgumentException(e); } // 1. Load all GCM Deployment Descriptor Map<String, GCMDeploymentDescriptor> gdds; gdds = gadParser.getResourceProviders(); // 2. Get Virtual Node and Command Builder virtualNodes = gadParser.getVirtualNodes(); CommandBuilder commandBuilder = gadParser.getCommandBuilder(); // 3. Select the GCM Deployment Descriptors to be used selectedDeploymentDesc = selectGCMD(virtualNodes, gdds); // 4. Build the runtime tree buildDeploymentTree(); // 5. Start the deployment for (GCMDeploymentDescriptor gdd : selectedDeploymentDesc.values()) { gdd.start(commandBuilder); } /** * If this GCMA describes a distributed application. The Runtime has * been started and will populate Virtual Nodes etc. We let the user * code, interact with its Middleware. * * if a "script" is described. The command has been started on each * machine/VM/core and we can safely return */ } protected void buildDeploymentTree() { deploymentTree = new DeploymentTree(); // make root node from local JVM DeploymentNode rootNode = new DeploymentNode(); rootNode.setDeploymentDescriptorPath(""); // no deployment descriptor here try { rootNode.setApplicationDescriptorPath(gadFile.getCanonicalPath()); } catch (IOException e) { rootNode.setApplicationDescriptorPath(""); } currentDeploymentPath.clear(); ProActiveRuntimeImpl proActiveRuntime = ProActiveRuntimeImpl.getProActiveRuntime(); VMNodes vmNodes = new VMNodes(proActiveRuntime.getVMInformation()); currentDeploymentPath.add(proActiveRuntime.getVMInformation().getName()); // vmNodes.addNode(<something>); - TODO cmathieu rootNode.addVMNodes(vmNodes); rootNode.setDeploymentPath(getCurrentdDeploymentPath()); deploymentTree.setRootNode(rootNode); // Build leaf nodes for (GCMDeploymentDescriptor gdd : selectedDeploymentDesc.values()) { GCMDeploymentDescriptorImpl gddi = (GCMDeploymentDescriptorImpl) gdd; GCMDeploymentResources resources = gddi.getResources(); for (Group group : resources.getGroups()) { buildGroupTreeNode(rootNode, group); } for (Bridge bridge : resources.getBridges()) { buildBridgeTree(rootNode, bridge); } } } /** * return a copy of the current deployment path * @return */ private List<String> getCurrentdDeploymentPath() { return new ArrayList<String>(currentDeploymentPath); } private void buildGroupTreeNode(DeploymentNode rootNode, Group group) { DeploymentNode deploymentNode = new DeploymentNode(); deploymentNode.setDeploymentDescriptorPath(rootNode.getDeploymentDescriptorPath()); HostInfoImpl hostInfo = (HostInfoImpl) group.getHostInfo(); pushDeploymentPath(hostInfo.getId()); hostInfo.setNodeId(deploymentNode.getId()); deploymentTree.addNode(deploymentNode, rootNode); popDeploymentPath(); } private void buildBridgeTree(DeploymentNode baseNode, Bridge bridge) { DeploymentNode deploymentNode = new DeploymentNode(); deploymentNode.setDeploymentDescriptorPath(baseNode.getDeploymentDescriptorPath()); pushDeploymentPath(bridge.getId()); // first look for a host info... if (bridge.getHostInfo() != null) { HostInfoImpl hostInfo = (HostInfoImpl) bridge.getHostInfo(); pushDeploymentPath(hostInfo.getId()); hostInfo.setNodeId(deploymentNode.getId()); deploymentTree.addNode(deploymentNode, baseNode); popDeploymentPath(); } // then groups... if (bridge.getGroups() != null) { for (Group group : bridge.getGroups()) { buildGroupTreeNode(deploymentNode, group); } } // then bridges (and recurse) if (bridge.getBridges() != null) { for (Bridge subBridge : bridge.getBridges()) { buildBridgeTree(deploymentNode, subBridge); } } popDeploymentPath(); } private boolean pushDeploymentPath(String pathElement) { return currentDeploymentPath.add(pathElement); } private void popDeploymentPath() { currentDeploymentPath.remove(currentDeploymentPath.size() - 1); } /** * Select the GCM Deployment descriptor to be used * * A Virtual Node is a consumer, and a GCM Deployment Descriptor a producer. * We try to fulfill the consumers needs with as few as possible producer. * * @param vns * Virtual Nodes asking for some resources * @param gdds * GCM Deployment Descriptor providing some resources * @return A */ static private Map<String, GCMDeploymentDescriptor> selectGCMD( Map<String, VirtualNodeInternal> vns, Map<String, GCMDeploymentDescriptor> gdds) { // TODO: Implement this method return gdds; } private long getRequiredCapacity() { int cap = 0; for (VirtualNodeInternal vn : virtualNodes.values()) { cap += vn.getRequiredCapacity(); } return cap; } public VirtualNode getVirtualNode(String vnName) throws IllegalArgumentException { VirtualNode ret = virtualNodes.get(vnName); if (ret == null) { throw new IllegalArgumentException("Virtual Node " + vnName + " does not exist"); } return ret; } public Map<String, ?extends VirtualNode> getVirtualNodes() { return virtualNodes; } public void kill() { // TODO Auto-generated method stub } @SuppressWarnings("unused") static public class TestGCMApplicationDescriptorImpl { } public boolean allProcessExited() { // TODO Auto-generated method stub return false; } public void awaitTermination() { try { Executor.getExecutor().awaitTermination(); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
package com.alibaba.akita.util; /** * Log * @author zhe.yangz 2011-11-25 04:06:57 */ public class Log { static final boolean SHOW_LOG = false; public static void i(String tag, String string) { if (SHOW_LOG) android.util.Log.i(tag, string); } public static void e(String tag, String string) { if (SHOW_LOG) android.util.Log.e(tag, string); } public static void e(String tag, String string, Throwable tr) { if (SHOW_LOG) android.util.Log.e(tag, string, tr); } public static void d(String tag, String string) { if (SHOW_LOG) android.util.Log.d(tag, string); } public static void w(String tag, String string) { if (SHOW_LOG) android.util.Log.w(tag, string); } public static void w(String tag, String string, Throwable tr) { if (SHOW_LOG) android.util.Log.w(tag, string, tr); } public static void v(String tag, String string) { if (SHOW_LOG) android.util.Log.v(tag, string); } public static void v(String tag, String string, Throwable tr) { if (SHOW_LOG) android.util.Log.v(tag, string, tr); } }
package gui; import java.awt.EventQueue; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.border.EmptyBorder; import javax.swing.JTextField; import javax.swing.JButton; import java.awt.event.ActionListener; import java.awt.event.ActionEvent; import javax.swing.JTabbedPane; import javax.swing.GroupLayout; import javax.swing.GroupLayout.Alignment; import javax.swing.JLayeredPane; import javax.swing.JOptionPane; import javax.swing.JLabel; import javax.swing.SwingConstants; import java.awt.Font; import java.awt.Color; import java.awt.SystemColor; import javax.swing.JTable; import javax.swing.border.LineBorder; import control.Constant; import jdbc.UpdateDocument; import vo.Author; import vo.Book; import vo.Branch; import vo.ChiefEditor; import vo.ConferenceProceeding; import vo.JournalVolume; import vo.Publisher; import vo.Reader; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import javax.swing.JRadioButton; public class LibraryManagement extends JFrame { private static final long serialVersionUID = 1331809788690220988L; private JPanel contentPane; private JTextField txtReadername; private JTextField txtReaderid; private JTextField txtPublisherName; private JTextField txtAuthorName; private JTextField txtTitle; private JTable table; private JTextField txtDescriptor; private JTextField txtWelcomeToThe; private JTextField textField_R_Type; private JTextField textField_R_ReaderName; private JTextField textField_R_Address; private JTextField textField_R_PhoneNum; private JTextField textField_D_PublisherName; private JTextField textField_D_Title; private JTextField textField_D_PDate; private JTextField textField_D_ISBN; private JTextField textField_D_VolumeNo; private JTextField textField_D_ChiefEditor; private JTextField textField_C_DocId; private JTextField textField_C_LibId; private JTextField textField_C_Position; private JTextField textField_Name; private JTextField textField_Location; private JTextField textFieldRReaderId; private JTextField textFieldRType; private JTextField textFieldRReaderName; private JTextField textFieldAddress; private JTextField textFieldPhoneNum; private JTextField textFieldLibId; private JTextField textFieldLibName; private JTextField textFieldLibLocation; private JTextField textField_Descriptor; private JTextField textField_AuthorName; private JTextField textField_CDate; private JTextField textField_CLocation; private JRadioButton rdbtnBook; private JRadioButton rdbtnJournal; private JRadioButton rdbtnCP; private int docType; private JTextField textField_CEditor; private JLabel lblCEditor; /** * Launch the application. */ public static void main(String[] args) { EventQueue.invokeLater(new Runnable() { public void run() { try { LibraryManagement frame = new LibraryManagement(); frame.setVisible(true); } catch (Exception e) { e.printStackTrace(); } } }); } /** * Create the frame. */ public LibraryManagement() { setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); setBounds(100, 100, 950, 630); contentPane = new JPanel(); contentPane.setBorder(new EmptyBorder(5, 5, 5, 5)); setContentPane(contentPane); JLayeredPane layeredPane = new JLayeredPane(); GroupLayout groupLayout = new GroupLayout(contentPane); groupLayout.setHorizontalGroup( groupLayout.createParallelGroup(Alignment.LEADING) .addGroup(groupLayout.createSequentialGroup() .addContainerGap() .addComponent(layeredPane, GroupLayout.DEFAULT_SIZE, 904, Short.MAX_VALUE) .addContainerGap()) ); groupLayout.setVerticalGroup( groupLayout.createParallelGroup(Alignment.LEADING) .addComponent(layeredPane, GroupLayout.DEFAULT_SIZE, 581, Short.MAX_VALUE) ); Publisher publisher = new Publisher(); Book book = new Book(); ChiefEditor chiefEditor = new ChiefEditor(); JournalVolume journalVolume = new JournalVolume(); ConferenceProceeding cp = new ConferenceProceeding(); JLayeredPane layeredPane_Welcome = new JLayeredPane(); layeredPane_Welcome.setBounds(148, 0, 780, 586); layeredPane.add(layeredPane_Welcome); txtWelcomeToThe = new JTextField(); txtWelcomeToThe.setBackground(SystemColor.menu); txtWelcomeToThe.setForeground(new Color(0, 0, 128)); txtWelcomeToThe.setEditable(false); txtWelcomeToThe.setHorizontalAlignment(SwingConstants.CENTER); txtWelcomeToThe.setFont(new Font("Comic Sans MS", Font.PLAIN, 24)); txtWelcomeToThe.setText("Welcome to the library management system!"); txtWelcomeToThe.setBounds(49, 115, 665, 216); layeredPane_Welcome.add(txtWelcomeToThe); txtWelcomeToThe.setColumns(10); JLayeredPane layeredPane_BR = new JLayeredPane(); layeredPane_BR.setBounds(148, 6, 767, 580); layeredPane.add(layeredPane_BR); layeredPane_BR.setVisible(false); JPanel panel = new JPanel(); panel.setBorder(new LineBorder(Color.GRAY)); panel.setBounds(10, 2, 741, 75); layeredPane_BR.add(panel); panel.setLayout(null); JLabel lblReaderid = new JLabel("Reader Id"); lblReaderid.setFont(new Font("Times New Roman", Font.PLAIN, 12)); lblReaderid.setBounds(21, 26, 74, 35); panel.add(lblReaderid); txtReaderid = new JTextField(); txtReaderid.setBounds(105, 25, 120, 35); panel.add(txtReaderid); txtReaderid.setForeground(SystemColor.desktop); txtReaderid.setHorizontalAlignment(SwingConstants.CENTER); txtReaderid.setFont(new Font("Lucida Grande", Font.PLAIN, 15)); txtReaderid.setColumns(10); JLabel lblReadername = new JLabel("Reader Name"); lblReadername.setFont(new Font("Times New Roman", Font.PLAIN, 12)); lblReadername.setBounds(247, 26, 93, 35); panel.add(lblReadername); txtReadername = new JTextField(); txtReadername.setBounds(350, 25, 120, 35); panel.add(txtReadername); txtReadername.setForeground(SystemColor.desktop); txtReadername.setHorizontalAlignment(SwingConstants.CENTER); txtReadername.setFont(new Font("Lucida Grande", Font.PLAIN, 15)); txtReadername.setColumns(10); JButton btnNewButton_4 = new JButton("SEARCH"); btnNewButton_4.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { //fuzzy search for readerId or readerName, and show result //TODO } }); btnNewButton_4.setBounds(595, 26, 120, 35); panel.add(btnNewButton_4); btnNewButton_4.setForeground(Color.BLUE); btnNewButton_4.setFont(new Font("Times New Roman", Font.PLAIN, 13)); JPanel panel_1 = new JPanel(); panel_1.setBorder(new LineBorder(Color.GRAY)); panel_1.setBounds(10, 83, 741, 487); layeredPane_BR.add(panel_1); panel_1.setLayout(null); JLabel lblPublisherName = new JLabel("<html>Publisher<br>Name<html>"); lblPublisherName.setFont(new Font("Times New Roman", Font.PLAIN, 12)); lblPublisherName.setBounds(21, 11, 74, 35); panel_1.add(lblPublisherName); txtPublisherName = new JTextField(); txtPublisherName.setBounds(105, 11, 120, 35); panel_1.add(txtPublisherName); txtPublisherName.setForeground(SystemColor.desktop); txtPublisherName.setFont(new Font("Lucida Grande", Font.PLAIN, 16)); txtPublisherName.setHorizontalAlignment(SwingConstants.CENTER); txtPublisherName.setColumns(10); JLabel lblNewLabel = new JLabel("<html>Author<br>Name<html>"); lblNewLabel.setFont(new Font("Times New Roman", Font.PLAIN, 12)); lblNewLabel.setBounds(247, 11, 74, 35); panel_1.add(lblNewLabel); txtAuthorName = new JTextField(); txtAuthorName.setBounds(350, 11, 120, 35); panel_1.add(txtAuthorName); txtAuthorName.setForeground(SystemColor.desktop); txtAuthorName.setFont(new Font("Lucida Grande", Font.PLAIN, 16)); txtAuthorName.setHorizontalAlignment(SwingConstants.CENTER); txtAuthorName.setColumns(10); JLabel lblTitle = new JLabel("Title"); lblTitle.setFont(new Font("Times New Roman", Font.PLAIN, 12)); lblTitle.setBounds(21, 57, 74, 35); panel_1.add(lblTitle); txtTitle = new JTextField(); txtTitle.setBounds(105, 55, 120, 35); panel_1.add(txtTitle); txtTitle.setForeground(SystemColor.desktop); txtTitle.setFont(new Font("Lucida Grande", Font.PLAIN, 16)); txtTitle.setHorizontalAlignment(SwingConstants.CENTER); txtTitle.setColumns(10); JLabel lblDescriptor = new JLabel("Descriptor"); lblDescriptor.setFont(new Font("Times New Roman", Font.PLAIN, 12)); lblDescriptor.setBounds(247, 57, 74, 35); panel_1.add(lblDescriptor); txtDescriptor = new JTextField(); txtDescriptor.setBounds(350, 55, 120, 35); panel_1.add(txtDescriptor); txtDescriptor.setForeground(SystemColor.desktop); txtDescriptor.setHorizontalAlignment(SwingConstants.CENTER); txtDescriptor.setFont(new Font("Lucida Grande", Font.PLAIN, 16)); txtDescriptor.setColumns(10); JButton btnSearch = new JButton("SEARCH"); btnSearch.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { //fuzzy search for publisher name, author name, title or descriptor and show result //TODO } }); btnSearch.setBounds(595, 57, 117, 35); panel_1.add(btnSearch); btnSearch.setForeground(Color.BLUE); btnSearch.setFont(new Font("Times New Roman", Font.PLAIN, 13)); table = new JTable(); table.setBounds(9, 102, 722, 330); panel_1.add(table); JButton btnNewButton_5 = new JButton("BORROW"); btnNewButton_5.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { //Borrow event //TODO } }); btnNewButton_5.setFont(new Font("Times New Roman", Font.PLAIN, 13)); btnNewButton_5.setBounds(458, 442, 117, 35); panel_1.add(btnNewButton_5); btnNewButton_5.setForeground(Color.BLUE); JButton btnNewButton_6 = new JButton("RESERVE"); btnNewButton_6.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { //Reserve event //TODO } }); btnNewButton_6.setFont(new Font("Times New Roman", Font.PLAIN, 13)); btnNewButton_6.setBounds(597, 442, 117, 35); panel_1.add(btnNewButton_6); btnNewButton_6.setForeground(Color.BLUE); JLayeredPane layeredPane_New = new JLayeredPane(); layeredPane_New.setBounds(148, 6, 774, 580); layeredPane.add(layeredPane_New); layeredPane_New.setVisible(false); JTabbedPane tabbedPane_New = new JTabbedPane(JTabbedPane.TOP); tabbedPane_New.setBounds(6, 19, 762, 555); layeredPane_New.add(tabbedPane_New); JPanel panel_N_Reader = new JPanel(); tabbedPane_New.addTab("Reader", null, panel_N_Reader, null); textField_R_Type = new JTextField(); textField_R_Type.setBounds(48, 88, 93, 39); textField_R_Type.setColumns(10); textField_R_ReaderName = new JTextField(); textField_R_ReaderName.setBounds(189, 88, 93, 39); textField_R_ReaderName.setColumns(10); JButton R_add = new JButton("ADD"); R_add.setForeground(Color.BLUE); R_add.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { Reader reader = new Reader(); reader.setPhoneNum(textField_R_Type.getText()); reader.setName(textField_R_ReaderName.getText()); reader.setAddress(textField_R_Address.getText()); reader.setPhoneNum(textField_R_PhoneNum.getText()); } }); R_add.setFont(new Font("Times New Roman", Font.PLAIN, 13)); R_add.setBounds(612, 89, 93, 38); panel_N_Reader.setLayout(null); JLabel lblType = new JLabel("Type"); lblType.setFont(new Font("Arial", Font.PLAIN, 11)); lblType.setBounds(77, 63, 36, 15); lblType.setHorizontalAlignment(SwingConstants.CENTER); panel_N_Reader.add(lblType); JLabel lblReadername_ReaderName = new JLabel("Reader Name"); lblReadername_ReaderName.setFont(new Font("Arial", Font.PLAIN, 11)); lblReadername_ReaderName.setBounds(199, 63, 83, 15); panel_N_Reader.add(lblReadername_ReaderName); JLabel lblAdress = new JLabel("Address"); lblAdress.setFont(new Font("Arial", Font.PLAIN, 11)); lblAdress.setBounds(350, 63, 54, 15); panel_N_Reader.add(lblAdress); JLabel lblPhonenum = new JLabel("Phone Number"); lblPhonenum.setFont(new Font("Arial", Font.PLAIN, 11)); lblPhonenum.setBounds(481, 63, 70, 14); panel_N_Reader.add(lblPhonenum); panel_N_Reader.add(textField_R_Type); panel_N_Reader.add(textField_R_ReaderName); textField_R_Address = new JTextField(); textField_R_Address.setBounds(330, 88, 93, 39); textField_R_Address.setColumns(10); panel_N_Reader.add(textField_R_Address); textField_R_PhoneNum = new JTextField(); textField_R_PhoneNum.setBounds(471, 88, 93, 39); textField_R_PhoneNum.setColumns(10); panel_N_Reader.add(textField_R_PhoneNum); panel_N_Reader.add(R_add); JPanel panel_N_Document = new JPanel(); tabbedPane_New.addTab("Document", null, panel_N_Document, null); JLabel lblIsbn = new JLabel("ISBN"); lblIsbn.setFont(new Font("Arial", Font.PLAIN, 11)); lblIsbn.setBounds(55, 139, 23, 14); textField_D_ISBN = new JTextField(); textField_D_ISBN.setBounds(29, 161, 90, 35); textField_D_ISBN.setColumns(10); JLabel lblVolumeno = new JLabel("Volume Number"); lblVolumeno.setFont(new Font("Arial", Font.PLAIN, 11)); lblVolumeno.setBounds(29, 139, 76, 14); textField_D_VolumeNo = new JTextField(); textField_D_VolumeNo.setBounds(29, 161, 90, 35); textField_D_VolumeNo.setColumns(10); JLabel lblChiefEditor = new JLabel("Chief Editor"); lblChiefEditor.setFont(new Font("Arial", Font.PLAIN, 11)); lblChiefEditor.setBounds(149, 139, 62, 14); textField_D_ChiefEditor = new JTextField(); textField_D_ChiefEditor.setBounds(149, 163, 90, 35); textField_D_ChiefEditor.setColumns(10); JButton D_add = new JButton("ADD"); D_add.setForeground(Color.BLUE); D_add.setFont(new Font("Times New Roman", Font.PLAIN, 13)); D_add.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { if(docType == 0){ String[] authorArray = textField_AuthorName.getText().split(","); for(int i=0; i<authorArray.length; i++){ Author author = new Author(); author.setAuName(authorArray[i]); book.addAuthor(author); } String[] descriptorArray = textField_Descriptor.getText().split(","); for(int i=0; i<descriptorArray.length; i++){ book.addDescriptor(descriptorArray[i]); } publisher.setPubName(textField_D_PublisherName.getText()); book.setPublisher(publisher); book.setTitle(textField_D_Title.getText()); book.setPubDate(textField_D_PDate.getText()); book.setIsbn(textField_D_ISBN.getText()); UpdateDocument.newBook(book); } if(docType == 1){ String[] authorArray = textField_AuthorName.getText().split(","); for(int i=0; i<authorArray.length; i++){ Author author = new Author(); author.setAuName(authorArray[i]); journalVolume.addAuthor(author); } String[] descriptorArray = textField_Descriptor.getText().split(","); for(int i=0; i<descriptorArray.length; i++){ journalVolume.addDescriptor(descriptorArray[i]); } publisher.setPubName(textField_D_PublisherName.getText()); journalVolume.setPublisher(publisher); journalVolume.setTitle(textField_D_Title.getText()); journalVolume.setPubDate(textField_D_PDate.getText()); journalVolume.setVolNum(textField_D_VolumeNo.getText()); chiefEditor.setCeName(lblChiefEditor.getText()); journalVolume.setEditor(chiefEditor); UpdateDocument.newJournalVolume(journalVolume); } if(docType == 2){ String[] authorArray = textField_AuthorName.getText().split(","); for(int i=0; i<authorArray.length; i++){ Author author = new Author(); author.setAuName(authorArray[i]); cp.addAuthor(author); } String[] descriptorArray = textField_Descriptor.getText().split(","); for(int i=0; i<descriptorArray.length; i++){ cp.addDescriptor(descriptorArray[i]); } publisher.setPubName(textField_D_PublisherName.getText()); cp.setPublisher(publisher); cp.setTitle(textField_D_Title.getText()); cp.setPubDate(textField_D_PDate.getText()); cp.setConDate(textField_CDate.getText()); cp.setConLocation(textField_CLocation.getText()); cp.setConEditor(textField_CEditor.getText()); UpdateDocument.newConferenceProceeding(cp); } } }); D_add.setBounds(624, 160, 90, 36); panel_N_Document.setLayout(null); JLabel lblAddPublisherName = new JLabel("Publisher Name"); lblAddPublisherName.setFont(new Font("Arial", Font.PLAIN, 11)); lblAddPublisherName.setBounds(29, 61, 84, 14); panel_N_Document.add(lblAddPublisherName); JLabel lblTitle_Title = new JLabel("Title"); lblTitle_Title.setFont(new Font("Arial", Font.PLAIN, 11)); lblTitle_Title.setBounds(157, 61, 26, 14); panel_N_Document.add(lblTitle_Title); JLabel lblBdate = new JLabel("Publish Date"); lblBdate.setFont(new Font("Arial", Font.PLAIN, 11)); lblBdate.setBounds(267, 61, 59, 14); panel_N_Document.add(lblBdate); panel_N_Document.add(lblIsbn); panel_N_Document.add(lblVolumeno); panel_N_Document.add(lblChiefEditor); textField_D_PublisherName = new JTextField(); textField_D_PublisherName.setBounds(29, 86, 90, 35); textField_D_PublisherName.setColumns(10); panel_N_Document.add(textField_D_PublisherName); textField_D_Title = new JTextField(); textField_D_Title.setBounds(149, 85, 90, 35); textField_D_Title.setColumns(10); panel_N_Document.add(textField_D_Title); textField_D_PDate = new JTextField(); textField_D_PDate.setBounds(267, 86, 90, 35); textField_D_PDate.setColumns(10); panel_N_Document.add(textField_D_PDate); panel_N_Document.add(textField_D_ISBN); panel_N_Document.add(textField_D_VolumeNo); panel_N_Document.add(textField_D_ChiefEditor); panel_N_Document.add(D_add); JLabel lblNewDescriptor = new JLabel("Descriptor"); lblNewDescriptor.setFont(new Font("Arial", Font.PLAIN, 11)); lblNewDescriptor.setBounds(386, 61, 60, 15); panel_N_Document.add(lblNewDescriptor); textField_Descriptor = new JTextField(); textField_Descriptor.setBounds(386, 86, 90, 35); panel_N_Document.add(textField_Descriptor); textField_Descriptor.setColumns(10); JLabel lblNewAuthorName = new JLabel("Author Name"); lblNewAuthorName.setFont(new Font("Arial", Font.PLAIN, 11)); lblNewAuthorName.setBounds(502, 61, 63, 14); panel_N_Document.add(lblNewAuthorName); textField_AuthorName = new JTextField(); textField_AuthorName.setBounds(502, 85, 90, 36); panel_N_Document.add(textField_AuthorName); textField_AuthorName.setColumns(10); JLabel lblCDate = new JLabel("Conference Date"); lblCDate.setFont(new Font("Arial", Font.PLAIN, 11)); lblCDate.setBounds(29, 139, 84, 15); panel_N_Document.add(lblCDate); textField_CDate = new JTextField(); textField_CDate.setBounds(29, 161, 88, 35); panel_N_Document.add(textField_CDate); textField_CDate.setColumns(10); JLabel lblCLocation = new JLabel("Conference Location"); lblCLocation.setFont(new Font("Arial", Font.PLAIN, 11)); lblCLocation.setBounds(149, 139, 106, 15); panel_N_Document.add(lblCLocation); textField_CLocation = new JTextField(); textField_CLocation.setBounds(149, 163, 90, 35); panel_N_Document.add(textField_CLocation); textField_CLocation.setColumns(10); rdbtnBook = new JRadioButton("Book"); rdbtnBook.setSelected(true); rdbtnBook.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { docType = Constant.TYPE_BOOK; rdbtnBook.setSelected(true); rdbtnJournal.setSelected(false); rdbtnCP.setSelected(false); lblIsbn.setVisible(true); lblVolumeno.setVisible(false); lblChiefEditor.setVisible(false); lblCDate.setVisible(false); lblCEditor.setVisible(false); lblCLocation.setVisible(false); textField_D_ISBN.setVisible(true); textField_D_VolumeNo.setVisible(false); textField_D_ChiefEditor.setVisible(false); textField_CDate.setVisible(false); textField_CLocation.setVisible(false); textField_CEditor.setVisible(false); } }); lblCEditor = new JLabel("Conference Editor"); lblCEditor.setFont(new Font("Arial", Font.PLAIN, 11)); lblCEditor.setBounds(267, 138, 102, 15); panel_N_Document.add(lblCEditor); textField_CEditor = new JTextField(); textField_CEditor.setBounds(267, 161, 90, 35); panel_N_Document.add(textField_CEditor); textField_CEditor.setColumns(10); textField_CEditor.setVisible(false); rdbtnBook.setBounds(29, 20, 84, 23); panel_N_Document.add(rdbtnBook); rdbtnJournal = new JRadioButton("Journal"); rdbtnJournal.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { docType = Constant.TYPE_JOURNAL_VOLUME; rdbtnBook.setSelected(false); rdbtnJournal.setSelected(true); rdbtnCP.setSelected(false); lblIsbn.setVisible(false); lblVolumeno.setVisible(true); lblChiefEditor.setVisible(true); lblCDate.setVisible(false); lblCLocation.setVisible(false); lblCEditor.setVisible(false); textField_D_ISBN.setVisible(false); textField_D_VolumeNo.setVisible(true); textField_D_ChiefEditor.setVisible(true); textField_CDate.setVisible(false); textField_CLocation.setVisible(false); textField_CEditor.setVisible(false); } }); rdbtnJournal.setBounds(149, 20, 90, 23); panel_N_Document.add(rdbtnJournal); rdbtnCP = new JRadioButton("Conference Proceeding"); rdbtnCP.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { docType = Constant.TYPE_CONFERENCE_PROCEEDING; rdbtnBook.setSelected(false); rdbtnJournal.setSelected(false); rdbtnCP.setSelected(true); lblIsbn.setVisible(false); lblVolumeno.setVisible(false); lblChiefEditor.setVisible(false); lblCDate.setVisible(true); lblCLocation.setVisible(true); lblCEditor.setVisible(true); textField_D_ISBN.setVisible(false); textField_D_VolumeNo.setVisible(false); textField_D_ChiefEditor.setVisible(false); textField_CDate.setVisible(true); textField_CLocation.setVisible(true); textField_CEditor.setVisible(true); } }); rdbtnCP.setBounds(267, 20, 179, 23); panel_N_Document.add(rdbtnCP); lblIsbn.setVisible(true); lblVolumeno.setVisible(false); lblChiefEditor.setVisible(false); lblCDate.setVisible(false); lblCLocation.setVisible(false); lblCEditor.setVisible(false); textField_D_ISBN.setVisible(true); textField_D_VolumeNo.setVisible(false); textField_D_ChiefEditor.setVisible(false); textField_CDate.setVisible(false); textField_CLocation.setVisible(false); textField_CEditor.setVisible(false); JPanel panel_N_LibraryBranch = new JPanel(); tabbedPane_New.addTab("Library Branch", null, panel_N_LibraryBranch, null); textField_Name = new JTextField(); textField_Name.setBounds(49, 89, 97, 36); textField_Name.setColumns(10); textField_Location = new JTextField(); textField_Location.setBounds(200, 89, 97, 36); textField_Location.setColumns(10); JButton L_add = new JButton("ADD"); L_add.setForeground(Color.BLUE); L_add.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { Branch branch = new Branch(); branch.setName(textField_Name.getText()); branch.setLocation(textField_Location.getText()); } }); L_add.setFont(new Font("Times New Roman", Font.PLAIN, 13)); L_add.setBounds(596, 89, 97, 36); L_add.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { } }); panel_N_LibraryBranch.setLayout(null); JLabel lblNewLabel_1 = new JLabel("Library Name"); lblNewLabel_1.setFont(new Font("Arial", Font.PLAIN, 11)); lblNewLabel_1.setBounds(64, 65, 64, 14); panel_N_LibraryBranch.add(lblNewLabel_1); JLabel lblNewLabel_2 = new JLabel("Location"); lblNewLabel_2.setFont(new Font("Arial", Font.PLAIN, 11)); lblNewLabel_2.setBounds(222, 65, 52, 14); panel_N_LibraryBranch.add(lblNewLabel_2); panel_N_LibraryBranch.add(textField_Name); panel_N_LibraryBranch.add(textField_Location); panel_N_LibraryBranch.add(L_add); JPanel panel_N_Copy = new JPanel(); tabbedPane_New.addTab("Copy", null, panel_N_Copy, null); JLabel lblDocid = new JLabel("Documentation Name"); lblDocid.setFont(new Font("Arial", Font.PLAIN, 11)); lblDocid.setBounds(115, 66, 101, 14); textField_C_DocId = new JTextField(); textField_C_DocId.setBounds(115, 90, 87, 36); textField_C_DocId.setColumns(10); JLabel lblLibid = new JLabel("Library Name"); lblLibid.setFont(new Font("Arial", Font.PLAIN, 11)); lblLibid.setBounds(115, 161, 64, 14); textField_C_LibId = new JTextField(); textField_C_LibId.setBounds(115, 185, 87, 36); textField_C_LibId.setColumns(10); JLabel lblPosition = new JLabel("Library Position"); lblPosition.setFont(new Font("Arial", Font.PLAIN, 11)); lblPosition.setBounds(115, 257, 74, 14); textField_C_Position = new JTextField(); textField_C_Position.setBounds(115, 281, 87, 36); textField_C_Position.setColumns(10); panel_N_Copy.setLayout(null); panel_N_Copy.add(lblDocid); panel_N_Copy.add(lblLibid); panel_N_Copy.add(lblPosition); panel_N_Copy.add(textField_C_DocId); panel_N_Copy.add(textField_C_LibId); panel_N_Copy.add(textField_C_Position); JButton btnSearch_1 = new JButton("SEARCH"); btnSearch_1.setForeground(Color.BLUE); btnSearch_1.setFont(new Font("Times New Roman", Font.PLAIN, 13)); btnSearch_1.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent arg0) { //search for doc name and choose one //TODO } }); btnSearch_1.setBounds(542, 90, 87, 36); panel_N_Copy.add(btnSearch_1); JButton btnSearch_2 = new JButton("SEARCH"); btnSearch_2.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { //search for library name //TODO } }); btnSearch_2.setForeground(Color.BLUE); btnSearch_2.setFont(new Font("Times New Roman", Font.PLAIN, 13)); btnSearch_2.setBounds(542, 185, 87, 36); panel_N_Copy.add(btnSearch_2); JButton C_add = new JButton("ADD"); C_add.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { //after select doc name and library name, add position //TODO } }); C_add.setForeground(Color.BLUE); C_add.setFont(new Font("Times New Roman", Font.PLAIN, 13)); C_add.setBounds(542, 281, 87, 36); panel_N_Copy.add(C_add); layeredPane_New.setVisible(false); JLayeredPane layeredPane_Update = new JLayeredPane(); layeredPane_Update.setBounds(148, 6, 774, 580); layeredPane.add(layeredPane_Update); layeredPane_Update.setVisible(false); JTabbedPane tabbedPane_Update = new JTabbedPane(JTabbedPane.TOP); tabbedPane_Update.setBounds(6, 17, 762, 557); layeredPane_Update.add(tabbedPane_Update); JPanel panel_U_Reader = new JPanel(); tabbedPane_Update.addTab("Reader", null, panel_U_Reader, null); JLabel lblReaderid_1 = new JLabel("Reader Id"); lblReaderid_1.setFont(new Font("Arial", Font.PLAIN, 11)); lblReaderid_1.setBounds(38, 62, 70, 15); JLabel lblType_1 = new JLabel("Type"); lblType_1.setFont(new Font("Arial", Font.PLAIN, 11)); lblType_1.setBounds(163, 62, 53, 15); JLabel lblReadname = new JLabel("Reader Name"); lblReadname.setFont(new Font("Arial", Font.PLAIN, 11)); lblReadname.setBounds(286, 62, 70, 15); JLabel lblAddress = new JLabel("Address"); lblAddress.setFont(new Font("Arial", Font.PLAIN, 11)); lblAddress.setBounds(407, 62, 63, 15); JLabel lblPhonenum_1 = new JLabel("Phone No."); lblPhonenum_1.setFont(new Font("Arial", Font.PLAIN, 11)); lblPhonenum_1.setBounds(522, 62, 70, 15); textFieldRReaderId = new JTextField(); textFieldRReaderId.setBounds(28, 87, 93, 39); textFieldRReaderId.setColumns(10); textFieldRType = new JTextField(); textFieldRType.setBounds(149, 87, 93, 39); textFieldRType.setColumns(10); textFieldRReaderName = new JTextField(); textFieldRReaderName.setBounds(274, 87, 93, 39); textFieldRReaderName.setColumns(10); textFieldAddress = new JTextField(); textFieldAddress.setBounds(395, 87, 93, 39); textFieldAddress.setColumns(10); textFieldPhoneNum = new JTextField(); textFieldPhoneNum.setBounds(512, 87, 93, 39); textFieldPhoneNum.setColumns(10); Reader rd = new Reader(); //wrong //Search for existing readerID and if they are same, update type, name, address and phone number //TODO JButton btnUpdate_2 = new JButton("UPDATE"); btnUpdate_2.setForeground(Color.BLUE); btnUpdate_2.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent arg0) { String readerId = textFieldRReaderId.getText(); if(readerId.isEmpty() == false){ if(readerId == rd.getId()){ rd.setType(textFieldRType.getText()); rd.setName(textFieldRReaderName.getText()); rd.setAddress(textFieldAddress.getText()); rd.setPhoneNum(textFieldPhoneNum.getText()); } }else{ JOptionPane.showMessageDialog(null, "Reader Id cannot be empty.", "Check Input", JOptionPane.WARNING_MESSAGE); } //System.out.println(readerId); } }); btnUpdate_2.setFont(new Font("Times New Roman", Font.PLAIN, 13)); btnUpdate_2.setBounds(633, 90, 93, 39); panel_U_Reader.setLayout(null); panel_U_Reader.add(lblReaderid_1); panel_U_Reader.add(textFieldRReaderId); panel_U_Reader.add(lblType_1); panel_U_Reader.add(lblReadname); panel_U_Reader.add(textFieldRType); panel_U_Reader.add(textFieldRReaderName); panel_U_Reader.add(textFieldAddress); panel_U_Reader.add(lblAddress); panel_U_Reader.add(btnUpdate_2); panel_U_Reader.add(textFieldPhoneNum); panel_U_Reader.add(lblPhonenum_1); JPanel panel_U_Branch = new JPanel(); tabbedPane_Update.addTab("Library Branch", null, panel_U_Branch, null); JLabel lblLibid_2 = new JLabel("LIBID"); lblLibid_2.setFont(new Font("Arial", Font.PLAIN, 12)); lblLibid_2.setBounds(94, 64, 30, 15); textFieldLibId = new JTextField(); textFieldLibId.setBounds(62, 89, 111, 39); textFieldLibId.setColumns(10); JLabel lblNewLabel_5 = new JLabel("NAME"); lblNewLabel_5.setFont(new Font("Arial", Font.PLAIN, 12)); lblNewLabel_5.setBounds(266, 64, 48, 15); textFieldLibName = new JTextField(); textFieldLibName.setBounds(235, 89, 111, 39); textFieldLibName.setColumns(10); JLabel lblLocation = new JLabel("LOCATION"); lblLocation.setFont(new Font("Arial", Font.PLAIN, 12)); lblLocation.setBounds(423, 64, 69, 15); textFieldLibLocation = new JTextField(); textFieldLibLocation.setBounds(408, 89, 111, 39); textFieldLibLocation.setColumns(10); Branch br = new Branch();//Wrong //Search for library branch Id, if they are same, update library name and library location //TODO JButton btnUpdate_1 = new JButton("UPDATE"); btnUpdate_1.setForeground(Color.BLUE); btnUpdate_1.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent arg0) { String lb = textFieldLibId.getText(); if(lb.isEmpty() == false){ if(lb == br.getId()){ br.setName(textFieldLibName.getText()); br.setLocation(textFieldLibLocation.getText()); } }else{ JOptionPane.showMessageDialog(null, "Library Id cannot be empty.", "Check Input", JOptionPane.WARNING_MESSAGE); } } }); btnUpdate_1.setFont(new Font("Times New Roman", Font.PLAIN, 13)); btnUpdate_1.setBounds(581, 90, 111, 37); panel_U_Branch.setLayout(null); panel_U_Branch.add(textFieldLibId); panel_U_Branch.add(textFieldLibName); panel_U_Branch.add(textFieldLibLocation); panel_U_Branch.add(btnUpdate_1); panel_U_Branch.add(lblLibid_2); panel_U_Branch.add(lblNewLabel_5); panel_U_Branch.add(lblLocation); JButton btnNewButton_new = new JButton("New"); btnNewButton_new.setFont(new Font("Dialog", Font.PLAIN, 14)); btnNewButton_new.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { layeredPane_Welcome.setVisible(false); layeredPane_BR.setVisible(false); layeredPane_New.setVisible(true); layeredPane_Update.setVisible(false); } }); JButton btnNewButton_br = new JButton("Borrow/reserve"); btnNewButton_br.setFont(new Font("Dialog", Font.PLAIN, 14)); btnNewButton_br.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { layeredPane_Welcome.setVisible(false); layeredPane_BR.setVisible(true); layeredPane_New.setVisible(false); layeredPane_Update.setVisible(false); } }); btnNewButton_br.setBounds(9, 62, 131, 42); layeredPane.add(btnNewButton_br); btnNewButton_new.setBounds(6, 133, 134, 39); layeredPane.add(btnNewButton_new); JButton btnNewButton_quit = new JButton("Quit"); btnNewButton_quit.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { int choice = JOptionPane.showOptionDialog(null, "You really want to quit?", "Quit?", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, null, null); if (choice == JOptionPane.YES_OPTION) { System.exit(0); } } }); JButton btnNewButton_update = new JButton("Update"); btnNewButton_update.setFont(new Font("Dialog", Font.PLAIN, 14)); btnNewButton_update.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { layeredPane_Welcome.setVisible(false); layeredPane_BR.setVisible(false); layeredPane_New.setVisible(false); layeredPane_Update.setVisible(true); } }); btnNewButton_update.setBounds(9, 200, 134, 39); layeredPane.add(btnNewButton_update); btnNewButton_quit.setFont(new Font("Dialog", Font.PLAIN, 14)); btnNewButton_quit.setBounds(9, 267, 131, 40); layeredPane.add(btnNewButton_quit); contentPane.setLayout(groupLayout); } }
package se.raddo.raddose3D; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; /** * Crystal abstract class. */ public abstract class Crystal { /** Constant for data fields in Map constructors: X dimension in um. */ public static final String CRYSTAL_DIM_X = "DIM_X"; /** Constant for data fields in Map constructors: Y dimension in um. */ public static final String CRYSTAL_DIM_Y = "DIM_Y"; /** Constant for data fields in Map constructors: Z dimension in um. */ public static final String CRYSTAL_DIM_Z = "DIM_Z"; /** Constant for data fields in Map constructors: Crystal resolution. */ public static final String CRYSTAL_RESOLUTION = "RES"; /** Constant for data fields in Map constructors: P angle (in degrees!). */ public static final String CRYSTAL_ANGLE_P = "P"; /** Constant for data fields in Map constructors: L angle (in degrees!). */ public static final String CRYSTAL_ANGLE_L = "L"; /** Constant for data fields in Map constructors: Coefficient Model. */ public static final String CRYSTAL_COEFCALC = "COEFCALC"; /** Constant for data fields in Map constructors: Dose Decay Model. */ public static final String CRYSTAL_DDM = "DECAYMODEL"; /** Constant for data fields in Map constructors: Wireframe type. */ public static final String CRYSTAL_WIREFRAME_TYPE = "WIREFRAME_TYPE"; /** Constant for data fields in Map constructors: Wireframe file. */ public static final String CRYSTAL_WIREFRAME_FILE = "WIREFRAME_FILE"; /** Constant for data fields in Map constructors: Photoelectron escape. */ public static final String CRYSTAL_ELECTRON_ESCAPE = "PHESCAPE"; /** Default recommended voxel resolution in voxels/micrometre. */ protected static final Double CRYSTAL_RESOLUTION_DEF = 0.5d; /** Number of exposure-steps when crystal is exposed without rotation. */ public static final int STATICEXPOSURE = 100; /** Conversion factor from Gy to MGy. */ private static final double GY_TO_MGY = 1e-6; /** Unit conversion to get voxel mass in kg. */ private static final double UNIT_CONVERSION = 1e-15; /** * Upper voxel limit for default resolution. * Resolution will be reduced if voxel number would otherwise exceed this. */ private static final Long CRYSTAL_RESOLUTION_DEF_VOXLIM = 1000000L; /** The dose decay model used by this Crystal instance. */ private final DDM ddm; /** The CoefCalc method being employed to generate crystal coefficients. */ private final CoefCalc coefCalc; /** * Cumulative dose lost from crystal due to photoelectron escape */ private double totalEscapedDose = 0; private double fluorescentEscapedDose = 0; /** * Cumulative dose both remaining in crystal and lost through photoelectron escape */ private double totalCrystalDose = 0; /** The mass of each voxel in the crystal */ private double voxelMass = 0; /** * whether photoelectron escape should be included */ private final boolean photoElectronEscape; /** * List of registered exposureObservers. Registered objects will be notified * of individual voxel exposure events and can also inspect the Crystal object * after each image and and wedge. */ private final List<ExposeObserver> exposureObservers = new ArrayList<ExposeObserver>(); /** * An single, common ExposureSummary object to which a reference can be * obtained via getExposureSummary(). */ private ExposureSummary exposureSummaryObserver; /** * Generic property constructor for crystal classes. * Sets the DDM object if defined, a reasonable default otherwise. * Sets the CoefCalc object if defined, a reasonable default otherwise. * * @param properties * Map of type <Object, Object> that contains all crystal properties. * The keys of the Map are defined by the constants in the * Crystal class. */ public Crystal(final Map<Object, Object> properties) { if (properties.get(Crystal.CRYSTAL_DDM) == null) { ddm = new DDMSimple(); } else { ddm = (DDM) properties.get(Crystal.CRYSTAL_DDM); } if (properties.get(Crystal.CRYSTAL_COEFCALC) == null) { coefCalc = new CoefCalcAverage(); } else { coefCalc = (CoefCalc) properties.get(Crystal.CRYSTAL_COEFCALC); } String pEE = (String) properties.get(CRYSTAL_ELECTRON_ESCAPE); photoElectronEscape = ("TRUE".equals(pEE)); } public abstract void setupDepthFinding(double angrad, Wedge wedge); /** * This should take a set of xyz coordinates (a voxel coordinate), the current * orientation of the crystal, and the wedge we are exposing and return the * depth of the coordinate in micrometres from the surface of the crystal * along the [0 0 1] unit vector. * * @param voxCoord coordinates of voxel * @param deltaPhi change in phi * @param myWedge Wedge object * @return depth */ public abstract double findDepth(double[] voxCoord, double deltaPhi, Wedge myWedge); // TODO change the deltaphi from absolute to how far along // the wedge, since this is more general. /** * Return the coordinates, in micrometres from the origin (centre of * rotation and beam intercept) of voxel ijk. * * @param i i coord * @param j j coord * @param k k coord * @return crystal coordinates */ public abstract double[] getCrystCoord(int i, int j, int k); /** * returns TRUE if there is a crystal at the coordinates i, j, k. * * @param i i coord * @param j j coord * @param k k coord * @return TRUE if crystal present at coords */ public abstract boolean isCrystalAt(int i, int j, int k); /** * Get the escape factor for a voxel in the crystal * * @param i i coord * @param j j coord * @param k k coord * @return escapeFactor at crystal coordinates x, y, z */ public abstract double getEscapeFactor(int i, int j, int k); /** * Should increment the dose array element ijk by doseVox. * * @param i i coord * @param j j coord * @param k k coord * @param doseIncrease */ public abstract void addDose(int i, int j, int k, double doseIncrease); /** * Should increment the dose array element ijk by doseVox. * This accounts for PE energy transfer to nearby voxels. * * @param i i coord * @param j j coord * @param k k coord * @param doseIncrease * @return voxel dose lost from crystal */ public abstract double addDoseAfterPE(int i, int j, int k, double doseIncrease); public abstract double addDoseAfterFL(int i, int j, int k, double doseIncrease); /** * set new photoelectron trajectory parameters for current beam * * @param beamEnergy */ public abstract void setPEparamsForCurrentBeam(double beamEnergy); /** * Should increment the fluence array element ijk by fluenceVox. * * @param i i coord * @param j j coord * @param k k coord * @param fluenceIncrease */ public abstract void addFluence(int i, int j, int k, double fluenceIncrease); /** * Should increment the elastic yield array element ijk by fluenceVox. * * @param i i coord * @param j j coord * @param k k coord * @param elasticIncrease */ public abstract void addElastic(int i, int j, int k, double elasticIncrease); /** * Return a description of at least crystal size, initial orientation, and the * voxel resolution. */ public abstract String crystalInfo(); /** * Return the size of the bounding box of the crystal in voxels. */ public abstract int[] getCrystSizeVoxels(); /** * Return the size of the bounding box of the crystal in um. */ public abstract double[] getCrystSizeUM(); /** * Return the dose at voxel ijk in MGy. * * @param i i coord * @param j j coord * @param k k coord */ public abstract double getDose(int i, int j, int k); /** * Return the fluence at voxel ijk. * * @param i i coord * @param j j coord * @param k k coord */ public abstract double getFluence(int i, int j, int k); /** * Return the total elastic scattering from voxel ijk. * * @param i i coord * @param j j coord * @param k k coord */ public abstract double getElastic(int i, int j, int k); /** * Return the resolution of the crystal in 1/um. * @return resolution of crystal in 1 / um. */ public abstract double getCrystalPixPerUM(); /** * Return the coefCalc object that is being used to calculate coefficients. * * @return * CoefCalc object. */ public final CoefCalc getCoefCalc() { return coefCalc; } /** * Retrieve the DoseDecayModel object of the crystal. * * @return * DoseDecayModel object. */ public final DDM getDDM() { return ddm; } /** * Register an observer for crystal exposures. * If the observer has already been registered it will not be registered * again. * * @param e * The observer class to be registered */ public void addObserver(final ExposeObserver e) { if (!exposureObservers.contains(e)) { exposureObservers.add(e); e.register(this); } } /** * Expose this crystal to a given beam according to a strategy. * * @param beam * Beam object describing the used beam. * @param wedge * Wedge object describing the exposure strategy including * translational and rotational information. */ public void expose(final Beam beam, final Wedge wedge) { // Update coefficients in case the beam energy has changed. coefCalc.updateCoefficients(beam); setPEparamsForCurrentBeam(beam.getPhotonEnergy()); double[][] fluorEscapeFactors = coefCalc.getFluorescentEscapeFactors(beam); //Takes the fluorescent escape factors and calculates the energy that can escape by fluorescence double[][] fluorescentescapefactors = coefCalc.getFluorescentEscapeFactors(beam); double energy = beam.getPhotonEnergy(); double length = fluorescentescapefactors.length; for (int i = 0; i < length; i++){ //loops over each atom type double muratio = fluorescentescapefactors[i][0]; // uj/upe double K1, L1, L2, L3; double K1px = fluorescentescapefactors[i][2]*fluorescentescapefactors[i][3]*fluorescentescapefactors[i][4]; //K-shell ionization x K-shell fluorescence yield x fluorescentX-ray escape probability double L1px = fluorescentescapefactors[i][6]*fluorescentescapefactors[i][7]*fluorescentescapefactors[i][8]; double L2px = fluorescentescapefactors[i][10]*fluorescentescapefactors[i][11]*fluorescentescapefactors[i][12]; double L3px = fluorescentescapefactors[i][14]*fluorescentescapefactors[i][15]*fluorescentescapefactors[i][16]; K1 = fluorescentescapefactors[i][1] * K1px; // K1px is multiplied by the K1 edge energy L1 = fluorescentescapefactors[i][5] * L1px; L2 = fluorescentescapefactors[i][9] * L2px; L3 = fluorescentescapefactors[i][13] * L3px; energy = (energy - K1 - L1 - L2 - L3) * muratio; // beam energy minus K1, L1, L2 and L3 multipled my uj/upe fluorescentEscapedDose = fluorescentEscapedDose + energy; // Adds energy that can escape for each atom type } double beamEnergyInJoules = beam.getPhotonEnergy() * Beam.KEVTOJOULES; fluorescentEscapedDose = fluorescentEscapedDose * Beam.KEVTOJOULES; // Set up angles to iterate over. double[] angles; if (Math.abs(wedge.getStartAng() - wedge.getEndAng()) < wedge.getAngRes()) { angles = new double[STATICEXPOSURE]; // TODO: something clever for (int i = 0; i < angles.length; i++) { angles[i] = wedge.getStartAng(); } } else { Integer sign = 1; if (wedge.getEndAng() < wedge.getStartAng()) { sign = -1; } angles = new double[sign * (int) ((wedge.getEndAng() - wedge.getStartAng()) / wedge.getAngRes() + 1)]; for (int i = 0; i < angles.length; i++) { angles[i] = wedge.getStartAng() + sign * i * wedge.getAngRes(); } } for (ExposeObserver eo : exposureObservers) { eo.exposureStart(angles.length); } // The main meat of it: for (int n = 0; n < angles.length; n++) { // Expose one angle exposeAngle(angles[n], beam, wedge, n, angles.length, beamEnergyInJoules, fluorEscapeFactors); for (ExposeObserver eo : exposureObservers) { eo.imageComplete(n, angles[n]); } } // end of looping over angles double fractionEscapedDose = totalEscapedDose/totalCrystalDose; //!!!!!!!!!Whats this? This towards the end? for (int i = 0; i < getCrystSizeVoxels()[0]; i++) { for (int j = 0; j < getCrystSizeVoxels()[1]; j++) { for (int k = 0; k < getCrystSizeVoxels()[2]; k++) { if (isCrystalAt(i, j, k)) { for (ExposeObserver eo : exposureObservers) { eo.summaryObservation(i, j, k, getDose(i, j, k)); } } } } } for (ExposeObserver eo : exposureObservers) { eo.exposureComplete(); } if (photoElectronEscape) { System.out.println(String.format("\nEnergy that may escape by Photoelectron Escape: %.2e", fractionEscapedDose)); System.out.print(String.format("Total energy that may escape by Fluorescent Escape: %.2e", fluorescentEscapedDose)); System.out.println(" J.\n"); } // END OF EXPOSE METHOD } private void exposeAngle(final double angle, final Beam beam, final Wedge wedge, final int anglenum, final int anglecount, final double beamEnergy, final double[][] fluorEscapeFactors) { final int[] crystalSize = getCrystSizeVoxels(); final Double[] wedgeStart = wedge.getStartVector(); final Double[] wedgeTranslation = wedge.getTranslationVector(angle); final double anglecos = Math.cos(angle); final double anglesin = Math.sin(angle); setupDepthFinding(angle, wedge); final double absorptionFraction = 1 - Math.exp(-1 * coefCalc.getAbsorptionCoefficient() / getCrystalPixPerUM()); // absorption fraction of the beam by a voxel final double fluenceToDoseFactorCompton = -1 * Math.expm1(-1 * coefCalc.getInelasticCoefficient() / getCrystalPixPerUM()) // exposure for the Voxel (J) * fraction absorbed by voxel / (1e-15 * (Math.pow(getCrystalPixPerUM(), -3) * coefCalc .getDensity())) // Voxel mass: 1um^3/1m/ml // (= 1e-18/1e3) / [volume (um^-3) *density (g/ml)] * 1e-6; // MGy // Calculate voxel mass (kg) = voxelVolume (um^3) * density (g/cm^3) * // unitConversionFactor voxelMass = UNIT_CONVERSION * (Math.pow(getCrystalPixPerUM(), -3) * coefCalc.getDensity()); final double absorptionFractionPerKg = absorptionFraction / voxelMass * GY_TO_MGY; final double fluenceToElasticFactor = -1 * Math.expm1(-1 * coefCalc.getElasticCoefficient() / getCrystalPixPerUM()) // exposure for the Voxel (J) * fraction scattered by the voxel // = J scattered / (beam.getPhotonEnergy() * Beam.KEVTOJOULES); // J scattered / [(keV/photon) / (J/keV)] = photons scattered final double beamAttenuationFactor = Math.pow(getCrystalPixPerUM(), -2) * wedge.getTotSec() / anglecount; // Area in um^2 of a voxel * time per angular step final double beamAttenuationExpFactor = -coefCalc .getAttenuationCoefficient(); double[] crystCoords; double[] translateRotateCoords = new double[3]; for (int i = 0; i < crystalSize[0]; i++) { for (int j = 0; j < crystalSize[1]; j++) { for (int k = 0; k < crystalSize[2]; k++) { if (isCrystalAt(i, j, k)) { // Rotate crystal into position crystCoords = getCrystCoord(i, j, k); // Translate Y translateRotateCoords[1] = crystCoords[1] + wedgeStart[1] + wedgeTranslation[1]; // Translate X double translateCoordX = crystCoords[0] + wedgeStart[0] + wedgeTranslation[0]; // Translate Z double translateCoordZ = crystCoords[2] + wedgeStart[2] + wedgeTranslation[2]; /* Rotate clockwise when y axis points away from observer */ // Rotate X translateRotateCoords[0] = translateCoordX * anglecos + translateCoordZ * anglesin; // Rotate Z translateRotateCoords[2] = -1 * translateCoordX * anglesin + translateCoordZ * anglecos; /* Unattenuated beam intensity (J/um^2/s) */ double unattenuatedBeamIntensity = beam.beamIntensity( translateRotateCoords[0], translateRotateCoords[1], wedge.getOffAxisUm()); if (unattenuatedBeamIntensity > 0d) { double depth = findDepth(translateRotateCoords, angle, wedge); /* * Assigning exposure (joules incident) and dose (J/kg absorbed) * to the voxel. */ double voxImageFluence = (unattenuatedBeamIntensity) * beamAttenuationFactor * Math.exp(depth * beamAttenuationExpFactor) * (beamEnergy);// - fluorescentEscapedDose); // Attenuates the beam for absorption /* * I think that we need to reduce the voxImageEnergy due to * X-ray fluorescence escape in the first line below this * comment! * * One other consideration is whether you put the code for * Energy reduction by X-ray fluorescence before the code to * reduce the dose due to photoelectron escape (PE). This is * because the inner shell electron has to be ejected before * X-ray fluorescence takes place. Hence there's an argument * for doing PE first */ double electronweight = 9.10938356E-31; double csquared = 3E8*3E8; double beamenergy = (beam.getPhotonEnergy() * Beam.KEVTOJOULES); double mcsquared = electronweight * csquared; double voxImageElectronEnergyDose = mcsquared / (2*beamenergy + mcsquared); voxImageElectronEnergyDose = (beamenergy * (1 - (Math.pow(voxImageElectronEnergyDose, 0.5)))); //Compton electron energy in joules double numberofphotons = voxImageFluence / beamenergy; //This gives I0 in equation 9 in Karthik 2010, dividing by beam energy leaves photons per um^2/s double voxImageEnergy = voxImageFluence; double voxImageComptonFluence = numberofphotons * voxImageElectronEnergyDose; //Re-calculate voxImageFluence using Compton electron energy double voxImageDoseCompton = fluenceToDoseFactorCompton * voxImageComptonFluence; double voxImageDose = absorptionFractionPerKg * voxImageEnergy; // MGy double voxElasticYield = fluenceToElasticFactor * /** * Returns a common ExposureSummary object registered to this crystal. * * @return * An ExposureSummary object that keeps a list of automatically * generated metrics regarding exposures of this crystal. */ public synchronized ExposureSummary getExposureSummary() { if (exposureSummaryObserver == null) { exposureSummaryObserver = new ExposureSummary(); addObserver(exposureSummaryObserver); } return exposureSummaryObserver; } /** * This function recommends an experimental resolution (in voxels per * micrometres) for the cases where the user did not explicitly specify one. * A built-in default resolution is considered, but the total number of voxels * is limited. * * @param x * crystal length in micrometres. * @param y * crystal width in micrometres. * @param z * crystal depth in micrometres. * @return * Recommended resolution in voxels per micrometre. */ public Double getDefaultLimitedResolution(final Double x, final Double y, final Double z) { if ((x * CRYSTAL_RESOLUTION_DEF) * (y * CRYSTAL_RESOLUTION_DEF) * (z * CRYSTAL_RESOLUTION_DEF) <= CRYSTAL_RESOLUTION_DEF_VOXLIM) { return CRYSTAL_RESOLUTION_DEF; } Double reductionFactor = CRYSTAL_RESOLUTION_DEF_VOXLIM / (x * y * z); reductionFactor = Math.pow(reductionFactor, 1d / 3d); return reductionFactor; } }
package dynamake.commands; import java.io.Serializable; import java.util.ArrayDeque; import dynamake.models.CompositeLocation; import dynamake.models.Location; import dynamake.models.ModelRootLocation; public class ExecutionScope implements Serializable { private static final long serialVersionUID = 1L; private static class NullWrapper { } private ArrayDeque<Object> production = new ArrayDeque<Object>(); private Location offset = new ModelRootLocation(); public void produce(Object value) { if(value == null) value = new NullWrapper(); production.addLast(value); } public Object consume() { // If pollLast returns null, then ambiguity of the return result of consume occurs, because // the cause of null is not NullWrapper. Object value = production.pollLast(); return value instanceof NullWrapper ? null : value; } public void pushOffset(Location offset) { this.offset = new CompositeLocation(this.offset, offset); } public Location popOffset() { CompositeLocation offsetAsComposite = (CompositeLocation)offset; Location poppedOffset = offsetAsComposite.getTail(); offset = offsetAsComposite.getHead(); return poppedOffset; } public Location getOffset() { return offset; } }
package se.raddo.raddose3D; /** * This is a Dose Decay Model class that calculates the Relative * Diffraction Efficiency (RDE) according to the model from the * Leal et al. (2012) paper. The paper describes the loss of * scattering power of a crystal as a product of the expected * intensity, the Debye-waller factor and an empirically derived * scale factor. */ public class DDMLeal implements DDM { /** * Decay parameters used in Leal et al. (2012) (eqn 4). */ /** * Decay Parameter beta. */ private final double BETA; /** * Decay Parameter b0. */ private final double B0; /** * Decay Parameter gamma. */ private final double GAMMA; /** * Array that stores the BEST intensity data * column 1 - h^2 values. Where h = 1/d and d is the spacing between successive * Bragg planes (i.e. resolution) * column 2 - J values. These are expected intensity values */ private static final double[][] BEST_DATA = new double[][] { { 0.009000, 117970.000000 }, { 0.013100, 100512.023438 }, { 0.017200, 80882.992188 }, { 0.021300, 62948.515625 }, { 0.025400, 57507.757813 }, { 0.029500, 61357.429688 }, { 0.033500, 72234.062500 }, { 0.037600, 89858.945313 }, { 0.041700, 109460.929688 }, { 0.045800, 126917.039063 }, { 0.049900, 137405.062500 }, { 0.054000, 139655.375000 }, { 0.058100, 137483.218750 }, { 0.062200, 133394.875000 }, { 0.066300, 129394.328125 }, { 0.070400, 125762.617188 }, { 0.074500, 121035.289063 }, { 0.078600, 116051.804688 }, { 0.082600, 110836.078125 }, { 0.086700, 104613.296875 }, { 0.090800, 97322.054688 }, { 0.094900, 89836.304688 }, { 0.099000, 83216.187500 }, { 0.103100, 78146.273438 }, { 0.107200, 73459.671875 }, { 0.111300, 69471.023438 }, { 0.115400, 65299.644531 }, { 0.119500, 61581.441406 }, { 0.123600, 58510.613281 }, { 0.127700, 55865.179688 }, { 0.131800, 53658.789063 }, { 0.135800, 52101.019531 }, { 0.139900, 51070.417969 }, { 0.144000, 50092.042969 }, { 0.148100, 49350.722656 }, { 0.152200, 48151.910156 }, { 0.156300, 47058.906250 }, { 0.160400, 46675.406250 }, { 0.164500, 46597.675781 }, { 0.168600, 45924.046875 }, { 0.172700, 46080.671875 }, { 0.176800, 45937.621094 }, { 0.180900, 46096.023438 }, { 0.184900, 45896.964844 }, { 0.189000, 45990.093750 }, { 0.193100, 46123.292969 }, { 0.197200, 46343.515625 }, { 0.201300, 45936.539063 }, { 0.205400, 45715.695313 }, { 0.209500, 45109.164063 }, { 0.213600, 44549.132813 }, { 0.217700, 43634.820313 }, { 0.221800, 43566.085938 }, { 0.225900, 43451.015625 }, { 0.230000, 42696.292969 }, { 0.234100, 41173.980469 }, { 0.238100, 39972.753906 }, { 0.242200, 39166.628906 }, { 0.246300, 38020.367188 }, { 0.250400, 36810.992188 }, { 0.254500, 35497.308594 }, { 0.258600, 34194.906250 }, { 0.262700, 32992.742188 }, { 0.266800, 31585.996094 }, { 0.270900, 30211.492188 }, { 0.275000, 29119.816406 }, { 0.279100, 28151.564453 }, { 0.283200, 27386.414063 }, { 0.287300, 26232.775391 }, { 0.291300, 25235.693359 }, { 0.295400, 24318.244141 }, { 0.299500, 23707.949219 }, { 0.303600, 22821.910156 }, { 0.307700, 22182.095703 }, { 0.311800, 21694.740234 }, { 0.315900, 21236.888672 }, { 0.320000, 20733.123047 }, { 0.324100, 20323.289063 }, { 0.328200, 20073.404297 }, { 0.332300, 19932.156250 }, { 0.336400, 19631.480469 }, { 0.340400, 19223.189453 }, { 0.344500, 18920.273438 }, { 0.348600, 18557.662109 }, { 0.352700, 18134.789063 }, { 0.356800, 17926.917969 }, { 0.360900, 17909.144531 }, { 0.365000, 17908.371094 }, { 0.369100, 17781.652344 }, { 0.373200, 17634.251953 }, { 0.377300, 17607.757813 }, { 0.381400, 17273.970703 }, { 0.385500, 17132.121094 }, { 0.389600, 16953.238281 }, { 0.393600, 16883.560547 }, { 0.397700, 16615.091797 }, { 0.401800, 16435.376953 }, { 0.405900, 16423.140625 }, { 0.410000, 16351.833008 }, { 0.414100, 16278.805664 }, { 0.418200, 15998.300781 }, { 0.422300, 15795.753906 }, { 0.426400, 15589.185547 }, { 0.430500, 15561.383789 }, { 0.434600, 15467.072266 }, { 0.438700, 15476.588867 }, { 0.442800, 15331.998047 }, { 0.446800, 15028.963867 }, { 0.450900, 14745.987305 }, { 0.455000, 14509.141602 }, { 0.459100, 14445.925781 }, { 0.463200, 14254.642578 }, { 0.467300, 14111.920898 }, { 0.471400, 13900.478516 }, { 0.475500, 13785.526367 }, { 0.479600, 13686.092773 }, { 0.483700, 13464.845703 }, { 0.487800, 13304.157227 }, { 0.491900, 13084.092773 }, { 0.495900, 13114.880859 }, { 0.500000, 13089.595703 }, { 0.504100, 13244.094727 }, { 0.508200, 13117.398438 }, { 0.512300, 13140.625977 }, { 0.516400, 13031.726563 }, { 0.520500, 12999.481445 }, { 0.524600, 12835.458008 }, { 0.528700, 12954.440430 }, { 0.532800, 12937.747070 }, { 0.536900, 12936.303711 }, { 0.541000, 12825.827148 }, { 0.545100, 12995.077148 }, { 0.549100, 12994.031250 }, { 0.553200, 13036.256836 }, { 0.557300, 13006.765625 }, { 0.561400, 13057.585938 }, { 0.565500, 13010.015625 }, { 0.569600, 12891.707031 }, { 0.573700, 12966.081055 }, { 0.577800, 13114.422852 }, { 0.581900, 13119.473633 }, { 0.586000, 13065.753906 }, { 0.590100, 13052.747070 }, { 0.594200, 13214.619141 }, { 0.598200, 13376.884766 }, { 0.602300, 13386.037109 }, { 0.606400, 13244.183594 }, { 0.610500, 13225.625000 }, { 0.614600, 13203.177734 }, { 0.618700, 13157.918945 }, { 0.622800, 13058.344727 }, { 0.626900, 13089.546875 }, { 0.631000, 13236.269531 }, { 0.635100, 13356.927734 }, { 0.639200, 13294.084961 }, { 0.643300, 13322.505859 }, { 0.647400, 13356.877930 }, { 0.651400, 13574.700195 }, { 0.655500, 13741.788086 }, { 0.659600, 13988.012695 }, { 0.663700, 14126.933594 }, { 0.667800, 14226.778320 }, { 0.671900, 14096.913086 }, { 0.676000, 14083.927734 }, { 0.680100, 14170.342773 }, { 0.684200, 14351.646484 }, { 0.688300, 14494.584961 }, { 0.692400, 14485.082031 }, { 0.696500, 14514.433594 }, { 0.700600, 14622.690430 }, { 0.704600, 14725.596680 }, { 0.708700, 14840.912109 }, { 0.712800, 14869.136719 }, { 0.716900, 14947.928711 }, { 0.721000, 15039.328125 }, { 0.725100, 15069.899414 }, { 0.729200, 15058.230469 }, { 0.733300, 14892.115234 }, { 0.737400, 14829.183594 }, { 0.741500, 14854.609375 }, { 0.745600, 14911.042969 }, { 0.749700, 14950.721680 }, { 0.753700, 15113.783203 }, { 0.757800, 15211.773438 }, { 0.761900, 15205.695313 }, { 0.766000, 15024.023438 }, { 0.770100, 14926.859375 }, { 0.774200, 14948.205078 }, { 0.778300, 14968.500000 }, { 0.782400, 14961.653320 }, { 0.786500, 14880.744141 }, { 0.790600, 14853.396484 }, { 0.794700, 14715.400391 }, { 0.798800, 14625.747070 }, { 0.802900, 14476.197266 }, { 0.806900, 14315.362305 }, { 0.811000, 14115.835938 }, { 0.815100, 14177.434570 }, { 0.819200, 14214.168945 }, { 0.823300, 13756.127930 }, { 0.827400, 13478.938477 }, { 0.831500, 13409.521484 }, { 0.835600, 13313.304688 }, { 0.839700, 13191.076172 }, { 0.843800, 13068.227539 }, { 0.847900, 13143.240234 }, { 0.852000, 13034.021484 }, { 0.856100, 12844.786133 }, { 0.860100, 12565.625977 }, { 0.864200, 12494.125977 }, { 0.868300, 12431.333008 }, { 0.872400, 12224.258789 }, { 0.876500, 12045.228516 }, { 0.880600, 11934.916992 }, { 0.884700, 11999.309570 }, { 0.888800, 12092.721680 }, { 0.892900, 12073.926758 }, { 0.897000, 12000.385742 }, { 0.901100, 11492.284180 }, { 0.905200, 11340.666016 }, { 0.909200, 11261.278320 }, { 0.913300, 11170.411133 }, { 0.917400, 11033.553711 }, { 0.921500, 10920.555664 }, { 0.925600, 10805.260742 }, { 0.929700, 10749.541992 }, { 0.933800, 10633.936523 }, { 0.937900, 10553.670898 }, { 0.942000, 10396.851563 }, { 0.946100, 10345.898438 }, { 0.950200, 10439.532227 }, { 0.954300, 10444.083984 }, { 0.958400, 10338.727539 }, { 0.962400, 10137.357422 }, { 0.966500, 10024.374023 }, { 0.970600, 9960.443359 }, { 0.974700, 9843.068359 }, { 0.978800, 9813.852539 }, { 0.982900, 9774.963867 }, { 0.987000, 9722.901367 }, { 0.991100, 9668.754883 }, { 0.995200, 9489.758789 }, { 0.999300, 9437.469727 }, { 1.003400, 9337.846680 }, { 1.007500, 9232.355469 }, { 1.011500, 9143.000977 }, { 1.015600, 8946.202148 }, { 1.019700, 9061.576172 }, { 1.023800, 8927.707031 }, { 1.027900, 8833.817383 }, { 1.032000, 8559.502930 }, { 1.036100, 8737.791016 }, { 1.040200, 8741.252930 }, { 1.044300, 8734.716797 }, { 1.048400, 8730.012695 }, { 1.052500, 8553.071289 }, { 1.056600, 8567.203125 }, { 1.060700, 8448.906250 }, { 1.064700, 8348.450195 }, { 1.068800, 8372.744141 }, { 1.072900, 8420.621094 }, { 1.077000, 8534.404297 }, { 1.081100, 8515.739258 }, { 1.085200, 8391.372070 }, { 1.089300, 8376.128906 }, { 1.093400, 8364.005859 }, { 1.097500, 8370.607422 }, { 1.101600, 8053.081055 }, { 1.105700, 7885.546875 }, { 1.109800, 7949.569824 }, { 1.113900, 8098.683105 }, { 1.117900, 8009.884766 }, { 1.122000, 7884.853027 }, { 1.126100, 7912.110840 }, { 1.130200, 7977.089844 }, { 1.134300, 8038.597168 }, { 1.138400, 7984.880859 }, { 1.142500, 7943.616211 }, { 1.146600, 8002.785156 }, { 1.150700, 7840.146973 }, { 1.154800, 7771.714844 }, { 1.158900, 7704.839844 }, { 1.163000, 7606.397949 }, { 1.167000, 7499.033203 }, { 1.171100, 7380.200195 }, { 1.175200, 7353.042969 }, { 1.179300, 7373.826172 }, { 1.183400, 7386.295898 }, { 1.187500, 7445.311035 }, { 1.191600, 7298.761230 }, { 1.195700, 7163.548828 }, { 1.199800, 6936.292969 }, { 1.203900, 6920.410156 }, { 1.208000, 6888.470215 }, { 1.212100, 7020.129883 }, { 1.216200, 6991.485840 }, { 1.220200, 6970.270020 }, { 1.224300, 6894.088867 }, { 1.228400, 6915.407227 }, { 1.232500, 6934.170898 } }; /** * Interpolated values between BEST data */ final double[][] interpolatedValues; /** * Constructor for the DDMLeal class that takes in the three decay * parameters (defined in Leal et al. 2012, equation 4). * * @param gamma gamma * @param b0 b0 * @param beta beta */ public DDMLeal(final Double gamma, final Double b0, final Double beta) { //TODO Add comments for following code if (gamma == null || b0 == null || beta == null) { this.BETA = 0; this.B0 = 0; this.GAMMA = 0; System.out.print("No decay parameter values given. "); System.out.println("All decay parameters set to 0."); } else { this.BETA = beta; this.B0 = b0; this.GAMMA = gamma; /*Print the parameter values to the screen*/ System.out.println("Using decay parameter values:"); System.out.println("Gamma = " + this.GAMMA); System.out.println("B0 = " + this.B0); System.out.println("Beta = " + this.BETA); } /* * column 1 - interpolated h^2 values * column 2 - interpolated J values * column 3 - difference between h values */ interpolatedValues = new double[BEST_DATA.length - 1][3]; /** * Calculate the dh values, i.e. the differences between each resolution * from the BEST data */ for (int i = 0; i < BEST_DATA.length - 1; i++) { interpolatedValues[i][0] = (BEST_DATA[i][0] + BEST_DATA[i + 1][0]) / 2; interpolatedValues[i][1] = (BEST_DATA[i][1] + BEST_DATA[i + 1][1]) / 2; interpolatedValues[i][2] = Math.sqrt(BEST_DATA[i + 1][0]) - Math.sqrt(BEST_DATA[i][0]); } } /** * Print string to tell user the type of dose decay model being used. * * @return Informative string about the dose decay model being used. */ @Override public String toString() { return "Dose Decay Model from Leal et al. 2012 is being used."; } /** * Method to calculate the Relative Diffraction Efficiency (RDE). * The model used is from the Leal et al. 2012 paper that describes * the loss of scattering power of a crystal as a product of the * expected intensity, the Debye-waller factor and an empirically * derived scale factor. * * @param dose * This is the absorbed dose within the crystal voxel * @return The Relative Diffraction Efficiency */ @Override public double calcDecay(final double dose) { /** * Relative intensity is the integrated intensity calculated * using the current dose divided by the integrated intensity * at dose = 0 MGy */ double relativeIntensityDecay = getIntegratedIntensity(dose) / getIntegratedIntensity(0); return relativeIntensityDecay; } /** * Method to calculate the expected integrated intensity. * The integrated intensity can be found in the Leal et al. 2012 * paper equation 4. * * @param dose * This is the absorbed dose within the crystal voxel * @return The integrated intensity */ public double getIntegratedIntensity(final double dose) { // TODO Write a 'check' to make sure there is an argument. /** * The integrated intensity according to leal et al. 2012 (eqn 4) */ double integratedIntensity; /** * Calculate integral of eqn 4 leal et al. 2012 */ double integralSum = 0; double eachTerm; for (int j = 0; j < interpolatedValues.length; j++) { eachTerm = (interpolatedValues[j][0] * interpolatedValues[j][1] * Math.exp(-0.5 * (interpolatedValues[j][0]) * (this.B0 + this.BETA * dose))) * interpolatedValues[j][2]; integralSum = integralSum + eachTerm; } /** * Calculate the integrated intensity of eqn 4 leal et al. 2012 */ integratedIntensity = Math.exp(-Math.pow(this.GAMMA * dose, 2)) * integralSum; return integratedIntensity; } /** * Method to extract the BEST intensity data (Popov & Bourenkov 2003) * The intensity data is stored in a csv file in 2 columns: * column 1 are h^2 values (h = 1/d and d is the resolution in Angstroms) * column 2 are the expected intensity values (denoted J in the file). * The file contains intensity values for each of the 300 resolution bins * (i.e. 300 rows) * * @return An array containing the BEST intensity data */ }
package se.raddo.raddose3D; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.FileNotFoundException; import java.io.OutputStreamWriter; import java.io.InputStream; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.Keys; //import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.interactions.Actions; import org.openqa.selenium.chrome.ChromeDriver; //import org.openqa.selenium.support.ui.ExpectedCondition; //import org.openqa.selenium.support.ui.WebDriverWait; import java.text.*; import java.awt.Toolkit; import java.awt.datatransfer.*; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; public class MicroED { //polyhderon variables public double[][] verticesEM; public int[][] indicesEM; public double[][][][] crystCoordEM; public double crystalPixPerUMEM; public int[] crystalSizeVoxelsEM; public boolean[][][][] crystOccEM; /** * Normal array holding normalised direction vectors for * each triangle specified by the index array. * Contains an i, j, k vector per triangle. * Should have same no. of entries as the indices array. */ private double[][] normals, rotatedNormals; private TreeMap<Double, double[]>[] lowEnergyAngles; private TreeMap<Double, double[]>[] highEnergyAngles; /** * Distances from origin for each of the triangle planes. * Should have same no. of entries as the indices array. */ private double[] originDistances, rotatedOriginDistances; public double crystalSurfaceArea; public double sampleThickness; public double crystalVolume; //dm^3 public double XDimension; public double YDimension; public double ZDimension; public String crystalTypeEM; private long numSimulatedElectrons; private double numElectrons; private double numberElastic; private double numberSingleElastic; private double numberNotInelasticEqu; private double numberNotInelasticRatio; private double numberProductive; private double stoppingPowerESTAR; private double MonteCarloRuntime; //to see if multislice is necessary at all private final int numberSlices = 1; private double MonteCarloDose; private double MonteCarloImageDose; private double MonteCarloTotElasticCount; private double MonteCarloSingleElasticCount; private double MonteCarloFSEEscape; private double MonteCarloFSEEntry; private double MonteCarloFlEscape; private double MonteCarloAugerEscape; private double MonteCarloAugerEntry; private double MonteCarloProductive; private double MonteCarloProductiveImage; private double extraFlEscape; private double extraAugerEscape; private double newMonteCarloFSEEscape; private double FSEsum; private int FSEcount; private double lowEnDose; private double MonteCarloElectronsExited; private double MonteCarloElectronsEntered; private double MonteCarloCharge; private double MonteCarloChargeDensity; private double elasticEnergyTot; private double displacementEnergy; private double totFSEEnergy; private double totAugerEnergy; private double totShellEnergy; private double totPlasmonEnergy; private double totBreakdownEnergy; private int numAuger; private int numFL; private int numFSEFromSurr; private int numFSEFromSample; private double[][][] voxelCharge; private double[][][] voxelDose; private double avgVoxelDose; private double maxX, maxY, maxZ; private double[] regionDose; private double[] regionVolume; private double avgRegionDose; public final double Wcc = 0.0; private double MonteCarloGOSDose; private double MonteCarloGOSEscape; private double energyLostGOS; private double avgW; private double Wcount; private double avgShell; private boolean GOS = true; protected static final int NUM_REGIONS = 10; protected static final long NUM_MONTE_CARLO_ELECTRONS = 100000; protected static final double c = 299792458; protected static final double m = 9.10938356E-31; protected static final double CUTOFF = 0.0001; public double energyCUTOFF = 3; public final boolean considerCharge = false; protected static final int BIN_DIVISION = 2; //how many bins to divide the dose deposition into @SuppressWarnings("unchecked") public MicroED(double vertices[][], int[][] indices, double[][][][] crystCoord, double crystalPixPerUM, int[] crystSizeVoxels, boolean[][][][] crystOcc, String crystalType) { verticesEM = vertices; indicesEM = indices; crystCoordEM = crystCoord; crystalPixPerUMEM = crystalPixPerUM; crystalSizeVoxelsEM = crystSizeVoxels; crystOccEM = crystOcc; crystalTypeEM = crystalType; double[] xMinMax = this.minMaxVertices(0, vertices); double[] yMinMax = this.minMaxVertices(1, vertices); double[] zMinMax = this.minMaxVertices(2, vertices); XDimension = 1000 * (xMinMax[1] - xMinMax[0]); YDimension = 1000 * (yMinMax[1] - yMinMax[0]); ZDimension = 1000 * (zMinMax[1] - zMinMax[0]); crystalSurfaceArea = XDimension * YDimension * 1E02; //convert from nm^2 to A^2 if (crystalTypeEM == "CYLINDER") { crystalSurfaceArea = (Math.PI * (XDimension/2) * (YDimension/2)) * 1E02; } sampleThickness = ZDimension; crystalVolume = (crystalSurfaceArea * (sampleThickness * 10) * 1E-27); //A^3 to dm^3 if (crystalTypeEM == "SPHERICAL") { crystalVolume = ((4/3) * Math.PI * (XDimension/2) * (YDimension/2) * (ZDimension/2)) * 1E-24; //nm^3 } //note the volume would need to be updated for a polyhedron!!! - currently just a cube or cylinder //although it isn't used lowEnergyAngles = new TreeMap[95]; highEnergyAngles = new TreeMap[95]; //initialise voxel dose and charge int[] maxVoxel = getMaxPixelCoordinates(); voxelCharge = new double[maxVoxel[0]][maxVoxel[1]][maxVoxel[2]]; voxelDose = new double[maxVoxel[0]][maxVoxel[1]][maxVoxel[2]]; maxX = maxVoxel[0]; maxY = maxVoxel[1]; maxZ = maxVoxel[2]; regionDose = new double[NUM_REGIONS]; regionVolume = new double[NUM_REGIONS]; populateRegionVolumes(); } public void getCSDArange(CoefCalc coefCalc) { double en = 100; int divisions = 100; double distance = 0; double energyStep = en/divisions; //get distance to lose the energy step while (en > 0.05) { double stoppingPower = coefCalc.getStoppingPower(en, false); distance += energyStep/stoppingPower; en -= energyStep; } distance = distance /1000; } public void CalculateEM(Beam beam, Wedge wedge, CoefCalc coefCalc) { // also pass in crystal dimensions // Just to be clear these are all dose of the exposed volume // testingXFELQuick(beam, coefCalc); //get a CSDA range for any given electron energy // getCSDArange(coefCalc); //getGOSinel // double test = coefCalc.getGOSInel(false); double wavelength = getWavelength(beam); double resRough = getResolutionRough(wavelength); double maxRes = getMaxRes(wavelength); System.out.println(String.format("The rough maximum resolution is: %.2e", resRough)); System.out.println(String.format("The max res is: %.2e", maxRes)); double dose1 = EMLETWay(beam, wedge, coefCalc); System.out.print(String.format("\nThe Dose in the exposed area by LET: %.8e", dose1)); System.out.println(" MGy\n"); double dose2 = EMEquationWay(beam, wedge, coefCalc, true); System.out.print(String.format("\nThe Dose in the exposed area by equation: %.8e", dose2)); System.out.println(" MGy\n"); dose2 = EMEquationWay(beam, wedge, coefCalc, false); System.out.print(String.format("\nThe Dose in the exposed area by 3:1: %.8e", dose2)); System.out.println(" MGy\n"); //calculate Sternheimer adjustment factor double dose3 = EMStoppingPowerWay(beam, wedge, coefCalc); System.out.print(String.format("\nThe Dose in the exposed area by stopping power: %.8e", dose3)); System.out.println(" MGy\n"); //start the Monte carlo stuff long start = System.nanoTime(); startMonteCarlo(coefCalc, beam); double[] dose4 = processMonteCarloDose(beam, coefCalc); System.out.print(String.format("\nThe Dose in the exposed area by Monte Carlo: %.8e", dose4[0])); System.out.println(" MGy\n"); System.out.print(String.format("The Dose in the imaged area by Monte Carlo: %.8e", dose4[1])); System.out.println(" MGy\n"); long runtime = System.nanoTime() - start; System.out.println(String.format("The Monte Carlo runtime in seconds was: %.8e", runtime/1E9)); MonteCarloRuntime = runtime/1E9; /* accessESTAR(coefCalc, beam.getPhotonEnergy()); double dose4 = getESTARDose(coefCalc, beam); System.out.print(String.format("\nThe Dose in the exposed area by ESTAR: %.8e", dose4)); System.out.println(" MGy\n"); */ System.out.println("\nNumber elastic events: " + numberElastic); System.out.println("Number single elastic events: " + numberSingleElastic); System.out.println("Number productive events: " + numberProductive); System.out.println("Number elastic events Monte Carlo: " + MonteCarloTotElasticCount); System.out.println("Number single elastic events Monte Carlo: " + MonteCarloSingleElasticCount); System.out.println("Number of productive electrons Monte Carlo: " + MonteCarloProductive); System.out.println("\nCharge buildup: " + MonteCarloCharge); System.out.println("Charge density " + MonteCarloChargeDensity); try { WriterFile("outputMicroED.CSV", dose4[0]); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } private double getWavelength(Beam beam) { double h = 6.626070040E-34; double c = 299792458; double csquared = c*c; double m0 = 9.109383356E-31; double V0 = beam.getPhotonEnergy()*Beam.KEVTOJOULES; double lambda = (h*c)/Math.pow(Math.pow(V0, 2) + 2*V0*m0*csquared, 0.5); // in m lambda *= 1E10; //convert m to A return lambda; } private double getResolutionRough(double wavelength) { double a = 0.01; //radians double n = 1; return (wavelength / (2*n*Math.sin(a))); } private double getMaxRes(double wavelength) { double Cs = 1E7; double res = Math.pow(Cs*Math.pow(wavelength, 3)/6, 0.25); return res; } private double EMLETWay(Beam beam, Wedge wedge, CoefCalc coefCalc) { // double electronNumber = beam.getPhotonsPerSec() * wedge.getTotSec(); // total incident electrons double exposure = beam.getExposure(); //check if the beam is bigger or smaller than the sample - need to check in x and in y (x = horizontal, y = vertical) double exposedAreaY = getExposedY(beam); double exposedAreaX = getExposedX(beam); double totExposedArea = 0; if (beam.getIsCircular() == false) { totExposedArea = (exposedAreaX * exposedAreaY) * 1E08; //convert um^2 to A^2 } else { totExposedArea = Math.PI * ((exposedAreaX/2) * (exposedAreaY/2)) * 1E08; //convert um^2 to A^2 } // double electronNumber = exposure * totExposedArea; //Reduce electron number if beam bigger than the sample /* if (totExposedArea < (beam.getBeamX()*beam.getBeamY() * 1E08)) { double fractionFlux = totExposedArea / (beam.getBeamX()*beam.getBeamY() * 1E08); electronNumber = electronNumber * fractionFlux; //convert total electron number to electron incident on the sample } */ // double exposure = electronNumber/totExposedArea; //exposure in e/A^2 double beamEnergy = beam.getPhotonEnergy(); double baseDose = 0; double theDose = 0; //set case exposure = 1 if (beamEnergy == 100) { baseDose = 6.6; } else if (beamEnergy == 200) { baseDose = 4.5; } else if (beamEnergy == 300) { baseDose = 3.7; } theDose = baseDose * exposure; return theDose; } private double EMEquationWay(Beam beam, Wedge wedge, CoefCalc coefCalc, boolean useInelEqu) { double exposure = beam.getExposure(); // double energyPerEvent = 0.02; //in keV double energyPerEvent = (7 * coefCalc.getZav())/1000; //in keV //Change this to 7* Zav energyPerEvent = 0.025; //will need to edit when I add in circular double exposedArea = 0; if (beam.getIsCircular() == false) { exposedArea = (getExposedX(beam) * getExposedY(beam)); //um^2 } else { exposedArea = Math.PI * ((getExposedX(beam)/2) * (getExposedY(beam)/2)); //um^2 } double electronNumber = exposure * (exposedArea * 1E08); numElectrons = electronNumber; double exposedVolume = exposedArea * (sampleThickness/1000) * 1E-15; //exposed volume in dm^3 // double electronNumber = getElectronNumber(beam, wedge, exposedArea); // double solventFraction = coefCalc.getEMSolventFraction(); //now I need to calcWaters here as don't have access to crystal properties in coefCalcEM //way 1 - density //way 2 = their way // coefCalc.calculateSolventWaterEM(solventFraction, exposedVolume); //density // coefCalc.calculateDensityEM(exposedVolume); // System.out.println(String.format("\nDensity: %.2e", coefCalc.getDensity())); //Elastic collisions // put in multislice here as well double elasticProb = 0; double avgEnergy = beam.getPhotonEnergy(); for (int i = 1; i <= numberSlices; i++) { double elasticProbOverT = coefCalc.getElectronElastic(avgEnergy); elasticProb += elasticProbOverT * (sampleThickness/numberSlices); //I need to update the electron energy, will do this with the stopping power for consistency double stoppingPower = coefCalc.getStoppingPower(avgEnergy, false); //send it electron energy double energyPerEl = stoppingPower * (sampleThickness/numberSlices); avgEnergy -= energyPerEl; } numberElastic = elasticProb * electronNumber; numberSingleElastic = electronNumber * Math.exp(-elasticProb) * (Math.pow(elasticProb, 1) / 1); //Poisson distribution //inelastic double inelasticProbOverT = 0; double inelasticProb = 0; avgEnergy = beam.getPhotonEnergy(); if (useInelEqu == true) { for (int i = 1; i <= numberSlices; i++) { inelasticProbOverT = coefCalc.getElectronInelastic(avgEnergy, exposedVolume); inelasticProb += inelasticProbOverT * (sampleThickness/numberSlices); //testing this double startingLambda_el = coefCalc.getElectronElasticMFPL(avgEnergy, false); inelasticProb = sampleThickness/coefCalc.getElectronInelasticMFPL(avgEnergy, false); //I need to update the electron energy, will do this with the stopping power for consistency double stoppingPower = coefCalc.getStoppingPower(avgEnergy, false); //send it electron energy double energyPerEl = stoppingPower * (sampleThickness/numberSlices); avgEnergy -= energyPerEl; } numberNotInelasticEqu = Math.exp(-inelasticProb) * electronNumber; } else { inelasticProb = elasticProb * 3; numberNotInelasticRatio = Math.exp(-inelasticProb) * electronNumber; } numberProductive = numberSingleElastic* numberNotInelasticEqu / electronNumber; //calculate backscattering coefficient - Use Heinrichs equation as a start double eta = coefCalc.getEta(); double numberBackscattered = electronNumber * (eta / 100); //how I deal with backscattering in terms of dose and productive is really not trivial!!!! //If I take them off at the start, they don't contribute to dose so that is understimated //If I take them off just the productive, I'm overestimating dose a little //How do I take off - I need to take it off single elastic via a probability //Indep of beam energy also scares me a bit //I should defo do % that were elastically scattered within the specified angle from the //objective aperture as this is much better!!!!!!!!!!!!!!!!! //Am I doing the mass right???? What is dose it is energy per mass of all right not just protein.... double numberInelasticEvents = (inelasticProb * electronNumber); double energyDeposited = (energyPerEvent * numberInelasticEvents) * Beam.KEVTOJOULES; //in J double exposedMass = (((coefCalc.getDensity()*1000) * exposedVolume) / 1000); //in Kg double dose = (energyDeposited/exposedMass) / 1E06; //dose in MGy //thickness isn't making a difference on dose as mass increases with it return dose; } private double EMStoppingPowerWay(Beam beam, Wedge wedge, CoefCalc coefCalc) { double exposedArea = 0; double exposure = beam.getExposure(); if (beam.getIsCircular() == false) { exposedArea = (getExposedX(beam) * getExposedY(beam)); //um^2 } else { exposedArea = Math.PI * ((getExposedX(beam)/2) * (getExposedY(beam)/2)); //um^2 } double electronNumber = exposure * (exposedArea * 1E08); double exposedVolume = exposedArea * ((sampleThickness/1000)) * 1E-15; //exposed volume in dm^3 double exposedMass = (((coefCalc.getDensity()*1000) * exposedVolume) / 1000); //in Kg double stoppingPower = 0, energyDeposited = 0, dose = 0; double avgEnergy = beam.getPhotonEnergy(); for (int i = 1; i <= numberSlices; i++) { // need to get the stopping power from coefcalc stoppingPower = coefCalc.getStoppingPower(avgEnergy, false); //send it electron energy double energyPerEl = stoppingPower * (sampleThickness/numberSlices); avgEnergy -= energyPerEl; energyDeposited = electronNumber * energyPerEl * Beam.KEVTOJOULES; //in J, currently per electron dose += (energyDeposited/exposedMass) / 1E06; //dose in MGy } return dose; } /** * Returns the exposed area in the x dimensions of the sample in um * * @param beam * @return exposedAreaX */ private double getExposedX(Beam beam) { double exposedAreaX; double beamX = beam.getBeamX(); if (XDimension/1000 > beamX) { exposedAreaX = beamX; } else { exposedAreaX = XDimension/1000; } return exposedAreaX; } /** * Returns the exposed area in the y dimensions of the sample in um * * @param beam * @return exposedAreaY */ private double getExposedY(Beam beam) { double exposedAreaY; double beamY = beam.getBeamY(); if (YDimension/1000 > beamY) { exposedAreaY = beamY; } else { exposedAreaY = YDimension/1000; } return exposedAreaY; } private void WriterFile(final String filename, final double dose4) throws IOException { BufferedWriter outFile; outFile = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(filename), "UTF-8")); try { outFile.write("dose, numSimulated, runtime, total electrons, total elastic, single elastic, productive\n"); outFile.write(String.format( " %f, %d, %f, %f, %f, %f, %f%n", dose4, numSimulatedElectrons, MonteCarloRuntime, numElectrons, MonteCarloTotElasticCount, MonteCarloSingleElasticCount, MonteCarloProductive)); } catch (IOException e) { e.printStackTrace(); System.err.println("WriterFile: Could not write to file " + filename); } try { outFile.close(); } catch (IOException e) { e.printStackTrace(); System.err.println("WriterFile: Could not close file " + filename); } } private void writeDoseCSV(final String filename) throws IOException { BufferedWriter outFile; outFile = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(filename), "UTF-8")); try { outFile.write("X,Y,Z,scalar\n"); for (int i = 0; i < maxX; i++) { for (int j = 0; j < maxY; j++) { for (int k = 0; k < maxZ; k++) { double[] coords = convertToCartesianCoordinates(i, j, k); outFile.write(String.format( "%f,%f,%f,%f%n", coords[0], coords[1], coords[2], voxelDose[i][j][k])); } } } } catch (IOException e) { e.printStackTrace(); System.err.println("WriterFile: Could not write to file " + filename); } try { outFile.close(); } catch (IOException e) { e.printStackTrace(); System.err.println("WriterFile: Could not close file " + filename); } } public void accessESTAR(CoefCalc coefCalc, double avgElectronEnergy) { String exePath = "lib\\selenium\\chromedriver.exe"; System.setProperty("webdriver.chrome.driver", exePath); // Create a new instance of the Firefox driver WebDriver driver = new ChromeDriver(); //Launch the Website driver.get("https://physics.nist.gov/PhysRefData/Star/Text/ESTAR-u.html"); //Enter material name WebElement name = driver.findElement(By.name("Name")); name.sendKeys("Protein"); //Enter density double densityNum = coefCalc.getDensity(); String densityString = Double.toString(densityNum); WebElement density = driver.findElement(By.name("Density")); density.sendKeys(densityString); //Enter element fractions Map<String, Double> fractionElementEM = new HashMap<String, Double>(); fractionElementEM = coefCalc.getFractionElementEM(); WebElement fractions = driver.findElement(By.name("Formulae")); NumberFormat formatNoSF = new DecimalFormat(); formatNoSF = new DecimalFormat("0.000000"); //will break if in standard form for (String elementName : fractionElementEM.keySet()) { String fractionElement = formatNoSF.format(fractionElementEM.get(elementName)); String toSend = (elementName + " " + fractionElement); //Write this in the textbox fractions.sendKeys(toSend); fractions.sendKeys(Keys.RETURN); } //submit this information WebElement submit = driver.findElement(By.cssSelector("input[value='Submit']")); submit.click(); //enter the beam energy String beamMeV = Double.toString((avgElectronEnergy / 1000)); WebElement energy = driver.findElement(By.name("Energies")); energy.sendKeys(beamMeV); //uncheck default energies WebElement checkBox = driver.findElement(By.cssSelector("input[value='on']")); checkBox.click(); //remove the graph as unnecessary WebElement radioButton = driver.findElement(By.cssSelector("input[value='None']")); radioButton.click(); //submit this page submit = driver.findElement(By.cssSelector("input[value='Submit']")); submit.click(); //select to output total stopping power checkBox = driver.findElement(By.name("total")); checkBox.click(); //Download data submit = driver.findElement(By.cssSelector("input[value='Download data']")); submit.click(); //copy and paste whole page Actions action = new Actions(driver); action.keyDown(Keys.CONTROL).sendKeys(String.valueOf('\u0061')).perform(); action.keyUp(Keys.CONTROL).perform(); action.keyDown(Keys.CONTROL).sendKeys(String.valueOf('\u0063')).perform(); String wholeTable = getSysClipboardText(); //get beam energy in a string double MeV = avgElectronEnergy/1000; NumberFormat formatter = new DecimalFormat(); formatter = new DecimalFormat("0.000E00"); String beamEnergy = formatter.format(MeV); // search using beam energy int beamEnergyIndex = wholeTable.indexOf(beamEnergy); String numbers = wholeTable.substring(beamEnergyIndex); //find stopping power by the space int spaceIndex = numbers.indexOf(" "); String stoppingPowerString = numbers.substring(spaceIndex + 1); stoppingPowerString = stoppingPowerString.trim(); stoppingPowerESTAR = Double.parseDouble(stoppingPowerString); driver.quit(); // close all windows opened by selenium } /** * get string from Clipboard */ public static String getSysClipboardText() { String ret = ""; Clipboard sysClip = Toolkit.getDefaultToolkit().getSystemClipboard(); Transferable clipTf = sysClip.getContents(null); if (clipTf != null) { if (clipTf.isDataFlavorSupported(DataFlavor.stringFlavor)) { try { ret = (String) clipTf .getTransferData(DataFlavor.stringFlavor); } catch (Exception e) { e.printStackTrace(); } } } return ret; } private double getESTARDose(CoefCalc coefCalc, Beam beam) { double exposedArea = 0; if (beam.getIsCircular() == false) { exposedArea = (getExposedX(beam) * getExposedY(beam)); //um^2 } else { exposedArea = Math.PI * ((getExposedX(beam)/2) * (getExposedY(beam)/2)); //um^2 } double exposedVolume = exposedArea * (sampleThickness/1000) * 1E-15; //exposed volume in dm^3 double exposure = beam.getExposure(); double electronNumber = exposure * (exposedArea * 1E08); double stoppingPower = (stoppingPowerESTAR * coefCalc.getDensity() * 1000) / 1E7; //keV/nm double energyDeposited = electronNumber * stoppingPower * sampleThickness * Beam.KEVTOJOULES; //in J, currently per electron double exposedMass = (((coefCalc.getDensity()*1000) * exposedVolume) / 1000); //in Kg double dose = (energyDeposited/exposedMass) / 1E06; //dose in MGy return dose; } // Everything below will be the Monte Carlo section of the code private void startMonteCarlo(CoefCalc coefCalc, Beam beam) { // boolean GOS = true; int triggered = 0; //testing int thisTriggered = 0; //testing //get number of electrons to simulate long numSim = coefCalc.getNumberSimulatedElectrons(); if (numSim == 0) { numSim = NUM_MONTE_CARLO_ELECTRONS; } numSimulatedElectrons = numSim; //set up for one electron to start with and then test how many needed to get little deviation and then scale up int numberBackscattered = 0; //Start stuff to make quicker // These are for the sample, need to do them all again for the surrounding double startingEnergy = beam.getPhotonEnergy(); double startingStoppingPower = coefCalc.getStoppingPower(startingEnergy, false); double startingLambda_el = coefCalc.getElectronElasticMFPL(startingEnergy, false); Map<ElementEM, Double> elasticProbs = coefCalc.getElasticProbs(false); //the FSE stuff double startingFSExSection = getFSEXSection(startingEnergy); double startingFSELambda = coefCalc.getFSELambda(startingFSExSection, false); //Inner shell ionisation x section coefCalc.populateCrossSectionCoefficients(); double startingInnerShellLambda = coefCalc.betheIonisationxSection(startingEnergy, false); // Map<Element, Double> ionisationProbs = coefCalc.getInnerShellProbs(); //Really need to make sure that these are in the same order Map<Element, double[]> ionisationProbs = coefCalc.getAllShellProbs(false); //Really need to make sure that these are in the same order //plasmon stuff double startingPlasmonLambda = coefCalc.getPlasmaMFPL(startingEnergy, false); double plasmaEnergy = coefCalc.getPlasmaFrequency(false)/1000.0; //in keV double gosInelasticLambda = 0, gosInnerLambda = 0, gosOuterLambda = 0; Map<Element, double[]> gosIonisationProbs = null; Map<Element, Double> gosOuterIonisationProbs = null; // if (surrounding == false) { if (GOS == true) { gosInelasticLambda = coefCalc.getGOSInel(false, startingEnergy); gosInnerLambda = coefCalc.getGOSInnerLambda(false); gosOuterLambda = coefCalc.getGOSOuterLambda(false); gosOuterIonisationProbs = coefCalc.getGOSOuterShellProbs(false, gosOuterLambda); //note atm this does not work with plasma in this way gosIonisationProbs = coefCalc.getGOSShellProbs(false, gosInelasticLambda); if (startingInnerShellLambda > 0) { gosInelasticLambda = 1/(1/gosOuterLambda + 1/startingInnerShellLambda); } else { gosInelasticLambda = gosOuterLambda; } } //tot inelastic double startingInelasticLambda = coefCalc.getElectronInelasticMFPL(startingEnergy, false); Map<ElementEM, Double> elasticProbsSurrounding = null; Map<Element, double[]> ionisationProbsSurrounding = null; Map<Element, double[]> gosIonisationProbsSur = null; Map<Element, Double> gosOuterIonisationProbsSur = null; //now do all of the starting stuff again for electrons in the surrounding double startingStoppingPowerSurrounding = 0, startingLambda_elSurrounding = 0, startingInelasticLambdaSurrounding = 0; double startingFSELambdaSurrounding = 0, startingInnershellLambdaSurrounding = 0, gosInelasticLambdaSur = 0, gosInnerLambdaSur = 0, gosOuterLambdaSur = 0; if (coefCalc.isCryo()) { //stopping power startingStoppingPowerSurrounding = coefCalc.getStoppingPower(startingEnergy, true); //elastic startingLambda_elSurrounding = coefCalc.getElectronElasticMFPL(startingEnergy, true); elasticProbsSurrounding = coefCalc.getElasticProbs(true); //total inelastic startingInelasticLambdaSurrounding = coefCalc.getElectronInelasticMFPL(startingEnergy, true); //FSE stuff startingFSELambdaSurrounding = coefCalc.getFSELambda(startingFSExSection, true); //xSection per electron is the same //inner shell ionisation startingInnershellLambdaSurrounding = coefCalc.betheIonisationxSection(startingEnergy, true); ionisationProbsSurrounding = coefCalc.getAllShellProbs(true); if (GOS == true) { gosInelasticLambdaSur = coefCalc.getGOSInel(true, startingEnergy); gosInnerLambdaSur = coefCalc.getGOSInnerLambda(true); gosOuterLambdaSur = coefCalc.getGOSOuterLambda(true); gosOuterIonisationProbsSur = coefCalc.getGOSOuterShellProbs(true, gosOuterLambdaSur); //note atm this does not work with plasma in this way gosIonisationProbsSur = coefCalc.getGOSShellProbs(true, gosInelasticLambdaSur); if (startingInnershellLambdaSurrounding > 0) { gosInelasticLambdaSur = 1/(1/gosOuterLambdaSur + 1/startingInnershellLambdaSurrounding); } else { gosInelasticLambdaSur = gosOuterLambdaSur; } } } //not going to change the direction of the program yet going to write it separately and then //incorporate it in -remember that lambda_el now needs to always be lambda_t!!!!!!!!! //test ELSEPA // startingLambda = 236; double progress = 0, lastProgress = 0; for (int i = 0; i < numSimulatedElectrons; i++) { //for every electron to simulate progress = ((double)i)/numSimulatedElectrons; if (progress - lastProgress >= 0.05) { lastProgress = progress; System.out.print((int)(progress*100) + "% "); } //position stuff first //Need to change these to a uniform beam double previousX = 0, previousY = 0; //atm starting going straight double xNorm = 0.0000, yNorm = 0.0000, zNorm = 1.0; //direction cosine are such that just going down in one double theta = 0, phi = 0, previousTheta = 0, previousPhi = 0, thisTheta = 0; double previousZ = -ZDimension/2; //dodgy if specimen not flat - change for concave holes //position double RNDx = Math.random(); double beamX = beam.getBeamX()*1000; previousX = (RNDx * XDimension) - (XDimension/2); //places on sample previousX = (RNDx * beamX) - (beamX/2); //places in beam area double RNDy = Math.random(); double beamY = beam.getBeamY()*1000; previousY = (RNDy * YDimension) - (YDimension/2); if (beam.getIsCircular()) { //reduce Y limits so you can't put it out of the circle / ellipse double fractionLimit = Math.pow(1 - Math.pow(previousX/beamX, 2), 0.5); RNDy *= fractionLimit; } previousY = (RNDy * beamY) - (beamY/2); //direction double[] directionVector = getElectronStartingDirection(beam, previousX, previousY, previousZ); xNorm = directionVector[0]; yNorm = directionVector[1]; zNorm = directionVector[2]; //need to update theta and phi for these direction vectors // theta angle between 0 0 1 and vector, phi angle between 1 0 0 and vector double[] zaxis = {0, 0, 1}; theta = Math.acos(Vector.dotProduct(zaxis, directionVector)); double[]xaxis = {1, 0, 0}; double[] phiVector = {xNorm, yNorm, 0}; //test // double[] phiVector = {0, -1, 0}; double phiVectorMag = Vector.vectorMagnitude(phiVector); for (int m = 0; m <= 2; m++) { phiVector[m] /= phiVectorMag; } phi = Math.acos(Vector.dotProduct(xaxis, phiVector)); if (yNorm < 0) { phi = 2*Math.PI - phi; //so phi can be between 0 and 2pi not just pi } //determine if the electron is incident on the sample or not - boolean surrounding = !isMicrocrystalAt(previousX, previousY, 0); //Z = 0 as just looking at x and y // if it is a certain distance away from the sample ignore it entirely - if it is times 2? boolean track = false; if (surrounding == true) { double distanceFrom = Math.pow(Math.pow(Math.abs(previousX) - (XDimension/2), 2) + Math.pow(Math.abs(previousY) - (YDimension/2), 2), 0.5); double distanceOf = Math.pow(Math.pow(XDimension, 2) + Math.pow(YDimension, 2), 0.5); if (distanceFrom < distanceOf) { track = true; } } //if it isn't I need to track it, untrack if exits this boundary I set up, deposit dose if something goes in the xtal //secondaries from the surrounding should only be tracked if they are heading in the right direction initially //remember to adjust how the Monte Carlo dose is adjusted accordingly if (coefCalc.isCryo() == false) { track = false; } boolean inelastic = false; boolean backscattered = false; int elasticCount = 0; double electronEnergy = startingEnergy; double energyLost = 0; double stoppingPower = 0, lambdaT = 0, PEL = 0, Pinel = 0, Pfse = 0, s = 0, xn = 0, yn = 0, zn = 0, Pinner=0; boolean entered = false; if (surrounding == true) { stoppingPower = startingStoppingPowerSurrounding; // lambdaT = 1 / (1/startingLambda_elSurrounding + 1/startingInelasticLambdaSurrounding); // lambdaT = 1 / (1/startingLambda_elSurrounding + 1/gosInelasticLambdaSur); lambdaT = 1 / (1/startingLambda_elSurrounding + 1/startingInelasticLambdaSurrounding); if (GOS == true) { if (gosInelasticLambdaSur > 0) { lambdaT = 1 / (1/startingLambda_elSurrounding + 1/gosInelasticLambdaSur); } else { lambdaT = startingLambda_elSurrounding; } } PEL = lambdaT / startingLambda_elSurrounding; Pinel = 1 - (lambdaT / startingLambda_elSurrounding); if (GOS == true) { if (startingInnershellLambdaSurrounding > 0) { Pinner = (gosInelasticLambdaSur/startingInnershellLambdaSurrounding); } } Pfse = startingInelasticLambdaSurrounding/startingFSELambdaSurrounding; double testRND = Math.random(); s = -lambdaT*Math.log(testRND); // I need to check if it's going to intersect and what the distance is double intersectionDistance = 1000*getIntersectionDistance(previousX, previousY, previousZ, xNorm, yNorm, zNorm); double[] intersectionPoint = getIntersectionPoint(intersectionDistance, previousX, previousY, previousZ, xNorm, yNorm, zNorm); boolean pointInCrystal = isIntersectionInCrystal(intersectionPoint); if (intersectionDistance < s && pointInCrystal == true) { //then need to change region here and reset stuff surrounding = false; entered = true; if (GOS == false) { electronEnergy -= intersectionDistance * stoppingPower; //taken out for GOS } previousX = intersectionPoint[0]*1000; previousY = intersectionPoint[1]*1000; previousZ = intersectionPoint[2]*1000; } } if (surrounding == false) { if (electronEnergy < startingEnergy) { //if this electron has entered from the surrounding stoppingPower = coefCalc.getStoppingPower(electronEnergy, false); } else { stoppingPower = startingStoppingPower; } // double lambdaT = startingLambda_el; //lambda in nm -//Just elastic // double lambdaT = 1 / (1/startingLambda_el + 1/startingFSELambda); //FSE one // double lambdaT = 1 / (1/startingLambda_el + 1/startingFSELambda + 1/startingPlasmonLambda); // double lambdaT = 1 / (1/startingLambda_el + 1/startingInnerShellLambda + 1/startingFSELambda); lambdaT = 1 / (1/startingLambda_el + 1/startingInelasticLambda); // lambdaT = 1 / (1/startingLambda_el + 1/gosInelasticLambda); // lambdaT = 1 / (1/startingLambda_el + 1/startingFSELambda); if (GOS == true) { if (gosInelasticLambda > 0) { lambdaT = 1 / (1/startingLambda_el + 1/gosInelasticLambda); } else { lambdaT = startingLambda_el; } if (startingInnerShellLambda > 0) { Pinner = (gosInelasticLambda/startingInnerShellLambda); } } PEL = lambdaT / startingLambda_el; Pinel = 1 - (lambdaT / startingLambda_el); // double Pplasmon = startingPlasmonLambda/ (startingFSELambda + startingPlasmonLambda); // double PinnerShell = startingFSELambda/(startingInnerShellLambda + startingFSELambda); //this is not making sense, it's all innner shell now // Pfse = startingInelasticLambda/startingFSELambda; Pfse = 1; double testRND = Math.random(); s = -lambdaT*Math.log(testRND); //now I'm going to go through the coordinates // double ca = cx; // double cb = cy; // double cc = cz; } xn = previousX + s * xNorm; yn = previousY + s * yNorm; zn = previousZ + s * zNorm; boolean exited = false; boolean scattered = false; int timesScattered = 0; //check if the electron has left the sample, if it has just do the dose of Z //if it has not left move onto the loop while (exited == false) { if (isMicrocrystalAt(xn, yn, zn) == true) { if (surrounding == true) { entered = true; } surrounding = false; ElementEM elasticElement = null; scattered = true; thisTriggered += 1; //update dose and energy and stoppingPower energyLost = s * stoppingPower; //will need to split this energy lost up to get the full spatially resolved dose model MonteCarloDose += energyLost; //keV //split the dose up into voxels // addDoseToVoxels(s, xNorm, yNorm, zNorm, previousX, previousY, previousZ, energyLost, beam, coefCalc); addDoseToRegion(s, xNorm, yNorm, zNorm, previousX, previousY, previousZ, energyLost); addDoseToImagedRegion(s, xNorm, yNorm, zNorm, previousX, previousY, previousZ, energyLost, beam); //reset previousTheta = theta; previousPhi = phi; previousX = xn; previousY = yn; previousZ = zn; //add an elastic collision //Determining if the scattering event was inelastic or elastic double RNDscatter = Math.random(); // RNDscatter = 0; // test // double phi = 0, cosPhi = 1, psi = 0, AN = 0, AM = 0, V1 = 0, V2 = 0, V3 = 0, V4 = 0; if (RNDscatter < Pinel) { //Do inelastic inelastic = true; /* //if plasmon do plasmon double RNDplasmon = Math.random(); if (RNDplasmon > Pplasmon) { theta = 0; totPlasmonEnergy += plasmaEnergy; } */ //if no secondary elecrton produced (other type of inelastic interaction such as a plasmon) double RNDFSE = Math.random(); // if (RNDFSE > Pfse) { //this was another interaction // theta = 0; // else { //else produce an FSE triggered += 1; theta = doPrimaryInelastic(coefCalc, previousX, previousY, previousZ, electronEnergy, ionisationProbs, false, beam, i, previousTheta, previousPhi, Pinner, gosOuterIonisationProbs); if (GOS == true) { electronEnergy -= energyLostGOS; } // } //end if not plasmon } //end if inelastic scatter else { //else it stays false and the collision will be elastic elasticCount += 1; timesScattered += 1; MonteCarloTotElasticCount += 1; //reupdate elastic probs because Monte carlo seconadry may have messed it up double fix = coefCalc.getElectronElasticMFPL(electronEnergy, false); elasticProbs = coefCalc.getElasticProbs(false); theta = doPrimaryElastic(electronEnergy, elasticProbs, false); } //now further update the primary phi = 2 * Math.PI * Math.random(); theta = previousTheta + theta; if (theta >= (2 * Math.PI)) { theta -= 2*Math.PI; } phi = previousPhi + phi; if (phi >= (2 * Math.PI)) { phi -= 2*Math.PI; } xNorm = Math.sin(theta) * Math.cos(phi); yNorm = Math.sin(theta) * Math.sin(phi); zNorm = Math.cos(theta); //update stopping powers //get new stoppingPower // electronEnergy -= energyLost; //this needs to be changed for GOS stoppingPower = coefCalc.getStoppingPower(electronEnergy, false); //get new lambdaT double FSExSection = getFSEXSection(electronEnergy); double FSELambda = coefCalc.getFSELambda(FSExSection, false); double lambdaEl = coefCalc.getElectronElasticMFPL(electronEnergy, false); double lambdaInel = coefCalc.getElectronInelasticMFPL(electronEnergy, false); double innerShellLambda = coefCalc.betheIonisationxSection(electronEnergy, false); double plasmonLambda = coefCalc.getPlasmaMFPL(electronEnergy, false); // lambdaT = 1 / (1/lambdaEl + 1/FSELambda); // lambdaT = 1 / (1/lambdaEl + 1/FSELambda + 1/plasmonLambda); // lambdaT = 1 / (1/lambdaEl + 1/innerShellLambda + 1/FSELambda); // lambdaT = 1 / (1/lambdaEl); lambdaT = 1 / (1/lambdaEl + 1/lambdaInel); if (GOS == true) { gosInelasticLambda = coefCalc.getGOSInel(false, electronEnergy); gosOuterLambda = coefCalc.getGOSOuterLambda(false); gosOuterIonisationProbs = coefCalc.getGOSOuterShellProbs(false, gosOuterLambda); gosIonisationProbs = coefCalc.getGOSShellProbs(false, gosInelasticLambda); if (innerShellLambda > 0) { gosInelasticLambda = 1/(1/gosOuterLambda + 1/innerShellLambda); } else { gosInelasticLambda = gosOuterLambda; } lambdaT = 1 / (1/lambdaEl + 1/gosInelasticLambda); } s = -lambdaT*Math.log(Math.random()); PEL = lambdaT / lambdaEl; Pinel = 1 - (lambdaT / lambdaEl); // Pfse = lambdaInel / FSELambda; Pfse = 1; if (GOS == true) { if (innerShellLambda > 0) { Pinner = gosInelasticLambda / innerShellLambda; } } // Pplasmon = plasmonLambda/ (FSELambda + plasmonLambda); // PinnerShell = FSELambda/(innerShellLambda + FSELambda); ionisationProbs = coefCalc.getAllShellProbs(false); elasticProbs = coefCalc.getElasticProbs(false); //update to new position xn = previousX + s * xNorm; yn = previousY + s * yNorm; zn = previousZ + s * zNorm; } else { if (surrounding == false) { exited = true; //find the plane it is crossing somehow s = 1000 * getIntersectionDistance(previousX, previousY, previousZ, xNorm, yNorm, zNorm); //I'm going to get the point as well for now as it may be useful when doing apertures and stuff //It's also useful for backscattering!!!! double[] intersectionPoint = getIntersectionPoint(s, previousX, previousY, previousZ, xNorm, yNorm, zNorm); energyLost = s * stoppingPower; MonteCarloDose += energyLost; //keV //split the dose up into voxels // addDoseToVoxels(s, xNorm, yNorm, zNorm, previousX, previousY, previousZ, energyLost, beam, coefCalc); addDoseToRegion(s, xNorm, yNorm, zNorm, previousX, previousY, previousZ, energyLost); addDoseToImagedRegion(s, xNorm, yNorm, zNorm, previousX, previousY, previousZ, energyLost, beam); if (1000*intersectionPoint[2] == -ZDimension/2 || zNorm < 0) { numberBackscattered += 1; backscattered = true; } } else { // surrounding = true // track this electron until track = false - it is out of Z area or the extended area //check whether to track it or not if (track == true){ double distanceFrom = Math.pow(Math.pow(Math.abs(xn) - (XDimension/2), 2) + Math.pow(Math.abs(yn) - (YDimension/2), 2), 0.5); double distanceOf = Math.pow(Math.pow(XDimension, 2) + Math.pow(YDimension, 2), 0.5); if (distanceFrom > distanceOf || zn > ZDimension/2 || zn < -ZDimension/2) { track = false; } } if (track == true) { previousTheta = theta; previousPhi = phi; previousX = xn; previousY = yn; previousZ = zn; //update dose and energy and stoppingPower energyLost = s * stoppingPower; double RNDscatter = Math.random(); // RNDscatter = 0; // test // double phi = 0, cosPhi = 1, psi = 0, AN = 0, AM = 0, V1 = 0, V2 = 0, V3 = 0, V4 = 0; if (RNDscatter < Pinel) { //Do inelastic inelastic = true; /* //if plasmon do plasmon double RNDplasmon = Math.random(); if (RNDplasmon > Pplasmon) { theta = 0; totPlasmonEnergy += plasmaEnergy; } */ //if no secondary elecrton produced (other type of inelastic interaction such as a plasmon) double RNDFSE = Math.random(); // if (RNDFSE > Pfse) { //this was another interaction // theta = 0; // else { //else produce an FSE triggered += 1; theta = doPrimaryInelastic(coefCalc, previousX, previousY, previousZ, electronEnergy, ionisationProbsSurrounding, true, beam, i, previousTheta, previousPhi, Pinner, gosOuterIonisationProbsSur); if (GOS == true) { electronEnergy -= energyLostGOS; } // } //end if not plasmon } //end if inelastic scatter else { //else it stays false and the collision will be elastic elasticCount += 1; timesScattered += 1; MonteCarloTotElasticCount += 1; double fix = coefCalc.getElectronElasticMFPL(electronEnergy, true); elasticProbsSurrounding = coefCalc.getElasticProbs(true); theta = doPrimaryElastic(electronEnergy, elasticProbsSurrounding, true); } //now further update the primary phi = 2 * Math.PI * Math.random(); theta = previousTheta + theta; if (theta >= (2 * Math.PI)) { theta -= 2*Math.PI; } phi = previousPhi + phi; if (phi >= (2 * Math.PI)) { phi -= 2*Math.PI; } xNorm = Math.sin(theta) * Math.cos(phi); yNorm = Math.sin(theta) * Math.sin(phi); zNorm = Math.cos(theta); //update stopping powers //get new stoppingPower if (GOS == false) { electronEnergy -= energyLost; //not here for the GOS } stoppingPower = coefCalc.getStoppingPower(electronEnergy, true); //get new lambdaT double FSExSection = getFSEXSection(electronEnergy); double FSELambda = coefCalc.getFSELambda(FSExSection, true); double lambdaEl = coefCalc.getElectronElasticMFPL(electronEnergy, true); double lambdaInel = coefCalc.getElectronInelasticMFPL(electronEnergy, true); double innerShellLambdaSurrounding = coefCalc.betheIonisationxSection(electronEnergy, true); double plasmonLambda = coefCalc.getPlasmaMFPL(electronEnergy, true); lambdaT = 1 / (1/lambdaEl + 1/lambdaInel); if (GOS == true) { gosInelasticLambdaSur = coefCalc.getGOSInel(true, electronEnergy); gosOuterLambdaSur = coefCalc.getGOSOuterLambda(true); gosOuterIonisationProbsSur = coefCalc.getGOSOuterShellProbs(true, gosOuterLambdaSur); gosIonisationProbsSur = coefCalc.getGOSShellProbs(true, gosInelasticLambdaSur); if (innerShellLambdaSurrounding > 0) { gosInelasticLambdaSur = 1/(1/gosOuterLambdaSur + 1/innerShellLambdaSurrounding); } else { gosInelasticLambdaSur = gosOuterLambdaSur; } lambdaT = 1 / (1/lambdaEl + 1/gosInelasticLambdaSur); } // lambdaT = 1 / (1/lambdaEl + 1/FSELambda); // lambdaT = 1 / (1/lambdaEl + 1/FSELambda + 1/plasmonLambda); // lambdaT = 1 / (1/lambdaEl + 1/innerShellLambda + 1/FSELambda); // lambdaT = 1 / (1/lambdaEl); s = -lambdaT*Math.log(Math.random()); // Pplasmon = plasmonLambda/ (FSELambda + plasmonLambda); // PinnerShell = FSELambda/(innerShellLambda + FSELambda); ionisationProbsSurrounding = coefCalc.getAllShellProbs(true); elasticProbsSurrounding = coefCalc.getElasticProbs(true); if (GOS == true) { if (innerShellLambdaSurrounding > 0) { Pinner = gosInelasticLambdaSur / innerShellLambdaSurrounding; } } //need to check if it crosses before it reaches s again and if it does update to this point double intersectionDistance = 1000*getIntersectionDistance(previousX, previousY, previousZ, xNorm, yNorm, zNorm); double[] intersectionPoint = getIntersectionPoint(intersectionDistance, previousX, previousY, previousZ, xNorm, yNorm, zNorm); boolean pointInCrystal = isIntersectionInCrystal(intersectionPoint); if (intersectionDistance < s && pointInCrystal == true) { //then need to change region here and reset stuff surrounding = false; entered = true; // electronEnergy -= intersectionDistance * stoppingPower; //removed for GOS model double innerShellLambda = coefCalc.betheIonisationxSection(electronEnergy, false); if (GOS == true) { gosInelasticLambda = coefCalc.getGOSInel(false, electronEnergy); gosOuterLambda = coefCalc.getGOSOuterLambda(false); gosOuterIonisationProbs = coefCalc.getGOSOuterShellProbs(false, gosOuterLambda); gosIonisationProbs = coefCalc.getGOSShellProbs(false, gosInelasticLambda); if (innerShellLambda > 0) { gosInelasticLambda = 1/(1/gosOuterLambda + 1/innerShellLambda); } else { gosInelasticLambda = gosOuterLambda; } } previousX = intersectionPoint[0]*1000; previousY = intersectionPoint[1]*1000; previousZ = intersectionPoint[2]*1000; stoppingPower = coefCalc.getStoppingPower(electronEnergy, false); FSExSection = getFSEXSection(electronEnergy); FSELambda = coefCalc.getFSELambda(FSExSection, false); lambdaEl = coefCalc.getElectronElasticMFPL(electronEnergy, false); lambdaInel = coefCalc.getElectronInelasticMFPL(electronEnergy, false); lambdaT = 1 / (1/lambdaEl + 1/lambdaInel); //lambdaT = 1 / (1/lambdaEl + 1/FSELambda); if (GOS == true) { lambdaT = 1 / (1/lambdaEl + 1/gosInelasticLambda); } s = -lambdaT*Math.log(Math.random()); if (GOS == true) { if (innerShellLambda > 0) { Pinner = gosInelasticLambda / innerShellLambda; } } ionisationProbs = coefCalc.getAllShellProbs(false); elasticProbs = coefCalc.getElasticProbs(false); } PEL = lambdaT / lambdaEl; Pinel = 1 - (lambdaT / lambdaEl); Pfse = lambdaInel / FSELambda; //update to new position xn = previousX + s * xNorm; yn = previousY + s * yNorm; zn = previousZ + s * zNorm; //need to also check whether to track the primary electron anymore or give up on it } else { exited = true; } } } if (electronEnergy < 0.05) { exited = true; if (isMicrocrystalAt(previousX, previousY, previousZ) == true) { MonteCarloDose += electronEnergy; // lowEnDose += electronEnergy; } } } if (timesScattered == 1) { MonteCarloSingleElasticCount += 1; } //check if this was a productive electron if (elasticCount == 1 && backscattered == false && inelastic == false && surrounding == false && entered == false) { MonteCarloProductive += 1; } } //end looping through electrons //Will need to do something about exiting the correct plane here //Will also need to add in inel scattering here for productive (and then FSE stuff) System.out.print(MonteCarloElectronsExited); totBreakdownEnergy = totFSEEnergy + totShellEnergy + totPlasmonEnergy; FSEsum = FSEsum/FSEcount; double fraction = newMonteCarloFSEEscape/MonteCarloDose; double fraction2 = newMonteCarloFSEEscape/totFSEEnergy; //MonteCarloDose -= MonteCarloFSEEscape; //MonteCarloDose -= newMonteCarloFSEEscape; //MonteCarloDose -= MonteCarloAugerEscape; //MonteCarloDose -= MonteCarloFlEscape; //MonteCarloDose += MonteCarloAugerEntry; MonteCarloDose += MonteCarloFSEEntry; } private int findIfElementIonised(Element e, Map<Element, double[]> ionisationProbs, double elementRND) { double[] elementShellProbs = ionisationProbs.get(e); int shell = -1; for (int k = 0; k < elementShellProbs.length; k++) { if (elementShellProbs[k] > elementRND) { //Then this element is the one that was ionised shell = k; break; } } return shell; } private double[] processMonteCarloDose(Beam beam, CoefCalc coefCalc) { double exposedArea = 0; if (beam.getIsCircular() == false) { exposedArea = (getExposedX(beam) * getExposedY(beam)); //um^2 } else { exposedArea = Math.PI * ((getExposedX(beam)/2) * (getExposedY(beam)/2)); //um^2 } if (exposedArea > crystalSurfaceArea) { exposedArea = crystalSurfaceArea; } double imageArea = beam.getImageX() * beam.getImageY(); //um^2 double imageVolume = imageArea * (sampleThickness/1000) * 1E-15; //dm^3 double exposedVolume = exposedArea * (sampleThickness/1000) * 1E-15; //exposed volume in dm^3 if (exposedVolume > crystalVolume) { exposedVolume = crystalVolume; } double exposure = beam.getExposure(); double electronNumber = exposure * (exposedArea * 1E08); //change electron number now simulating whole area of beam double beamArea = beam.getBeamArea(); electronNumber = exposure * (beamArea * 1E08); //do the elastic stuff MonteCarloTotElasticCount = MonteCarloTotElasticCount * (electronNumber / numSimulatedElectrons); MonteCarloSingleElasticCount = MonteCarloSingleElasticCount * (electronNumber / numSimulatedElectrons); MonteCarloProductiveImage = ((electronNumber/ numSimulatedElectrons) * (imageArea/beamArea)) * MonteCarloProductive; MonteCarloProductive = MonteCarloProductive * (electronNumber/ numSimulatedElectrons); MonteCarloGOSDose= (MonteCarloGOSDose * (electronNumber / numSimulatedElectrons)) * Beam.KEVTOJOULES; MonteCarloDose = (MonteCarloDose * (electronNumber / numSimulatedElectrons)) * Beam.KEVTOJOULES; MonteCarloImageDose = (MonteCarloImageDose * (electronNumber / numSimulatedElectrons)) * Beam.KEVTOJOULES; newMonteCarloFSEEscape = (newMonteCarloFSEEscape * (electronNumber / numSimulatedElectrons)) * Beam.KEVTOJOULES; MonteCarloFSEEntry = (MonteCarloFSEEntry * (electronNumber / numSimulatedElectrons)) * Beam.KEVTOJOULES; elasticEnergyTot = (elasticEnergyTot * (electronNumber / numSimulatedElectrons)) * Beam.KEVTOJOULES; totFSEEnergy = (totFSEEnergy * (electronNumber / numSimulatedElectrons)) * Beam.KEVTOJOULES; double exposedMass = (((coefCalc.getDensity()*1000) * exposedVolume) / 1000); //in Kg double dose = (MonteCarloDose/exposedMass) / 1E06; //dose in MGy double gosDose = (MonteCarloGOSDose/exposedMass) / 1E06; //dose in MGy double totFSEDose = (totFSEEnergy/exposedMass) / 1E06; //dose in MGy avgW = avgW / Wcount; avgShell = avgShell / Wcount; double imageMass = (((coefCalc.getDensity()*1000) * imageVolume) / 1000); //in Kg double imageDose = (MonteCarloImageDose/imageMass) / 1E06; //dose in MGy double doseExited = (newMonteCarloFSEEscape/exposedMass) / 1E06; //dose in MGy double doseEntered = (MonteCarloFSEEntry/exposedMass) / 1E06; //dose in MGy //charge stuff MonteCarloCharge = (MonteCarloElectronsExited - MonteCarloElectronsEntered) * (electronNumber / numSimulatedElectrons) * Beam.ELEMENTARYCHARGE; //need to add in Auger to these MonteCarloChargeDensity = MonteCarloCharge / (exposedVolume/1000); // C/m^3 //process voxel dose //for every voxel, convert keV to dose and average this /* int count = 0; double sumDose = 0; for (int i = 0; i < maxX; i++) { for (int j = 0; j < maxY; j++) { for (int k = 0; k < maxZ; k++) { voxelDose[i][j][k] = convertVoxEnergyToDose(voxelDose[i][j][k], beam, coefCalc); sumDose += voxelDose[i][j][k]; count += 1; } } } avgVoxelDose = sumDose / count; //scale double scaleFactor = dose / avgVoxelDose; for (int i = 0; i < maxX; i++) { for (int j = 0; j < maxY; j++) { for (int k = 0; k < maxZ; k++) { voxelDose[i][j][k] *= scaleFactor; } } } //write a csv file try { writeDoseCSV("outputVoxDose.CSV"); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } */ //process region dose int count = 0; double sumDose = 0; for (int i = 0; i < NUM_REGIONS; i++) { regionDose[i] = convertRegionEnergyToDose(regionDose[i], i, beam, coefCalc); sumDose += regionDose[i]; count += 1; } avgRegionDose = sumDose / count; double scaleFactor = dose / avgRegionDose; for (int i = 0; i < NUM_REGIONS; i++) { regionDose[i] *= scaleFactor; } double[] doses = {dose, imageDose}; return doses; } private double getFSEXSection(double electronEnergy) { double elementaryCharge = 4.80320425E-10; //units = esu = g^0.5 cm^1.5 s^-1 double m = 9.10938356E-28; // in g double c = 29979245800.0; //in cm //classical for now //find the electron velocity in cm/s // double vsquared = ((electronEnergy*Beam.KEVTOJOULES * 2) / (m/1000)) * 10000; //(cm/s)^2 double csquared = Math.pow(c/100, 2); double Vo = electronEnergy * Beam.KEVTOJOULES; double betaSquared = 1- Math.pow((m/1000)*csquared/(Vo + (m/1000)*csquared), 2); double vsquared = (betaSquared * csquared)*10000; //the v sqaured being relativistic or not is what makes the difference //integrate equation - currently this isn't right... Do it numerically // double constant = (Math.PI * Math.pow(Beam.ELEMENTARYCHARGE, 4)) / Math.pow(electronEnergy*Beam.KEVTOJOULES, 2); //maybe go with Murata // double constant = (6.21E-20 / Math.pow(electronEnergy, 2)); // *1E14; //cm^2/electron? ??? // double constant = (4* Math.PI * Math.pow(elementaryCharge, 4)) / (Math.pow(m*vsquared, 2)); double constant = (2* Math.PI * Math.pow(elementaryCharge, 4)) / (m*vsquared * (Vo*1000*10000)); //So the equation in Murata is cross section per electron (i assume cm^2/electron). So need to //1) Work out electrons per unit volume //2) multiply to get cross section in cm^-1 and convert to nm^-1 // double crossSection = 0; //so this is in cm^2 //equ integrates t 1/1-x -1/x + C // double crossSection = (1/(1-0.5) - 1/0.5) - ((1/(1-0.001) - 1/0.001)); // crossSection *= constant; // I think this is now in cm^2 per electron /* double crossSection = 0; for (double i = 1.1; i <= 500; i+=0.1) { double omega = i /1000; double omegaMinusOne = (i-0.1) / 1000; double width = (i /1000) - ((i-0.1)/1000); double height = ((constant * ((1/Math.pow(omega, 2)) + (1/Math.pow(1-omega, 2)))) + (constant * ((1/Math.pow(omegaMinusOne, 2)) + (1/Math.pow(1-omegaMinusOne, 2))))) / 2; crossSection += width * height; } */ //try the relativistic cross section from Murata et al - with times /* double restMassEnergy = 511; //keV double tau = electronEnergy/restMassEnergy; double crossSection = 0; for (double i = 1.1; i <= 500; i+=0.1) { double omega = i /1000; double omegaMinusOne = (i-0.1) / 1000; double width = (i /1000) - ((i-0.1)/1000); double height = ((constant * ((1/Math.pow(omega, 2)) + (1/Math.pow(1-omega, 2)) + Math.pow(tau/(tau+1), 2) - ((2*tau+1)/Math.pow(tau+1, 2)) * (1/(omega*(1-omega))))) + (constant * ((1/Math.pow(omegaMinusOne, 2)) + (1/Math.pow(1-omegaMinusOne, 2)) + Math.pow(tau/(tau+1), 2) - ((2*tau+1)/Math.pow(tau+1, 2)) * (1/(omegaMinusOne*(1-omegaMinusOne)))))) / 2; crossSection += width * height; } */ //numerical integral of this double energyCutOff; energyCutOff = (energyCUTOFF/1000.0)/electronEnergy; //corresponds to a 14eV cut off, the hydrogen K shell energy double restMassEnergy = 511; //keV double tau = electronEnergy/restMassEnergy; double crossSection = (((2*tau+1)/Math.pow(tau+1, 2))*(Math.log((1/0.5)-1)) + Math.pow(tau/(tau+1), 2) - (1/0.5) - (1/(0.5-1))) - (((2*tau+1)/Math.pow(tau+1, 2))*(Math.log((1/energyCutOff)-1)) + Math.pow(tau/(tau+1), 2) - (1/energyCutOff) - (1/(energyCutOff-1))); crossSection*= constant; //Book classical //constant is same as above /* double T = electronEnergy * Beam.KEVTOJOULES; double crossSection = 0; for (double i = 2*(T/1000); i <= T/2; i+= T/1000) { double Q = i; double QMinusOne = i - (T/1000); double width = T/1000; double height = ((constant * ((1/Math.pow(Q, 2)) * Math.pow(T/(T-Q), 2) * (1-2*(Q/T) + 2*Math.pow(Q/T, 2)))) + (constant * ((1/Math.pow(QMinusOne, 2)) * Math.pow(T/(T-QMinusOne), 2) * (1-2*(QMinusOne/T) + 2*Math.pow(QMinusOne/T, 2))))) / 2; crossSection += width * height; } */ /* //book relativistic //constant is same as above double T = electronEnergy * Beam.KEVTOJOULES; double m = 9.10938356E-31; // in Kg double c = 299792458; double csquared = c*c; double crossSection = 0; for (double i = 2*(T/1000); i <= T/2; i+= T/1000) { double Q = i; double QMinusOne = i - (T/1000); double width = T/1000; double height = ((constant * ((1/Math.pow(Q, 2)) * Math.pow(T/(T-Q), 2) * (1-(3-Math.pow(T/(T+m*csquared), 2))*(Q/T)*(1-(Q/T))+(Math.pow(Q/(T+m*csquared), 2) * Math.pow(1-(Q/T), 2))))) + (constant * ((1/Math.pow(QMinusOne, 2)) * Math.pow(T/(T-QMinusOne), 2) * (1-(3-Math.pow(T/(T+m*csquared), 2))*(QMinusOne/T)*(1-(QMinusOne/T))+(Math.pow(QMinusOne/(T+m*csquared), 2) * Math.pow(1-(QMinusOne/T), 2)))))) / 2; crossSection += width * height; } */ //book non-rel //book very rel return crossSection; //cm^2/atom //nm^2 per atom??? //Really not sure about units here } private double getRutherfordScreeningElement(ElementEM e, double electronEnergy) { double alpha = 0; alpha = 3.4E-3 * (Math.pow(e.getAtomicNumber(), 0.67)/electronEnergy); return alpha; } private int getNumberOfBins(double s) { double pixelDivisionSize = (1/crystalPixPerUMEM)/2; int pixelDivisionNumber = (int) StrictMath.round(s/pixelDivisionSize); return Math.max(pixelDivisionNumber, BIN_DIVISION); } private void inelasticFSEProduced(double electronEnergy) { //next stage is to track the FSE // 1) Use vector stuff to draw vector to point and determine energy by stopping power // make sure that I don't go to a negative energy // 2) Track the FSE by Monte Carlo //Use two methods to incorporate this // Method 1 - Only update the primary electron energy using the stopping power and subtract escape energy from the end //Method 2 - try incorporating the direct energy losses from this // - would expect these energy losses to be much lower but need to quantify } private double getShellBindingEnergy(Element collidedElement, int collidedShell) { double shellBindingEnergy = 0; switch (collidedShell) { case 0: shellBindingEnergy = collidedElement.getKEdge(); break; case 1: shellBindingEnergy = collidedElement.getL1Edge(); break; case 2: shellBindingEnergy = collidedElement.getL2Edge(); break; case 3: shellBindingEnergy = collidedElement.getL3Edge(); break; case 4: shellBindingEnergy = collidedElement.getM1Edge(); break; case 5: shellBindingEnergy = collidedElement.getM2Edge(); break; case 6: shellBindingEnergy = collidedElement.getM3Edge(); break; case 7: shellBindingEnergy = collidedElement.getM4Edge(); break; case 8: shellBindingEnergy = collidedElement.getM5Edge(); break; } return shellBindingEnergy; } private double doPrimaryInelastic(CoefCalc coefCalc, double previousX, double previousY, double previousZ, double electronEnergy, Map<Element, double[]> ionisationProbs, boolean surrounding, Beam beam, int i, double previousTheta, double previousPhi, double Pinner, Map<Element, Double> gosOuterIonisationProbs) { //I'm now going to change thi for the GOS model double theta = 0; double W = 0; boolean innerShell = false; double shellBindingEnergy = 0; Element collidedElement = null; int collidedShell = -1; //did this come from an inner shell? double RNDinnerShell = Math.random(); // if (RNDinnerShell < PinnerShell) { //they're all going to be coming from inner shells now //Then this secondary electron came from an inner shell // innerShell = true; //determine which elemental shell it came from double elementRND = Math.random(); boolean plasmon = false; if (GOS == true) { if (RNDinnerShell < Pinner) { for (Element e : ionisationProbs.keySet()) { collidedShell = findIfElementIonised(e, ionisationProbs, elementRND); if (collidedShell >= 0) { collidedElement = e; break; } } } else { //outer shell for (Element e : gosOuterIonisationProbs.keySet()) { int[] electrons = coefCalc.getNumValenceElectronsSubshells(e); int numInnerShells = electrons[1]; collidedShell = numInnerShells; if (findIfOuterShellIonised(e, gosOuterIonisationProbs, elementRND) == true){ collidedElement = e; break; } } } if (collidedShell == -1) { //then this is a collision with the conduction band plasmon = true; } else { //shellBindingEnergy = getShellBindingEnergyGOS(collidedElement, collidedShell); shellBindingEnergy = getShellBindingEnergy(collidedElement, collidedShell); } //get the type of collision int type = 0; if (plasmon == false) { type = getGOSInelasticType(coefCalc.getGOSVariable(surrounding).get(collidedElement), collidedShell); } else { type = getGOSInelasticTypePlasmon(coefCalc.getPlasmonVariable(surrounding)); } //get energy loss double a = coefCalc.returnAdjustment(); double Uk = 0; // double Uk = shellBindingEnergy*1000; double Wk = 0, Qak = 0, Q = 0; if (plasmon == false) { Uk = shellBindingEnergy*1000; Wk = coefCalc.getWkMolecule(a, collidedElement, collidedShell, surrounding); Qak = getQak(electronEnergy, Wk, Uk); } else { Uk = 0; Wk = coefCalc.getWcbAll(surrounding); Qak = Wk; } double Wak = WkToWak(electronEnergy, Wk, Uk); double Wdis = 3*Wak - 2*Uk; if (type == 0 || type == 1) { //then this was a distant collision //get recoil energy if (plasmon == true) { W = Wk/1000; } else { W = getEnergyLossDistant(Wdis, Uk)/1000; //in keV } Q = coefCalc.getRecoilEnergyDistant(electronEnergy, Wak, Qak); //get theta (new to add on to previous) if (type == 1) { //transverse theta = previousTheta; } else { //longitudinal theta = getGOSPrimaryThetaLong(electronEnergy, Q, Wak, previousTheta); } } else { //a close collision if (plasmon == true) { W = Wk/1000; } else { // double k = samplek(electronEnergy, Qak); double k = samplek(electronEnergy, Uk); W = k*(electronEnergy+Uk/1000); //keV } theta = getGOSPrimaryThetaClose(electronEnergy, W, previousTheta); } //now I need to send out the secondary electron //get an angle and energy then recursively call ofc double minTrackEnergy = 0.05; double SEPreviousTheta = previousTheta; double SEPreviousPhi = previousPhi; double SEEnergy = W - Uk/1000; double SETheta = 0, SEPhi = 0, SExNorm = 0, SEyNorm = 0, SEzNorm = 0; if (SEEnergy > 0) { // totalIonisationEvents[doseTime] += 1; //is this right??? need to sort this as well!!! //don't do this above if calling recursively as done at the top if (SEEnergy > minTrackEnergy) { // gosElectronDose[doseTime] += Uk/1000; //get theta if (type == 0 || type == 1) { //distant SETheta = secondaryThetaDistant(electronEnergy, Wak, Q, previousTheta); } else { //close SETheta = secondaryThetaClose(electronEnergy, W, SEPreviousTheta); } //get phi SEPhi = 2 * Math.PI * Math.random(); SEPhi = SEPreviousPhi + SEPhi; if (SEPhi >= (2 * Math.PI)) { SEPhi -= 2*Math.PI; } //now get normals SExNorm = Math.sin(SETheta) * Math.cos(SEPhi); SEyNorm = Math.sin(SETheta) * Math.sin(SEPhi); SEzNorm = Math.cos(SETheta); //send it out with the correct timestamp if (surrounding == false) { MonteCarloGOSDose += W; avgW += W; Wcount += 1; avgShell += shellBindingEnergy; } MonteCarloSecondaryElastic(coefCalc, SEEnergy, previousX, previousY, previousZ, SETheta, SEPhi, surrounding, beam, i); } else { //too low energy to track - work out what exactly I'm doing with dose! - need an SP way and a W way if (surrounding == false) { MonteCarloGOSDose += W; avgW += W; Wcount += 1; avgShell += shellBindingEnergy; } if (collidedElement.getAtomicNumber() > 2 && collidedShell < 4) { //only do fl or Auger if K or L shell and not H or He K shells FlAugerMonteCarlo(collidedElement, previousX, previousY, previousZ, collidedShell, coefCalc, surrounding, beam); } } // electronEnergy -= W; // lossSinceLastUpdate += W; //produce Auger electon should only be if it is from an inner shell of an element more than 2 // produceAugerElectron(coefCalc, timeStamp, collidedShell, collidedElement, xn, yn, zn, surrounding); //need to do an Auger here in some way } } //end if GOS == true /* // shellBindingEnergy = getShellBindingEnergy(collidedElement, collidedShell); double FSEtheta = 0, FSEphi = 0, FSEpreviousTheta = 0, FSEpreviousPhi = 0, FSExNorm = 0, FSEyNorm = 0, FSEzNorm = 0; FSEpreviousTheta = previousTheta; FSEpreviousPhi = previousPhi; //firstly calculate the FSE energy double epsilon = getFSEEnergy(electronEnergy, shellBindingEnergy); double FSEEnergy = epsilon * electronEnergy;// - shellBindingEnergy; if (FSEEnergy > 0) { //so only if it happened totFSEEnergy += FSEEnergy; //tot energy of all totShellEnergy += shellBindingEnergy; } */ /* if (collidedElement.getAtomicNumber() > 2 && collidedShell < 4 && FSEEnergy > 0) { //only do fl or Auger if K or L shell and not H or He K shells FlAugerMonteCarlo(collidedElement, previousX, previousY, previousZ, collidedShell, coefCalc, surrounding, beam); } */ // theta = 0; // else { //Track the secondary electron //I'm going to take t as the energy of that particular electron // This could be two values for the primary, with stopping power or with inel removal /* double sinSquaredAlpha = 0; double sinSquaredGamma = 0; double escapeDist = 0, maxDist = 0; double minTrackEnergy = 0.05; //this needs to be tested if (FSEEnergy > minTrackEnergy) { // so I only care about the FSE if it is more than x // if (FSEEnergy > 0) { // so I only care about the FSE if it is more than x // determine the angles of the FSE and the primary electron double tPrimary = (electronEnergy-FSEEnergy)/511; //t is in rest mass units. Need to change to stopping power en double tFSE = FSEEnergy/511; //alpha = angle of primary electron sinSquaredAlpha = (2 * epsilon) / (2 + tPrimary - tPrimary*epsilon); //gamma - angle of secondary electron sinSquaredGamma = 2*(1-epsilon) / (2 + tFSE*epsilon); FSEtheta = Math.asin(Math.pow(sinSquaredGamma, 0.5)); FSEphi = 2 * Math.PI * Math.random(); FSEtheta = FSEpreviousTheta + FSEtheta; if (FSEtheta >= (2 * Math.PI)) { FSEtheta -= 2*Math.PI; } FSEphi = FSEpreviousPhi + FSEphi; if (FSEphi >= (2 * Math.PI)) { FSEphi -= 2*Math.PI; } */ /* FSExNorm = Math.sin(FSEtheta) * Math.cos(FSEphi); FSEyNorm = Math.sin(FSEtheta) * Math.sin(FSEphi); FSEzNorm = Math.cos(FSEtheta); */ //now I have a vector need to find where it will intersect point and the distance //If doing Monte Carlo of FSE would start tracking it here // totFSEEnergy += FSEEnergy; // MonteCarloSecondaryElastic(coefCalc, FSEEnergy, previousX, previousY, previousZ, FSEtheta, FSEphi, surrounding, beam, i); /* escapeDist = 1000 * getIntersectionDistance(previousX, previousY, previousZ, FSExNorm, FSEyNorm, FSEzNorm); //nm double FSEStoppingPower = coefCalc.getStoppingPower(FSEEnergy, false); double energyToEdge = FSEStoppingPower * escapeDist; maxDist = FSEEnergy / FSEStoppingPower; totFSEEnergy += FSEEnergy; if (energyToEdge < FSEEnergy){ // MonteCarloFSEEscape += FSEEnergy - energyToEdge; double test = FSEEnergy - energyToEdge; //I'm thinking about multislicing this secondary electron and seeing if that makes a difference!!! //I think it will and it should reduce escape so increase dose - and it did //I should possibly also think about actually Monte Carlo tracking these at these low energies as they //will scatter and change direction double energyLostStep = 0; double newEnergy = FSEEnergy; int numSlices = 1; for (int j = 0; j < numSlices; j++) { //I will need to play around with the amount of slicing when I am writing up energyLostStep = (escapeDist/numSlices) * FSEStoppingPower; newEnergy -= energyLostStep; FSEStoppingPower = coefCalc.getStoppingPower(newEnergy, false); } if (newEnergy > 0) { MonteCarloFSEEscape += newEnergy; } } */ /* //Track Fl and Auger from this secondary electron //Need to think about the distance it is going and stuff like that though before I plough on double distTravelled = Math.min(maxDist, escapeDist); MonteCarloSecondaryElectronInnerShell(coefCalc, FSEEnergy, previousX, previousY, previousZ, FSExNorm, FSEyNorm, FSEzNorm, distTravelled); */ //track the primary electron /* phi = Math.asin(Math.pow(sinSquaredAlpha, 0.5)); cosPhi = Math.cos(phi); */ // theta = Math.asin(Math.pow(sinSquaredAlpha, 0.5)); /* not needed as done later phi = 2 * Math.PI * Math.random(); theta = previousTheta + theta; if (theta >= (2 * Math.PI)) { theta -= 2*Math.PI; } phi = previousPhi + phi; if (phi >= (2 * Math.PI)) { phi -= 2*Math.PI; } */ if (GOS == true) { energyLostGOS = W; } return theta; } private double doPrimaryElastic(double electronEnergy, Map<ElementEM, Double> elasticProbs, boolean surrounding) { //now start the loop - clean up the first iteration into this later //Determine what element elastically scattered the electron so can choose an alpha correctly double elasticElementRND = Math.random(); ElementEM elasticElement = null; for (ElementEM e : elasticProbs.keySet()) { if (elasticProbs.get(e) > elasticElementRND) { //Then this element is the one that was ionised elasticElement = e; break; } } //get the angles double alpha = getRutherfordScreeningElement(elasticElement, electronEnergy); double RND = Math.random(); /* cosPhi = 1 - ((2*alpha * Math.pow(RND, 2))/(1+alpha-RND)); phi = Math.acos(cosPhi); */ double theta = Math.acos(1 - ((2*alpha * Math.pow(RND, 2))/(1+alpha-RND))); //get angle by ELSEPA stuff if ((electronEnergy <= 300) && (electronEnergy >= 0.05)) { theta = getPrimaryElasticScatteringAngle(electronEnergy, elasticElement.getAtomicNumber()); } double thisTheta = theta; //Impart elastic knock-on energy??? if (surrounding == false) { double Emax = electronEnergy * (1 + electronEnergy/1022)/(456*elasticElement.getAtomicWeight()); double m = 9.10938356E-28; double u = 1.660539040E-27; //kg/(g/mol) double restEnergy = 511; Emax = (2/elasticElement.getAtomicWeight())*(m/u)*electronEnergy*(2+electronEnergy/(restEnergy)); double Ed = 35; /* double nuclearMass = elasticElement.getAtomicWeight() * 1.660539040E-27; double c = 299792458; double csquared = c*c; double Mcsquared = nuclearMass * csquared; double KinE = electronEnergy * Beam.KEVTOJOULES; double sintheta = Math.pow(Math.sin(theta/2), 2); double energyTransmitted = (Math.pow(KinE, 2) / (Mcsquared)) * ((2 * sintheta) / (1 + sintheta *(2*(KinE)/Mcsquared) )); energyTransmitted /= Beam.KEVTOJOULES; elasticEnergyTot += energyTransmitted; */ double sintheta = Math.pow(Math.sin(thisTheta/2), 2); double en = (Emax/1000) * sintheta; elasticEnergyTot += en; if (Emax > Ed) { displacementEnergy += en; } } return theta; } private void MonteCarloSecondaryElastic(CoefCalc coefCalc, double FSEenergy, double previousX, double previousY, double previousZ, double FSEtheta, double FSEphi, boolean surrounding, Beam beam, int numSimSoFar) { //Will need to combine this with the inner shell stuff as well - means re-updating the inner shell x sections after I mess with them if (surrounding == true) { numFSEFromSurr += 1; } else { numFSEFromSample += 1; } //find the pixel that the electron is staring in int[] startingPixel = convertToPixelCoordinates(previousX, previousY, previousZ); int[] thisPixel = startingPixel; double energyLost = 0; double theta = FSEtheta; double phi = FSEphi; double electronEnergy = FSEenergy; double startingEnergy = FSEenergy; double startingStoppingPower = coefCalc.getStoppingPower(startingEnergy, surrounding); double stoppingPower = startingStoppingPower; //remove the starting dose from the original voxel /* if (surrounding == false) { voxelDose[startingPixel[0]][startingPixel[1]][startingPixel[2]] -= FSEenergy; } */ double startingLambda_el = coefCalc.getElectronElasticMFPL(startingEnergy, surrounding); Map<ElementEM, Double> elasticProbs = coefCalc.getElasticProbs(surrounding); double startingInnerShellLambda = coefCalc.betheIonisationxSection(startingEnergy, surrounding); // Map<Element, Double> ionisationProbs = coefCalc.getInnerShellProbs(); Map<Element, double[]> ionisationProbs = coefCalc.getAllShellProbs(surrounding); //Just do elastic for now and then incorporate inner shell // double lambdaT = startingLambda_el; double lambdaT = 0; if (startingInnerShellLambda > 0) { lambdaT = 1/((1/startingLambda_el) + (1/startingInnerShellLambda)); } else{ lambdaT = startingLambda_el; //should probably change the whole thing to the FSe model and just not track the extra electrons } double testRND = Math.random(); double s = -lambdaT*Math.log(testRND); double Pinel = 1 - (lambdaT / startingLambda_el); double xNorm = Math.sin(theta) * Math.cos(phi); double yNorm = Math.sin(theta) * Math.sin(phi); double zNorm = Math.cos(theta); if (Double.isNaN(xNorm)){ System.out.println("test"); } boolean track = true; boolean entered = false; double entryEnergy = 0; //determine if it crosses into the crystal before s // I need to check if it's going to intersect and what the distance is if (surrounding == true) { // this could be a potential source of error as it could deflect in so need to test this to make sure it is valid double intersectionDistance = 1000*getIntersectionDistance(previousX, previousY, previousZ, xNorm, yNorm, zNorm); Double distanceObject = Double.valueOf(intersectionDistance); if (intersectionDistance < 0 || distanceObject.isNaN() || distanceObject.isInfinite()) { track = false; } if (track == true) { double[] intersectionPoint = getIntersectionPoint(intersectionDistance, previousX, previousY, previousZ, xNorm, yNorm, zNorm); boolean pointInCrystal = isIntersectionInCrystal(intersectionPoint); if (intersectionDistance < s && pointInCrystal == true) { //then need to change region here and reset stuff surrounding = false; entered = true; electronEnergy -= intersectionDistance * stoppingPower; entryEnergy = electronEnergy; previousX = intersectionPoint[0]*1000; previousY = intersectionPoint[1]*1000; previousZ = intersectionPoint[2]*1000; //update the stopping power and stuff stoppingPower = coefCalc.getStoppingPower(startingEnergy, surrounding); startingLambda_el = coefCalc.getElectronElasticMFPL(startingEnergy, surrounding); elasticProbs = coefCalc.getElasticProbs(surrounding); startingInnerShellLambda = coefCalc.betheIonisationxSection(startingEnergy, surrounding); // Map<Element, Double> ionisationProbs = coefCalc.getInnerShellProbs(); ionisationProbs = coefCalc.getAllShellProbs(surrounding); //Just do elastic for now and then incorporate inner shell // double lambdaT = startingLambda_el; if (startingInnerShellLambda > 0) { lambdaT = 1/((1/startingLambda_el) + (1/startingInnerShellLambda)); } else{ lambdaT = startingLambda_el; //should probably change the whole thing to the FSe model and just not track the extra electrons } testRND = Math.random(); s = -lambdaT*Math.log(testRND); Pinel = 1 - (lambdaT / startingLambda_el); } } } //Coulomb's law stuff will need to happen before here double electronNumber = beam.getExposure() * (beam.getBeamArea()*1E8); double[] electronPosition = {previousX, previousY, previousZ}; double[] chargePosition = {0, 0, 0}; double csquared = Math.pow(c, 2); double gamma = 0, newKineticEnergy = 0, kineticEnergyLossByCharge = 0, newVelocityMagnitude = 0; double[] newVelocityVector = new double[3]; double[] newVelocityUnitVector = new double[3]; MonteCarloCharge = (MonteCarloElectronsExited - MonteCarloElectronsEntered) * (electronNumber / numSimulatedElectrons) * ((double)numSimSoFar/numSimulatedElectrons) * Beam.ELEMENTARYCHARGE; if (considerCharge == true) { // MonteCarloCharge = 0; /* if (MonteCarloCharge != 0) { newVelocityVector = adjustVelocityVectorByCharge(electronPosition, chargePosition, s, electronEnergy, xNorm, yNorm, zNorm, coefCalc); newVelocityMagnitude = Vector.vectorMagnitude(newVelocityVector) /1E9; //m/s newVelocityUnitVector = Vector.normaliseVector(newVelocityVector); //update new xNorm. yNorm, zNorm xNorm = newVelocityUnitVector[0]; yNorm = newVelocityUnitVector[1]; zNorm = newVelocityUnitVector[2]; //update theta and phi theta = Math.acos(zNorm); phi = Math.asin(yNorm/Math.sin(theta)); if (Double.isNaN(xNorm)){ System.out.println("test"); } //work out the new kinetic energy gamma = 1 / Math.pow(1 - (Math.pow(newVelocityMagnitude, 2)/Math.pow(c, 2)), 0.5); //so if the electron is really close to the charge, the velocity becomes more than the speed of light and this break... newKineticEnergy = (gamma - 1) * m * Math.pow(c, 2)/Beam.KEVTOJOULES; // in keV kineticEnergyLossByCharge = electronEnergy - newKineticEnergy; //in keV } */ } double[] newTotalVelocityVector = new double[3]; if (considerCharge == true) { //now do the voxel charge here for (int i = 0; i < maxX; i++) { for (int j = 0; j < maxY; j++) { for (int k = 0; k < maxZ; k++) { if (voxelCharge[i][j][k] != 0) { int[] voxCoord = {i, j, k}; if (thisPixel != voxCoord) { //convert pixel coord to a cartesian coord chargePosition = convertToCartesianCoordinates(i, j, k); newVelocityVector = adjustVelocityVectorByCharge(electronPosition, chargePosition, s, electronEnergy, xNorm, yNorm, zNorm, coefCalc); for (int m = 0; m < 3; m++) { newTotalVelocityVector[m] += newVelocityVector[m]; //is this even right??? don't I want to some charge not velocity vectors } } } } } } newVelocityMagnitude = Vector.vectorMagnitude(newVelocityVector) /1E9; if (newVelocityMagnitude > 0) { //so there is a charge pulling newVelocityUnitVector = Vector.normaliseVector(newVelocityVector); //update new xNorm. yNorm, zNorm xNorm = newVelocityUnitVector[0]; yNorm = newVelocityUnitVector[1]; zNorm = newVelocityUnitVector[2]; //update theta and phi theta = Math.acos(zNorm); phi = Math.asin(yNorm/Math.sin(theta)); //work out the new kinetic energy gamma = 1 / Math.pow(1 - (Math.pow(newVelocityMagnitude, 2)/Math.pow(c, 2)), 0.5); // newKineticEnergy = ((gamma - 1) * m * Math.pow(c, 2)); // in Joules newKineticEnergy = ((gamma - 1) * m * Math.pow(c, 2))/Beam.KEVTOJOULES; // in keV // kineticEnergyLossByCharge = ((electronEnergy*Beam.KEVTOJOULES) - newKineticEnergy)/Beam.KEVTOJOULES; //in keV kineticEnergyLossByCharge = electronEnergy - newKineticEnergy; } } double xn = previousX + s * xNorm; double yn = previousY + s * yNorm; double zn = previousZ + s * zNorm; boolean exited = false, scattered = false; double previousTheta = 0, previousPhi = 0; if (electronEnergy < 0.05) { exited = true; lowEnDose -= electronEnergy; } while (exited == false) { if (isMicrocrystalAt(xn, yn, zn) == true) { //reset if (surrounding == true) { entered = true; entryEnergy = electronEnergy; } surrounding = false; scattered = true; //update dose and energy and stoppingPower energyLost = s * stoppingPower; //split the dose up into voxels // addDoseToVoxels(s, xNorm, yNorm, zNorm, previousX, previousY, previousZ, energyLost, beam, coefCalc); addDoseToRegion(s, xNorm, yNorm, zNorm, previousX, previousY, previousZ, energyLost); addDoseToImagedRegion(s, xNorm, yNorm, zNorm, previousX, previousY, previousZ, energyLost, beam); //energy lost from charge - charge energy not appropriate to count towards dose or get negative dose energyLost += kineticEnergyLossByCharge; //update position and angle previousTheta = theta; previousPhi = phi; previousX = xn; previousY = yn; previousZ = zn; double RNDscatter = Math.random(); if (RNDscatter < Pinel) { // If the scatter was an inner shell ionisation double shellBindingEnergy = 0; Element collidedElement = null; int collidedShell = -1; double elementRND = Math.random(); for (Element e : ionisationProbs.keySet()) { collidedShell = findIfElementIonised(e, ionisationProbs, elementRND); if (collidedShell >= 0) { collidedElement = e; break; } } shellBindingEnergy = getShellBindingEnergy(collidedElement, collidedShell); if (collidedElement.getAtomicNumber() > 2 && collidedShell < 4) { double shellFluorescenceYield = 0; double flauEnergy = 0; if (collidedShell == 0) { shellFluorescenceYield = collidedElement.getKShellFluorescenceYield(); flauEnergy = collidedElement.getKFluorescenceAverage(); } else if (collidedShell == 1) { shellFluorescenceYield = collidedElement.getL1ShellFluorescenceYield(); flauEnergy = collidedElement.getLFluorescenceAverage(); } else if (collidedShell == 2) { shellFluorescenceYield = collidedElement.getL2ShellFluorescenceYield(); flauEnergy = collidedElement.getLFluorescenceAverage(); } else if (collidedShell == 3){ shellFluorescenceYield = collidedElement.getL3ShellFluorescenceYield(); flauEnergy = collidedElement.getLFluorescenceAverage(); } if (electronEnergy > shellBindingEnergy && flauEnergy > 0 && !Double.isNaN(flauEnergy)) { //only a collision if it is physically possible //Do Fl or Auger //remove the flauenergy from this pixel int[] getPixel = convertToPixelCoordinates(xn, yn, zn); // voxelDose[getPixel[0]][getPixel[1]][getPixel[2]] -= flauEnergy; //RND for FL or Auger given it was that element double fluoresenceYieldKRND = Math.random(); // double KshellFluorescenceYield = collidedElement.getKShellFluorescenceYield(); if (fluoresenceYieldKRND <= shellFluorescenceYield) { //then it's fluorescence // get the absorption coefficient of the crystal // double flEnergy = collidedElement.getKFluorescenceAverage(); double absCoef = coefCalc.getEMFlAbsCoef(flauEnergy); //units um^-1 //get a random direction vector double SExNorm = Math.random(); double SEyNorm = Math.random(); double SEzNorm = Math.random(); //Draw the vector to the edge double flEscapeDist = getIntersectionDistance(previousX, previousY, previousZ, SExNorm, SEyNorm, SEzNorm); double escapeFraction = Math.exp(-absCoef * flEscapeDist); MonteCarloFlEscape += escapeFraction * flauEnergy; extraFlEscape += escapeFraction * flauEnergy; //add dose to voxels double energyRemained = 1- (escapeFraction * flauEnergy); // addDoseToVoxels(flEscapeDist, SExNorm, SEyNorm, SEzNorm, previousX, previousY, previousZ, energyRemained, beam, coefCalc); addDoseToRegion(flEscapeDist, SExNorm, SEyNorm, SEzNorm, previousX, previousY, previousZ, energyRemained); addDoseToImagedRegion(flEscapeDist, SExNorm, SEyNorm, SEzNorm, previousX, previousY, previousZ, energyRemained, beam); } else { //need to do Auger electrons //Auger electron energy equals flEnergy - shell binding energy of Auger electron //for now ignore the shell binding energy so overestimating their significance // double augerEnergy = collidedElement.getKFluorescenceAverage(); // totAugerEnergy += flauEnergy; //get a random direction vector double SExNorm = Math.random(); double SEyNorm = Math.random(); double SEzNorm = Math.random(); //Draw the vector to the edge double augerEscapeDist = 1000 * getIntersectionDistance(previousX, previousY, previousZ, SExNorm, SEyNorm, SEzNorm); double augerStoppingPower = coefCalc.getStoppingPower(flauEnergy, false); double augerEnergyToEdge = augerStoppingPower * augerEscapeDist; if (augerEnergyToEdge < flauEnergy){ MonteCarloAugerEscape += flauEnergy - augerEnergyToEdge; extraAugerEscape += flauEnergy - augerEnergyToEdge; } //redistribute the dose double distanceToStop = flauEnergy/augerStoppingPower; double trackDistance = 0, augerEnergyLoss = 0; //Math.min(distanceToStop, augerEscapeDist); if (distanceToStop < augerEscapeDist) { //so loses all it's energy in the sample augerEnergyLoss = flauEnergy; trackDistance = distanceToStop; } else { //it escapes augerEnergyLoss = augerEnergyToEdge; trackDistance = augerEscapeDist; } // addDoseToVoxels(trackDistance, SExNorm, SEyNorm, SEzNorm, previousX, previousY, previousZ, augerEnergyLoss, beam, coefCalc); addDoseToRegion(trackDistance, SExNorm, SEyNorm, SEzNorm, previousX, previousY, previousZ, augerEnergyLoss); addDoseToImagedRegion(trackDistance, SExNorm, SEyNorm, SEzNorm, previousX, previousY, previousZ, augerEnergyLoss, beam); } } } } else { double elasticElementRND = Math.random(); ElementEM elasticElement = null; for (ElementEM e : elasticProbs.keySet()) { if (elasticProbs.get(e) > elasticElementRND) { //Then this element is the one that was ionised elasticElement = e; break; } } //get the angles // double phi = 0, cosPhi = 1, psi = 0, AN = 0, AM = 0, V1 = 0, V2 = 0, V3 = 0, V4 = 0; // double alpha = getRutherfordScreeningElement(elasticElement, electronEnergy); double RND = Math.random(); /* cosPhi = 1 - ((2*alpha * Math.pow(RND, 2))/(1+alpha-RND)); phi = Math.acos(cosPhi); */ // theta = Math.acos(1 - ((2*alpha * Math.pow(RND, 2))/(1+alpha-RND))); //ELSEPA stuff theta = getPrimaryElasticScatteringAngle(electronEnergy, elasticElement.getAtomicNumber()); theta = previousTheta + theta; if (theta >= (2 * Math.PI)) { theta -= 2*Math.PI; } phi = 2 * Math.PI * Math.random(); phi = previousPhi + phi; if (phi >= (2 * Math.PI)) { phi -= 2*Math.PI; } //now further update the primary // psi = 2 * Math.PI * Math.random(); /* //x and y are the same as in Joy, so x will be the rotation axis AN = -(cx/cz); // will need to catch an error here if = 0 AM = 1 / (Math.pow(1 + AN*AN, 0.5)); V1 = AN * Math.sin(phi); V2 = AN*AM*Math.sin(phi); V3 = Math.cos(psi); V4 = Math.sin(psi); ca = (cx*cosPhi) + (V1*V3) + (cy*V2*V4); cb = (cy*cosPhi) + (V4*(cz*V1 - cx*V2)); cc = (cz*cosPhi) + (V2*V3) - (cy*V1*V4); */ xNorm = Math.sin(theta) * Math.cos(phi); yNorm = Math.sin(theta) * Math.sin(phi); zNorm = Math.cos(theta); if (Double.isNaN(xNorm)){ System.out.println("test"); } } //update stopping powers //get new stoppingPower electronEnergy -= energyLost; stoppingPower = coefCalc.getStoppingPower(electronEnergy, false); //get new lambdaT double lambdaEl = coefCalc.getElectronElasticMFPL(electronEnergy, false); double innerShellLambda = coefCalc.betheIonisationxSection(electronEnergy, false); if (innerShellLambda > 0) { lambdaT = 1 / ((1/lambdaEl)+(1/innerShellLambda)); } else { lambdaT = lambdaEl; } s = -lambdaT*Math.log(Math.random()); elasticProbs = coefCalc.getElasticProbs(false); ionisationProbs = coefCalc.getAllShellProbs(false); Pinel = 1 - (lambdaT / lambdaEl); //update the position and kinetic energy from the charge if (electronEnergy >= 0.05) { MonteCarloCharge = (MonteCarloElectronsExited - MonteCarloElectronsEntered) * (electronNumber / numSimulatedElectrons) * ((double)numSimSoFar/numSimulatedElectrons) * Beam.ELEMENTARYCHARGE; // if (considerCharge == false) { // MonteCarloCharge = 0; if (considerCharge == true) { /* if (MonteCarloCharge != 0) { electronPosition[0] = previousX; electronPosition[1] = previousY; electronPosition[2] = previousZ; //chargePosition = {0, 0, 0}; newVelocityVector = adjustVelocityVectorByCharge(electronPosition, chargePosition, s, electronEnergy, xNorm, yNorm, zNorm, coefCalc); newVelocityMagnitude = Vector.vectorMagnitude(newVelocityVector) /1E9; newVelocityUnitVector = Vector.normaliseVector(newVelocityVector); //update new xNorm. yNorm, zNorm xNorm = newVelocityUnitVector[0]; yNorm = newVelocityUnitVector[1]; zNorm = newVelocityUnitVector[2]; //update theta and phi theta = Math.acos(zNorm); phi = Math.asin(yNorm/Math.sin(theta)); if (Double.isNaN(xNorm)){ System.out.println("test"); } //work out the new kinetic energy gamma = 1 / Math.pow(1 - (Math.pow(newVelocityMagnitude, 2)/Math.pow(c, 2)), 0.5); newKineticEnergy = (gamma - 1) * m * Math.pow(c, 2)/Beam.KEVTOJOULES; // in keV kineticEnergyLossByCharge = electronEnergy - newKineticEnergy; //in keV } else { kineticEnergyLossByCharge = 0; } */ } if (considerCharge == true) { for (int i = 0; i < maxX; i++) { for (int j = 0; j < maxY; j++) { for (int k = 0; k < maxZ; k++) { if (voxelCharge[i][j][k] != 0) { int[] voxCoord = {i, j, k}; if (thisPixel != voxCoord) { //convert pixel coord to a cartesian coord chargePosition = convertToCartesianCoordinates(i, j, k); newVelocityVector = adjustVelocityVectorByCharge(electronPosition, chargePosition, s, electronEnergy, xNorm, yNorm, zNorm, coefCalc); for (int m = 0; m < 3; m++) { newTotalVelocityVector[m] += newVelocityVector[m]; } } } } } } newVelocityMagnitude = Vector.vectorMagnitude(newVelocityVector) /1E9; if (newVelocityMagnitude > 0) { //so there is a charge pulling newVelocityUnitVector = Vector.normaliseVector(newVelocityVector); //update new xNorm. yNorm, zNorm xNorm = newVelocityUnitVector[0]; yNorm = newVelocityUnitVector[1]; zNorm = newVelocityUnitVector[2]; //update theta and phi theta = Math.acos(zNorm); phi = Math.asin(yNorm/Math.sin(theta)); //work out the new kinetic energy gamma = 1 / Math.pow(1 - (Math.pow(newVelocityMagnitude, 2)/Math.pow(c, 2)), 0.5); // newKineticEnergy = ((gamma - 1) * m * Math.pow(c, 2)); // in Joules newKineticEnergy = ((gamma - 1) * m * Math.pow(c, 2))/Beam.KEVTOJOULES; // in keV // kineticEnergyLossByCharge = ((electronEnergy*Beam.KEVTOJOULES) - newKineticEnergy)/Beam.KEVTOJOULES; //in keV kineticEnergyLossByCharge = electronEnergy - newKineticEnergy; } else { kineticEnergyLossByCharge = 0; } } } //update to new position xn = previousX + s * xNorm; yn = previousY + s * yNorm; zn = previousZ + s * zNorm; } else { if (surrounding == false) { exited = true; //I need to add the distance bit here - multislice double escapeDist = 1000 * getIntersectionDistance(previousX, previousY, previousZ, xNorm, yNorm, zNorm); double FSEStoppingPower = coefCalc.getStoppingPower(electronEnergy, false); double energyToEdge = FSEStoppingPower * escapeDist; if (energyToEdge < electronEnergy){ //the FSE has escaped double energyLostStep = 0, totFSEenLostLastStep = 0; double newEnergy = electronEnergy; for (int j = 0; j < 10; j++) { //I will need to play around with the amount of slicing when I am writing up energyLostStep = (escapeDist/10) * FSEStoppingPower; newEnergy -= energyLostStep; totFSEenLostLastStep += energyLostStep; FSEStoppingPower = coefCalc.getStoppingPower(newEnergy, false); if (newEnergy < 0.05) { if (newEnergy > 0) { totFSEenLostLastStep += newEnergy; lowEnDose -= newEnergy; } break; } } if (newEnergy > 0) { // MonteCarloFSEEscape += newEnergy; if (entered == false) { //it started here newMonteCarloFSEEscape += newEnergy; MonteCarloElectronsExited += 1; //add this charge to the pixel it came from voxelCharge[startingPixel[0]][startingPixel[1]][startingPixel[2]] += Beam.ELEMENTARYCHARGE * (electronNumber / numSimulatedElectrons); //split the dose up into voxels // addDoseToVoxels(escapeDist, xNorm, yNorm, zNorm, previousX, previousY, previousZ, totFSEenLostLastStep, beam, coefCalc); addDoseToRegion(escapeDist, xNorm, yNorm, zNorm, previousX, previousY, previousZ, totFSEenLostLastStep); addDoseToImagedRegion(escapeDist, xNorm, yNorm, zNorm, previousX, previousY, previousZ, totFSEenLostLastStep, beam); } else { MonteCarloFSEEntry += entryEnergy - newEnergy; //here the entered FSE has escaped again //split the dose up into voxels // addDoseToVoxels(escapeDist, xNorm, yNorm, zNorm, previousX, previousY, previousZ, totFSEenLostLastStep, beam, coefCalc); addDoseToRegion(escapeDist, xNorm, yNorm, zNorm, previousX, previousY, previousZ, totFSEenLostLastStep); addDoseToImagedRegion(escapeDist, xNorm, yNorm, zNorm, previousX, previousY, previousZ, totFSEenLostLastStep, beam); } } } else { //FSE has stopped in the sample...just /* if (entered == true && electronEnergy > 0.05) { // here the entered FSE has stopped in the sample so all energy stays in sample MonteCarloFSEEntry += entryEnergy; MonteCarloElectronsEntered += 1; //add negative charge to this pixel thisPixel = convertToPixelCoordinates(xn, yn, zn); voxelCharge[thisPixel[0]][thisPixel[1]][thisPixel[2]] += Beam.ELEMENTARYCHARGE * (electronNumber / numSimulatedElectrons); addDoseToVoxels(escapeDist, xNorm, yNorm, zNorm, previousX, previousY, previousZ, electronEnergy, beam, coefCalc); } */ } } else { //surrounding = true double distanceFrom = Math.pow(Math.pow(Math.abs(xn) - (XDimension/2), 2) + Math.pow(Math.abs(yn) - (YDimension/2), 2), 0.5); double distanceOf = Math.pow(Math.pow(XDimension, 2) + Math.pow(YDimension, 2), 0.5); if (distanceFrom > distanceOf || zn > ZDimension/2 || zn < -ZDimension/2) { track = false; } if (track == true) { previousTheta = theta; previousPhi = phi; previousX = xn; previousY = yn; previousZ = zn; //update dose and energy and stoppingPower energyLost = s * stoppingPower; energyLost += kineticEnergyLossByCharge; double RNDscatter = Math.random(); if (RNDscatter < Pinel) { // If the scatter was an inner shell ionisation //do nothing } //end if inelastic scatter else { //else it stays false and the collision will be elastic //elastic just want to get the angle double elasticElementRND = Math.random(); ElementEM elasticElement = null; for (ElementEM e : elasticProbs.keySet()) { if (elasticProbs.get(e) > elasticElementRND) { //Then this element is the one that was ionised elasticElement = e; break; } } //get the angles // double phi = 0, cosPhi = 1, psi = 0, AN = 0, AM = 0, V1 = 0, V2 = 0, V3 = 0, V4 = 0; double alpha = getRutherfordScreeningElement(elasticElement, electronEnergy); double RND = Math.random(); /* cosPhi = 1 - ((2*alpha * Math.pow(RND, 2))/(1+alpha-RND)); phi = Math.acos(cosPhi); */ theta = Math.acos(1 - ((2*alpha * Math.pow(RND, 2))/(1+alpha-RND))); //ELSEPA stuff theta = getPrimaryElasticScatteringAngle(electronEnergy, elasticElement.getAtomicNumber()); theta = previousTheta + theta; if (theta >= (2 * Math.PI)) { theta -= 2*Math.PI; } phi = 2 * Math.PI * Math.random(); phi = previousPhi + phi; if (phi >= (2 * Math.PI)) { phi -= 2*Math.PI; } xNorm = Math.sin(theta) * Math.cos(phi); yNorm = Math.sin(theta) * Math.sin(phi); zNorm = Math.cos(theta); if (Double.isNaN(xNorm)){ System.out.println("test"); } } //update stopping powers //get new stoppingPower electronEnergy -= energyLost; stoppingPower = coefCalc.getStoppingPower(electronEnergy, true); //get new lambdaT double FSExSection = getFSEXSection(electronEnergy); double lambdaEl = coefCalc.getElectronElasticMFPL(electronEnergy, true); double innerShellLambda = coefCalc.betheIonisationxSection(electronEnergy, true); if (innerShellLambda > 0) { lambdaT = 1 / (1/lambdaEl + 1/innerShellLambda); } else { lambdaT = 1 / (1/lambdaEl); } s = -lambdaT*Math.log(Math.random()); ionisationProbs = coefCalc.getAllShellProbs(true); elasticProbs = coefCalc.getElasticProbs(true); //need to check if it crosses before it reaches s again and if it does update to this point double intersectionDistance = 1000*getIntersectionDistance(previousX, previousY, previousZ, xNorm, yNorm, zNorm); double[] intersectionPoint = getIntersectionPoint(intersectionDistance, previousX, previousY, previousZ, xNorm, yNorm, zNorm); boolean pointInCrystal = isIntersectionInCrystal(intersectionPoint); if (intersectionDistance < s && pointInCrystal == true) { //then need to change region here and reset stuff surrounding = false; entered = true; electronEnergy -= intersectionDistance * stoppingPower; entryEnergy = electronEnergy; previousX = intersectionPoint[0]*1000; previousY = intersectionPoint[1]*1000; previousZ = intersectionPoint[2]*1000; stoppingPower = coefCalc.getStoppingPower(electronEnergy, false); FSExSection = getFSEXSection(electronEnergy); lambdaEl = coefCalc.getElectronElasticMFPL(electronEnergy, false); innerShellLambda = coefCalc.betheIonisationxSection(electronEnergy, false); if (innerShellLambda > 0) { lambdaT = 1 / (1/lambdaEl + 1/innerShellLambda); } else { lambdaT = 1 / (1/lambdaEl); } s = -lambdaT*Math.log(Math.random()); elasticProbs = coefCalc.getElasticProbs(surrounding); ionisationProbs = coefCalc.getAllShellProbs(surrounding); } Pinel = 1 - (lambdaT / lambdaEl); //Charge stuff if (electronEnergy >= 0.05) { MonteCarloCharge = (MonteCarloElectronsExited - MonteCarloElectronsEntered) * (electronNumber / numSimulatedElectrons) * ((double)numSimSoFar/numSimulatedElectrons) * Beam.ELEMENTARYCHARGE; // if (considerCharge == false) { // MonteCarloCharge = 0; if (considerCharge == true) { /* if (MonteCarloCharge != 0) { electronPosition[0] = previousX; electronPosition[1] = previousY; electronPosition[2] = previousZ; //chargePosition = {0, 0, 0}; newVelocityVector = adjustVelocityVectorByCharge(electronPosition, chargePosition, s, electronEnergy, xNorm, yNorm, zNorm, coefCalc); newVelocityMagnitude = Vector.vectorMagnitude(newVelocityVector)/1E9; newVelocityUnitVector = Vector.normaliseVector(newVelocityVector); //update new xNorm. yNorm, zNorm xNorm = newVelocityUnitVector[0]; yNorm = newVelocityUnitVector[1]; zNorm = newVelocityUnitVector[2]; //update theta and phi theta = Math.acos(zNorm); phi = Math.asin(yNorm/Math.sin(theta)); if (Double.isNaN(xNorm)){ System.out.println("test"); } //work out the new kinetic energy gamma = 1 / Math.pow(1 - (Math.pow(newVelocityMagnitude, 2)/Math.pow(c, 2)), 0.5); newKineticEnergy = (gamma - 1) * m * Math.pow(c, 2)/Beam.KEVTOJOULES; // in keV kineticEnergyLossByCharge = electronEnergy - newKineticEnergy; //in keV } else { kineticEnergyLossByCharge = 0; } */ } if (considerCharge == true) { for (int i = 0; i < maxX; i++) { for (int j = 0; j < maxY; j++) { for (int k = 0; k < maxZ; k++) { if (voxelCharge[i][j][k] != 0) { int[] voxCoord = {i, j, k}; if (thisPixel != voxCoord) { //convert pixel coord to a cartesian coord chargePosition = convertToCartesianCoordinates(i, j, k); newVelocityVector = adjustVelocityVectorByCharge(electronPosition, chargePosition, s, electronEnergy, xNorm, yNorm, zNorm, coefCalc); for (int m = 0; m < 3; m++) { newTotalVelocityVector[m] += newVelocityVector[m]; } } } } } } newVelocityMagnitude = Vector.vectorMagnitude(newVelocityVector) /1E9; if (newVelocityMagnitude > 0) { //so there is a charge pulling newVelocityUnitVector = Vector.normaliseVector(newVelocityVector); //update new xNorm. yNorm, zNorm xNorm = newVelocityUnitVector[0]; yNorm = newVelocityUnitVector[1]; zNorm = newVelocityUnitVector[2]; //update theta and phi theta = Math.acos(zNorm); phi = Math.asin(yNorm/Math.sin(theta)); //work out the new kinetic energy gamma = 1 / Math.pow(1 - (Math.pow(newVelocityMagnitude, 2)/Math.pow(c, 2)), 0.5); // newKineticEnergy = ((gamma - 1) * m * Math.pow(c, 2)); // in Joules newKineticEnergy = ((gamma - 1) * m * Math.pow(c, 2))/Beam.KEVTOJOULES; // in keV // kineticEnergyLossByCharge = ((electronEnergy*Beam.KEVTOJOULES) - newKineticEnergy)/Beam.KEVTOJOULES; //in keV kineticEnergyLossByCharge = electronEnergy - newKineticEnergy; } else { kineticEnergyLossByCharge = 0; } } } //update to new position xn = previousX + s * xNorm; yn = previousY + s * yNorm; zn = previousZ + s * zNorm; if (Double.isNaN(xNorm)){ System.out.println("test"); } //need to also check whether to track the primary electron anymore or } else { exited = true; } } } if (electronEnergy < 0.05) { // play with this and maybe graph it exited = true; thisPixel = convertToPixelCoordinates(previousX, previousY, previousZ); lowEnDose -= electronEnergy; if (surrounding == false && entered == false) { //the FSE was from the sample and never left //redistribute it's charge within the sample if (thisPixel != startingPixel) { //add negative charge to this pixel voxelCharge[thisPixel[0]][thisPixel[1]][thisPixel[2]] -= Beam.ELEMENTARYCHARGE * (electronNumber / numSimulatedElectrons); //add positive charge to the original pixel voxelCharge[startingPixel[0]][startingPixel[1]][startingPixel[2]] += Beam.ELEMENTARYCHARGE * (electronNumber / numSimulatedElectrons); } //add last bit of dose to voxel it stopped in if (Double.isNaN(electronEnergy)) { System.out.println("Test"); } voxelDose[thisPixel[0]][thisPixel[1]][thisPixel[2]] += electronEnergy; if (Double.isNaN(voxelDose[thisPixel[0]][thisPixel[1]][thisPixel[2]])) { System.out.println("Test"); } } if (surrounding == false && entered == true) { // here the entered FSE has stopped in the sample so all energy stays in sample MonteCarloFSEEntry += entryEnergy; MonteCarloElectronsEntered += 1; //add negative charge to this pixel voxelCharge[thisPixel[0]][thisPixel[1]][thisPixel[2]] += Beam.ELEMENTARYCHARGE * (electronNumber / numSimulatedElectrons); if (Double.isNaN(electronEnergy)) { System.out.println("Test"); } voxelDose[thisPixel[0]][thisPixel[1]][thisPixel[2]] += electronEnergy; if (Double.isNaN(voxelDose[thisPixel[0]][thisPixel[1]][thisPixel[2]])) { System.out.println("Test"); } } } } } private double[] getElectronStartingDirection(Beam beam, double previousX, double previousY, double previousZ) { double beamSemiAngle = beam.getSemiAngle(); //in mrad if (beamSemiAngle == 0) { beamSemiAngle = 10; } double beamApertureRadius = beam.getApertureRadius(); if (beamApertureRadius == 0) { beamApertureRadius = 1.2; // how many times bigger the aperture is than the beam } double beamRadius = Math.pow(Math.pow((beam.getBeamX()/2),2) + Math.pow(beam.getBeamY()/2, 2), 0.5); double extraLength = beamRadius * (beamApertureRadius - 1); //now get the aperture point double signX = (previousX >= 0) ? 1 : -1; double signY = (previousY >= 0) ? 1 : -1; double apertureX = signX*(Math.random() * beamApertureRadius) + previousX; double apertureY = signY*(Math.random() * beamApertureRadius) + previousY; double apertureZ = previousZ - (extraLength/Math.tan(beamSemiAngle/1000)); double[] coordinateFrom = {apertureX, apertureY, apertureZ}; double[] coordinateTo = {previousX, previousY, previousZ}; double[] directionVector = Vector.vectorBetweenPoints(coordinateFrom, coordinateTo); double magnitude = Vector.vectorMagnitude(directionVector); for (int v = 0; v <= 2; v ++) { directionVector[v] /= magnitude; //this is to normalise it } return directionVector; } private double getPrimaryElasticScatteringAngle(double electronEnergy, int atomicNumber){ boolean highEnergy = false; if (electronEnergy > 20) { highEnergy = true; } //determine if need to get data from file or it's already loaded boolean getFile = mapPopulated(highEnergy, atomicNumber); //get the right file if I need to if (getFile == true) { TreeMap<Double, double[]> elementData = new TreeMap<Double, double[]>(); try { elementData = getAngleFileData(highEnergy, atomicNumber); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } //now add the file data to the global array if (highEnergy == true) { highEnergyAngles[atomicNumber] = elementData; } else { lowEnergyAngles[atomicNumber] = elementData; } } //Now use the data in the global array to work out the angle //get nearest energy Double energyKey = returnNearestEnergy(highEnergy, atomicNumber, electronEnergy); //should probably interpolate the values here tbh.... will do at some point //get the differential cross sections for that energy of the element double[] energyAngleProbs = null; if (highEnergy == true) { energyAngleProbs = highEnergyAngles[atomicNumber].get(energyKey); } else { energyAngleProbs = lowEnergyAngles[atomicNumber].get(energyKey); } //get the angle from this double deflectionAngle = returnDeflectionAngle(highEnergy, energyAngleProbs); if (Double.isNaN(deflectionAngle)){ System.out.println("test"); } return deflectionAngle; } private double getFSEEnergy(double electronEnergy, double shellBindingEnergy) { double RNDFSEEnergy = Math.random(); double energyCutOff = (energyCUTOFF/1000.0)/electronEnergy; double tau = electronEnergy/511; double alphaParam = Math.pow(tau/(tau+1), 2); double betaParam = (2*tau + 1)/Math.pow(tau+1, 2); double gammaParam = (1/energyCutOff)-(1/(1-energyCutOff))-(alphaParam*energyCutOff)-(betaParam*Math.log((1-energyCutOff)/((electronEnergy*energyCutOff)/511))); double omegaParam = RNDFSEEnergy*(gammaParam + (alphaParam/2)) - gammaParam; double epsilon = (omegaParam-2-betaParam+Math.pow(Math.pow(omegaParam-2-betaParam, 2) + 4*(omegaParam+alphaParam-2*betaParam), 0.5)) / (2*(omegaParam+alphaParam-2*betaParam)); double omega = 1 / ((1/energyCutOff) - ((1/energyCutOff)-2)*RNDFSEEnergy); // double omega = 1 / (100 - 98*Math.random()); FSEcount +=1; FSEsum += epsilon*electronEnergy; return epsilon; } private void FlAugerMonteCarlo(Element collidedElement, double previousX, double previousY, double previousZ, int collidedShell, CoefCalc coefCalc, boolean surrounding, Beam beam) { double shellFluorescenceYield = 0; double flauEnergy = 0; if (collidedShell == 0) { shellFluorescenceYield = collidedElement.getKShellFluorescenceYield(); flauEnergy = collidedElement.getKFluorescenceAverage(); } else if (collidedShell == 1) { shellFluorescenceYield = collidedElement.getL1ShellFluorescenceYield(); flauEnergy = collidedElement.getLFluorescenceAverage(); } else if (collidedShell == 2) { shellFluorescenceYield = collidedElement.getL2ShellFluorescenceYield(); flauEnergy = collidedElement.getLFluorescenceAverage(); } else if (collidedShell == 3){ shellFluorescenceYield = collidedElement.getL3ShellFluorescenceYield(); flauEnergy = collidedElement.getLFluorescenceAverage(); } //Do Fl or Auger //RND for FL or Auger given it was that element double fluoresenceYieldKRND = Math.random(); // double KshellFluorescenceYield = collidedElement.getKShellFluorescenceYield(); if(flauEnergy > 0 && !Double.isNaN(flauEnergy)) { //subtract dose from the voxel that's this is in if (surrounding == false) { int[] getPixel = convertToPixelCoordinates(previousX, previousY, previousZ); // voxelDose[getPixel[0]][getPixel[1]][getPixel[2]] -= flauEnergy; } if (fluoresenceYieldKRND <= shellFluorescenceYield) { //then it's fluorescence // get the absorption coefficient of the crystal // double flEnergy = collidedElement.getKFluorescenceAverage(); if (surrounding == false) { double absCoef = coefCalc.getEMFlAbsCoef(flauEnergy); //units um^-1 //get a random direction vector double SExNorm = Math.random(); double SEyNorm = Math.random(); double SEzNorm = Math.random(); //Draw the vector to the edge double flEscapeDist = getIntersectionDistance(previousX, previousY, previousZ, SExNorm, SEyNorm, SEzNorm); double escapeFraction = Math.exp(-absCoef * flEscapeDist); MonteCarloFlEscape += escapeFraction * flauEnergy; numFL += 1; //add dose to voxels double energyRemained = flauEnergy- (escapeFraction * flauEnergy); // addDoseToVoxels(flEscapeDist, SExNorm, SEyNorm, SEzNorm, previousX, previousY, previousZ, energyRemained, beam, coefCalc); addDoseToRegion(flEscapeDist, SExNorm, SEyNorm, SEzNorm, previousX, previousY, previousZ, energyRemained); addDoseToImagedRegion(flEscapeDist, SExNorm, SEyNorm, SEzNorm, previousX, previousY, previousZ, energyRemained, beam); } //if it's in the surrounding don't bother with fluorescence } else { //need to do Auger electrons //Auger electron energy equals flEnergy - shell binding energy of Auger electron //for now ignore the shell binding energy so overestimating their significance // double augerEnergy = collidedElement.getKFluorescenceAverage(); totAugerEnergy += flauEnergy; numAuger += 1; //get a random direction vector double SExNorm = Math.random(); double SEyNorm = Math.random(); double SEzNorm = Math.random(); //Draw the vector to the edge if (surrounding == false) { double augerEscapeDist = 1000* getIntersectionDistance(previousX, previousY, previousZ, SExNorm, SEyNorm, SEzNorm); double augerStoppingPower = coefCalc.getStoppingPower(flauEnergy, false); double augerEnergyToEdge = augerStoppingPower * augerEscapeDist; if (augerEnergyToEdge < flauEnergy){ MonteCarloAugerEscape += flauEnergy - augerEnergyToEdge; MonteCarloElectronsExited += 1; } //redistribute the dose double distanceToStop = flauEnergy/augerStoppingPower; double trackDistance = 0, augerEnergyLoss = 0; //Math.min(distanceToStop, augerEscapeDist); if (distanceToStop < augerEscapeDist) { //so loses all it's energy in the sample augerEnergyLoss = flauEnergy; trackDistance = distanceToStop; } else { //it escapes augerEnergyLoss = augerEnergyToEdge; trackDistance = augerEscapeDist; } // addDoseToVoxels(trackDistance, SExNorm, SEyNorm, SEzNorm, previousX, previousY, previousZ, augerEnergyLoss, beam, coefCalc); addDoseToRegion(trackDistance, SExNorm, SEyNorm, SEzNorm, previousX, previousY, previousZ, augerEnergyLoss); addDoseToImagedRegion(trackDistance, SExNorm, SEyNorm, SEzNorm, previousX, previousY, previousZ, augerEnergyLoss, beam); } else { //surrounding = true Double augerEntryDist = 1000* getIntersectionDistance(previousX, previousY, previousZ, SExNorm, SEyNorm, SEzNorm); boolean pointInCrystal = false; if (augerEntryDist <= 0 || augerEntryDist.isNaN() || augerEntryDist.isInfinite()){ //do nothing } else { //see if in crystal double[] interSectionPoint = getIntersectionPoint(augerEntryDist, previousX, previousY, previousZ, SExNorm, SEyNorm, SEzNorm); } if (pointInCrystal == true) { double augerStoppingPower = coefCalc.getStoppingPower(flauEnergy, true); double augerEnergyToEdge = augerStoppingPower * augerEntryDist; if (augerEnergyToEdge < flauEnergy){ MonteCarloAugerEntry += flauEnergy - augerEnergyToEdge; MonteCarloElectronsEntered += 1; } } //I am overestimating here a bit because the Auger can't enter and come back out again, //given the small significance of Auger this doesn't matter much //in the voxel model I'm not going to have auger electrons enter at all for now! } } } } private InputStreamReader locateFile(String filePath) throws UnsupportedEncodingException, FileNotFoundException{ InputStream is = getClass().getResourceAsStream("/" + filePath); if (is == null) { is = new FileInputStream(filePath); } return new InputStreamReader(is, "US-ASCII"); } private boolean mapPopulated(boolean highEnergy, int atomicNumber) { if (highEnergy == true) { if (highEnergyAngles[atomicNumber] == null) { return true; } else { return false; } } else { if (lowEnergyAngles[atomicNumber] == null) { return true; } else { return false; } } } //--put it in here when I have copy and paste back private TreeMap<Double, double[]> getAngleFileData(boolean highEnergy, int atomicNum) throws IOException{ String elementNum = String.valueOf(atomicNum) + ".csv"; String filePath = ""; if (highEnergy == true) { filePath = "constants/above_20000/" + elementNum; } else { filePath = "constants/below_20000/" + elementNum; } InputStreamReader isr = locateFile(filePath); BufferedReader br = new BufferedReader(isr); TreeMap<Double, double[]> elementData = new TreeMap<Double, double[]>(); String line; String[] components; int count = -1; while ((line = br.readLine()) != null) { count +=1 ; components = line.split(","); if (count > 0) { //if this is not the first line Double energy = Double.valueOf(components[0]); String[] angleProbsString = Arrays.copyOfRange(components, 1, components.length); double[] angleProbs = new double[angleProbsString.length]; for (int i = 0; i < angleProbsString.length; i++) { angleProbs[i] = Double.parseDouble(angleProbsString[i]); } //Now add this to the local treemap elementData.put(energy, angleProbs); } } return elementData; } private Double returnNearestEnergy(boolean highEnergy, int atomicNumber, double electronEnergy) { Double nearestEnergy = 0.; if (electronEnergy >= 0.05 && electronEnergy <= 300) { Double beforeKey = 0.; Double afterKey = 0.; if (highEnergy == true) { beforeKey = highEnergyAngles[atomicNumber].floorKey(electronEnergy); afterKey = highEnergyAngles[atomicNumber].ceilingKey(electronEnergy); } else { beforeKey = lowEnergyAngles[atomicNumber].floorKey(electronEnergy); afterKey = lowEnergyAngles[atomicNumber].ceilingKey(electronEnergy); } if (beforeKey == null) { beforeKey = 0.; } if (afterKey == null) { afterKey = 0.; } beforeKey = (beforeKey == 0.) ? afterKey: beforeKey; afterKey = (afterKey == 0.) ? beforeKey: afterKey; if (Math.abs(electronEnergy - beforeKey) <= Math.abs(electronEnergy-afterKey)) { nearestEnergy = beforeKey; } else { nearestEnergy = afterKey; } } if (electronEnergy > 300) { nearestEnergy = 300.0; } return nearestEnergy; } private double returnDeflectionAngle(boolean highEnergy, double[] energyAngleProbs) { double totalProb = 0; for (int i = 0; i < energyAngleProbs.length; i++) { totalProb += energyAngleProbs[i]; } double[] probPerAngle = new double[energyAngleProbs.length]; double sumProb = 0; for (int j = 0; j < energyAngleProbs.length; j++) { sumProb += energyAngleProbs[j]; probPerAngle[j] = sumProb/totalProb; } double RND = Math.random(); double index = 0; for (int k = 0; k < probPerAngle.length; k++) { if (probPerAngle[k] >= RND) { index = k; break; } } //convert the index to an angle double angleDegrees = 0; if (highEnergy == true) { double startFactor = 0.; int factor = 0; double divideFactor = 4; double minusFactor = 0; double modFactor = 0; if (index >=1 && index < 146) { minusFactor = 1; modFactor = 36; factor = (int) ((int) (index - minusFactor)/modFactor); startFactor = Math.pow(10, factor) * 0.0001; divideFactor = 4; } else if (index >= 146 && index < 236) { // factor = (int) (index-146)/100; startFactor = 1; divideFactor = 10; minusFactor = 146; modFactor = 90; } else if (index >= 236 && index <= 296) { startFactor = 10; //go until 25 divideFactor = 40; minusFactor = 236; modFactor = 60; } else if (index > 296) { startFactor = 25; divideFactor = 50; minusFactor = 296; modFactor = 1000000; //just anything super high as all but first one } angleDegrees = startFactor + (((index-minusFactor)%modFactor)*(startFactor/divideFactor)); if (Double.isNaN(angleDegrees)){ // System.out.println("test"); angleDegrees = 0; } } else { angleDegrees = 1.0 * index; } double angleRadians = angleDegrees * Math.PI/180; /* if (index > 296 && highEnergy == true) { System.out.println("test"); } */ return angleRadians; } private boolean findIfOuterShellIonised(Element e, Map<Element, Double> ionisationProbs, double elementRND) { boolean hit = false; double elementShellProbs = ionisationProbs.get(e); if (elementShellProbs > elementRND) { //Then this element is the one that was ionised hit = true; } return hit; } private int getGOSInelasticType(double[][] shellProbs, int shellIndex) { double runningSum = 0; double RND = Math.random(); int type = 0; for (int i = 0; i < 3; i++) { runningSum += shellProbs[shellIndex][i]/shellProbs[shellIndex][3]; if (runningSum > RND) { //then this type of collision type = i; break; } } return type; } private int getGOSInelasticTypePlasmon(double[] plasmonProbs) { double runningSum = 0; double RND = Math.random(); int type = 0; for (int i = 0; i < 3; i++) { runningSum += plasmonProbs[i]/plasmonProbs[3]; if (runningSum > RND) { //then this type of collision type = i; break; } } return type; } public double WkToWak(double E, double Wk, double Uk) { if (E*1000 > 3*Wk - 2*Uk) { return Wk; } else { return (E*1000+2*Uk)/3; } } public double getQak(double E, double Wk, double Uk) { if (E*1000 > 3*Wk - 2*Uk) { return Uk; } else { return Uk * (E*1000/(3*Wk-2*Uk)); } } public double getEnergyLossDistant(double Wdis, double Uk){ double RND = Math.random(); double W = Wdis - Math.pow(RND*Math.pow(Wdis-Uk, 2), 0.5); return W; //returning eV } public double getClosea(double E) { // E in keV double m = 9.10938356E-31; // in Kg double c = 299792458; double csquared = c*c; // (m/s)^2 double Vo = (E) * Beam.KEVTOJOULES; double a = Math.pow(Vo/(Vo+m*csquared), 2); return a; } public double getGOSPrimaryThetaLong(double EkeV, double Q, double WakeV, double previousTheta) { double m = 9.10938356E-31; // in Kg double c = 299792458; double csquared = c*c; // (m/s)^2 double theta = 0; //again make sure I sort out units in here double E = EkeV * Beam.KEVTOJOULES; double Wak = (WakeV/1000)*Beam.KEVTOJOULES; double numerator = E*(E+2*m*csquared) + (E-Wak)*(E-Wak+2*m*csquared) - Q*(Q+2*m*csquared); double denominator = 2*Math.pow(E*(E+2*m*csquared)*(E-Wak)*(E-Wak+2*m*csquared), 0.5); double cosTheta = numerator/denominator; theta = Math.acos(cosTheta); theta = previousTheta + theta; if (theta >= (2 * Math.PI)) { theta -= 2*Math.PI; } return theta; } public double getRandomk(double E, double Qk) { //E in keV and Qk in eV double kc = Math.max(Qk, Wcc) / (E*1000 + Qk); //get units right ofc double k = 0; double a = getClosea(E); double RND = Math.random(); double zeta = RND * (1.0+5.0*a*kc/2.0); if (zeta < 1) { k = kc / (1-zeta*(1-2*kc)); } else { k = kc + (zeta-1)*(1-2*kc)/(5*a*kc); } return k; //dimensionless } public double getPDFk(double E, double k, double Qk) { double kc = Math.max(Qk, Wcc) / (E*1000 + Qk); //get units right ofc double a = getClosea(E); //assume this is the gamma one not sturnheimer one double PDF = (Math.pow(k, -2) + Math.pow(1-k, -2) - 1/(k*(1-k)) + a*(1+1/(k*(1-k)))) * heavisideStepFunction(k-kc) * heavisideStepFunction(0.5-k); return PDF; } //now do the rejection algorithm public double samplek(double E, double Qk) { double a = getClosea(E); boolean exit = false; double k = 0; int count = 0; while (exit == false) { k = getRandomk(E, Qk); double RND = Math.random(); double LHS = RND * (1 + 5*a*Math.pow(k, 2)); double RHS = Math.pow(k, 2) * getPDFk(E, k, Qk); if (LHS < RHS) { exit = true; } // testing clause count += 1; if (count > 10000) { System.out.println("the random sampling of k is always being rejected"); break; } } return k; } private int heavisideStepFunction(double x) { if (x >= 0) { return 1; } else { return 0; } } private double getGOSPrimaryThetaClose(double EkeV, double WkeV, double previousTheta) { double m = 9.10938356E-31; // in Kg double c = 299792458; double csquared = c*c; // (m/s)^2 double E = EkeV*Beam.KEVTOJOULES; double W = WkeV*Beam.KEVTOJOULES; double cosTheta = Math.pow(((E-W)/E) * ((E+ 2*m*csquared)/ (E-W+2*m*csquared)),0.5); double theta = Math.acos(cosTheta); theta = previousTheta + theta; if (theta >= (2 * Math.PI)) { theta -= 2*Math.PI; } return theta; } public double secondaryThetaClose(double EkeV, double WkeV, double previousTheta) { double m = 9.10938356E-31; // in Kg double c = 299792458; double csquared = c*c; // (m/s)^2 double W = WkeV * Beam.KEVTOJOULES; double E = EkeV * Beam.KEVTOJOULES; double cosTheta = Math.pow((W/E)*((E+2*m*csquared)/(W+2*m*csquared)), 0.5); double theta = Math.acos(cosTheta); theta = previousTheta + theta; if (theta >= (2 * Math.PI)) { theta -= 2*Math.PI; } return theta; } public double secondaryThetaDistant(double E, double WakeV, double Q, double previousTheta) { double m = 9.10938356E-31; // in Kg double c = 299792458; double csquared = c*c; // (m/s)^2 double Vo = E * Beam.KEVTOJOULES; double betaSquared = 1- Math.pow(m*csquared/(Vo + m*csquared), 2); double Wak = (WakeV/1000)*Beam.KEVTOJOULES; double cosTheta = Math.pow(((Math.pow(Wak, 2)/betaSquared)/(Q*(Q+2*m*csquared))) *Math.pow(1+(Q*(Q+2*m*csquared)-Math.pow(Wak, 2))/(2*Wak*(Vo+m*csquared)), 2),0.5); double theta = Math.acos(cosTheta); theta = previousTheta + theta; if (theta >= (2 * Math.PI)) { theta -= 2*Math.PI; } return theta; } private boolean isMicrocrystalAt(final double x, final double y, final double z) { //Note that this is absolutely only right for a cuboid at the moment //This can stay as a quick test //this quick test actually messes with the program and it's imperfect placing of pixels if ((x > XDimension/2) || (x < -XDimension/2)) { return false; } if ((y > YDimension/2) || (y < -YDimension/2)) { return false; } if ((z > ZDimension/2) || (z < -ZDimension/2)) { return false; } //now do the crystal occupancy stuff //convert xyz to ijk int[] pixelCoords = convertToPixelCoordinates(x, y, z); boolean[] occ = crystOccEM[pixelCoords[0]][pixelCoords[1]][pixelCoords[2]]; //This means that if has already been done don't do it again // Really needed to speed up Monte Carlo if (!occ[0]) { occ[1] = calculateCrystalOccupancy(x, y, z); occ[0] = true; } return occ[1]; } private int[] convertToPixelCoordinates(final double x, final double y, final double z) { double[] xMinMax = this.minMaxVertices(0, verticesEM); double[] yMinMax = this.minMaxVertices(1, verticesEM); double[] zMinMax = this.minMaxVertices(2, verticesEM); int i = (int) StrictMath.round(((x/1000) - xMinMax[0]) * crystalPixPerUMEM); int j = (int) StrictMath.round(((y/1000) - yMinMax[0]) * crystalPixPerUMEM); int k = (int) StrictMath.round(((z/1000) - zMinMax[0]) * crystalPixPerUMEM); int[] pixelCoords = {i, j, k}; return pixelCoords; } private double[] convertToCartesianCoordinates(final int i, final int j, final int k) { double[] xMinMax = this.minMaxVertices(0, verticesEM); double[] yMinMax = this.minMaxVertices(1, verticesEM); double[] zMinMax = this.minMaxVertices(2, verticesEM); double x = ((i/crystalPixPerUMEM) + xMinMax[0])*1000; double y = ((j/crystalPixPerUMEM) + yMinMax[0])*1000; double z = ((k/crystalPixPerUMEM) + zMinMax[0])*1000; double[] cartesianCoords = {x, y, z}; return cartesianCoords; } private int[] getMaxPixelCoordinates() { double[] xMinMax = this.minMaxVertices(0, verticesEM); double[] yMinMax = this.minMaxVertices(1, verticesEM); double[] zMinMax = this.minMaxVertices(2, verticesEM); Double xdim = xMinMax[1] - xMinMax[0]; Double ydim = yMinMax[1] - yMinMax[0]; Double zdim = zMinMax[1] - zMinMax[0]; int nx = (int) StrictMath.round(xdim * crystalPixPerUMEM) + 1; int ny = (int) StrictMath.round(ydim * crystalPixPerUMEM) + 1; int nz = (int) StrictMath.round(zdim * crystalPixPerUMEM) + 1; int[] maxCoord = {nx, ny, nz}; return maxCoord; } private void addDoseToVoxels(double s, double xNorm, double yNorm, double zNorm, double previousX, double previousY, double previousZ , double energyLost, Beam beam, CoefCalc coefCalc) { int numberBins = getNumberOfBins(s); double binLength = s / numberBins; double energyDivision = energyLost/numberBins; double xPos, yPos, zPos = 0; for (int j = 1; j <= numberBins; j++) { xPos = previousX + (binLength *j) * xNorm; yPos = previousY + (binLength *j) * yNorm; zPos = previousZ + (binLength *j) * zNorm; if (isMicrocrystalAt(xPos, yPos, zPos) == true) { // needed for electrons that enter from the surrounding addDoseToPosition(xPos, yPos, zPos, energyDivision, beam, coefCalc); } } } private void addDoseToRegion(double s, double xNorm, double yNorm, double zNorm, double previousX, double previousY, double previousZ , double energyLost) { if (energyLost > 0) { double regionBinDistance = Math.min((XDimension/2)/10, (YDimension/2)/10); int numBins = (int) Math.ceil(s/regionBinDistance); double binLength = s/numBins; double energyDivision = energyLost/numBins; double xPos, yPos, zPos = 0; for (int j = 1; j <= numBins; j++) { xPos = previousX + (binLength *j) * xNorm; yPos = previousY + (binLength *j) * yNorm; zPos = previousZ + (binLength *j) * zNorm; if (isMicrocrystalAt(xPos, yPos, zPos) == true) { // needed for electrons that enter from the surrounding //find region int indexX = (int) ((((XDimension/2)-Math.abs(xPos)))/(0.5*XDimension/NUM_REGIONS)); int indexY = (int) ((((YDimension/2)-Math.abs(yPos)))/(0.5*YDimension/NUM_REGIONS)); int index = Math.min(indexX, indexY); if (index == 10) { //stops it breaking if it's exactly in 0,0,0 index -= 1; } //add energy to this region regionDose[index] += energyDivision; } } } else { System.out.println("Test"); } } private void addDoseToImagedRegion(double s, double xNorm, double yNorm, double zNorm, double previousX, double previousY, double previousZ , double energyLost, Beam beam) { int numBins = 10; double xPos, yPos; if (energyLost > 0) { //split up in the track to a certain number of bins and split up the energy into that number of bins double binDistance = s/numBins; double energyLostBin = energyLost / numBins; for (int i = 1; i <= numBins; i++) { xPos = previousX + (binDistance *i) * xNorm; yPos = previousY + (binDistance *i) * yNorm; //if this bin is located in the imaged region, then add the dose to this region, if it isn't don't do anything //the imaged region is assumed to be centered on 0 0, like the beam is assumed to be centred on 0 0 //test double x = beam.getImageX(); double y = beam.getImageY(); if (Math.abs(xPos)/1000 <= beam.getImageX()/2 && Math.abs(yPos)/1000 <= beam.getImageY()/2) { //then in imaged region MonteCarloImageDose += energyLostBin; } } } } private void addDoseToPosition(double x, double y, double z, double keV, Beam beam, CoefCalc coefCalc) { int[] voxCoord = convertToPixelCoordinates(x, y, z); if (Double.isNaN(keV)) { System.out.println("Test"); } voxelDose[voxCoord[0]][voxCoord[1]][voxCoord[2]] += keV; //probably easier to do keV and process at the end if (Double.isNaN(voxelDose[voxCoord[0]][voxCoord[1]][voxCoord[2]])) { System.out.println("Test"); } /* double electronNumber = beam.getExposure() * (beam.getBeamArea()*1E8); double totalJ = (keV * (electronNumber/numSimulatedElectrons))*Beam.KEVTOJOULES; double voxelVolume = Math.pow((1/crystalPixPerUMEM) /1E4,3); // voxel volume in cm^3 double voxelMass = (coefCalc.getDensity() * voxelVolume)/1000; //voxel mass in Kg double dose = (totalJ/voxelMass) / 1E6; //dose in MGy voxelDose[voxCoord[0]][voxCoord[1]][voxCoord[2]] += dose; */ } private double convertVoxEnergyToDose(double energy, Beam beam, CoefCalc coefCalc) { double electronNumber = beam.getExposure() * (beam.getBeamArea()*1E8); double totalJ = (energy * (electronNumber/numSimulatedElectrons))*Beam.KEVTOJOULES; double voxelVolume = Math.pow((1/crystalPixPerUMEM) /1E4,3); // voxel volume in cm^3 double voxelMass = (coefCalc.getDensity() * voxelVolume)/1000; //voxel mass in Kg double dose = (totalJ/voxelMass) / 1E6; //dose in MGy if (Double.isNaN(dose)) { System.out.println("Test"); } return dose; } private double convertRegionEnergyToDose(double energy, int index,Beam beam, CoefCalc coefCalc) { //find total energy in the region double electronNumber = beam.getExposure() * (beam.getBeamArea()*1E8); double totalJ = (energy * (electronNumber/numSimulatedElectrons))*Beam.KEVTOJOULES; double volume = regionVolume[index] / 1E21; //in cm^3 double regionMass = (coefCalc.getDensity()*volume) / 1000; double dose = (totalJ/regionMass) / 1E6; //MGy return dose; } private void populateRegionVolumes() { //find the volume of the region //for now I'm just going to deal with cubes but will need to change this later double totalVolume = XDimension*YDimension*ZDimension; double sumVolume = 0; for (int i=0; i < NUM_REGIONS; i++) { double innerVolume = (XDimension - (i+1)*(XDimension/NUM_REGIONS)) * (YDimension-(i+1)*(YDimension/NUM_REGIONS)) * ZDimension; regionVolume[i] = totalVolume - (innerVolume + sumVolume); sumVolume += regionVolume[i]; } } private boolean isIntersectionInCrystal(double[] intersectionPoint) { //fudge the point for (int j = 0; j < 3; j++) { if (intersectionPoint[j] < 0) { intersectionPoint[j] += 0.000001; } else { intersectionPoint[j] -= 0.000001; } } boolean pointInCrystal = isMicrocrystalAt(intersectionPoint[0]*1000, intersectionPoint[1]*1000, intersectionPoint[2]*1000); return pointInCrystal; } private double getIntersectionDistance(double x, double y, double z, double ca, double cb, double cc) { if (normals == null) { calculateNormals(false); } double[] directionVector = {ca, cb, cc}; //the actual direction vector double minIntersect = 0; double[] origin = new double[3]; origin[0] = x/1000; origin[1] = y/1000; origin[2] = z/1000; double intersectionDistance = 0; for (int l = 0; l < indicesEM.length; l++) { intersectionDistance = Vector.rayTraceDistance(normals[l], directionVector, origin, originDistances[l]); Double distanceObject = Double.valueOf(intersectionDistance); if (intersectionDistance < 0 || distanceObject.isNaN() || distanceObject.isInfinite()) { //do nothing } else { // break; //maybe should just be closest, or an issue with the rayTRace if (minIntersect == 0) { minIntersect = intersectionDistance; } else { double min = Math.min(minIntersect, intersectionDistance); minIntersect = min; } } } return minIntersect; } private double[] getIntersectionPoint(double intersectionDistance, double x, double y, double z, double ca, double cb, double cc) { double[] directionVector = {ca, cb, cc}; //the actual direction vector double[] origin = new double[3]; origin[0] = x/1000; origin[1] = y/1000; origin[2] = z/1000; double distance = intersectionDistance / 1000; double[] intersectionPoint = Vector.rayTraceToPointWithDistance( directionVector, origin, distance); return intersectionPoint; } /** * Returns the minimum and maximum values of a vertex array * given chosen dimension (0 = x, 1 = y, 2 = z). * * @param dimension 0 = x, 1 = y, 2 = z * @param vertices vertices to be examined * @return double array, first element minimum, second element maximum */ public double[] minMaxVertices(final int dimension, final double[][] vertices) { double min = java.lang.Double.POSITIVE_INFINITY; double max = java.lang.Double.NEGATIVE_INFINITY; for (int i = 0; i < vertices.length; i++) { if (vertices[i][dimension] < min) { min = vertices[i][dimension]; } if (vertices[i][dimension] > max) { max = vertices[i][dimension]; } } double[] result = { min, max }; return result; } /** * Calculates normal array from index and vertex arrays. * Also calculates signed distances of each triangle * from the origin. */ public void calculateNormals(final boolean rotated) { double[][] verticesUsed = verticesEM; double[] originDistancesUsed = new double[verticesEM.length]; double[][] normalsUsed = new double[verticesEM.length][3]; normalsUsed = new double[indicesEM.length][3]; originDistancesUsed = new double[indicesEM.length]; for (int i = 0; i < indicesEM.length; i++) { // get the three vertices which this triangle corresponds to. double[] point1 = verticesUsed[indicesEM[i][0] - 1]; double[] point2 = verticesUsed[indicesEM[i][1] - 1]; double[] point3 = verticesUsed[indicesEM[i][2] - 1]; // get two vectors which can be used to define our plane. double[] vector1 = Vector.vectorBetweenPoints(point1, point2); double[] vector2 = Vector.vectorBetweenPoints(point1, point3); // get the normal vector between these two vectors. double[] normalVector = Vector.normalisedCrossProduct(vector1, vector2); // copy this vector into the normals array at the given point. System.arraycopy(normalVector, 0, normalsUsed[i], 0, 3); double distanceFromOrigin = -(normalVector[0] * point1[0] + normalVector[1] * point1[1] + normalVector[2] * point1[2]); originDistancesUsed[i] = distanceFromOrigin; } originDistances = new double[indicesEM.length]; normals = new double[indicesEM.length][3]; for (int i = 0; i < normalsUsed.length; i++) { System.arraycopy(normalsUsed[i], 0, normals[i], 0, 3); } System.arraycopy(originDistancesUsed, 0, originDistances, 0, indicesEM.length); } public boolean calculateCrystalOccupancy(final double x, final double y, final double z) { if (normals == null) { calculateNormals(false); } boolean inside = false; double[] directionVector = { 0, 0, 1 }; double[] origin = new double[3]; origin[0] = x/1000; origin[1] = y/1000; origin[2] = z/1000; //It doesn't work if x = y so need a fudge here... this is horrible. if (origin[0] == origin[1]) { origin[0] += 0.00001; } for (int l = 0; l < indicesEM.length; l++) { double intersectionDistance = Vector.rayTraceDistance(normals[l], directionVector, origin, originDistances[l]); Double distanceObject = Double.valueOf(intersectionDistance); if (intersectionDistance < 0 || distanceObject.isNaN() || distanceObject.isInfinite()) { continue; } double[] intersectionPoint = Vector.rayTraceToPointWithDistance( directionVector, origin, intersectionDistance); double[][] triangleVertices = new double[3][3]; // copy vertices referenced by indices into single array for // passing onto the polygon inclusion test. for (int m = 0; m < 3; m++) { System.arraycopy(verticesEM[indicesEM[l][m] - 1], 0, triangleVertices[m], 0, 3); } boolean crosses = Vector.polygonInclusionTest(triangleVertices, intersectionPoint); if (crosses) { inside = !inside; } } return inside; } //coulombs law function //I pass in a point from (electron) and to (centre of sample/pixel) //Calculate the vector and then the unit vector //Calculate coulomb law (the vector version) - and convert to acceleration with F = ma //after s is determined I need to work out how long it will take to get there based on the velocity vector (direction + magnitude as speed) //This time can be used to convert the acceleration vector to a velocity vector //Combine the two velocity vectors to get a new one //this is what I need to return //Use this velocity to work back to the kinetic energy. This is extra kinetic energy change by the end of the step. //This will happen in the main void private double[] adjustVelocityVectorByCharge(double[] electronPosition, double[] chargePosition, double s, double electronEnergy, double xNorm, double yNorm, double zNorm, CoefCalc coefCalc) { double Ke = 8.987551787E+27; // N nm^2 C^-2 //calculate time taken for electron to travel distance s double csquared = c*c; // (m/s)^2 double Vo = electronEnergy * Beam.KEVTOJOULES; double betaSquared = 1- Math.pow(m*csquared/(Vo + m*csquared), 2); double v = Math.pow(betaSquared*csquared, 0.5) *1E9; // nm/s double seconds = (1/v) * s; //seconds to move s nm //calculate the electron velocity vector (in nm/s) double[] electronVelocityVector = new double[3]; double[] unitVector = {xNorm, yNorm, zNorm}; for (int i = 0; i < 3; i++) { electronVelocityVector[i] = v * unitVector[i]; //nm/s } //calculate the force/acceleration/velocity vector to the charge double[] vectorToCharge = Vector.vectorBetweenPoints(electronPosition, chargePosition); double vectorToChargeMagnitude = Vector.vectorMagnitude(vectorToCharge); double[] normalisedVectorToCharge = Vector.normaliseVector(vectorToCharge); //estimate relative permittivity of the medium double solventFraction = coefCalc.getSolventFraction(); if (solventFraction == 0) { solventFraction = 0.5; } double relativeEpsilon = (80*solventFraction) + ((1-solventFraction)*4); //vacuum // relativeEpsilon = 1; double forceVectorConstant = Ke * ((MonteCarloCharge*Beam.ELEMENTARYCHARGE)/(Math.pow(vectorToChargeMagnitude, 2)*relativeEpsilon)); //N or J/m double[] forceVector = new double[3]; double[] accelerationVector = new double[3]; double[] chargeVelocityVector = new double[3]; double[] totalVelocityVector = new double[3]; int bins = 100; //need this to update mass double interval = seconds/bins; double relativisticMass = (electronEnergy * Beam.KEVTOJOULES + m*csquared) / csquared; totalVelocityVector = electronVelocityVector; for (int j = 0; j < 3; j++) { forceVector[j] = forceVectorConstant * normalisedVectorToCharge[j]; //N or J/m for (int i = 1; i <= bins; i++) { accelerationVector[j] = forceVector[j] / relativisticMass; // m/s^2 //i think I need an increase in mass here //yeah this mass should be (kinetic energy of electron + m0c^2)/c^2 chargeVelocityVector[j] = accelerationVector[j] * interval * 1E9; // nm/s totalVelocityVector[j] += chargeVelocityVector[j]; //update mass betaSquared = Math.pow(Vector.vectorMagnitude(totalVelocityVector)/1E9,2)/csquared; double gamma = 1/Math.pow((1-betaSquared), 0.5); electronEnergy = (gamma-1)*m*csquared; relativisticMass = (electronEnergy + m*csquared) / csquared; } } /* if (MonteCarloCharge > 1E-14 && vectorToChargeMagnitude < 150) { System.out.println("test"); } */ //error fixer double velocity = Vector.vectorMagnitude(totalVelocityVector)/1E9; if(velocity > c) { double multiple = velocity/c; for (int j = 0; j < 3; j++) { totalVelocityVector[j] = (totalVelocityVector[j]/multiple)-1; } } return totalVelocityVector; } /** * Vector class containing magical vector methods * like cross products and magnitudes. * * @author magd3052 */ private static class Vector { /** * Returns magnitude of 3D vector. * * @param vector 3d coordinates of vector * @return magnitude scalar. */ public static double vectorMagnitude(final double[] vector) { double squaredDistance = Math.pow(vector[0], 2) + Math.pow(vector[1], 2) + Math.pow(vector[2], 2); double distance = Math.sqrt(squaredDistance); return distance; } /** * returns 3D vector between FROM and TO points. * * @param from from point * @param to to point * @return vector between points. */ public static double[] vectorBetweenPoints(final double[] from, final double[] to) { double[] newVector = new double[3]; for (int i = 0; i < 3; i++) { newVector[i] = to[i] - from[i]; } return newVector; } public static double[] normaliseVector(final double[] vector) { double[] newVector = new double[3]; double magnitude = vectorMagnitude(vector); for (int i = 0; i < 3; i++) { newVector[i] = vector[i]/magnitude; } return newVector; } /** * returns 3D cross-product between two vectors. * * @param vector1 vector1 * @param vector2 vector2 * @return cross product */ public static double[] crossProduct(final double[] vector1, final double[] vector2) { double[] newVector = new double[3]; newVector[0] = vector1[1] * vector2[2] - vector1[2] * vector2[1]; newVector[1] = vector1[2] * vector2[0] - vector1[0] * vector2[2]; newVector[2] = vector1[0] * vector2[1] - vector1[1] * vector2[0]; return newVector; } /** * returns 3D cross product with magnitude set to 1 between * two vectors. * * @param vector1 vector1 * @param vector2 vector2 * @return normalised cross product */ public static double[] normalisedCrossProduct(final double[] vector1, final double[] vector2) { double[] newVector = crossProduct(vector1, vector2); double magnitude = vectorMagnitude(newVector); for (int i = 0; i < 3; i++) { newVector[i] /= magnitude; } return newVector; } /** * returns dot product between two 3D vectors. * * @param vector1 vector1 * @param vector2 vector2 * @return dot product */ public static double dotProduct(final double[] vector1, final double[] vector2) { double dotProduct = 0; for (int i = 0; i < 3; i++) { dotProduct += vector1[i] * vector2[i]; } return dotProduct; } /** * Ray trace from a point to a plane via a direction vector, * find the intersection between the direction vector and the * plane and return this point. * * @param normalUnitVector normal vector with magnitude 1 * @param directionVector direction vector of any magnitude * @param origin point from which ray is traced (i.e. voxel coordinate) * @param planeDistance distance of plane from true origin (0, 0, 0) * @return intersection point between plane and direction vector */ @SuppressWarnings("unused") public static double[] rayTraceToPoint(final double[] normalUnitVector, final double[] directionVector, final double[] origin, final double planeDistance) { double t = rayTraceDistance(normalUnitVector, directionVector, origin, planeDistance); double[] point = new double[3]; for (int i = 0; i < 3; i++) { point[i] = origin[i] + t * directionVector[i]; } return point; } /** * Ray trace - find intersection of direction vector from point * with plane from already-known distance t. * * @param directionVector direction vector * @param origin point from which ray is traced * @param t distance of origin to plane along direction vector * @return point of intersection */ public static double[] rayTraceToPointWithDistance( final double[] directionVector, final double[] origin, final double t) { double[] point = new double[3]; for (int i = 0; i < 3; i++) { point[i] = origin[i] + t * directionVector[i]; } return point; } /** * Ray trace from a point to a plane via a direction vector, * find the signed distance between the direction vector and * the plane and return this point. * * @param normalUnitVector normal vector with magnitude 1 * @param directionVector direction vector of any magnitude * @param origin point from which ray is traced (i.e. voxel coordinate) * @param planeDistance distance of plane from true origin (0, 0, 0) * @return signed distance between direction vector and plane */ public static double rayTraceDistance(final double[] normalUnitVector, final double[] directionVector, final double[] origin, final double planeDistance) { double originNormalDotProduct = dotProduct(origin, normalUnitVector); double directionNormalDotProduct = dotProduct(directionVector, normalUnitVector); double t = -(originNormalDotProduct + planeDistance) / directionNormalDotProduct; return t; } public static boolean polygonInclusionTest(final double[][] vertices, final double[] point) { boolean c = false; for (int i = 0, j = vertices.length - 1; i < vertices.length; j = i++) { if (((vertices[i][1] > point[1]) != (vertices[j][1] > point[1])) && (point[0] < (vertices[j][0] - vertices[i][0]) * (point[1] - vertices[i][1]) / (vertices[j][1] - vertices[i][1]) + vertices[i][0])) { c = !c; } } return c; } } private void testingXFELQuick(Beam beam, CoefCalc coefcalc) { double m = 9.10938356E-31; // in Kg double c = 299792458; double csquared = c*c; // (m/s)^2 double onefsTotDose = 193.7; //this would be 1E11 photons in 10fs double beamEnergy = beam.getPhotonEnergy(); double peBinding = 0.48; double electronEnergy = beamEnergy - peBinding; double photonDosePerfs = (peBinding/beamEnergy)*onefsTotDose; //just a test //pulse energy of 1.5mJ double pulseEn = 2.11E-3; double energyPerPhoton = beam.getPhotonEnergy()*Beam.KEVTOJOULES; double numberOfPhotons = pulseEn/energyPerPhoton; //so for 2fs int time = 20; double stoppingPower = 0; double photonDose = photonDosePerfs * time; double electronDose = 0; for (int i = 1; i < time; i++) { //so i is fs since first pe produced (time - 1) stoppingPower = coefcalc.getStoppingPower(electronEnergy, false); //get the speed double Vo = electronEnergy * Beam.KEVTOJOULES; double betaSquared = 1- Math.pow(m*csquared/(Vo + m*csquared), 2); double v = Math.pow(betaSquared*csquared, 0.5) * 1E9 / 1E15; //nm/fs double distanceMoved = (v*1); double energyDeposited = stoppingPower * distanceMoved; double doseDeposited = (energyDeposited/beamEnergy)*onefsTotDose; electronDose += doseDeposited * (time-i); electronEnergy -= energyDeposited; if (electronEnergy < 0.05) { break; } } double totDose = photonDose + electronDose; System.out.println(totDose); //so the photon dose is contentious because of outrunning Auger, need to consider this //The elctron dose is largely not inner shell but fraction that is and outrunning Auger should be considered //Need to consider photoelectron escape as well as this could be huge //I think a Monte Carlo simulation and multiplying up might work, need Monte Carlo because of the timescale I think //I should be able to predict pulse lengths at which damage is seen for a given beam energy, sample composition and stuff //Should also be able to use elastic scattering cross sections to advise on beam energy and stuff } private void MonteCarloXFEL() { //this will be where my Monte Carlo simulation for XFELs is set up and I can see what I can learn from this } }
import java.lang.reflect.*; import java.io.*; import java.net.*; import java.util.*; import org.xbill.DNS.*; import org.xbill.DNS.utils.*; /** @author Brian Wellington &lt;[email protected]&gt; */ public class jnamed { Hashtable caches; Hashtable znames; Hashtable TSIGs; public jnamed(String conffile) throws IOException { FileInputStream fs; boolean started = false; try { fs = new FileInputStream(conffile); } catch (Exception e) { System.out.println("Cannot open " + conffile); return; } caches = new Hashtable(); znames = new Hashtable(); TSIGs = new Hashtable(); BufferedReader br = new BufferedReader(new InputStreamReader(fs)); String line = null; while ((line = br.readLine()) != null) { StringTokenizer st = new StringTokenizer(line); if (!st.hasMoreTokens()) continue; String keyword = st.nextToken(); if (!st.hasMoreTokens()) { System.out.println("Invalid line: " + line); continue; } if (keyword.charAt(0) == ' continue; if (keyword.equals("primary")) addPrimaryZone(st.nextToken(), st.nextToken()); if (keyword.equals("secondary")) addSecondaryZone(st.nextToken(), st.nextToken()); else if (keyword.equals("cache")) { Cache cache = new Cache(st.nextToken()); caches.put(new Short(DClass.IN), cache); } else if (keyword.equals("key")) addTSIG(st.nextToken(), st.nextToken()); else if (keyword.equals("port")) { short port = Short.parseShort(st.nextToken()); addUDP(port); addTCP(port); started = true; } else { System.out.println("ignoring invalid keyword: " + keyword); } } if (!started) { addUDP((short) 53); addTCP((short) 53); } System.out.println("running"); } public void addPrimaryZone(String zname, String zonefile) throws IOException { Name origin = null; Cache cache = getCache(DClass.IN); if (zname != null) origin = new Name(zname, Name.root); Zone newzone = new Zone(zonefile, cache, origin); znames.put(newzone.getOrigin(), newzone); /*System.out.println("Adding zone named <" + newzone.getOrigin() + ">");*/ } public void addSecondaryZone(String zone, String remote) throws IOException { Cache cache = getCache(DClass.IN); Name zname = new Name(zone); Zone newzone = new Zone(zname, DClass.IN, remote, cache); znames.put(zname, newzone); /*System.out.println("Adding zone named <" + zname + ">");*/ } public void addTSIG(String name, String key) { TSIGs.put(new Name(name), base64.fromString(key)); } public Cache getCache(short dclass) { Cache c = (Cache) caches.get(new Short(dclass)); if (c == null) { c = new Cache(dclass); caches.put(new Short(dclass), c); } return c; } public Zone findBestZone(Name name) { Zone foundzone = null; foundzone = (Zone) znames.get(name); if (foundzone != null) return foundzone; int labels = name.labels(); for (int i = 1; i < labels; i++) { Name tname = new Name(name, i); foundzone = (Zone) znames.get(tname); if (foundzone != null) return foundzone; } return null; } public RRset findExactMatch(Name name, short type, short dclass, boolean glue) { Zone zone = findBestZone(name); if (zone != null) return zone.findExactMatch(name, type); else { RRset [] rrsets; Cache cache = getCache(dclass); if (glue) rrsets = cache.findAnyRecords(name, type); else rrsets = cache.findRecords(name, type); if (rrsets == null) return null; else return rrsets[0]; /* not quite right */ } } void addRRset(Name name, Message response, RRset rrset, byte section, boolean sigonly) { Enumeration e; for (byte s = 1; s <= section; s++) if (response.findRRset(name, rrset.getType(), s)) return; if (!sigonly) { e = rrset.rrs(); while (e.hasMoreElements()) { Record r = (Record) e.nextElement(); if (!name.isWild() && r.getName().isWild()) r = r.withName(name); response.addRecord(r, section); } } e = rrset.sigs(); while (e.hasMoreElements()) { Record r = (Record) e.nextElement(); if (!name.isWild() && r.getName().isWild()) r = r.withName(name); response.addRecord(r, section); } } private void addSOA(Message response, Zone zone) { response.addRecord(zone.getSOA(), Section.AUTHORITY); } private void addNS(Message response, Zone zone) { RRset nsRecords = zone.getNS(); addRRset(nsRecords.getName(), response, nsRecords, Section.AUTHORITY, false); } private void addCacheNS(Message response, Cache cache, Name name) { SetResponse sr = cache.lookupRecords(name, Type.NS, Credibility.HINT); if (!sr.isDelegation()) return; RRset nsRecords = sr.getNS(); Enumeration e = nsRecords.rrs(); while (e.hasMoreElements()) { Record r = (Record) e.nextElement(); response.addRecord(r, Section.AUTHORITY); } } private void addGlue(Message response, Name name) { RRset a = findExactMatch(name, Type.A, DClass.IN, true); if (a == null) return; if (response.findRRset(name, Type.A)) return; Enumeration e = a.rrs(); while (e.hasMoreElements()) { Record r = (Record) e.nextElement(); response.addRecord(r, Section.ADDITIONAL); } e = a.sigs(); while (e.hasMoreElements()) { Record r = (Record) e.nextElement(); response.addRecord(r, Section.ADDITIONAL); } } private void addAdditional2(Message response, int section) { Enumeration e = response.getSection(section); while (e.hasMoreElements()) { Record r = (Record) e.nextElement(); Name glueName = null; switch (r.getType()) { case Type.MX: glueName = ((MXRecord)r).getTarget(); break; case Type.NS: glueName = ((NSRecord)r).getTarget(); break; case Type.KX: glueName = ((KXRecord)r).getTarget(); break; case Type.NAPTR: glueName = ((NAPTRRecord)r).getReplacement(); break; case Type.SRV: glueName = ((SRVRecord)r).getTarget(); break; default: break; } if (glueName != null) addGlue(response, glueName); } } void addAdditional(Message response) { addAdditional2(response, Section.ANSWER); addAdditional2(response, Section.AUTHORITY); } byte addAnswer(Message response, Name name, short type, short dclass, int iterations) { SetResponse sr; boolean sigonly; byte rcode = Rcode.NOERROR; if (iterations > 6) return Rcode.NOERROR; if (type == Type.SIG) { type = Type.ANY; sigonly = true; } else sigonly = false; Zone zone = findBestZone(name); if (zone != null) sr = zone.findRecords(name, type); else { Cache cache = getCache(dclass); sr = cache.lookupRecords(name, type, Credibility.NONAUTH_ANSWER); } if (sr.isUnknown()) { addCacheNS(response, getCache(dclass), name); } if (sr.isNXDOMAIN()) { response.getHeader().setRcode(Rcode.NXDOMAIN); if (zone != null) { addSOA(response, zone); if (iterations == 0) response.getHeader().setFlag(Flags.AA); } rcode = Rcode.NXDOMAIN; } else if (sr.isNXRRSET()) { if (zone != null) { addSOA(response, zone); if (iterations == 0) response.getHeader().setFlag(Flags.AA); } } else if (sr.isDelegation()) { RRset nsRecords = sr.getNS(); addRRset(nsRecords.getName(), response, nsRecords, Section.AUTHORITY, false); } else if (sr.isCNAME()) { RRset rrset = new RRset(); CNAMERecord cname = sr.getCNAME(); rrset.addRR(cname); addRRset(name, response, rrset, Section.ANSWER, false); if (zone != null && iterations == 0) response.getHeader().setFlag(Flags.AA); rcode = addAnswer(response, cname.getTarget(), type, dclass, iterations + 1); } else if (sr.isDNAME()) { RRset rrset = new RRset(); DNAMERecord dname = sr.getDNAME(); rrset.addRR(dname); addRRset(name, response, rrset, Section.ANSWER, false); Name newname = name.fromDNAME(dname); if (newname == null) return Rcode.SERVFAIL; try { rrset = new RRset(); rrset.addRR(new CNAMERecord(name, dclass, 0, newname)); addRRset(name, response, rrset, Section.ANSWER, false); } catch (IOException e) {} if (zone != null && iterations == 0) response.getHeader().setFlag(Flags.AA); rcode = addAnswer(response, newname, type, dclass, iterations + 1); } else if (sr.isSuccessful()) { RRset [] rrsets = sr.answers(); for (int i = 0; i < rrsets.length; i++) addRRset(name, response, rrsets[i], Section.ANSWER, sigonly); if (zone != null) { addNS(response, zone); if (iterations == 0) response.getHeader().setFlag(Flags.AA); } else addCacheNS(response, getCache(dclass), name); } return rcode; } TSIG findTSIG(Name name) { byte [] key = (byte []) TSIGs.get(name); if (key != null) return new TSIG(name, key); else return null; } Message doAXFR(Name name, Message query, Socket s) { Zone zone = (Zone) znames.get(name); if (zone == null) { /* System.out.println("no zone " + name + " to AXFR");*/ return errorMessage(query, Rcode.REFUSED); } Enumeration e = zone.AXFR(); try { DataOutputStream dataOut; dataOut = new DataOutputStream(s.getOutputStream()); while (e.hasMoreElements()) { RRset rrset = (RRset) e.nextElement(); Message response = new Message(); addRRset(rrset.getName(), response, rrset, Section.ANSWER, false); byte [] out = response.toWire(); dataOut.writeShort(out.length); dataOut.write(out); } } catch (IOException ex) { System.out.println("AXFR failed"); } try { s.close(); } catch (IOException ex) { } return null; } /* * Note: a null return value means that the caller doesn't need to do * anything. Currently this only happens if this is an AXFR request over * TCP. */ Message generateReply(Message query, byte [] in, Socket s) { boolean badversion; int maxLength; boolean sigonly; SetResponse sr; if (query.getHeader().getOpcode() != Opcode.QUERY) return errorMessage(query, Rcode.NOTIMPL); Record queryRecord = query.getQuestion(); TSIGRecord queryTSIG = query.getTSIG(); TSIG tsig = null; if (queryTSIG != null) { tsig = findTSIG(queryTSIG.getName()); if (tsig.verify(query, in, null) != Rcode.NOERROR) return formerrMessage(in); } OPTRecord queryOPT = query.getOPT(); if (queryOPT != null && queryOPT.getVersion() > 0) badversion = true; if (s != null) maxLength = 65535; else if (queryOPT != null) maxLength = queryOPT.getPayloadSize(); else maxLength = 512; Message response = new Message(); response.getHeader().setID(query.getHeader().getID()); response.getHeader().setFlag(Flags.QR); if (query.getHeader().getFlag(Flags.RD)); response.getHeader().setFlag(Flags.RD); response.addRecord(queryRecord, Section.QUESTION); Name name = queryRecord.getName(); short type = queryRecord.getType(); short dclass = queryRecord.getDClass(); if (type == Type.AXFR && s != null) return doAXFR(name, query, s); if (!Type.isRR(type) && type != Type.ANY) return errorMessage(query, Rcode.NOTIMPL); byte rcode = addAnswer(response, name, type, dclass, 0); if (rcode != Rcode.NOERROR && rcode != Rcode.NXDOMAIN) return errorMessage(query, rcode); addAdditional(response); if (queryTSIG != null) { try { if (tsig != null) tsig.apply(response, queryTSIG); } catch (IOException e) { } } try { response.freeze(); byte [] out = response.toWire(); if (out.length > maxLength) { response.thaw(); truncate(response, out.length, maxLength); if (tsig != null) tsig.apply(response, queryTSIG); } } catch (IOException e) { } return response; } public int truncateSection(Message in, int maxLength, int length, int section) { int removed = 0; Record [] records = in.getSectionArray(section); for (int i = records.length - 1; i >= 0; i Record r = records[i]; removed += r.getWireLength(); length -= r.getWireLength(); in.removeRecord(r, section); if (length > maxLength) continue; else { for (int j = i - 1; j >= 0; j Record r2 = records[j]; if (!r.getName().equals(r2.getName()) || r.getType() != r2.getType() || r.getDClass() != r2.getDClass()) break; removed += r2.getWireLength(); length -= r2.getWireLength(); in.removeRecord(r2, section); } return removed; } } return removed; } public void truncate(Message in, int length, int maxLength) { TSIGRecord tsig = in.getTSIG(); if (tsig != null) maxLength -= tsig.getWireLength(); length -= truncateSection(in, maxLength, length, Section.ADDITIONAL); if (length < maxLength) return; in.getHeader().setFlag(Flags.TC); if (tsig != null) { in.removeAllRecords(Section.ANSWER); in.removeAllRecords(Section.AUTHORITY); return; } length -= truncateSection(in, maxLength, length, Section.AUTHORITY); if (length < maxLength) return; length -= truncateSection(in, maxLength, length, Section.ANSWER); } public Message formerrMessage(byte [] in) { Header header; try { header = new Header(new DataByteInputStream(in)); } catch (IOException e) { header = new Header(0); } Message response = new Message(); response.setHeader(header); for (int i = 0; i < 4; i++) response.removeAllRecords(i); header.setRcode(Rcode.FORMERR); return response; } public Message errorMessage(Message query, short rcode) { Header header = query.getHeader(); Message response = new Message(); response.setHeader(header); for (int i = 0; i < 4; i++) response.removeAllRecords(i); if (rcode == Rcode.SERVFAIL) response.addRecord(query.getQuestion(), Section.QUESTION); header.setRcode(rcode); return response; } public void serveTCP(short port) { try { ServerSocket sock = new ServerSocket(port); while (true) { Socket s = sock.accept(); int inLength; DataInputStream dataIn; DataOutputStream dataOut; byte [] in; try { InputStream is = s.getInputStream(); dataIn = new DataInputStream(is); inLength = dataIn.readUnsignedShort(); in = new byte[inLength]; dataIn.readFully(in); } catch (InterruptedIOException e) { s.close(); continue; } Message query, response; try { query = new Message(in); response = generateReply(query, in, s); if (response == null) continue; } catch (IOException e) { response = formerrMessage(in); } byte [] out = response.toWire(); dataOut = new DataOutputStream(s.getOutputStream()); dataOut.writeShort(out.length); dataOut.write(out); s.close(); } } catch (IOException e) { System.out.println("serveTCP: " + e); } } public void serveUDP(short port) { try { DatagramSocket sock = new DatagramSocket(port); while (true) { short udpLength = 512; byte [] in = new byte[udpLength]; DatagramPacket dp = new DatagramPacket(in, in.length); try { sock.receive(dp); } catch (InterruptedIOException e) { continue; } Message query, response; try { query = new Message(in); response = generateReply(query, in, null); if (response == null) continue; } catch (IOException e) { response = formerrMessage(in); } byte [] out = response.toWire(); dp = new DatagramPacket(out, out.length, dp.getAddress(), dp.getPort()); sock.send(dp); } } catch (IOException e) { System.out.println("serveUDP: " + e); } } public void addTCP(final short port) { Thread t; t = new Thread(new Runnable() {public void run() {serveTCP(port);}}); t.start(); } public void addUDP(final short port) { Thread t; t = new Thread(new Runnable() {public void run() {serveUDP(port);}}); t.start(); } public static void main(String [] args) { if (args.length > 1) { System.out.println("usage: jnamed [conf]"); System.exit(0); } jnamed s; try { String conf; if (args.length == 1) conf = args[0]; else conf = "jnamed.conf"; s = new jnamed(conf); } catch (IOException e) { System.out.println(e); } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package jme3tools.savegame; import com.jme3.asset.AssetManager; import com.jme3.export.Savable; import com.jme3.export.binary.BinaryExporter; import com.jme3.export.binary.BinaryImporter; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.logging.Level; import java.util.logging.Logger; import java.util.prefs.Preferences; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import sun.misc.UUDecoder; import sun.misc.UUEncoder; /** * Tool for saving Savables as SaveGame entries in a system-dependent way. * @author normenhansen */ public class SaveGame { /** * Saves a savable in a system-dependent way. Note that only small amounts of data can be saved. * @param gamePath A unique path for this game, e.g. com/mycompany/mygame * @param dataName A unique name for this savegame, e.g. "save_001" * @param data The Savable to save */ public static void saveGame(String gamePath, String dataName, Savable data) { Preferences prefs = Preferences.userRoot().node(gamePath); BinaryExporter ex = BinaryExporter.getInstance(); ByteArrayOutputStream out = new ByteArrayOutputStream(); try { GZIPOutputStream zos = new GZIPOutputStream(out); ex.save(data, zos); zos.close(); } catch (IOException ex1) { Logger.getLogger(SaveGame.class.getName()).log(Level.SEVERE, "Error saving data: {0}", ex1); ex1.printStackTrace(); } UUEncoder enc = new UUEncoder(); String dataString = enc.encodeBuffer(out.toByteArray()); System.out.println(dataString); if (dataString.length() > Preferences.MAX_VALUE_LENGTH) { throw new IllegalStateException("SaveGame dataset too large"); } prefs.put(dataName, dataString); } /** * Loads a savable that has been saved on this system with saveGame() before. * @param gamePath A unique path for this game, e.g. com/mycompany/mygame * @param dataName A unique name for this savegame, e.g. "save_001" * @return The savable that was saved */ public static Savable loadGame(String gamePath, String dataName) { return loadGame(gamePath, dataName, null); } /** * Loads a savable that has been saved on this system with saveGame() before. * @param gamePath A unique path for this game, e.g. com/mycompany/mygame * @param dataName A unique name for this savegame, e.g. "save_001" * @param assetManager Link to an AssetManager if required for loading the data (e.g. models with textures) * @return The savable that was saved */ public static Savable loadGame(String gamePath, String dataName, AssetManager manager) { Preferences prefs = Preferences.userRoot().node(gamePath); String data = prefs.get(dataName, ""); InputStream is = null; Savable sav = null; UUDecoder dec = new UUDecoder(); try { is = new GZIPInputStream(new ByteArrayInputStream(dec.decodeBuffer(data))); BinaryImporter imp = BinaryImporter.getInstance(); if (manager != null) { imp.setAssetManager(manager); } sav = imp.load(is); } catch (IOException ex) { Logger.getLogger(SaveGame.class.getName()).log(Level.SEVERE, "Error loading data: {0}", ex); ex.printStackTrace(); } finally { if (is != null) { try { is.close(); } catch (IOException ex) { Logger.getLogger(SaveGame.class.getName()).log(Level.SEVERE, "Error loading data: {0}", ex); ex.printStackTrace(); } } } return sav; } }
package org.gluu.oxtrust.ldap.service; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.io.InputStream; import javax.activation.FileTypeMap; import javax.activation.MimetypesFileTypeMap; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.ArrayUtils; import org.jboss.seam.ScopeType; import org.jboss.seam.annotations.AutoCreate; import org.jboss.seam.annotations.Create; import org.jboss.seam.annotations.In; import org.jboss.seam.annotations.Logger; import org.jboss.seam.annotations.Name; import org.jboss.seam.annotations.Scope; import org.jboss.seam.annotations.Startup; import org.jboss.seam.core.ResourceLoader; import org.jboss.seam.log.Log; import org.jboss.seam.ui.graphicImage.Image.Type; import org.xdi.config.oxtrust.ApplicationConfiguration; import org.xdi.model.GluuImage; import org.xdi.util.StringHelper; import org.xdi.util.image.ImageTransformationUtility; import org.xdi.util.repository.RepositoryUtility; @Name("imageRepository") @Scope(ScopeType.APPLICATION) @AutoCreate @Startup public class ImageRepository { @Logger private Log log; @In(required = false) private ResourceLoader resourceLoader; @In(value = "#{oxTrustConfiguration.applicationConfiguration}") private ApplicationConfiguration applicationConfiguration; private static final String TEMP_FOLDER = "tmp"; private static final String REMOVED_FOLDER = "removed"; private static boolean createBackupDuringRemoval = true; private String sourceHome, thumbHome; private String tmpSourceHome, tmpThumbHome; private String removedSourceHome, removedThumbHome; private File photoRepositoryRootDirFile; private byte[] blankImage, blankPhoto, blankIcon; private int countLevels; private int countFoldersPerLevel; private FileTypeMap fileTypeMap; @Create public void init() throws Exception { countLevels = applicationConfiguration.getPhotoRepositoryCountLeveles(); countFoldersPerLevel = applicationConfiguration.getPhotoRepositoryCountFoldersPerLevel(); String photoRepositoryRootDir = applicationConfiguration.getPhotoRepositoryRootDir(); photoRepositoryRootDirFile = new File(photoRepositoryRootDir); // Create folders for persistent images sourceHome = photoRepositoryRootDir + File.separator + "source"; thumbHome = photoRepositoryRootDir + File.separator + "thumb"; createFoldersTree(new File(sourceHome)); createFoldersTree(new File(thumbHome)); // Create folders for temporary images tmpSourceHome = photoRepositoryRootDir + File.separator + TEMP_FOLDER + File.separator + "source"; tmpThumbHome = photoRepositoryRootDir + File.separator + TEMP_FOLDER + File.separator + "thumb"; createFoldersTree(new File(tmpSourceHome)); createFoldersTree(new File(tmpThumbHome)); // Create folders for removed images if (createBackupDuringRemoval) { removedSourceHome = photoRepositoryRootDir + File.separator + REMOVED_FOLDER + File.separator + "source"; removedThumbHome = photoRepositoryRootDir + File.separator + REMOVED_FOLDER + File.separator + "thumb"; createFoldersTree(new File(removedSourceHome)); createFoldersTree(new File(removedThumbHome)); } prepareBlankImage(); prepareBlankPhoto(); prepareBlankIcon(); initFileTypesMap(); } public void initFileTypesMap() throws Exception { fileTypeMap = MimetypesFileTypeMap.getDefaultFileTypeMap(); InputStream is = ImageRepository.class.getClassLoader().getResourceAsStream("META-INF/mimetypes-gluu.default"); try { if (is != null) { fileTypeMap = new MimetypesFileTypeMap(is); } } catch (Exception ex) { log.error("Failed to load file types map. Using default one.", ex); fileTypeMap = new MimetypesFileTypeMap(); } finally { IOUtils.closeQuietly(is); } } /** * Creates image in repository * * @param image * image file * @return true if image was added successfully, false otherwise * @throws Exception */ public boolean createRepositoryImageFiles(GluuImage image, int thumbWidth, int thumbHeight) throws Exception { if (image.getSourceContentType().equals("application/octet-stream")) { image.setSourceContentType(fileTypeMap.getContentType(image.getSourceName())); } if (!addThumbnail(image, thumbWidth, thumbHeight)) { return false; } // Generate paths setGeneratedImagePathes(image, Type.IMAGE_JPEG.getExtension()); // Create folders tree createImagePathes(image); // Save thumb image FileUtils.writeByteArrayToFile(getThumbFile(image), image.getThumbData()); // Save source image FileUtils.writeByteArrayToFile(getSourceFile(image), image.getData()); return true; } public boolean addThumbnail(GluuImage image, int thumbWidth, int thumbHeight) throws Exception { if (!image.getSourceContentType().matches("image/(gif|png|jpeg|jpg|bmp)")) { return false; } // Load source image org.jboss.seam.ui.graphicImage.Image graphicsImage = new org.jboss.seam.ui.graphicImage.Image(); graphicsImage.setInput(image.getData()); graphicsImage.setContentType(Type.IMAGE_PNG); if (graphicsImage.getBufferedImage() == null) { throw new IOException("The image data is empty"); } // Set source image size image.setWidth(graphicsImage.getWidth()); image.setHeight(graphicsImage.getHeight()); BufferedImage bi = ImageTransformationUtility.scaleImage(graphicsImage.getBufferedImage(), thumbWidth, thumbHeight); graphicsImage.setBufferedImage(bi); // Set thumb properties image.setThumbWidth(graphicsImage.getWidth()); image.setThumbHeight(graphicsImage.getHeight()); image.setThumbContentType(graphicsImage.getContentType().getMimeType()); // Store thumb image image.setThumbData(graphicsImage.getImage()); graphicsImage = null; return true; } private void moveImageToPersistentStore(GluuImage image, boolean saveThumb, String destSourceFilePath, String destThumbFilePath) throws IOException { if (!image.isStoreTemporary()) { return; } File tmpOrigFile = getSourceFile(image); File tmpThumbFile = getThumbFile(image); image.setStoreTemporary(false); if (!StringHelper.isEmpty(destSourceFilePath)) { image.setSourceFilePath(destSourceFilePath); } if (!StringHelper.isEmpty(destThumbFilePath)) { image.setThumbFilePath(destThumbFilePath); } FileUtils.copyFile(tmpOrigFile, getSourceFile(image)); if (saveThumb) { FileUtils.copyFile(tmpThumbFile, getThumbFile(image)); } deleteFile(tmpOrigFile, true); deleteFile(tmpThumbFile, true); } public void moveImageToPersistentStore(GluuImage image) throws Exception { moveImageToPersistentStore(image, true, null, null); } public void moveLogoImageToPersistentStore(GluuImage image) throws IOException { if (!image.isLogo()) { return; } String logoSourceFilePath = "logo" + RepositoryUtility.getFileNameExtension(image.getSourceFilePath()); String logoThumbFilePath = "logo_thumb" + RepositoryUtility.getFileNameExtension(image.getSourceFilePath()); moveImageToPersistentStore(image, true, logoSourceFilePath, logoThumbFilePath); } private void setGeneratedImagePathes(GluuImage image, String thumbExt) throws Exception { String uuid = RepositoryUtility.generateUUID(); String ext = RepositoryUtility.getFileNameExtension(image.getSourceName()); String sourceFileName = uuid + ext; String thumbFileName = uuid + (thumbExt == null ? ext : thumbExt); String sourceFilePath = RepositoryUtility.generateTreeFolderPath(countLevels, countFoldersPerLevel, sourceFileName); String thumbFilePath = RepositoryUtility.generateTreeFolderPath(countLevels, countFoldersPerLevel, thumbFileName); image.setUuid(uuid); image.setSourceFilePath(sourceFilePath); image.setThumbFilePath(thumbFilePath); } public File getThumbFile(GluuImage image) { if (image.isLogo() && !image.isStoreTemporary()) { return new File(applicationConfiguration.getLogoLocation() + File.separator + image.getThumbFilePath()); } String parentFolder = image.isStoreTemporary() ? tmpThumbHome : thumbHome; return new File(parentFolder + File.separator + image.getThumbFilePath()); } public File getSourceFile(GluuImage image) { if (image.isLogo() && !image.isStoreTemporary()) { return new File(applicationConfiguration.getLogoLocation() + File.separator + image.getSourceFilePath()); } String parentFolder = image.isStoreTemporary() ? tmpSourceHome : sourceHome; return new File(parentFolder + File.separator + image.getSourceFilePath()); } public byte[] getThumbImageData(GluuImage image) throws Exception { return FileUtils.readFileToByteArray(getThumbFile(image)); } public byte[] getSourceImageData(GluuImage image) throws Exception { return FileUtils.readFileToByteArray(getSourceFile(image)); } public void deleteImage(GluuImage image) { File thumbFile = getThumbFile(image); File sourceFile = getSourceFile(image); if (!image.isStoreTemporary() && createBackupDuringRemoval) { File reovedThumbFile = new File(removedThumbHome + File.separator + image.getThumbFilePath()); File removedSourceFile = new File(removedSourceHome + File.separator + image.getSourceFilePath()); try { FileUtils.copyFile(thumbFile, reovedThumbFile); FileUtils.copyFile(sourceFile, removedSourceFile); } catch (IOException ex) { log.error("Failed to create backup for photo {0} before removal", ex, image); } } // Delete thumb and source files deleteFile(thumbFile, true); deleteFile(sourceFile, true); } private boolean deleteFile(File file, boolean removeEmptyfoldersTree) { boolean result = true; if (file.exists()) { result = file.delete(); if (removeEmptyfoldersTree) { removeEmptyfoldersTree(file.getParentFile(), countLevels); } } return result; } private void removeEmptyfoldersTree(File folder, int remainLevels) { if (photoRepositoryRootDirFile.equals(folder) || (remainLevels == 0)) { return; } File[] files = folder.listFiles(); if (files == null) { // null if security restricted return; } if (files.length == 0) { File parent = folder.getParentFile(); deleteFile(folder, false); removeEmptyfoldersTree(parent, --remainLevels); } } private void createFoldersTree(File folder) { if (folder != null && folder.mkdirs()) { // findbugs: probably needs to do something here } } private void createImagePathes(GluuImage image) throws Exception { createFoldersTree(getSourceFile(image).getParentFile()); createFoldersTree(getThumbFile(image).getParentFile()); } public byte[] getBlankImage() { // findbugs: copy on return to not expose internal representation return ArrayUtils.clone(blankImage); } public byte[] getBlankPhoto() { // findbugs: copy on return to not expose internal representation return ArrayUtils.clone(blankPhoto); } public byte[] getBlankIcon() { return ArrayUtils.clone(blankIcon); } private void prepareBlankImage() { InputStream is = resourceLoader.getResourceAsStream("/WEB-INF/static/images/blank_image.gif"); if(is != null){ try { this.blankImage = IOUtils.toByteArray(is); } catch (Exception ex) { log.error("Failed to load blank image", ex); } finally { IOUtils.closeQuietly(is); } }else{ log.error("Failed to load blank image. ResourceLoader returned null stream."); } } private void prepareBlankPhoto() { InputStream is = resourceLoader.getResourceAsStream("/WEB-INF/static/images/anonymous.png"); if(is != null){ try { this.blankPhoto = IOUtils.toByteArray(is); } catch (Exception ex) { log.error("Failed to load blank photo", ex); } finally { IOUtils.closeQuietly(is); } }else{ log.error("Failed to load blank photo. ResourceLoader returned null stream."); } } private void prepareBlankIcon() { InputStream is = resourceLoader.getResourceAsStream("/WEB-INF/static/images/blank_icon.gif"); if(is != null){ try { this.blankIcon = IOUtils.toByteArray(is); } catch (Exception ex) { log.error("Failed to load blank icon", ex); } finally { IOUtils.closeQuietly(is); } }else{ log.error("Failed to load blank icon. ResourceLoader returned null stream."); } } public boolean createRepositoryFaviconImageFiles(GluuImage image) throws Exception { if (!isIconImage(image)) { return false; } // Generate paths setGeneratedImagePathes(image, null); // Create folders tree createImagePathes(image); // Set source image size image.setWidth(16); image.setHeight(16); byte[] data = image.getData(); FileUtils.writeByteArrayToFile(getThumbFile(image), data); // Save source image FileUtils.writeByteArrayToFile(getSourceFile(image), data); return true; } public boolean isIconImage(GluuImage image) { if (image.getSourceContentType().equals("application/octet-stream")) { image.setSourceContentType(fileTypeMap.getContentType(image.getSourceName())); } return image.getSourceContentType().matches("image/(x-icon|x-ico|jpeg|jpg)"); } }
package sorting.java; import java.util.Arrays; /** * Implementation of a min-heap */ public class Heap { private int SIZE = 1024; private int[] items = new int[SIZE]; private int count = 0; public void add(int val) { count++; if (count >= SIZE) { dynamicExpand(); } items[count] = val; bubbleUp(count); } private void dynamicExpand() { System.out.println(String.format("expansion from %d to %d [#items=%d]", SIZE, SIZE * 2, count)); items = Arrays.copyOf(items, SIZE * 2); SIZE *= 2; } public int min() { return items[1]; } public void pop() { if (count == 0) return; items[1] = items[count]; count bubbleDown(1); } private int parent(int index) { if (index == 1) return -1; return index / 2; } private int childLeft(int index) { return 2 * index; } private int childRight(int index) { return (2 * index) + 1; } private static void swap(int[] items, int x, int y) { int tmp; tmp = items[x]; items[x] = items[y]; items[y] = tmp; } private void bubbleUp(int index) { if (parent(index) == -1) return; // root of the heap if (items[parent(index)] > items[index]) { swap(items, parent(index), index); bubbleUp(parent(index)); } } private void bubbleDown(int index) { int left = childLeft(index); int right = childRight(index); int minIndex = index; // find out who is the smaller item, if the root, the left or the right child if (left <= count && items[minIndex] > items[left]) minIndex = left; if (right <= count && items[minIndex] > items[right]) minIndex = right; // stop bubbling down if the smallest item is the root if (minIndex != index) { swap(items, index, minIndex); bubbleDown(minIndex); } } public static void main(String[] args) { Heap heap; heap = new Heap(); heap.add(1); heap.add(2); heap.add(3); heap.add(4); heap.add(5); test(heap.min() == 1); heap.pop(); test(heap.min() == 2); heap.pop(); test(heap.min() == 3); heap.pop(); test(heap.min() == 4); heap.pop(); test(heap.min() == 5); heap.pop(); heap = new Heap(); heap.add(5); heap.add(4); heap.add(3); heap.add(2); heap.add(1); test(heap.min() == 1); heap.pop(); test(heap.min() == 2); heap.pop(); test(heap.min() == 3); heap.pop(); test(heap.min() == 4); heap.pop(); test(heap.min() == 5); heap.pop(); int i; heap = new Heap(); for (i = 10240; i >= 0; i heap.add(i); } for (i = 0; i < 10240; i++) { test(heap.min() == i); heap.pop(); } } public static void test(boolean condition) { // assertions are disabled by default in java, mimic their behaviour here if (!condition) { throw new AssertionError("invalid test"); } } }
package gov.nih.nci.gss.scheduler; import gov.nih.nci.gss.domain.DataService; import gov.nih.nci.gss.domain.DataServiceGroup; import gov.nih.nci.gss.domain.DomainClass; import gov.nih.nci.gss.domain.DomainModel; import gov.nih.nci.gss.domain.GridService; import gov.nih.nci.gss.domain.HostingCenter; import gov.nih.nci.gss.domain.PointOfContact; import gov.nih.nci.gss.grid.DataServiceObjectCounter; import gov.nih.nci.gss.grid.GSSCredentials; import gov.nih.nci.gss.grid.GridAutoDiscoveryException; import gov.nih.nci.gss.grid.GridIndexService; import gov.nih.nci.gss.grid.GridServiceVerifier; import gov.nih.nci.gss.support.LastRefresh; import gov.nih.nci.gss.util.Cab2bAPI; import gov.nih.nci.gss.util.Cab2bTranslator; import gov.nih.nci.gss.util.GSSUtil; import gov.nih.nci.gss.util.GridServiceDAO; import gov.nih.nci.gss.util.NamingUtil; import gov.nih.nci.gss.util.Cab2bAPI.Cab2bService; import gov.nih.nci.system.applicationservice.ApplicationException; import java.net.SocketException; import java.net.SocketTimeoutException; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.apache.log4j.Logger; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.Transaction; import org.hibernate.exception.ConstraintViolationException; public class GridDiscoveryServiceJob { private static Logger logger = Logger.getLogger(GridDiscoveryServiceJob.class); private static final int NUM_QUERY_THREADS = 20; private static final int MAX_COUNT_ERROR_LEN = 5000; private static final int MAX_COUNT_STACKTRACE_LEN = 50000; private static final String STATUS_CHANGE_ACTIVE = "ACTIVE"; private static final String STATUS_CHANGE_INACTIVE = "INACTIVE"; private Cab2bTranslator xlateUtil = null; private NamingUtil namingUtil = null; private Map<String,Cab2bService> cab2bServices = null; /** Cache for JSON responses */ private Map cache; private SessionFactory sessionFactory; private Session hibernateSession; public GridDiscoveryServiceJob() { logger.info("Creating GridDiscoveryServiceJob"); } public void setCache(Map cache) { this.cache = cache; } public void setSessionFactory(SessionFactory sessionFactory) { logger.info("Setting session factory: "+sessionFactory); this.sessionFactory = sessionFactory; } public void execute() throws Exception { // Initialize helper classes this.xlateUtil = new Cab2bTranslator(sessionFactory); this.namingUtil = new NamingUtil(sessionFactory); Cab2bAPI cab2bAPI = new Cab2bAPI(xlateUtil); this.cab2bServices = cab2bAPI.getServices(); Map<String,GridService> gridNodes = null; try { logger.info("Logged into Globus: "+GSSCredentials.getCredential()); // Get services from Grid Index Service gridNodes = populateRemoteServices(); } catch (GridAutoDiscoveryException e) { Throwable root = GSSUtil.getRootException(e); if (root instanceof SocketException || root instanceof SocketTimeoutException) { logger.warn("Could not connect to index service."); return; } else { throw e; } } try { hibernateSession = sessionFactory.openSession(); // Merge with our database to get a complete list of all services we know about Map<String,GridService> allServices = mergeWithGss(gridNodes); if (allServices != null) { // Verify accessibility verifyAccessibility(allServices); // Update counts updateCounts(allServices); // Update services as necessary or add new ones saveServices(allServices, gridNodes.size()); // Clear the JSON cache cache.clear(); } } finally { hibernateSession.close(); hibernateSession = null; } } /** * @return List<GridNodeBean> */ private Map<String,GridService> populateRemoteServices() throws GridAutoDiscoveryException { // Build a hash on URL for GridServices HashMap<String,GridService> serviceMap = new HashMap<String,GridService>(); logger.info("Discovering grid services"); // auto-discover grid nodes and save in session List<GridService> list = GridIndexService.discoverGridServices(); if (list != null) { for (GridService service : list) { if (serviceMap.containsKey(service.getUrl())) { logger.warn("Index Service returned duplicate service URL: "+ service.getUrl()); } serviceMap.put(service.getUrl(), service); } } return serviceMap; } /** * Merge the services reported by the Index Service with what is * currently in the GSS database. * @param gridNodes * @return */ private HashMap<String,GridService> mergeWithGss(Map<String,GridService> gridNodes) { HashMap<String,GridService> allServices = new HashMap<String,GridService>(); int countNew = 0; int countUpdated = 0; int countInactive = 0; logger.info("Merging service metadata..."); Collection<GridService> currentServices = null; Collection<HostingCenter> currentHosts = null; HashMap<String,GridService> serviceMap = null; HashMap<String,HostingCenter> hostMap = null; try { currentServices = GridServiceDAO.getServices(null,hibernateSession); // Build a hash on URL for GridServices serviceMap = new HashMap<String,GridService>(); for (GridService service : currentServices) { serviceMap.put(service.getUrl(), service); } currentHosts = GridServiceDAO.getHosts(null,hibernateSession); // Build a hash on hosting center long name for HostingCenters hostMap = new HashMap<String,HostingCenter>(); for (HostingCenter host : currentHosts) { hostMap.put(host.getLongName(), host); } } catch (ApplicationException e) { logger.error("Error getting service metadata from GSS database",e); return null; } // Walk the list of gridNodes and update the current services and hosting centers where necessary for (GridService service : gridNodes.values()) { logger.info(" logger.info("Name: "+service.getName()); logger.info("URL: "+service.getUrl()); // Standardize the host long name HostingCenter thisHC = service.getHostingCenter(); String hostLongName = null; if (thisHC != null) { hostLongName = namingUtil.getSimpleHostName(thisHC.getLongName()); // The trim is important because MySQL will consider two // strings equal if the only difference is trailing whitespace hostLongName = hostLongName.trim(); if (!thisHC.getLongName().equals(hostLongName)) { logger.info("Host name: "+hostLongName+" (was "+thisHC.getLongName()+")"); thisHC.setLongName(hostLongName); } else { logger.info("Host name: "+thisHC.getLongName()); } // Create persistent identifier based on the long name thisHC.setIdentifier(GSSUtil.generateHostIdentifier(thisHC)); // Hide this host? thisHC.setHiddenDefault(namingUtil.isHidden(thisHC.getLongName())); } // Check to see if the hosting center already exists. if (thisHC != null) { if (hostMap.containsKey(hostLongName)) { HostingCenter matchingHost = hostMap.get(hostLongName); matchingHost = updateHostData(matchingHost, thisHC); service.setHostingCenter(matchingHost); logger.info("Using existing host with id: "+matchingHost.getId()); } else { hostMap.put(hostLongName, thisHC); } } if (serviceMap.containsKey(service.getUrl())) { logger.info("Service already exists, updating..."); countUpdated++; // This service is already in the list of current services GridService matchingSvc = serviceMap.get(service.getUrl()); // Update any new data about this service matchingSvc = updateServiceData(matchingSvc, service); // Check to see if this service is active once again if (STATUS_CHANGE_INACTIVE.equals(matchingSvc.getLastStatus())) { // Service was marked as inactive, need to make it active now service.setLastStatus(createStatus(true)); } allServices.put(matchingSvc.getUrl(),matchingSvc); } else { logger.info("Creating new service..."); countNew++; // Mark this service as published/discovered now. Also, give it a default status change of "up". // TODO: Is there a better "publish date" in the service metadata? service.setPublishDate(new Date()); // Set up service simple name and linkage to correct caB2B model group service.setSimpleName(namingUtil.getSimpleServiceName(service.getName())); // Hide some core infrastructure services service.setHiddenDefault(namingUtil.isHidden(service.getName())); // Create a persistent identifier based on the URL service.setIdentifier(GSSUtil.generateServiceIdentifier(service)); if (service instanceof DataService) { DataService dataService = (DataService)service; dataService = updateCab2bData(dataService); } service.setLastStatus(createStatus(true)); allServices.put(service.getUrl(),service); } } // Mark the services we didn't see as inactive for (GridService service : currentServices) { if (!gridNodes.containsKey(service.getUrl())) { countInactive++; logger.info(" logger.info("Name: "+service.getName()); logger.info("URL: "+service.getUrl()); logger.info("Not found in index service metadata."); service.setLastStatus(createStatus(false)); allServices.put(service.getUrl(),service); } } logger.info("Database will be updated as follows:"); logger.info("New services found: "+countNew); logger.info("Existing services updated: "+countUpdated); logger.info("Existing services marked inactive: "+countInactive); return allServices; } private void updateCounts(Map<String,GridService> gridNodes) { ExecutorService parallelExecutor = Executors.newFixedThreadPool(NUM_QUERY_THREADS); List<DataServiceObjectCounter> counters = new ArrayList<DataServiceObjectCounter>(); logger.info("Updating counts..."); for (GridService service : gridNodes.values()) { if (service instanceof DataService) { DataService dataService = (DataService)service; DomainModel model = dataService.getDomainModel(); if (model == null) continue; // clear everything so that there's no stale data if we give up early for(DomainClass domainClass : model.getClasses()) { domainClass.setCount(null); domainClass.setCountDate(null); domainClass.setCountError(null); domainClass.setCountStacktrace(null); } // Avoid services which didn't respond to a WSDL query if (!service.getAccessible()) { logger.info("Not attempting to count for inaccessible service: "+ service.getUrl()); continue; } // Avoid caTissues because they don't support count queries // TODO: remove this in the future when caTissue supports counts // if (service.getSimpleName().startsWith("caTissue")) { // logger.info("Not attempting to count for caTissue: "+ // service.getUrl()); // continue; DataServiceObjectCounter counter = new DataServiceObjectCounter(dataService); counters.add(counter); parallelExecutor.submit(counter); } } try { parallelExecutor.shutdown(); logger.info("Awaiting completion of object counting..."); if (!parallelExecutor.awaitTermination(60*60, TimeUnit.SECONDS)) { logger.info("Timed out waiting for counts to finish, disregarding remaining counts."); // timed out, cancel the tasks for(DataServiceObjectCounter counter : counters) { synchronized (counter) { counter.disregard(); } } } logger.info("Object counting completed."); } catch (InterruptedException e) { logger.error("Could not update object counts",e); } } private void verifyAccessibility(Map<String,GridService> gridNodes) { ExecutorService parallelExecutor = Executors.newFixedThreadPool(NUM_QUERY_THREADS); List<GridServiceVerifier> verifiers = new ArrayList<GridServiceVerifier>(); logger.info("Verifying accessibility..."); for (GridService service : gridNodes.values()) { GridServiceVerifier verifier = new GridServiceVerifier(service); verifiers.add(verifier); parallelExecutor.submit(verifier); } try { parallelExecutor.shutdown(); logger.info("Awaiting completion of service verification..."); if (!parallelExecutor.awaitTermination(60*60, TimeUnit.SECONDS)) { logger.info("Timed out waiting for counts to finish, disregarding remaining counts."); // timed out, cancel the tasks for(GridServiceVerifier verifier : verifiers) { synchronized (verifier) { verifier.disregard(); } } } logger.info("Service verification completed."); } catch (InterruptedException e) { logger.error("Could not verify services",e); } } /** * Actually save all the changes made to the GSS object model. * @param services * @param numGridNodes */ private void saveServices(Map<String,GridService> services, int numGridNodes) { logger.info("Updating GSS database..."); Transaction tx = null; try { tx = hibernateSession.beginTransaction(); for(GridService service : services.values()) { saveService(service); } // Note that the update completed LastRefresh lastRefresh = GridServiceDAO.getLastRefreshObject(hibernateSession); lastRefresh.setCompletionDate(new Date()); lastRefresh.setNumServices(new Long(numGridNodes)); hibernateSession.save(lastRefresh); logger.info("Commiting changes to GSS database..."); tx.commit(); logger.info("Commit complete."); } catch (Exception e) { if (tx != null) { tx.rollback(); } logger.error("Error updating GSS database",e); } } private void saveService(GridService service) { try { // Domain classes are saved in reverse referencing order // 1) All POCs for (PointOfContact POC : service.getPointOfContacts()) { logger.debug("Saving Service POC "+POC.getName()); POC.setId((Long)hibernateSession.save(POC)); } HostingCenter hc = service.getHostingCenter(); if (hc != null) { for (PointOfContact POC : hc.getPointOfContacts()) { logger.debug("Saving Host POC "+POC.getName()); POC.setId((Long)hibernateSession.save(POC)); } // 2) Hosting Center if (hc.getId() == null) { logger.debug("Saving Host: "+hc.getLongName()); // Hosting center has not been saved yet hc.setId((Long)hibernateSession.save(hc)); } } // 3) Domain Model if (service instanceof DataService) { DomainModel model = ((DataService)service).getDomainModel(); if (model != null) { logger.debug("Saving Domain Model: "+model.getLongName()); model.setId((Long)hibernateSession.save(model)); // 4) Domain Classes logger.debug("Saving "+model.getClasses().size()+" Domain Classes"); for(DomainClass domainClass : model.getClasses()) { // truncate values that are too long to fit in the DB if (domainClass.getCountError() != null) { if (domainClass.getCountError().length() > MAX_COUNT_ERROR_LEN) { logger.warn("Truncating long count error for: "+service.getUrl()); domainClass.setCountError( domainClass.getCountError().substring( 0, MAX_COUNT_ERROR_LEN-3)+"..."); } } if (domainClass.getCountStacktrace() != null) { if (domainClass.getCountStacktrace().length() > MAX_COUNT_STACKTRACE_LEN) { logger.warn("Truncating long count stacktrace for: "+service.getUrl()); domainClass.setCountStacktrace( domainClass.getCountStacktrace().substring( 0, MAX_COUNT_STACKTRACE_LEN-3)+"..."); } } domainClass.setId((Long)hibernateSession.save(domainClass)); } } } // 5) Grid Service logger.debug("Saving Service: "+service.getName()); service.setId((Long)hibernateSession.save(service)); } catch (ConstraintViolationException e) { logger.warn("Duplicate object for: " + service.getUrl(),e); } catch (RuntimeException e) { logger.warn("Unable to save GridService",e); } } private String createStatus(Boolean isActive) { return isActive ? STATUS_CHANGE_ACTIVE : STATUS_CHANGE_INACTIVE; } private HostingCenter updateHostData(HostingCenter matchingHost, HostingCenter host) { // Copy over data from the new host data // - Do not overwrite: long name (unique key), id (db primary key) matchingHost.setHiddenDefault(host.getHiddenDefault()); matchingHost.setCountryCode(host.getCountryCode()); matchingHost.setLocality(host.getLocality()); matchingHost.setPostalCode(host.getPostalCode()); matchingHost.setShortName(host.getShortName()); matchingHost.setStateProvince(host.getStateProvince()); matchingHost.setStreet(host.getStreet()); return matchingHost; } private DataService updateCab2bData(DataService dataService) { Cab2bService cab2bService = cab2bServices.get(dataService.getUrl()); if (cab2bService != null) { // Translate the caB2B model group to a service group DataServiceGroup group = xlateUtil.getServiceGroupForModelGroup( cab2bService.getModelGroupName()); // Populate service attributes dataService.setGroup(group); dataService.setSearchDefault(cab2bService.isSearchDefault()); if (group == null) { logger.info("Found service in caB2B but could not " + "translate group "+cab2bService.getModelGroupName()); } else { logger.info("Found service in caB2B under group "+ group.getName()+" with searchDefault="+ cab2bService.isSearchDefault()); } } else { dataService.setSearchDefault(false); } return dataService; } private GridService updateServiceData(GridService matchingSvc, GridService service) { // Copy over data from the new service // - Do not overwrite: url (unique keys), id (db primary key), publish date (should stay the original value) matchingSvc.setName(service.getName()); matchingSvc.setSimpleName(namingUtil.getSimpleServiceName(service.getName())); // Hide this service? matchingSvc.setHiddenDefault(namingUtil.isHidden(service.getName())); matchingSvc.setVersion(service.getVersion()); matchingSvc.setDescription(service.getDescription()); matchingSvc.setHostingCenter(service.getHostingCenter()); if (matchingSvc instanceof DataService && service instanceof DataService) { DataService dataService = (DataService)service; DataService matchingDataSvc = (DataService)matchingSvc; // We are consciously overwriting things here that likely will not change, // since they are based on the URL, which is guaranteed to be the same if we // call this function. However, on the off chance that the DB lookup tables or // caB2B content has changed, we need to overwrite here to be sure. updateCab2bData(matchingDataSvc); // Update domain model DomainModel model = dataService.getDomainModel(); DomainModel matchingModel = matchingDataSvc.getDomainModel(); if (matchingModel == null) { logger.warn("Existing data service has no model: "+service.getUrl()); matchingDataSvc.setDomainModel(model); return matchingSvc; } if (model == null) { logger.warn("Data service has no model: "+service.getUrl()); return matchingSvc; } matchingModel.setDescription(model.getDescription()); matchingModel.setLongName(model.getLongName()); matchingModel.setVersion(model.getVersion()); Map<String,DomainClass> existingClasses = new HashMap<String,DomainClass>(); for(DomainClass domainClass : matchingModel.getClasses()) { String fullClass = domainClass.getDomainPackage()+"."+domainClass.getClassName(); existingClasses.put(fullClass,domainClass); logger.debug(" Existing class: "+fullClass); } for(DomainClass domainClass : model.getClasses()) { String fullClass = domainClass.getDomainPackage()+"."+domainClass.getClassName(); if (existingClasses.containsKey(fullClass)) { DomainClass matchingClass = existingClasses.get(fullClass); matchingClass.setDescription(domainClass.getDescription()); } else { logger.debug(" New class: "+fullClass); matchingModel.getClasses().add(domainClass); } } // TODO: handle domain class deletions } return matchingSvc; } }
package com.intellij.ide.favoritesTreeView; import com.intellij.ide.projectView.BaseProjectTreeBuilder; import com.intellij.ide.projectView.ProjectViewPsiTreeChangeListener; import com.intellij.ide.projectView.impl.ModuleGroup; import com.intellij.ide.projectView.impl.ProjectAbstractTreeStructureBase; import com.intellij.ide.projectView.impl.nodes.Form; import com.intellij.ide.projectView.impl.nodes.LibraryGroupElement; import com.intellij.ide.projectView.impl.nodes.NamedLibraryElement; import com.intellij.ide.projectView.impl.nodes.PackageElement; import com.intellij.ide.util.treeView.AbstractTreeNode; import com.intellij.ide.util.treeView.AbstractTreeUpdater; import com.intellij.ide.util.treeView.NodeDescriptor; import com.intellij.openapi.ide.CopyPasteManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModuleRootEvent; import com.intellij.openapi.roots.ModuleRootListener; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.vcs.FileStatusListener; import com.intellij.openapi.vcs.FileStatusManager; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.util.ui.tree.TreeUtil; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeModel; import java.awt.datatransfer.Transferable; import java.util.Comparator; import java.util.Set; public class FavoritesViewTreeBuilder extends BaseProjectTreeBuilder { private ProjectViewPsiTreeChangeListener myPsiTreeChangeListener; private final PsiTreeChangeListener myPsiTreeChangeAdapter; private ModuleRootListener myModuleRootListener; private FileStatusListener myFileStatusListener; private MyCopyPasteListener myCopyPasteListener; public FavoritesViewTreeBuilder(Project project, JTree tree, DefaultTreeModel treeModel, ProjectAbstractTreeStructureBase treeStructure) { super(project, tree, treeModel, treeStructure, null); setNodeDescriptorComparator(new Comparator<NodeDescriptor>(){ private int getWeight(NodeDescriptor descriptor) { FavoritesTreeNodeDescriptor favoritesTreeNodeDescriptor = (FavoritesTreeNodeDescriptor)descriptor; Object value = favoritesTreeNodeDescriptor.getElement().getValue(); if (value instanceof SmartPsiElementPointer){ value = ((SmartPsiElementPointer)value).getElement(); } if (value instanceof ModuleGroup){ return 0; } if (value instanceof Module){ return 1; } if (value instanceof PsiDirectory || value instanceof PackageElement){ return 2; } if (value instanceof PsiFile){ return 4; } if (value instanceof PsiClass){ return 5; } if (value instanceof PsiField){ return 6; } if (value instanceof PsiMethod){ return 7; } if (value instanceof PsiElement){ return 8; } if (value instanceof Form){ return 9; } if (value instanceof LibraryGroupElement){ return 10; } if (value instanceof NamedLibraryElement){ return 11; } return 12; } public int compare(NodeDescriptor nd1, NodeDescriptor nd2) { if (nd1 instanceof FavoritesTreeNodeDescriptor && nd2 instanceof FavoritesTreeNodeDescriptor){ FavoritesTreeNodeDescriptor fd1 = (FavoritesTreeNodeDescriptor)nd1; FavoritesTreeNodeDescriptor fd2 = (FavoritesTreeNodeDescriptor)nd2; int weight1 = getWeight(fd1); int weight2 = getWeight(fd2); if (weight1 != weight2) { return weight1 - weight2; } String s1 = fd1.toString(); String s2 = fd2.toString(); if (s1 == null) return s2 == null ? 0 : -1; if (s2 == null) return +1; if (!s1.equals(s2)) { return s1.compareToIgnoreCase(s2); } else { s1 = fd1.getLocation(); s2 = fd2.getLocation(); if (s1 == null) return s2 == null ? 0 : -1; if (s2 == null) return +1; return s1.compareToIgnoreCase(s2); } } return 0; } }); myPsiTreeChangeListener = new ProjectViewPsiTreeChangeListener() { protected DefaultMutableTreeNode getRootNode() { return myRootNode; } protected AbstractTreeUpdater getUpdater() { return myUpdater; } protected boolean isFlattenPackages() { return ((FavoritesTreeStructure)myTreeStructure).getFavoritesConfiguration().IS_FLATTEN_PACKAGES; } }; myModuleRootListener = new ModuleRootListener() { public void beforeRootsChange(ModuleRootEvent event) { } public void rootsChanged(ModuleRootEvent event) { myUpdater.addSubtreeToUpdate(myRootNode); } }; PsiManager.getInstance(myProject).addPsiTreeChangeListener(myPsiTreeChangeListener); myPsiTreeChangeAdapter = new MyPsiTreeChangeListener(); PsiManager.getInstance(myProject).addPsiTreeChangeListener(myPsiTreeChangeAdapter); ProjectRootManager.getInstance(myProject).addModuleRootListener(myModuleRootListener); myFileStatusListener = new MyFileStatusListener(); FileStatusManager.getInstance(myProject).addFileStatusListener(myFileStatusListener); myCopyPasteListener = new MyCopyPasteListener(); CopyPasteManager.getInstance().addContentChangedListener(myCopyPasteListener); initRootNode(); } public void updateFromRoot() { myUpdater.cancelAllRequests(); super.updateFromRoot(); } public void updateTree() { myUpdater.addSubtreeToUpdate(myRootNode); } public void updateTree(Runnable runAferUpdate) { myUpdater.runAfterUpdate(runAferUpdate); updateTree(); } public void select(Object element, VirtualFile file, boolean requestFocus) { final Set<AbstractTreeNode> favorites = ((FavoritesTreeStructure)getTreeStructure()).getFavorites(); for (AbstractTreeNode favorite : favorites) { Object currentValue = favorite.getValue(); if (favorite.getValue() instanceof SmartPsiElementPointer){ currentValue = ((SmartPsiElementPointer)favorite.getValue()).getElement(); } else if (currentValue instanceof PsiJavaFile) { final PsiClass[] classes = ((PsiJavaFile)currentValue).getClasses(); if (classes.length > 0) { currentValue = classes[0]; } } if (Comparing.equal(element, currentValue)){ final DefaultMutableTreeNode nodeWithObject = findFirstLevelNodeWithObject((DefaultMutableTreeNode)getTree().getModel().getRoot(), favorite); if (nodeWithObject != null){ TreeUtil.selectInTree(nodeWithObject, requestFocus, getTree()); return; } } } super.select(element, file, requestFocus); } @Nullable private static DefaultMutableTreeNode findFirstLevelNodeWithObject(final DefaultMutableTreeNode aRoot, final Object aObject) { for (int i = 0; i < aRoot.getChildCount(); i++) { final DefaultMutableTreeNode child = (DefaultMutableTreeNode)aRoot.getChildAt(i); Object userObject = child.getUserObject(); if (userObject instanceof FavoritesTreeNodeDescriptor) { if (Comparing.equal(((FavoritesTreeNodeDescriptor)userObject).getElement(), aObject)) { return child; } } } return null; } public final void dispose() { super.dispose(); PsiManager.getInstance(myProject).removePsiTreeChangeListener(myPsiTreeChangeListener); PsiManager.getInstance(myProject).removePsiTreeChangeListener(myPsiTreeChangeAdapter); ProjectRootManager.getInstance(myProject).removeModuleRootListener(myModuleRootListener); FileStatusManager.getInstance(myProject).removeFileStatusListener(myFileStatusListener); CopyPasteManager.getInstance().removeContentChangedListener(myCopyPasteListener); } protected boolean isAlwaysShowPlus(NodeDescriptor nodeDescriptor) { final Object[] childElements = myTreeStructure.getChildElements(nodeDescriptor); return childElements != null ? childElements.length > 0 : false; } protected boolean isAutoExpandNode(NodeDescriptor nodeDescriptor) { return nodeDescriptor.getParentDescriptor() == null; } private final class MyFileStatusListener implements FileStatusListener { public void fileStatusesChanged() { myUpdater.addSubtreeToUpdate(myRootNode); } public void fileStatusChanged(VirtualFile vFile) { PsiElement element; PsiManager psiManager = PsiManager.getInstance(myProject); if (vFile.isDirectory()) { element = psiManager.findDirectory(vFile); } else { element = psiManager.findFile(vFile); } if (!myUpdater.addSubtreeToUpdateByElement(element) && element instanceof PsiJavaFile) { myUpdater.addSubtreeToUpdateByElement(((PsiJavaFile)element).getContainingDirectory()); } } } private final class MyPsiTreeChangeListener extends PsiTreeChangeAdapter { public final void childAdded(final PsiTreeChangeEvent event) { myUpdater.addSubtreeToUpdate(myRootNode); } public final void childRemoved(final PsiTreeChangeEvent event) { myUpdater.addSubtreeToUpdate(myRootNode); } public final void childReplaced(final PsiTreeChangeEvent event) { myUpdater.addSubtreeToUpdate(myRootNode); } public final void childMoved(final PsiTreeChangeEvent event) { myUpdater.addSubtreeToUpdate(myRootNode); } public final void childrenChanged(final PsiTreeChangeEvent event) { myUpdater.addSubtreeToUpdate(myRootNode); } public final void propertyChanged(final PsiTreeChangeEvent event) { myUpdater.addSubtreeToUpdate(myRootNode); } } private final class MyCopyPasteListener implements CopyPasteManager.ContentChangedListener { public void contentChanged(final Transferable oldTransferable, final Transferable newTransferable) { myUpdater.addSubtreeToUpdate(myRootNode); } } }
package com.intellij.refactoring.move.moveMembers; import com.intellij.ide.util.PackageUtil; import com.intellij.ide.util.TreeClassChooser; import com.intellij.ide.util.TreeClassChooserFactory; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.help.HelpManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Ref; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.refactoring.HelpID; import com.intellij.refactoring.RefactoringSettings; import com.intellij.refactoring.move.MoveCallback; import com.intellij.refactoring.ui.MemberSelectionTable; import com.intellij.refactoring.ui.RefactoringDialog; import com.intellij.refactoring.ui.VisibilityPanel; import com.intellij.refactoring.util.RefactoringMessageUtil; import com.intellij.refactoring.util.classMembers.MemberInfo; import com.intellij.refactoring.util.classMembers.MemberInfoChange; import com.intellij.refactoring.util.classMembers.UsesAndInterfacesDependencyMemberInfoModel; import com.intellij.ui.IdeBorderFactory; import com.intellij.ui.ReferenceEditorWithBrowseButton; import com.intellij.ui.ScrollPaneFactory; import com.intellij.util.IncorrectOperationException; import javax.swing.*; import javax.swing.border.Border; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.Set; public class MoveMembersDialog extends RefactoringDialog implements MoveMembersOptions { private MyMemberInfoModel myMemberInfoModel; private Project myProject; private PsiClass mySourceClass; private String mySourceClassName; private MemberInfo[] myMemberInfos; private final ReferenceEditorWithBrowseButton myTfTargetClassName; private MemberSelectionTable myTable; private Set<PsiMember> myPreselectMembers; private final MoveCallback myMoveCallback; VisibilityPanel myVisibilityPanel; public MoveMembersDialog(Project project, PsiClass sourceClass, final PsiClass initialTargetClass, Set<PsiMember> preselectMembers, MoveCallback moveCallback) { super(project, true); myProject = project; mySourceClass = sourceClass; myPreselectMembers = preselectMembers; myMoveCallback = moveCallback; setTitle(MoveMembersImpl.REFACTORING_NAME); mySourceClassName = mySourceClass.getQualifiedName(); PsiField[] fields = mySourceClass.getFields(); PsiMethod[] methods = mySourceClass.getMethods(); PsiClass[] innerClasses = mySourceClass.getInnerClasses(); ArrayList<MemberInfo> memberList = new ArrayList<MemberInfo>(fields.length + methods.length); for (int idx = 0; idx < innerClasses.length; idx++) { PsiClass innerClass = innerClasses[idx]; if (!innerClass.hasModifierProperty(PsiModifier.STATIC)) continue; MemberInfo info = new MemberInfo(innerClass); if (myPreselectMembers.contains(innerClass)) { info.setChecked(true); } memberList.add(info); } for (int idx = 0; idx < fields.length; idx++) { PsiField field = fields[idx]; if (field.hasModifierProperty(PsiModifier.STATIC)) { MemberInfo info = new MemberInfo(field); if (myPreselectMembers.contains(field)) { info.setChecked(true); } memberList.add(info); } } for (int idx = 0; idx < methods.length; idx++) { PsiMethod method = methods[idx]; if (method.hasModifierProperty(PsiModifier.STATIC)) { MemberInfo info = new MemberInfo(method); if (myPreselectMembers.contains(method)) { info.setChecked(true); } memberList.add(info); } } myMemberInfos = memberList.toArray(new MemberInfo[memberList.size()]); String fqName = initialTargetClass != null && !sourceClass.equals(initialTargetClass) ? initialTargetClass.getQualifiedName() : ""; myTfTargetClassName = new ReferenceEditorWithBrowseButton(new ChooseClassAction(), fqName, PsiManager.getInstance(myProject), true); init(); } public String getMemberVisibility() { return myVisibilityPanel.getVisibility(); } protected String getDimensionServiceKey() { return "#com.intellij.refactoring.move.moveMembers.MoveMembersDialog"; } private JTable createTable() { myMemberInfoModel = new MyMemberInfoModel(); myTable = new MemberSelectionTable(myMemberInfos, null); myTable.setMemberInfoModel(myMemberInfoModel); myTable.addMemberInfoChangeListener(myMemberInfoModel); myMemberInfoModel.memberInfoChanged(new MemberInfoChange(myMemberInfos)); return myTable; } protected JComponent createNorthPanel() { JPanel panel = new JPanel(new BorderLayout()); JPanel _panel; Box box = Box.createVerticalBox(); _panel = new JPanel(new BorderLayout()); JTextField sourceClassField = new JTextField(); sourceClassField.setText(mySourceClassName); sourceClassField.setEditable(false); _panel.add(new JLabel("Move members from:"), BorderLayout.NORTH); _panel.add(sourceClassField, BorderLayout.CENTER); box.add(_panel); box.add(Box.createVerticalStrut(10)); _panel = new JPanel(new BorderLayout()); JLabel label = new JLabel("To (fully qualified name):"); label.setLabelFor(myTfTargetClassName); _panel.add(label, BorderLayout.NORTH); _panel.add(myTfTargetClassName, BorderLayout.CENTER); box.add(_panel); myTfTargetClassName.getEditorTextField().getDocument().addDocumentListener(new com.intellij.openapi.editor.event.DocumentAdapter() { public void documentChanged(com.intellij.openapi.editor.event.DocumentEvent e) { myMemberInfoModel.updateTargetClass(); } }); panel.add(box, BorderLayout.CENTER); panel.add(Box.createVerticalStrut(10), BorderLayout.SOUTH); return panel; } protected JComponent createCenterPanel() { JPanel panel = new JPanel(new BorderLayout()); JTable table = createTable(); if (table.getRowCount() > 0) { table.getSelectionModel().addSelectionInterval(0, 0); } JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(table); Border titledBorder = IdeBorderFactory.createTitledBorder("Members to be moved (static only)"); Border emptyBorder = BorderFactory.createEmptyBorder(0, 5, 5, 5); Border border = BorderFactory.createCompoundBorder(titledBorder, emptyBorder); scrollPane.setBorder(border); panel.add(scrollPane, BorderLayout.CENTER); myVisibilityPanel = new VisibilityPanel(true); myVisibilityPanel.setVisibility(null); panel.add(myVisibilityPanel, BorderLayout.EAST); return panel; } public JComponent getPreferredFocusedComponent() { return myTfTargetClassName.getEditorTextField(); } public PsiMember[] getSelectedMembers() { final MemberInfo[] selectedMemberInfos = myTable.getSelectedMemberInfos(); ArrayList<PsiMember> list = new ArrayList<PsiMember>(); for (int i = 0; i < selectedMemberInfos.length; i++) { list.add(selectedMemberInfos[i].getMember()); } return list.toArray(new PsiMember[list.size()]); } public String getTargetClassName() { return myTfTargetClassName.getText().trim(); } protected void doAction() { String message = validateInputData(); if (message != null) { if (message.length() != 0) { RefactoringMessageUtil.showErrorMessage( MoveMembersImpl.REFACTORING_NAME, message, HelpID.MOVE_MEMBERS, myProject); } return; } invokeRefactoring(new MoveMembersProcessor(getProject(), myMoveCallback, new MoveMembersOptions() { public String getMemberVisibility() { return MoveMembersDialog.this.getMemberVisibility(); } public PsiMember[] getSelectedMembers() { return MoveMembersDialog.this.getSelectedMembers(); } public String getTargetClassName() { return MoveMembersDialog.this.getTargetClassName(); } })); RefactoringSettings.getInstance().MOVE_PREVIEW_USAGES = isPreviewUsages(); } protected boolean areButtonsValid() { return getTargetClassName().length() > 0; } private String validateInputData() { final PsiManager manager = PsiManager.getInstance(myProject); final String fqName = getTargetClassName(); if ("".equals(fqName)) { return "No destination class specified"; } else if (!manager.getNameHelper().isQualifiedName(fqName)) { return "'" + fqName + "' is not a legal FQ-name"; } else { final PsiClass[] targetClass = new PsiClass[]{null}; CommandProcessor.getInstance().executeCommand(myProject, new Runnable() { public void run() { try { targetClass[0] = findOrCreateTargetClass(manager, fqName); } catch (IncorrectOperationException e) { RefactoringMessageUtil.showErrorMessage( MoveMembersImpl.REFACTORING_NAME, e.getMessage(), HelpID.MOVE_MEMBERS, myProject); } } }, "Create class " + fqName, null); if (targetClass[0] == null) { return ""; } if (mySourceClass.equals(targetClass[0])) { return "Source and destination classes should be different"; } else { for (int i = 0; i < myMemberInfos.length; i++) { MemberInfo info = myMemberInfos[i]; if (!info.isChecked()) continue; if (PsiTreeUtil.isAncestor(info.getMember(), targetClass[0], false)) { return "Cannot move inner class " + info.getDisplayName() + " into itself."; } } if (!targetClass[0].isWritable()) { if (!RefactoringMessageUtil.checkReadOnlyStatus(myProject, targetClass[0])) return ""; return ""; // return "Cannot perform the refactoring.\nDestination class " + targetClass[0].getQualifiedName() + " is read-only."; } return null; } } } private PsiClass findOrCreateTargetClass(final PsiManager manager, final String fqName) throws IncorrectOperationException { final String className; final String packageName; int dotIndex = fqName.lastIndexOf('.'); if (dotIndex >= 0) { packageName = fqName.substring(0, dotIndex); className = (dotIndex + 1 < fqName.length())? fqName.substring(dotIndex + 1) : ""; } else { packageName = ""; className = fqName; } PsiClass aClass = manager.findClass(fqName, GlobalSearchScope.projectScope(myProject)); if (aClass != null) return aClass; final PsiDirectory directory = PackageUtil.findOrCreateDirectoryForPackage( myProject, packageName, mySourceClass.getContainingFile().getContainingDirectory(), true); if (directory == null) { return null; } int answer = Messages.showYesNoDialog( myProject, "Class " + fqName + " does not exist.\nDo you want to create it?", MoveMembersImpl.REFACTORING_NAME, Messages.getQuestionIcon() ); if (answer != 0) return null; final Ref<IncorrectOperationException> eRef = new Ref<IncorrectOperationException>(); final PsiClass newClass = ApplicationManager.getApplication().runWriteAction(new Computable<PsiClass>() { public PsiClass compute() { try { return directory.createClass(className); } catch (IncorrectOperationException e) { eRef.set(e); return null; } } }); if (!eRef.isNull()) throw eRef.get(); return newClass; } protected void doHelpAction() { HelpManager.getInstance().invokeHelp(HelpID.MOVE_MEMBERS); } private class ChooseClassAction implements ActionListener { public void actionPerformed(ActionEvent e) { TreeClassChooser chooser = TreeClassChooserFactory.getInstance(myProject).createWithInnerClassesScopeChooser("Choose Destination Class", GlobalSearchScope.projectScope(myProject), new TreeClassChooser.ClassFilter() { public boolean isAccepted(PsiClass aClass) { return aClass.getParent() instanceof PsiJavaFile || aClass.hasModifierProperty(PsiModifier.STATIC); } }, null); chooser.selectDirectory(mySourceClass.getContainingFile().getContainingDirectory()); chooser.showDialog(); PsiClass aClass = chooser.getSelectedClass(); if (aClass != null) { myTfTargetClassName.setText(aClass.getQualifiedName()); myMemberInfoModel.updateTargetClass(); } } } private class MyMemberInfoModel extends UsesAndInterfacesDependencyMemberInfoModel { PsiClass myTargetClass = null; public MyMemberInfoModel() { super(mySourceClass, null, false, DEFAULT_CONTAINMENT_VERIFIER); } public Boolean isFixedAbstract(MemberInfo member) { return null; } public boolean isCheckedWhenDisabled(MemberInfo member) { return false; } public boolean isMemberEnabled(MemberInfo member) { if(myTargetClass != null && myTargetClass.isInterface()) { return !(member.getMember() instanceof PsiMethod); } return super.isMemberEnabled(member); } public void updateTargetClass() { final PsiManager manager = PsiManager.getInstance(myProject); myTargetClass = manager.findClass(getTargetClassName(), GlobalSearchScope.projectScope(myProject)); myTable.fireExternalDataChange(); } } }
package ai.h2o.automl; import water.H2O; import water.Job; import water.Key; public class H2OJob { protected final H2ORunnable _target; protected final Key _key; protected Key<Job> _jobKey; public H2OJob(H2ORunnable runnable) { _target=runnable; _key=Key.make(); } public H2OJob(H2ORunnable runnable, Key k) { _target=runnable; _key=k; } public Job start() { Job j = new Job<>(_key,_target.getClass().getName(), _target.getClass().getName()); _jobKey=j._key; return j.start(new H2O.H2OCountedCompleter() { @Override public void compute2() { _target.run(); tryComplete(); } },1); } public void stop() { _jobKey.get().stop(); } }
package com.jme3.util; import com.jme3.math.ColorRGBA; import com.jme3.math.Quaternion; import com.jme3.math.Vector2f; import com.jme3.math.Vector3f; import java.nio.Buffer; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.DoubleBuffer; import java.nio.FloatBuffer; import java.nio.IntBuffer; import java.nio.ShortBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.Map; import java.util.WeakHashMap; /** * <code>BufferUtils</code> is a helper class for generating nio buffers from * jME data classes such as Vectors and ColorRGBA. * * @author Joshua Slack * @version $Id: BufferUtils.java,v 1.16 2007/10/29 16:56:18 nca Exp $ */ public final class BufferUtils { private static final Map<Buffer, Object> trackingHash = Collections.synchronizedMap(new WeakHashMap<Buffer, Object>()); private static final Object ref = new Object(); // Note: a WeakHashMap is really bad here since the hashCode() and // equals() behavior of buffers will vary based on their contents. // As it stands, put()'ing an empty buffer will wipe out the last // empty buffer with the same size. So any tracked memory calculations // could be lying. // Besides, the hashmap behavior isn't even being used here and // yet the expense is still incurred. For example, a newly allocated // 10,000 byte buffer will iterate through the whole buffer of 0's // to calculate the hashCode and then potentially do it again to // calculate the equals()... which by the way is guaranteed for // every empty buffer of an existing size since they will always produce // the same hashCode(). // It would be better to just keep a straight list of weak references // and clean out the dead every time a new buffer is allocated. // WeakHashMap is doing that anyway... to there is no extra expense // incurred. // Recommend a ConcurrentLinkedQueue of WeakReferences since it // supports the threading semantics required with little extra overhead. private static final boolean trackDirectMemory = false; /** * Creates a clone of the given buffer. The clone's capacity is * equal to the given buffer's limit. * * @param buf The buffer to clone * @return The cloned buffer */ public static Buffer clone(Buffer buf) { if (buf instanceof FloatBuffer) { return clone((FloatBuffer) buf); } else if (buf instanceof ShortBuffer) { return clone((ShortBuffer) buf); } else if (buf instanceof ByteBuffer) { return clone((ByteBuffer) buf); } else if (buf instanceof IntBuffer) { return clone((IntBuffer) buf); } else if (buf instanceof DoubleBuffer) { return clone((DoubleBuffer) buf); } else { throw new UnsupportedOperationException(); } } private static void onBufferAllocated(Buffer buffer){ /* StackTraceElement[] stackTrace = new Throwable().getStackTrace(); int initialIndex = 0; for (int i = 0; i < stackTrace.length; i++){ if (!stackTrace[i].getClassName().equals(BufferUtils.class.getName())){ initialIndex = i; break; } } int allocated = buffer.capacity(); int size = 0; if (buffer instanceof FloatBuffer){ size = 4; }else if (buffer instanceof ShortBuffer){ size = 2; }else if (buffer instanceof ByteBuffer){ size = 1; }else if (buffer instanceof IntBuffer){ size = 4; }else if (buffer instanceof DoubleBuffer){ size = 8; } allocated *= size; for (int i = initialIndex; i < stackTrace.length; i++){ StackTraceElement element = stackTrace[i]; if (element.getClassName().startsWith("java")){ break; } try { Class clazz = Class.forName(element.getClassName()); if (i == initialIndex){ System.out.println(clazz.getSimpleName()+"."+element.getMethodName()+"():" + element.getLineNumber() + " allocated " + allocated); }else{ System.out.println(" at " + clazz.getSimpleName()+"."+element.getMethodName()+"()"); } } catch (ClassNotFoundException ex) { } }*/ if (trackDirectMemory){ trackingHash.put(buffer, ref); } } /** * Generate a new FloatBuffer using the given array of Vector3f objects. * The FloatBuffer will be 3 * data.length long and contain the vector data * as data[0].x, data[0].y, data[0].z, data[1].x... etc. * * @param data array of Vector3f objects to place into a new FloatBuffer */ public static FloatBuffer createFloatBuffer(Vector3f... data) { if (data == null) { return null; } FloatBuffer buff = createFloatBuffer(3 * data.length); for (int x = 0; x < data.length; x++) { if (data[x] != null) { buff.put(data[x].x).put(data[x].y).put(data[x].z); } else { buff.put(0).put(0).put(0); } } buff.flip(); return buff; } /** * Generate a new FloatBuffer using the given array of Quaternion objects. * The FloatBuffer will be 4 * data.length long and contain the vector data. * * @param data array of Quaternion objects to place into a new FloatBuffer */ public static FloatBuffer createFloatBuffer(Quaternion... data) { if (data == null) { return null; } FloatBuffer buff = createFloatBuffer(4 * data.length); for (int x = 0; x < data.length; x++) { if (data[x] != null) { buff.put(data[x].getX()).put(data[x].getY()).put(data[x].getZ()).put(data[x].getW()); } else { buff.put(0).put(0).put(0); } } buff.flip(); return buff; } /** * Generate a new FloatBuffer using the given array of float primitives. * @param data array of float primitives to place into a new FloatBuffer */ public static FloatBuffer createFloatBuffer(float... data) { if (data == null) { return null; } FloatBuffer buff = createFloatBuffer(data.length); buff.clear(); buff.put(data); buff.flip(); return buff; } /** * Create a new FloatBuffer of an appropriate size to hold the specified * number of Vector3f object data. * * @param vertices * number of vertices that need to be held by the newly created * buffer * @return the requested new FloatBuffer */ public static FloatBuffer createVector3Buffer(int vertices) { FloatBuffer vBuff = createFloatBuffer(3 * vertices); return vBuff; } /** * Create a new FloatBuffer of an appropriate size to hold the specified * number of Vector3f object data only if the given buffer if not already * the right size. * * @param buf * the buffer to first check and rewind * @param vertices * number of vertices that need to be held by the newly created * buffer * @return the requested new FloatBuffer */ public static FloatBuffer createVector3Buffer(FloatBuffer buf, int vertices) { if (buf != null && buf.limit() == 3 * vertices) { buf.rewind(); return buf; } return createFloatBuffer(3 * vertices); } /** * Sets the data contained in the given color into the FloatBuffer at the * specified index. * * @param color * the data to insert * @param buf * the buffer to insert into * @param index * the postion to place the data; in terms of colors not floats */ public static void setInBuffer(ColorRGBA color, FloatBuffer buf, int index) { buf.position(index * 4); buf.put(color.r); buf.put(color.g); buf.put(color.b); buf.put(color.a); } /** * Sets the data contained in the given quaternion into the FloatBuffer at the * specified index. * * @param color * the data to insert * @param buf * the buffer to insert into * @param index * the postion to place the data; in terms of quaternions not floats */ public static void setInBuffer(Quaternion quat, FloatBuffer buf, int index) { buf.position(index * 4); buf.put(quat.getX()); buf.put(quat.getY()); buf.put(quat.getZ()); buf.put(quat.getW()); } /** * Sets the data contained in the given Vector3F into the FloatBuffer at the * specified index. * * @param vector * the data to insert * @param buf * the buffer to insert into * @param index * the postion to place the data; in terms of vectors not floats */ public static void setInBuffer(Vector3f vector, FloatBuffer buf, int index) { if (buf == null) { return; } if (vector == null) { buf.put(index * 3, 0); buf.put((index * 3) + 1, 0); buf.put((index * 3) + 2, 0); } else { buf.put(index * 3, vector.x); buf.put((index * 3) + 1, vector.y); buf.put((index * 3) + 2, vector.z); } } /** * Updates the values of the given vector from the specified buffer at the * index provided. * * @param vector * the vector to set data on * @param buf * the buffer to read from * @param index * the position (in terms of vectors, not floats) to read from * the buf */ public static void populateFromBuffer(Vector3f vector, FloatBuffer buf, int index) { vector.x = buf.get(index * 3); vector.y = buf.get(index * 3 + 1); vector.z = buf.get(index * 3 + 2); } /** * Generates a Vector3f array from the given FloatBuffer. * * @param buff * the FloatBuffer to read from * @return a newly generated array of Vector3f objects */ public static Vector3f[] getVector3Array(FloatBuffer buff) { buff.clear(); Vector3f[] verts = new Vector3f[buff.limit() / 3]; for (int x = 0; x < verts.length; x++) { Vector3f v = new Vector3f(buff.get(), buff.get(), buff.get()); verts[x] = v; } return verts; } /** * Copies a Vector3f from one position in the buffer to another. The index * values are in terms of vector number (eg, vector number 0 is postions 0-2 * in the FloatBuffer.) * * @param buf * the buffer to copy from/to * @param fromPos * the index of the vector to copy * @param toPos * the index to copy the vector to */ public static void copyInternalVector3(FloatBuffer buf, int fromPos, int toPos) { copyInternal(buf, fromPos * 3, toPos * 3, 3); } /** * Normalize a Vector3f in-buffer. * * @param buf * the buffer to find the Vector3f within * @param index * the position (in terms of vectors, not floats) of the vector * to normalize */ public static void normalizeVector3(FloatBuffer buf, int index) { TempVars vars = TempVars.get(); Vector3f tempVec3 = vars.vect1; populateFromBuffer(tempVec3, buf, index); tempVec3.normalizeLocal(); setInBuffer(tempVec3, buf, index); vars.release(); } /** * Add to a Vector3f in-buffer. * * @param toAdd * the vector to add from * @param buf * the buffer to find the Vector3f within * @param index * the position (in terms of vectors, not floats) of the vector * to add to */ public static void addInBuffer(Vector3f toAdd, FloatBuffer buf, int index) { TempVars vars = TempVars.get(); Vector3f tempVec3 = vars.vect1; populateFromBuffer(tempVec3, buf, index); tempVec3.addLocal(toAdd); setInBuffer(tempVec3, buf, index); vars.release(); } /** * Multiply and store a Vector3f in-buffer. * * @param toMult * the vector to multiply against * @param buf * the buffer to find the Vector3f within * @param index * the position (in terms of vectors, not floats) of the vector * to multiply */ public static void multInBuffer(Vector3f toMult, FloatBuffer buf, int index) { TempVars vars = TempVars.get(); Vector3f tempVec3 = vars.vect1; populateFromBuffer(tempVec3, buf, index); tempVec3.multLocal(toMult); setInBuffer(tempVec3, buf, index); vars.release(); } /** * Checks to see if the given Vector3f is equals to the data stored in the * buffer at the given data index. * * @param check * the vector to check against - null will return false. * @param buf * the buffer to compare data with * @param index * the position (in terms of vectors, not floats) of the vector * in the buffer to check against * @return */ public static boolean equals(Vector3f check, FloatBuffer buf, int index) { TempVars vars = TempVars.get(); Vector3f tempVec3 = vars.vect1; populateFromBuffer(tempVec3, buf, index); boolean eq = tempVec3.equals(check); vars.release(); return eq; } // // -- VECTOR2F METHODS -- //// /** * Generate a new FloatBuffer using the given array of Vector2f objects. * The FloatBuffer will be 2 * data.length long and contain the vector data * as data[0].x, data[0].y, data[1].x... etc. * * @param data array of Vector2f objects to place into a new FloatBuffer */ public static FloatBuffer createFloatBuffer(Vector2f... data) { if (data == null) { return null; } FloatBuffer buff = createFloatBuffer(2 * data.length); for (int x = 0; x < data.length; x++) { if (data[x] != null) { buff.put(data[x].x).put(data[x].y); } else { buff.put(0).put(0); } } buff.flip(); return buff; } /** * Create a new FloatBuffer of an appropriate size to hold the specified * number of Vector2f object data. * * @param vertices * number of vertices that need to be held by the newly created * buffer * @return the requested new FloatBuffer */ public static FloatBuffer createVector2Buffer(int vertices) { FloatBuffer vBuff = createFloatBuffer(2 * vertices); return vBuff; } /** * Create a new FloatBuffer of an appropriate size to hold the specified * number of Vector2f object data only if the given buffer if not already * the right size. * * @param buf * the buffer to first check and rewind * @param vertices * number of vertices that need to be held by the newly created * buffer * @return the requested new FloatBuffer */ public static FloatBuffer createVector2Buffer(FloatBuffer buf, int vertices) { if (buf != null && buf.limit() == 2 * vertices) { buf.rewind(); return buf; } return createFloatBuffer(2 * vertices); } /** * Sets the data contained in the given Vector2F into the FloatBuffer at the * specified index. * * @param vector * the data to insert * @param buf * the buffer to insert into * @param index * the postion to place the data; in terms of vectors not floats */ public static void setInBuffer(Vector2f vector, FloatBuffer buf, int index) { buf.put(index * 2, vector.x); buf.put((index * 2) + 1, vector.y); } /** * Updates the values of the given vector from the specified buffer at the * index provided. * * @param vector * the vector to set data on * @param buf * the buffer to read from * @param index * the position (in terms of vectors, not floats) to read from * the buf */ public static void populateFromBuffer(Vector2f vector, FloatBuffer buf, int index) { vector.x = buf.get(index * 2); vector.y = buf.get(index * 2 + 1); } /** * Generates a Vector2f array from the given FloatBuffer. * * @param buff * the FloatBuffer to read from * @return a newly generated array of Vector2f objects */ public static Vector2f[] getVector2Array(FloatBuffer buff) { buff.clear(); Vector2f[] verts = new Vector2f[buff.limit() / 2]; for (int x = 0; x < verts.length; x++) { Vector2f v = new Vector2f(buff.get(), buff.get()); verts[x] = v; } return verts; } /** * Copies a Vector2f from one position in the buffer to another. The index * values are in terms of vector number (eg, vector number 0 is postions 0-1 * in the FloatBuffer.) * * @param buf * the buffer to copy from/to * @param fromPos * the index of the vector to copy * @param toPos * the index to copy the vector to */ public static void copyInternalVector2(FloatBuffer buf, int fromPos, int toPos) { copyInternal(buf, fromPos * 2, toPos * 2, 2); } /** * Normalize a Vector2f in-buffer. * * @param buf * the buffer to find the Vector2f within * @param index * the position (in terms of vectors, not floats) of the vector * to normalize */ public static void normalizeVector2(FloatBuffer buf, int index) { TempVars vars = TempVars.get(); Vector2f tempVec2 = vars.vect2d; populateFromBuffer(tempVec2, buf, index); tempVec2.normalizeLocal(); setInBuffer(tempVec2, buf, index); vars.release(); } /** * Add to a Vector2f in-buffer. * * @param toAdd * the vector to add from * @param buf * the buffer to find the Vector2f within * @param index * the position (in terms of vectors, not floats) of the vector * to add to */ public static void addInBuffer(Vector2f toAdd, FloatBuffer buf, int index) { TempVars vars = TempVars.get(); Vector2f tempVec2 = vars.vect2d; populateFromBuffer(tempVec2, buf, index); tempVec2.addLocal(toAdd); setInBuffer(tempVec2, buf, index); vars.release(); } /** * Multiply and store a Vector2f in-buffer. * * @param toMult * the vector to multiply against * @param buf * the buffer to find the Vector2f within * @param index * the position (in terms of vectors, not floats) of the vector * to multiply */ public static void multInBuffer(Vector2f toMult, FloatBuffer buf, int index) { TempVars vars = TempVars.get(); Vector2f tempVec2 = vars.vect2d; populateFromBuffer(tempVec2, buf, index); tempVec2.multLocal(toMult); setInBuffer(tempVec2, buf, index); vars.release(); } /** * Checks to see if the given Vector2f is equals to the data stored in the * buffer at the given data index. * * @param check * the vector to check against - null will return false. * @param buf * the buffer to compare data with * @param index * the position (in terms of vectors, not floats) of the vector * in the buffer to check against * @return */ public static boolean equals(Vector2f check, FloatBuffer buf, int index) { TempVars vars = TempVars.get(); Vector2f tempVec2 = vars.vect2d; populateFromBuffer(tempVec2, buf, index); boolean eq = tempVec2.equals(check); vars.release(); return eq; } //// -- INT METHODS -- //// /** * Generate a new IntBuffer using the given array of ints. The IntBuffer * will be data.length long and contain the int data as data[0], data[1]... * etc. * * @param data * array of ints to place into a new IntBuffer */ public static IntBuffer createIntBuffer(int... data) { if (data == null) { return null; } IntBuffer buff = createIntBuffer(data.length); buff.clear(); buff.put(data); buff.flip(); return buff; } /** * Create a new int[] array and populate it with the given IntBuffer's * contents. * * @param buff * the IntBuffer to read from * @return a new int array populated from the IntBuffer */ public static int[] getIntArray(IntBuffer buff) { if (buff == null) { return null; } buff.clear(); int[] inds = new int[buff.limit()]; for (int x = 0; x < inds.length; x++) { inds[x] = buff.get(); } return inds; } /** * Create a new float[] array and populate it with the given FloatBuffer's * contents. * * @param buff * the FloatBuffer to read from * @return a new float array populated from the FloatBuffer */ public static float[] getFloatArray(FloatBuffer buff) { if (buff == null) { return null; } buff.clear(); float[] inds = new float[buff.limit()]; for (int x = 0; x < inds.length; x++) { inds[x] = buff.get(); } return inds; } //// -- GENERAL DOUBLE ROUTINES -- //// /** * Create a new DoubleBuffer of the specified size. * * @param size * required number of double to store. * @return the new DoubleBuffer */ public static DoubleBuffer createDoubleBuffer(int size) { DoubleBuffer buf = ByteBuffer.allocateDirect(8 * size).order(ByteOrder.nativeOrder()).asDoubleBuffer(); buf.clear(); onBufferAllocated(buf); return buf; } /** * Create a new DoubleBuffer of an appropriate size to hold the specified * number of doubles only if the given buffer if not already the right size. * * @param buf * the buffer to first check and rewind * @param size * number of doubles that need to be held by the newly created * buffer * @return the requested new DoubleBuffer */ public static DoubleBuffer createDoubleBuffer(DoubleBuffer buf, int size) { if (buf != null && buf.limit() == size) { buf.rewind(); return buf; } buf = createDoubleBuffer(size); return buf; } /** * Creates a new DoubleBuffer with the same contents as the given * DoubleBuffer. The new DoubleBuffer is seperate from the old one and * changes are not reflected across. If you want to reflect changes, * consider using Buffer.duplicate(). * * @param buf * the DoubleBuffer to copy * @return the copy */ public static DoubleBuffer clone(DoubleBuffer buf) { if (buf == null) { return null; } buf.rewind(); DoubleBuffer copy; if (buf.isDirect()) { copy = createDoubleBuffer(buf.limit()); } else { copy = DoubleBuffer.allocate(buf.limit()); } copy.put(buf); return copy; } //// -- GENERAL FLOAT ROUTINES -- //// /** * Create a new FloatBuffer of the specified size. * * @param size * required number of floats to store. * @return the new FloatBuffer */ public static FloatBuffer createFloatBuffer(int size) { FloatBuffer buf = ByteBuffer.allocateDirect(4 * size).order(ByteOrder.nativeOrder()).asFloatBuffer(); buf.clear(); onBufferAllocated(buf); return buf; } /** * Copies floats from one position in the buffer to another. * * @param buf * the buffer to copy from/to * @param fromPos * the starting point to copy from * @param toPos * the starting point to copy to * @param length * the number of floats to copy */ public static void copyInternal(FloatBuffer buf, int fromPos, int toPos, int length) { float[] data = new float[length]; buf.position(fromPos); buf.get(data); buf.position(toPos); buf.put(data); } /** * Creates a new FloatBuffer with the same contents as the given * FloatBuffer. The new FloatBuffer is seperate from the old one and changes * are not reflected across. If you want to reflect changes, consider using * Buffer.duplicate(). * * @param buf * the FloatBuffer to copy * @return the copy */ public static FloatBuffer clone(FloatBuffer buf) { if (buf == null) { return null; } buf.rewind(); FloatBuffer copy; if (buf.isDirect()) { copy = createFloatBuffer(buf.limit()); } else { copy = FloatBuffer.allocate(buf.limit()); } copy.put(buf); return copy; } //// -- GENERAL INT ROUTINES -- //// /** * Create a new IntBuffer of the specified size. * * @param size * required number of ints to store. * @return the new IntBuffer */ public static IntBuffer createIntBuffer(int size) { IntBuffer buf = ByteBuffer.allocateDirect(4 * size).order(ByteOrder.nativeOrder()).asIntBuffer(); buf.clear(); onBufferAllocated(buf); return buf; } /** * Create a new IntBuffer of an appropriate size to hold the specified * number of ints only if the given buffer if not already the right size. * * @param buf * the buffer to first check and rewind * @param size * number of ints that need to be held by the newly created * buffer * @return the requested new IntBuffer */ public static IntBuffer createIntBuffer(IntBuffer buf, int size) { if (buf != null && buf.limit() == size) { buf.rewind(); return buf; } buf = createIntBuffer(size); return buf; } /** * Creates a new IntBuffer with the same contents as the given IntBuffer. * The new IntBuffer is seperate from the old one and changes are not * reflected across. If you want to reflect changes, consider using * Buffer.duplicate(). * * @param buf * the IntBuffer to copy * @return the copy */ public static IntBuffer clone(IntBuffer buf) { if (buf == null) { return null; } buf.rewind(); IntBuffer copy; if (buf.isDirect()) { copy = createIntBuffer(buf.limit()); } else { copy = IntBuffer.allocate(buf.limit()); } copy.put(buf); return copy; } //// -- GENERAL BYTE ROUTINES -- //// /** * Create a new ByteBuffer of the specified size. * * @param size * required number of ints to store. * @return the new IntBuffer */ public static ByteBuffer createByteBuffer(int size) { ByteBuffer buf = ByteBuffer.allocateDirect(size).order(ByteOrder.nativeOrder()); buf.clear(); onBufferAllocated(buf); return buf; } /** * Create a new ByteBuffer of an appropriate size to hold the specified * number of ints only if the given buffer if not already the right size. * * @param buf * the buffer to first check and rewind * @param size * number of bytes that need to be held by the newly created * buffer * @return the requested new IntBuffer */ public static ByteBuffer createByteBuffer(ByteBuffer buf, int size) { if (buf != null && buf.limit() == size) { buf.rewind(); return buf; } buf = createByteBuffer(size); return buf; } public static ByteBuffer createByteBuffer(byte... data) { ByteBuffer bb = createByteBuffer(data.length); bb.put(data); bb.flip(); return bb; } public static ByteBuffer createByteBuffer(String data) { byte[] bytes = data.getBytes(); ByteBuffer bb = createByteBuffer(bytes.length); bb.put(bytes); bb.flip(); return bb; } /** * Creates a new ByteBuffer with the same contents as the given ByteBuffer. * The new ByteBuffer is seperate from the old one and changes are not * reflected across. If you want to reflect changes, consider using * Buffer.duplicate(). * * @param buf * the ByteBuffer to copy * @return the copy */ public static ByteBuffer clone(ByteBuffer buf) { if (buf == null) { return null; } buf.rewind(); ByteBuffer copy; if (buf.isDirect()) { copy = createByteBuffer(buf.limit()); } else { copy = ByteBuffer.allocate(buf.limit()); } copy.put(buf); return copy; } //// -- GENERAL SHORT ROUTINES -- //// /** * Create a new ShortBuffer of the specified size. * * @param size * required number of shorts to store. * @return the new ShortBuffer */ public static ShortBuffer createShortBuffer(int size) { ShortBuffer buf = ByteBuffer.allocateDirect(2 * size).order(ByteOrder.nativeOrder()).asShortBuffer(); buf.clear(); onBufferAllocated(buf); return buf; } /** * Create a new ShortBuffer of an appropriate size to hold the specified * number of shorts only if the given buffer if not already the right size. * * @param buf * the buffer to first check and rewind * @param size * number of shorts that need to be held by the newly created * buffer * @return the requested new ShortBuffer */ public static ShortBuffer createShortBuffer(ShortBuffer buf, int size) { if (buf != null && buf.limit() == size) { buf.rewind(); return buf; } buf = createShortBuffer(size); return buf; } public static ShortBuffer createShortBuffer(short... data) { if (data == null) { return null; } ShortBuffer buff = createShortBuffer(data.length); buff.clear(); buff.put(data); buff.flip(); return buff; } /** * Creates a new ShortBuffer with the same contents as the given ShortBuffer. * The new ShortBuffer is seperate from the old one and changes are not * reflected across. If you want to reflect changes, consider using * Buffer.duplicate(). * * @param buf * the ShortBuffer to copy * @return the copy */ public static ShortBuffer clone(ShortBuffer buf) { if (buf == null) { return null; } buf.rewind(); ShortBuffer copy; if (buf.isDirect()) { copy = createShortBuffer(buf.limit()); } else { copy = ShortBuffer.allocate(buf.limit()); } copy.put(buf); return copy; } /** * Ensures there is at least the <code>required</code> number of entries left after the current position of the * buffer. If the buffer is too small a larger one is created and the old one copied to the new buffer. * @param buffer buffer that should be checked/copied (may be null) * @param required minimum number of elements that should be remaining in the returned buffer * @return a buffer large enough to receive at least the <code>required</code> number of entries, same position as * the input buffer, not null */ public static FloatBuffer ensureLargeEnough(FloatBuffer buffer, int required) { if (buffer == null || (buffer.remaining() < required)) { int position = (buffer != null ? buffer.position() : 0); FloatBuffer newVerts = createFloatBuffer(position + required); if (buffer != null) { buffer.rewind(); newVerts.put(buffer); newVerts.position(position); } buffer = newVerts; } return buffer; } public static ShortBuffer ensureLargeEnough(ShortBuffer buffer, int required) { if (buffer == null || (buffer.remaining() < required)) { int position = (buffer != null ? buffer.position() : 0); ShortBuffer newVerts = createShortBuffer(position + required); if (buffer != null) { buffer.rewind(); newVerts.put(buffer); newVerts.position(position); } buffer = newVerts; } return buffer; } public static ByteBuffer ensureLargeEnough(ByteBuffer buffer, int required) { if (buffer == null || (buffer.remaining() < required)) { int position = (buffer != null ? buffer.position() : 0); ByteBuffer newVerts = createByteBuffer(position + required); if (buffer != null) { buffer.rewind(); newVerts.put(buffer); newVerts.position(position); } buffer = newVerts; } return buffer; } public static void printCurrentDirectMemory(StringBuilder store) { long totalHeld = 0; // make a new set to hold the keys to prevent concurrency issues. ArrayList<Buffer> bufs = new ArrayList<Buffer>(trackingHash.keySet()); int fBufs = 0, bBufs = 0, iBufs = 0, sBufs = 0, dBufs = 0; int fBufsM = 0, bBufsM = 0, iBufsM = 0, sBufsM = 0, dBufsM = 0; for (Buffer b : bufs) { if (b instanceof ByteBuffer) { totalHeld += b.capacity(); bBufsM += b.capacity(); bBufs++; } else if (b instanceof FloatBuffer) { totalHeld += b.capacity() * 4; fBufsM += b.capacity() * 4; fBufs++; } else if (b instanceof IntBuffer) { totalHeld += b.capacity() * 4; iBufsM += b.capacity() * 4; iBufs++; } else if (b instanceof ShortBuffer) { totalHeld += b.capacity() * 2; sBufsM += b.capacity() * 2; sBufs++; } else if (b instanceof DoubleBuffer) { totalHeld += b.capacity() * 8; dBufsM += b.capacity() * 8; dBufs++; } } long heapMem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); boolean printStout = store == null; if (store == null) { store = new StringBuilder(); } store.append("Existing buffers: ").append(bufs.size()).append("\n"); store.append("(b: ").append(bBufs).append(" f: ").append(fBufs).append(" i: ").append(iBufs).append(" s: ").append(sBufs).append(" d: ").append(dBufs).append(")").append("\n"); store.append("Total heap memory held: ").append(heapMem / 1024).append("kb\n"); store.append("Total direct memory held: ").append(totalHeld / 1024).append("kb\n"); store.append("(b: ").append(bBufsM / 1024).append("kb f: ").append(fBufsM / 1024).append("kb i: ").append(iBufsM / 1024).append("kb s: ").append(sBufsM / 1024).append("kb d: ").append(dBufsM / 1024).append("kb)").append("\n"); if (printStout) { System.out.println(store.toString()); } } }
package nta.storage; import java.io.FileNotFoundException; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.SortedSet; import java.util.TreeSet; import nta.catalog.Column; import nta.catalog.Schema; import nta.catalog.TableMeta; import nta.catalog.statistics.Stat; import nta.catalog.statistics.StatSet; import nta.datum.Datum; import nta.datum.DatumFactory; import nta.datum.DatumType; import nta.engine.TCommonProtos.StatType; import nta.engine.ipc.protocolrecords.Fragment; import nta.storage.exception.AlreadyExistsStorageException; import org.apache.commons.codec.binary.Base64; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; /** * @author Haemi Yang * @author Jimin Kim * */ public class CSVFile2 extends Storage { public static final String DELIMITER = "csvfile.delimiter"; public static final String DELIMITER_DEFAULT = ","; private static final Log LOG = LogFactory.getLog(CSVFile2.class); public CSVFile2(Configuration conf) { super(conf); } @Override public Appender getAppender(TableMeta meta, Path path) throws IOException { return new CSVAppender(conf, meta, path, true); } @Override public Scanner openScanner(Schema schema, Fragment[] tablets) throws IOException { return new CSVScanner(conf, schema, tablets); } public static class CSVAppender extends FileAppender { private final Path path; private final TableMeta meta; private final Schema schema; private final FileSystem fs; private FSDataOutputStream fos; private String delimiter; private final boolean statsEnabled; private StatSet statSet = null; private Stat numRowStat = null; private Stat outputBytesStat = null; public CSVAppender(Configuration conf, final TableMeta meta, final Path path, boolean statsEnabled) throws IOException { super(conf, meta, path); this.path = new Path(path, "data"); this.fs = path.getFileSystem(conf); this.meta = meta; this.schema = meta.getSchema(); if (!fs.exists(path.getParent())) { throw new FileNotFoundException(this.path.toString()); } if (fs.exists(path)) { throw new AlreadyExistsStorageException(this.path); } fos = fs.create(path); // set delimiter. this.delimiter = this.meta.getOption(DELIMITER, DELIMITER_DEFAULT); this.statsEnabled = statsEnabled; if (statsEnabled) { this.statSet = new StatSet(); this.numRowStat = new Stat(StatType.TABLE_NUM_ROWS); this.statSet.putStat(this.numRowStat); this.outputBytesStat = new Stat(StatType.TABLE_NUM_BYTES); this.statSet.putStat(this.outputBytesStat); } } @Override public void addTuple(Tuple tuple) throws IOException { StringBuilder sb = new StringBuilder(); Column col = null; Datum datum = null; for (int i = 0; i < schema.getColumnNum(); i++) { datum = tuple.get(i); if (datum.type() == DatumType.NULL) { } else { col = schema.getColumn(i); switch (col.getDataType()) { case BYTE: sb.append(new String(Base64.encodeBase64(tuple.getByte(i) .asByteArray(), false))); break; case BYTES: sb.append(new String(Base64.encodeBase64(tuple.getBytes(i) .asByteArray(), false))); break; case STRING: sb.append(tuple.getString(i)); break; case SHORT: sb.append(tuple.getShort(i)); break; case INT: sb.append(tuple.getInt(i)); break; case LONG: sb.append(tuple.getLong(i)); break; case FLOAT: sb.append(tuple.getFloat(i)); break; case DOUBLE: sb.append(tuple.getDouble(i)); break; case IPv4: sb.append(tuple.getIPv4(i)); break; case IPv6: sb.append(tuple.getIPv6(i)); break; default: break; } } sb.append(delimiter); } sb.deleteCharAt(sb.length() - 1); sb.append('\n'); fos.writeBytes(sb.toString()); // Statistical section if (statsEnabled) { numRowStat.increment(); } } @Override public void flush() throws IOException { } @Override public void close() throws IOException { // Statistical section if (statsEnabled) { outputBytesStat.setValue(fos.getPos()); } fos.close(); } @Override public StatSet getStats() { return this.statSet; } } public static class CSVScanner extends FileScanner { private FileSystem fs; private FSDataInputStream fis; private SortedSet<Fragment> tabletSet; private Iterator<Fragment> tabletIter; private Fragment curTablet; private long startOffset, length; private long startPos; private static final byte LF = '\n'; private String delimiter; private byte[] buffer = null; private byte[] piece = null; private String[] tupleList; private int bufferSize, validIndex, curIndex = 0; private final static int DEFAULT_BUFFER_SIZE = 65536; private long pageStart = -1; private long curTupleOffset = -1; private HashMap<Long, Integer> offsetCurIndexMap; private long[] tupleOffsets; public CSVScanner(Configuration conf, final Schema schema, final Fragment[] tablets) throws IOException { super(conf, schema, tablets); init(conf, schema, tablets); } public void init(Configuration conf, final Schema schema, final Fragment[] tablets) throws IOException { // set default page size. this.bufferSize = DEFAULT_BUFFER_SIZE; // set delimiter. this.delimiter = tablets[0].getMeta().getOption(DELIMITER, DELIMITER_DEFAULT); if (this.delimiter.equals("|")) { this.delimiter = "\\|"; } // set tablets iterator. this.tabletSet = new TreeSet<Fragment>(); this.offsetCurIndexMap = new HashMap<Long, Integer>(); for (Fragment t : tablets) this.tabletSet.add(t); this.tabletIter = tabletSet.iterator(); openNextTablet(); } private boolean openNextTablet() throws IOException { if (this.fis != null) { this.fis.close(); } // set tablet information. if (tabletIter.hasNext()) { curTablet = tabletIter.next(); this.fs = curTablet.getPath().getFileSystem(this.conf); this.fis = this.fs.open(curTablet.getPath()); this.startOffset = curTablet.getStartOffset(); if (curTablet.getLength() == -1) { // unknown case this.length = fs.getFileStatus(curTablet.getPath()).getLen(); } else { this.length = curTablet.getLength(); } long available = tabletable();//(this.startOffset + this.length) - fis.getPos(); // set correct start offset. if (startOffset != 0) { if (startOffset < available) { fis.seek(startOffset - 1); while ( (fis.readByte()) != LF) { } // fis.seek(fis.getPos()); } else { fis.seek(available); } } startPos = fis.getPos(); return pageBuffer(); } else { return false; } } private boolean pageBuffer() throws IOException { this.offsetCurIndexMap.clear(); if (tabletable() < 1) { // initialize. this.curIndex = 0; this.validIndex = 0; this.curTupleOffset = 0; this.bufferSize = DEFAULT_BUFFER_SIZE;; return false; } // set buffer size. if (tabletable() <= bufferSize) { bufferSize = (int) tabletable(); } else { bufferSize = DEFAULT_BUFFER_SIZE; } // read. if (fis.getPos() == startPos) { buffer = new byte[bufferSize]; this.pageStart = fis.getPos(); fis.read(buffer); piece = new byte[0]; } else { if (tabletable() <= bufferSize) bufferSize = piece.length + (int) tabletable(); buffer = new byte[bufferSize]; this.pageStart = fis.getPos() - piece.length; System.arraycopy(piece, 0, buffer, 0, piece.length); if (tabletable() != 0) { fis.read(buffer, piece.length, (buffer.length - piece.length)); } } tupleList = new String(buffer).split("\n"); checkLineFeed(); tupleOffset(); return true; } private void checkLineFeed() throws IOException { if ((char) buffer[buffer.length - 1] != LF) { if (tabletable() < 1) { // end of tablet. long mark = fis.getPos(); int i; for (i = 1; fis.readByte() != LF; i++) ; fis.seek(mark); byte[] extra = new byte[i - 1]; fis.read(extra); if (i > 1) { // i=1 case : read line feed. tupleList[tupleList.length - 1] = new String(tupleList[tupleList.length - 1] + new String(extra)); } validIndex = tupleList.length; } else { // keeping incorrect tuple. piece = tupleList[tupleList.length -1].getBytes(); validIndex = tupleList.length - 1; } } else { // correct tuple. if (tabletable() < bufferSize) { bufferSize = (int) tabletable(); } else { bufferSize = DEFAULT_BUFFER_SIZE; } if (bufferSize > 0) { // piece = new byte[bufferSize - 1]; // fis.read(piece); // fis.seek(fis.getPos()-piece.length); piece = new byte[0]; } validIndex = tupleList.length; } } private void tupleOffset() throws IOException { this.curIndex = 0; this.curTupleOffset = 0; this.tupleOffsets = new long[tupleList.length]; for (int i = 0; i < this.tupleList.length; i++) { this.tupleOffsets[i] = this.curTupleOffset + this.pageStart; this.offsetCurIndexMap.put(this.curTupleOffset + this.pageStart, i); this.curTupleOffset += (tupleList[i] + "\n").getBytes().length; } } @Override public void seek(long offset) throws IOException { if (this.offsetCurIndexMap.containsKey(offset)) { curIndex = this.offsetCurIndexMap.get(offset); } else if (offset >= this.pageStart + this.bufferSize || offset < this.pageStart) { fis.seek(offset); piece = new byte[0]; buffer = new byte[DEFAULT_BUFFER_SIZE]; bufferSize = DEFAULT_BUFFER_SIZE; curIndex = 0; validIndex = 0; // pageBuffer(); } else { throw new IOException("invalid offset"); } } @Override public long getNextOffset() throws IOException { if (curIndex == tupleList.length) { pageBuffer(); } return this.tupleOffsets[curIndex]; } @Override public long tabletable() throws IOException { return (this.startOffset + this.length - fis.getPos()); } @Override public Tuple next() throws IOException { try { if (curIndex == validIndex) { if (!pageBuffer()) { if (!openNextTablet()) { return null; } } } long nextOffset = getNextOffset(); VTuple tuple = new VTuple(schema.getColumnNum()); tuple.setOffset(nextOffset); String[] cells = tupleList[curIndex++].split(delimiter); Column field; for (int i = 0; i < schema.getColumnNum(); i++) { field = schema.getColumn(i); if (cells.length <= i) { tuple.put(i, DatumFactory.createNullDatum()); } else { String cell = cells[i].trim(); if (cell.equals("")) { tuple.put(i, DatumFactory.createNullDatum()); } else { switch (field.getDataType()) { case BYTE: tuple.put(i, DatumFactory.createByte(Base64.decodeBase64(cell)[0])); break; case BYTES: tuple.put(i, DatumFactory.createBytes(Base64.decodeBase64(cell))); break; case SHORT: tuple.put(i, DatumFactory.createShort(cell)); break; case INT: tuple.put(i, DatumFactory.createInt(cell)); break; case LONG: tuple.put(i, DatumFactory.createLong(cell)); break; case FLOAT: tuple.put(i, DatumFactory.createFloat(cell)); break; case DOUBLE: tuple.put(i, DatumFactory.createDouble(cell)); break; case STRING: tuple.put(i, DatumFactory.createString(cell)); break; case IPv4: if (cells[i].charAt(0) == '/') { tuple.put( i, DatumFactory.createIPv4(cells[i].substring(1, cell.length()))); } break; } } } } return tuple; } catch (Throwable t) { LOG.error("tupleList Length: " + tupleList.length, t); LOG.error("tupleList Current index: " + curIndex, t); LOG.error("tupleList Vaild index: " + validIndex, t); } return null; } @Override public void reset() throws IOException { init(conf, schema, tablets); } @Override public void close() throws IOException { fis.close(); } } }
package org.umlg.sqlg.structure; import com.fasterxml.jackson.core.Version; import com.fasterxml.jackson.core.util.VersionUtil; import com.google.common.base.Preconditions; import org.apache.commons.collections4.set.ListOrderedSet; import org.apache.commons.lang3.time.StopWatch; import org.apache.commons.lang3.tuple.Triple; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.umlg.sqlg.sql.dialect.SqlDialect; import org.umlg.sqlg.structure.topology.*; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.sql.*; import java.time.LocalDateTime; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import static org.umlg.sqlg.structure.topology.Topology.*; class SqlgStartupManager { private static final String APPLICATION_VERSION = "application.version"; private static final String SQLG_APPLICATION_PROPERTIES = "sqlg.application.properties"; private static Logger logger = LoggerFactory.getLogger(SqlgStartupManager.class); private SqlgGraph sqlgGraph; private SqlDialect sqlDialect; private String buildVersion; SqlgStartupManager(SqlgGraph sqlgGraph) { this.sqlgGraph = sqlgGraph; this.sqlDialect = sqlgGraph.getSqlDialect(); } void loadSqlgSchema() { try { if (logger.isDebugEnabled()) { logger.debug("SchemaManager.loadSqlgSchema()..."); } //check if the topology schema exists, if not createVertexLabel it boolean existSqlgSchema = existSqlgSchema(); StopWatch stopWatch = new StopWatch(); stopWatch.start(); if (!existSqlgSchema) { //This exist separately because Hsqldb and H2 do not support "if exist" in the schema creation sql. createSqlgSchema(); } if (!existGuiSchema()) { createGuiSchema(); } if (!existSqlgSchema) { createSqlgSchemaTablesAndIndexes(); } //The default schema is generally called 'public' and is created upfront by the db. //But what if its been deleted, so check. if (!existDefaultSchema()) { createDefaultSchema(); } //committing here will ensure that sqlg creates the tables. this.sqlgGraph.tx().commit(); stopWatch.stop(); logger.debug("Time to createVertexLabel sqlg topology: " + stopWatch.toString()); if (!existSqlgSchema) { addPublicSchema(); this.sqlgGraph.tx().commit(); } if (!existSqlgSchema) { //old versions of sqlg needs the topology populated from the information_schema table. logger.debug("Upgrading sqlg from pre sqlg_schema version to sqlg_schema version"); StopWatch stopWatch2 = new StopWatch(); stopWatch2.start(); loadSqlgSchemaFromInformationSchema(); String version = getBuildVersion(); TopologyManager.addGraph(this.sqlgGraph, version); stopWatch2.stop(); logger.debug("Time to upgrade sqlg from pre sqlg_schema: " + stopWatch2.toString()); logger.debug("Done upgrading sqlg from pre sqlg_schema version to sqlg_schema version"); } else { // make sure the index edge index property exist, this if for upgrading from 1.3.4 to 1.4.0 upgradeIndexEdgeSequenceToExist(); //make sure the sqlg_schema.graph exists. String version = getBuildVersion(); String oldVersion = createOrUpdateGraph(version); if (oldVersion == null || !oldVersion.equals(version)) { updateTopology(oldVersion); } } cacheTopology(); if (this.sqlgGraph.configuration().getBoolean("validate.topology", false)) { validateTopology(); } this.sqlgGraph.tx().commit(); } catch (Exception e) { this.sqlgGraph.tx().rollback(); throw e; } } private void updateTopology(String oldVersion) { Version v = Version.unknownVersion(); if (oldVersion != null) { v = VersionUtil.parseVersion(oldVersion, null, null); } if (v.isUnknownVersion() || v.compareTo(new Version(1, 5, 0, null, null, null)) < 0) { if (this.sqlDialect.supportsDeferrableForeignKey()) { upgradeForeignKeysToDeferrable(); } } if (v.isUnknownVersion() || v.compareTo(new Version(1, 6, 0, null, null, null)) < 0) { addPartitionSupportToSqlgSchema(); } } private void addPartitionSupportToSqlgSchema() { Connection conn = this.sqlgGraph.tx().getConnection(); List<String> addPartitionTables = this.sqlDialect.addPartitionTables(); for (String addPartitionTable : addPartitionTables) { try (Statement s = conn.createStatement()) { s.execute(addPartitionTable); } catch (SQLException e) { throw new RuntimeException(e); } } } private void upgradeForeignKeysToDeferrable() { Connection conn = this.sqlgGraph.tx().getConnection(); try (PreparedStatement s = conn.prepareStatement(this.sqlDialect.sqlToGetAllForeignKeys())) { ResultSet rs = s.executeQuery(); while (rs.next()) { String schema = rs.getString(1); String table = rs.getString(2); String fk = rs.getString(3); try (Statement statement = conn.createStatement()) { statement.execute(this.sqlDialect.alterForeignKeyToDeferrable(schema, table, fk)); } } } catch (SQLException e) { throw new RuntimeException(e); } } private void cacheTopology() { this.sqlgGraph.getTopology().cacheTopology(); } private void validateTopology() { this.sqlgGraph.getTopology().validateTopology(); if (!this.sqlgGraph.getTopology().getValidationErrors().isEmpty()) { for (Topology.TopologyValidationError topologyValidationError : this.sqlgGraph.getTopology().getValidationErrors()) { logger.warn(topologyValidationError.toString()); } } } /** * create or update the graph metadata * * @param version the new version of the graph * @return the old version of the graph, or null if there was no graph */ private String createOrUpdateGraph(String version) { String oldVersion = null; Connection conn = this.sqlgGraph.tx().getConnection(); try { DatabaseMetaData metadata = conn.getMetaData(); String[] types = new String[]{"TABLE"}; //load the vertices try (ResultSet vertexRs = metadata.getTables(null, Schema.SQLG_SCHEMA, Topology.VERTEX_PREFIX + Topology.GRAPH, types)) { if (!vertexRs.next()) { try (Statement statement = conn.createStatement()) { String sql = this.sqlDialect.sqlgCreateTopologyGraph(); statement.execute(sql); TopologyManager.addGraph(this.sqlgGraph, version); } } else { //Need to check if dbVersion has been added try (ResultSet columnRs = metadata.getColumns(null, Schema.SQLG_SCHEMA, Topology.VERTEX_PREFIX + Topology.GRAPH, Topology.SQLG_SCHEMA_GRAPH_DB_VERSION)) { if (!columnRs.next()) { try (Statement statement = conn.createStatement()) { statement.execute(sqlDialect.addDbVersionToGraph(metadata)); } } } oldVersion = TopologyManager.updateGraph(this.sqlgGraph, version); } return oldVersion; } } catch (SQLException e) { throw new RuntimeException(e); } } private void upgradeIndexEdgeSequenceToExist() { Connection conn = this.sqlgGraph.tx().getConnection(); try { DatabaseMetaData metadata = conn.getMetaData(); String catalog = null; String schemaPattern = Topology.SQLG_SCHEMA; @SuppressWarnings("ConstantConditions") List<Triple<String, Integer, String>> columns = this.sqlDialect.getTableColumns(metadata, catalog, schemaPattern, Topology.EDGE_PREFIX + "index_property", SQLG_SCHEMA_INDEX_PROPERTY_EDGE_SEQUENCE); if (columns.isEmpty()) { try (Statement statement = conn.createStatement()) { String sql = this.sqlDialect.sqlgAddIndexEdgeSequenceColumn(); statement.execute(sql); } } } catch (SQLException e) { logger.error("Error upgrading index edge property to include a sequence column. Error swallowed.", e); } } @SuppressWarnings("ConstantConditions") private void loadSqlgSchemaFromInformationSchema() { Connection conn = this.sqlgGraph.tx().getConnection(); try { DatabaseMetaData metadata = conn.getMetaData(); String catalog = null; String schemaPattern = null; List<String> schemaNames = this.sqlDialect.getSchemaNames(metadata); for (String schemaName : schemaNames) { if (schemaName.equals(SQLG_SCHEMA) || schemaName.equals(this.sqlDialect.getPublicSchema()) || this.sqlDialect.getGisSchemas().contains(schemaName)) { continue; } TopologyManager.addSchema(this.sqlgGraph, schemaName); } Map<String, Set<IndexRef>> indices = this.sqlDialect.extractIndices(conn, catalog, schemaPattern); //load the vertices List<Triple<String, String, String>> vertexTables = this.sqlDialect.getVertexTables(metadata); for (Triple<String, String, String> vertexTable : vertexTables) { String tblCat = vertexTable.getLeft(); String schema = vertexTable.getMiddle(); String table = vertexTable.getRight(); //check if is internal, if so ignore. Set<String> schemasToIgnore = new HashSet<>(this.sqlDialect.getInternalSchemas()); if (schema.equals(SQLG_SCHEMA) || schemasToIgnore.contains(schema) || this.sqlDialect.getGisSchemas().contains(schema)) { continue; } if (this.sqlDialect.getSpacialRefTable().contains(table)) { continue; } Map<String, PropertyType> columns = new ConcurrentHashMap<>(); List<Triple<String, Integer, String>> metaDatas = this.sqlDialect.getTableColumns(metadata, tblCat, schema, table, null); ListIterator<Triple<String, Integer, String>> metaDataIter = metaDatas.listIterator(); while (metaDataIter.hasNext()) { Triple<String, Integer, String> tripple = metaDataIter.next(); String columnName = tripple.getLeft(); int columnType = tripple.getMiddle(); String typeName = tripple.getRight(); if (!columnName.equals(Topology.ID)) { extractProperty(schema, table, columnName, columnType, typeName, columns, metaDataIter); } } String label = table.substring(Topology.VERTEX_PREFIX.length()); List<String> primaryKeys = this.sqlDialect.getPrimaryKeys(metadata, tblCat, schema, table); if (primaryKeys.size() == 1 && primaryKeys.get(0).equals(Topology.ID)) { TopologyManager.addVertexLabel(this.sqlgGraph, schema, label, columns, new ListOrderedSet<>()); } else { //partitioned tables have no pk and must have identifiers. //however we can not tell which columns are the identifiers so ahem??? //we do a little hardcoding. ID,uid and uuid are determined to be identifiers. if (primaryKeys.isEmpty()) { ListOrderedSet<String> identifiers = new ListOrderedSet<>(); for (String s : columns.keySet()) { if (s.equalsIgnoreCase("ID") || s.equalsIgnoreCase("uid") || s.equalsIgnoreCase("uuid")) { identifiers.add(s); } } TopologyManager.addVertexLabel(this.sqlgGraph, schema, label, columns, identifiers); } else { TopologyManager.addVertexLabel(this.sqlgGraph, schema, label, columns, ListOrderedSet.listOrderedSet(primaryKeys)); } } if (indices != null) { String key = tblCat + "." + schema + "." + table; Set<IndexRef> idxs = indices.get(key); if (idxs != null) { for (IndexRef ir : idxs) { TopologyManager.addIndex(sqlgGraph, schema, label, true, ir.getIndexName(), ir.getIndexType(), ir.getColumns()); } } } else { extractIndices(metadata, tblCat, schema, table, label, true); } } //load the edges without their properties List<Triple<String, String, String>> edgeTables = this.sqlDialect.getEdgeTables(metadata); for (Triple<String, String, String> edgeTable : edgeTables) { String edgCat = edgeTable.getLeft(); String schema = edgeTable.getMiddle(); String table = edgeTable.getRight(); //check if is internal, if so ignore. Set<String> schemasToIgnore = new HashSet<>(this.sqlDialect.getInternalSchemas()); if (schema.equals(SQLG_SCHEMA) || schemasToIgnore.contains(schema) || this.sqlDialect.getGisSchemas().contains(schema)) { continue; } if (this.sqlDialect.getSpacialRefTable().contains(table)) { continue; } List<Triple<String, Integer, String>> edgeColumns = this.sqlDialect.getTableColumns(metadata, edgCat, schema, table, null); List<String> primaryKeys = this.sqlDialect.getPrimaryKeys(metadata, edgCat, schema, table); Vertex edgeVertex; if (hasIDPrimaryKey(primaryKeys)) { edgeVertex = TopologyManager.addEdgeLabel(this.sqlgGraph, table, Collections.emptyMap(), new ListOrderedSet<>(), PartitionType.NONE, null); } else { //partitioned tables have no pk and must have identifiers. //however we can not tell which columns are the identifiers so ahem??? //we do a little hardcoding. ID,uid and uuid are determined to be identifiers. if (primaryKeys.isEmpty()) { ListOrderedSet<String> identifiers = new ListOrderedSet<>(); for (Triple<String, Integer, String> s : edgeColumns) { if (s.getLeft().equalsIgnoreCase("ID") || s.getLeft().equalsIgnoreCase("uid") || s.getLeft().equalsIgnoreCase("uuid")) { identifiers.add(s.getLeft()); } } edgeVertex = TopologyManager.addEdgeLabel(this.sqlgGraph, table, Collections.emptyMap(), identifiers, PartitionType.NONE, null); } else { edgeVertex = TopologyManager.addEdgeLabel(this.sqlgGraph, table, Collections.emptyMap(), ListOrderedSet.listOrderedSet(primaryKeys), PartitionType.NONE, null); } } for (Triple<String, Integer, String> edgeColumn : edgeColumns) { String column = edgeColumn.getLeft(); if (table.startsWith(EDGE_PREFIX) && (column.endsWith(Topology.IN_VERTEX_COLUMN_END) || column.endsWith(Topology.OUT_VERTEX_COLUMN_END))) { String[] split = column.split("\\."); SchemaTable foreignKey; if (hasIDPrimaryKey(primaryKeys)) { foreignKey = SchemaTable.of(split[0], split[1]); } else { //There could be no ID pk because of user defined pk or because partioned tables have no pk. //This logic is because in TopologyManager.addLabelToEdge the '__I' or '__O' is assumed to be present and gets trimmed. if (column.endsWith(Topology.IN_VERTEX_COLUMN_END)) { if (split.length == 3) { //user defined pk foreignKey = SchemaTable.of(split[0], split[1] + Topology.IN_VERTEX_COLUMN_END); } else { foreignKey = SchemaTable.of(split[0], split[1]); } } else { if (split.length == 3) { //user defined pk foreignKey = SchemaTable.of(split[0], split[1] + Topology.OUT_VERTEX_COLUMN_END); } else { foreignKey = SchemaTable.of(split[0], split[1]); } } } if (column.endsWith(Topology.IN_VERTEX_COLUMN_END)) { TopologyManager.addLabelToEdge(this.sqlgGraph, edgeVertex, schema, table, true, foreignKey); } else if (column.endsWith(Topology.OUT_VERTEX_COLUMN_END)) { TopologyManager.addLabelToEdge(this.sqlgGraph, edgeVertex, schema, table, false, foreignKey); } } } } //load the edges without their in and out vertices for (Triple<String, String, String> edgeTable : edgeTables) { String edgCat = edgeTable.getLeft(); String schema = edgeTable.getMiddle(); String table = edgeTable.getRight(); List<String> primaryKeys = this.sqlDialect.getPrimaryKeys(metadata, edgCat, schema, table); //check if is internal, if so ignore. Set<String> schemasToIgnore = new HashSet<>(this.sqlDialect.getInternalSchemas()); if (schema.equals(SQLG_SCHEMA) || schemasToIgnore.contains(schema) || this.sqlDialect.getGisSchemas().contains(schema)) { continue; } if (this.sqlDialect.getSpacialRefTable().contains(table)) { continue; } Map<String, PropertyType> columns = new HashMap<>(); //get the columns List<Triple<String, Integer, String>> metaDatas = this.sqlDialect.getTableColumns(metadata, edgCat, schema, table, null); ListIterator<Triple<String, Integer, String>> metaDataIter = metaDatas.listIterator(); while (metaDataIter.hasNext()) { Triple<String, Integer, String> tripple = metaDataIter.next(); String columnName = tripple.getLeft(); String typeName = tripple.getRight(); int columnType = tripple.getMiddle(); if (!columnName.equals(Topology.ID)) { extractProperty(schema, table, columnName, columnType, typeName, columns, metaDataIter); } } TopologyManager.addEdgeColumn(this.sqlgGraph, schema, table, columns, ListOrderedSet.listOrderedSet(primaryKeys)); String label = table.substring(Topology.EDGE_PREFIX.length()); if (indices != null) { String key = edgCat + "." + schema + "." + table; Set<IndexRef> idxs = indices.get(key); if (idxs != null) { for (IndexRef ir : idxs) { TopologyManager.addIndex(sqlgGraph, schema, label, false, ir.getIndexName(), ir.getIndexType(), ir.getColumns()); } } } else { extractIndices(metadata, edgCat, schema, table, label, false); } } if (this.sqlDialect.supportsPartitioning()) { //load the partitions conn = this.sqlgGraph.tx().getConnection(); List<Map<String, String>> partitions = this.sqlDialect.getPartitions(conn); List<PartitionTree> roots = PartitionTree.build(partitions); for (PartitionTree root : roots) { root.createPartitions(this.sqlgGraph); } } this.sqlgGraph.tx().commit(); } catch (SQLException e) { throw new RuntimeException(e); } } private void extractIndices(DatabaseMetaData metadata, String catalog, String schema, String table, String label, boolean isVertex) throws SQLException { String lastIndexName = null; IndexType lastIndexType = null; List<String> lastColumns = new LinkedList<>(); List<Triple<String, Boolean, String>> indexes = this.sqlDialect.getIndexInfo(metadata, catalog, schema, table, false, true); for (Triple<String, Boolean, String> index : indexes) { String indexName = index.getLeft(); boolean nonUnique = index.getMiddle(); String columnName = index.getRight(); if (lastIndexName == null) { lastIndexName = indexName; lastIndexType = nonUnique ? IndexType.NON_UNIQUE : IndexType.UNIQUE; } else if (!lastIndexName.equals(indexName)) { if (!this.sqlDialect.isSystemIndex(lastIndexName)) { if (!Schema.GLOBAL_UNIQUE_INDEX_SCHEMA.equals(schema)) { TopologyManager.addIndex(sqlgGraph, schema, label, isVertex, lastIndexName, lastIndexType, lastColumns); } } lastColumns.clear(); lastIndexName = indexName; lastIndexType = nonUnique ? IndexType.NON_UNIQUE : IndexType.UNIQUE; } lastColumns.add(columnName); } if (!this.sqlDialect.isSystemIndex(lastIndexName)) { if (!Schema.GLOBAL_UNIQUE_INDEX_SCHEMA.equals(schema)) { TopologyManager.addIndex(sqlgGraph, schema, label, isVertex, lastIndexName, lastIndexType, lastColumns); } } } private void extractProperty(String schema, String table, String columnName, Integer columnType, String typeName, Map<String, PropertyType> columns, ListIterator<Triple<String, Integer, String>> metaDataIter) throws SQLException { //check for ZONEDDATETIME, PERIOD, DURATION as they use more than one field to represent the type PropertyType propertyType = null; if (metaDataIter.hasNext()) { Triple<String, Integer, String> column2MetaData = metaDataIter.next(); String column2Name = column2MetaData.getLeft(); String typeName2 = column2MetaData.getRight(); int column2Type = column2MetaData.getMiddle(); if (column2Name.startsWith(columnName + "~~~")) { if (column2Type == Types.VARCHAR) { propertyType = PropertyType.ZONEDDATETIME; } else if ((column2Type == Types.ARRAY && this.sqlDialect.sqlArrayTypeNameToPropertyType(typeName2, this.sqlgGraph, schema, table, column2Name, metaDataIter) == PropertyType.STRING_ARRAY)) { propertyType = PropertyType.ZONEDDATETIME_ARRAY; } else { if (metaDataIter.hasNext()) { Triple<String, Integer, String> column3MetaData = metaDataIter.next(); String column3Name = column3MetaData.getLeft(); String typeName3 = column3MetaData.getRight(); int column3Type = column3MetaData.getMiddle(); if (column3Name.startsWith(columnName + "~~~")) { if (column3Type == Types.ARRAY) { Preconditions.checkState(sqlDialect.sqlArrayTypeNameToPropertyType(typeName3, this.sqlgGraph, schema, table, column3Name, metaDataIter) == PropertyType.INTEGER_ARRAY, "Only Period have a third column and it must be a Integer"); propertyType = PropertyType.PERIOD_ARRAY; } else { Preconditions.checkState(column3Type == Types.INTEGER, "Only Period have a third column and it must be a Integer"); propertyType = PropertyType.PERIOD; } } else { metaDataIter.previous(); if (column2Type == Types.ARRAY) { Preconditions.checkState(sqlDialect.sqlArrayTypeNameToPropertyType(typeName2, this.sqlgGraph, schema, table, column2Name, metaDataIter) == PropertyType.INTEGER_ARRAY, "Only Period have a third column and it must be a Integer"); propertyType = PropertyType.DURATION_ARRAY; } else { Preconditions.checkState(column2Type == Types.INTEGER, "Only Duration and Period have a second column and it must be a Integer"); propertyType = PropertyType.DURATION; } } } } } else { metaDataIter.previous(); } } if (propertyType == null) { propertyType = this.sqlDialect.sqlTypeToPropertyType(this.sqlgGraph, schema, table, columnName, columnType, typeName, metaDataIter); } columns.put(columnName, propertyType); } private void addPublicSchema() { this.sqlgGraph.addVertex( T.label, SQLG_SCHEMA + "." + SQLG_SCHEMA_SCHEMA, "name", this.sqlDialect.getPublicSchema(), Topology.CREATED_ON, LocalDateTime.now() ); } private void createGuiSchema() { Connection conn = this.sqlgGraph.tx().getConnection(); try (Statement statement = conn.createStatement()) { statement.execute(this.sqlDialect.sqlgGuiSchemaCreationScript()); } catch (SQLException e) { throw new RuntimeException(e); } } private void createSqlgSchemaTablesAndIndexes() { Connection conn = this.sqlgGraph.tx().getConnection(); try (Statement statement = conn.createStatement()) { List<String> creationScripts = this.sqlDialect.sqlgTopologyCreationScripts(); //Hsqldb can not do this in one go for (String creationScript : creationScripts) { if (logger.isDebugEnabled()) { logger.debug(creationScript); } statement.execute(creationScript); } } catch (SQLException e) { throw new RuntimeException(e); } } private boolean existSqlgSchema() { Connection conn = this.sqlgGraph.tx().getConnection(); try { DatabaseMetaData metadata = conn.getMetaData(); return this.sqlDialect.schemaExists(metadata, SQLG_SCHEMA); } catch (SQLException e) { throw new RuntimeException(e); } } private boolean existDefaultSchema() { Connection conn = this.sqlgGraph.tx().getConnection(); try { DatabaseMetaData metadata = conn.getMetaData(); return this.sqlDialect.schemaExists(metadata, this.sqlDialect.getPublicSchema()); } catch (SQLException e) { throw new RuntimeException(e); } } private boolean existGuiSchema() { Connection conn = this.sqlgGraph.tx().getConnection(); try { DatabaseMetaData metadata = conn.getMetaData(); return this.sqlDialect.schemaExists(metadata, Schema.GLOBAL_UNIQUE_INDEX_SCHEMA); } catch (SQLException e) { throw new RuntimeException(e); } } private void createSqlgSchema() { Connection conn = this.sqlgGraph.tx().getConnection(); try (Statement statement = conn.createStatement()) { String creationScript = this.sqlDialect.sqlgSqlgSchemaCreationScript(); statement.execute(creationScript); } catch (SQLException e) { throw new RuntimeException(e); } } private void createDefaultSchema() { Connection conn = this.sqlgGraph.tx().getConnection(); try (Statement statement = conn.createStatement()) { statement.execute(this.sqlDialect.createSchemaStatement(this.sqlDialect.getPublicSchema())); } catch (SQLException e) { throw new RuntimeException(e); } } /** * get the build version * * @return the build version, or null if unknown */ String getBuildVersion() { if (buildVersion==null){ Properties prop = new Properties(); try { // try system URL u=ClassLoader.getSystemResource(SQLG_APPLICATION_PROPERTIES); if(u==null){ // try own class loader u=getClass().getClassLoader().getResource(SQLG_APPLICATION_PROPERTIES); } if (u!=null){ try (InputStream is=u.openStream()){ prop.load(is); } buildVersion=prop.getProperty(APPLICATION_VERSION); } } catch (IOException e) { throw new RuntimeException(e); } } return buildVersion; } private boolean hasIDPrimaryKey(List<String> primaryKeys) { return primaryKeys.size() == 1 && primaryKeys.get(0).equals(Topology.ID); } }
package org.objectweb.proactive.core.remoteobject; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.lang.reflect.Method; import java.lang.reflect.TypeVariable; import java.net.URI; import java.rmi.dgc.VMID; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map.Entry; import java.util.Observable; import java.util.Observer; import org.apache.log4j.Logger; import org.objectweb.proactive.core.ProActiveException; import org.objectweb.proactive.core.ProActiveRuntimeException; import org.objectweb.proactive.core.body.reply.Reply; import org.objectweb.proactive.core.body.request.Request; import org.objectweb.proactive.core.mop.MethodCall; import org.objectweb.proactive.core.remoteobject.benchmark.RemoteObjectBenchmark; import org.objectweb.proactive.core.remoteobject.exception.UnknownProtocolException; import org.objectweb.proactive.core.runtime.ProActiveRuntimeImpl; import org.objectweb.proactive.core.security.exceptions.RenegotiateSessionException; import org.objectweb.proactive.core.util.URIBuilder; import org.objectweb.proactive.core.util.log.Loggers; import org.objectweb.proactive.core.util.log.ProActiveLogger; public class RemoteObjectSet implements Serializable, Observer { static final Logger LOGGER_RO = ProActiveLogger.getLogger(Loggers.REMOTEOBJECT); // *transient * Each RRO need a special marshalling processing // * Use LinkedHashMap for keeping the insertion-order private transient LinkedHashMap<URI, RemoteRemoteObject> rros; private HashSet<RemoteRemoteObject> unreliables; private transient RemoteRemoteObject defaultRO; private static Method getURI; private String[] order = new String[] {}; private String remoteRuntimeName; private RemoteRemoteObject forcedProtocol = null; private VMID vmid = null; private transient URI defaultURI = null; static { try { getURI = InternalRemoteRemoteObject.class.getDeclaredMethod("getURI", new Class<?>[0]); } catch (NoSuchMethodException e) { e.printStackTrace(); } } public RemoteObjectSet(RemoteRemoteObject defaultRRO, Collection<RemoteRemoteObject> rros) throws IOException { this.rros = new LinkedHashMap<URI, RemoteRemoteObject>(); this.unreliables = new HashSet<RemoteRemoteObject>(); for (RemoteRemoteObject rro : rros) { this.add(rro); } try { this.defaultRO = defaultRRO; this.remoteRuntimeName = getPARuntimeName(defaultRO); this.defaultURI = getURI(defaultRRO); } catch (RemoteRemoteObjectException e) { throw new IOException("Cannot access the remoteObject " + defaultRRO + " : " + e.getMessage()); } } /** * Select the best suited RemoteRemoteObject (protocol related), and send it the Request * Fallback to default (according to the PA_COMMUNICATION_PROTOCOL property) if necessary */ public Reply receiveMessage(Request message) throws ProActiveException, RenegotiateSessionException, IOException { if (forcedProtocol != null) { return forcedProtocol.receiveMessage(message); } RemoteRemoteObject rro = null; // For each protocol already selected and sorted for (String protocol : order) { // * Find the corresponding RemoteRemoteObject // * Selection order is store for runtime so, the uri could not be used // * Use Iterator for removing during iteration for (Iterator<Entry<URI, RemoteRemoteObject>> it = rros.entrySet().iterator(); it.hasNext();) { try { Entry<URI, RemoteRemoteObject> entry = it.next(); if (entry.getKey().getScheme().equalsIgnoreCase(protocol)) { rro = entry.getValue(); Reply rep = rro.receiveMessage(message); // The Exception is thrown on server side // So it is encapsulate to be delivered on client side Throwable t = rep.getResult().getException(); if (t != null) { it.remove(); this.unreliables.add(rro); continue; } return rep; } // These Exceptions happened on client side // RMI doesn't act as others protocols and Exceptions aren't // encapsulate, so they are catched here. } catch (ProActiveException pae) { it.remove(); this.unreliables.add(rro); continue; } catch (IOException io) { it.remove(); this.unreliables.add(rro); continue; } catch (RenegotiateSessionException rse) { it.remove(); this.unreliables.add(rro); continue; } } } // All RemoteRemoteObject lead to Exception, try with the default one return defaultRO.receiveMessage(message); } /** * Force the specified protocol to be used for all communication. Null value avoid the forcing. * * @throws UnknownProtocolException * The protocol specified isn't known * @throws NotYetExposedException * The Object isn't already exposed with this protocol */ public void forceProtocol(String protocol) throws UnknownProtocolException, NotYetExposedException { if (protocol == null || protocol.length() == 0) { this.forcedProtocol = null; return; } // Protocols factories can be added dynamically, so the only way to // check a protocol existence is to check if it have a factory if (RemoteObjectProtocolFactoryRegistry.get(protocol) != null) { boolean exposed = false; for (URI uri : rros.keySet()) { if (uri.getScheme().equalsIgnoreCase(protocol)) { this.forcedProtocol = rros.get(uri); exposed = true; } } if (!exposed) { throw new NotYetExposedException("The object isn't exposed on protocol " + protocol); } } else { throw new UnknownProtocolException("\"" + protocol + "\"" + " isn't a valid protocol."); } } /** * Return the default RemoteRemoteObject */ public RemoteRemoteObject getDefault() { return this.defaultRO; } /** * Return the URI of the default RemoteRemoteObject */ public URI getDefaultURI() throws ProActiveException { return this.defaultURI; } /** * Add a RemoteRemoteObject (protocol specific) to the RemoteObjectSet * If it is unreliable, keep it aside for later possible use */ public void add(RemoteRemoteObject rro) { try { this.rros.put(getURI(rro), rro); } catch (RemoteRemoteObjectException e) { this.unreliables.add(rro); } } /** * @see org.objectweb.proactive.core.remoteobject.RemoteObjectSet#add(RemoteRemoteObject) */ public void add(Collection<RemoteRemoteObject> rros) { // If an older same rro is present, it will be updated for (RemoteRemoteObject rro : rros) { this.add(rro); } } /** * Send a non-functional internal request to get the URI of the RemoteRemoteObject */ private URI getURI(RemoteRemoteObject rro) throws RemoteRemoteObjectException { try { MethodCall mc = MethodCall.getMethodCall(getURI, new Object[0], new HashMap<TypeVariable<?>, Class<?>>()); Request r = new InternalRemoteRemoteObjectRequest(mc); Reply rep = rro.receiveMessage(r); return (URI) rep.getResult().getResult(); } catch (ProActiveException e) { throw new RemoteRemoteObjectException( "RemoteObjectSet: can't access RemoteObject through " + rro, e); } catch (IOException e) { throw new RemoteRemoteObjectException( "RemoteObjectSet: can't access RemoteObject through " + rro, e); } catch (ProActiveRuntimeException e) { throw new RemoteRemoteObjectException( "RemoteObjectSet: can't access RemoteObject through " + rro, e); } catch (RenegotiateSessionException e) { e.printStackTrace(); throw new RemoteRemoteObjectException(e); } } /** * Send a non-functional internal request to get the name of the remote ProActiveRuntime */ private String getPARuntimeName(RemoteRemoteObject rro) throws RemoteRemoteObjectException { try { Request r = new PARuntimeNameRequest(); Reply rep = rro.receiveMessage(r); return (String) rep.getResult().getResult(); } catch (ProActiveException e) { throw new RemoteRemoteObjectException("RemoteObjectSet: can't get ProActiveRuntime urls from " + rro, e); } catch (IOException e) { throw new RemoteRemoteObjectException("RemoteObjectSet: can't get ProActiveRuntime urls from " + rro, e); } catch (RenegotiateSessionException e) { e.printStackTrace(); throw new RemoteRemoteObjectException(e); } } /** * Exception thrown an communication error, internal use only */ public class RemoteRemoteObjectException extends Exception { RemoteRemoteObjectException(Exception e) { super(e); } RemoteRemoteObjectException(String m) { super(m); } RemoteRemoteObjectException(String m, Exception e) { super(m, e); } } /** * The Object isn't already exposed with this protocol */ public class NotYetExposedException extends Exception { public NotYetExposedException(Exception e) { super(e); } public NotYetExposedException(String m) { super(m); } public NotYetExposedException(String m, Exception e) { super(m, e); } } public int size() { return this.rros.size(); } /** * Update the protocol order from the new ProActive Runtime * when the remote remote object is reified */ private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); int size = in.readInt(); this.rros = new LinkedHashMap<URI, RemoteRemoteObject>(size); ObjectInputStream ois = null; byte[] buf = null; for (int i = 0; i < size; i++) { try { // Read the data before calling any method throwing an exception to avoid stream corruption URI uri = (URI) in.readObject(); buf = (byte[]) in.readObject(); RemoteObjectFactory rof = AbstractRemoteObjectFactory.getRemoteObjectFactory(uri.getScheme()); ois = rof.getProtocolObjectInputStream(new ByteArrayInputStream(buf)); RemoteRemoteObject rro = (RemoteRemoteObject) ois.readObject(); this.rros.put(uri, rro); } catch (UnknownProtocolException e) { LOGGER_RO.debug("Failed to instanciate a ROF when receiving a RemoteObjectset", e); } finally { if (ois != null) ois.close(); } } try { // Read the data before calling any method throwing an exception to avoid stream corruption this.defaultURI = (URI) in.readObject(); buf = (byte[]) in.readObject(); RemoteObjectFactory rof = AbstractRemoteObjectFactory.getRemoteObjectFactory(this.defaultURI .getScheme()); ois = rof.getProtocolObjectInputStream(new ByteArrayInputStream(buf)); RemoteRemoteObject rro = (RemoteRemoteObject) ois.readObject(); this.defaultRO = rro; } catch (UnknownProtocolException e) { LOGGER_RO.debug("Failed to instanciate a ROF when receiving a RemoteObjectset", e); } finally { if (ois != null) ois.close(); } VMID testLocal = ProActiveRuntimeImpl.getProActiveRuntime().getVMInformation().getVMID(); if (!vmid.equals(testLocal)) { this.vmid = testLocal; this.updateUnreliable(); this.updateOrder(); } } /** * Check if now some RemoteRemoteObject becomes accessible */ private void updateUnreliable() { if (unreliables.size() != 0) { HashSet<RemoteRemoteObject> copy = new HashSet<RemoteRemoteObject>(this.unreliables); for (RemoteRemoteObject rro : copy) { this.unreliables.remove(rro); this.add(rro); } } } /** * Network topology could have change, change the order */ private void updateOrder() { // The update of the order is done asynchronously, so we need to erase previous values // It's not a good idea to set order as transient, because of the local serialization case this.order = new String[0]; if (rros.size() > 1) RemoteObjectBenchmark.getInstance().subscribeAsObserver(this, rros, this.remoteRuntimeName); } private void writeObject(ObjectOutputStream out) throws IOException { // Almost same as in term of speed UniqueID.getCurrentVMID() but more readable this.vmid = ProActiveRuntimeImpl.getProActiveRuntime().getVMInformation().getVMID(); out.defaultWriteObject(); out.writeInt(rros.size()); ObjectOutputStream oos = null; for (URI uri : rros.keySet()) { try { out.writeObject(uri); RemoteObjectFactory rof = AbstractRemoteObjectFactory.getRemoteObjectFactory(uri.getScheme()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); oos = rof.getProtocolObjectOutputStream(baos); oos.writeObject(rros.get(uri)); oos.flush(); out.writeObject(baos.toByteArray()); } catch (UnknownProtocolException e) { ProActiveLogger.logImpossibleException(LOGGER_RO, e); } finally { if (oos != null) oos.close(); } } try { out.writeObject(this.defaultURI); RemoteObjectFactory rof = AbstractRemoteObjectFactory.getRemoteObjectFactory(this.defaultURI .getScheme()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); oos = rof.getProtocolObjectOutputStream(baos); oos.writeObject(this.defaultRO); oos.flush(); out.writeObject(baos.toByteArray()); } catch (UnknownProtocolException e) { ProActiveLogger.logImpossibleException(LOGGER_RO, e); } finally { if (oos != null) oos.close(); } } /** * Notification from a BenchmarkMonitorThread Object */ public void update(Observable o, Object arg) { order = (String[]) arg; if (LOGGER_RO.isDebugEnabled()) LOGGER_RO.debug("[Multi-Protocol] " + URIBuilder.getNameFromURI(defaultURI) + " received protocol order: " + Arrays.toString(order)); } public String toString() { StringBuilder sb = new StringBuilder(); sb.append("["); for (URI uri : rros.keySet()) { sb.append(uri.toString()); sb.append(", "); } sb.delete(sb.length() - 2, sb.length()); sb.append("]"); return sb.toString(); } }
package org.zalando.nakadi.webservice.hila; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.hamcrest.core.StringContains; import org.junit.Before; import org.junit.Test; import org.zalando.nakadi.config.JsonConfig; import org.zalando.nakadi.domain.EventType; import org.zalando.nakadi.domain.ItemsWrapper; import org.zalando.nakadi.domain.Subscription; import org.zalando.nakadi.domain.SubscriptionBase; import org.zalando.nakadi.domain.SubscriptionCursor; import org.zalando.nakadi.domain.SubscriptionEventTypeStats; import org.zalando.nakadi.utils.JsonTestHelper; import org.zalando.nakadi.webservice.BaseAT; import org.zalando.nakadi.webservice.utils.NakadiTestUtils; import org.zalando.nakadi.webservice.utils.TestStreamingClient; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.stream.IntStream; import static com.jayway.restassured.RestAssured.given; import static java.text.MessageFormat.format; import static java.util.stream.IntStream.range; import static java.util.stream.IntStream.rangeClosed; import static org.apache.http.HttpStatus.SC_CONFLICT; import static org.apache.http.HttpStatus.SC_OK; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.not; import static org.zalando.nakadi.domain.SubscriptionBase.InitialPosition.BEGIN; import static org.zalando.nakadi.utils.RandomSubscriptionBuilder.randomSubscription; import static org.zalando.nakadi.utils.TestUtils.waitFor; import static org.zalando.nakadi.webservice.hila.StreamBatch.MatcherIgnoringToken.equalToBatchIgnoringToken; import static org.zalando.nakadi.webservice.hila.StreamBatch.singleEventBatch; import static org.zalando.nakadi.webservice.utils.NakadiTestUtils.commitCursors; import static org.zalando.nakadi.webservice.utils.NakadiTestUtils.createEventType; import static org.zalando.nakadi.webservice.utils.NakadiTestUtils.createSubscription; import static org.zalando.nakadi.webservice.utils.NakadiTestUtils.publishEvent; public class HilaAT extends BaseAT { private static final JsonTestHelper JSON_TEST_HELPER = new JsonTestHelper(new JsonConfig().jacksonObjectMapper()); private EventType eventType; private Subscription subscription; @Before public void before() throws IOException { // create event-type and subscribe to it eventType = createEventType(); final SubscriptionBase subscription = randomSubscription() .withEventType(eventType.getName()) .withStartFrom(BEGIN) .buildSubscriptionBase(); this.subscription = createSubscription(subscription); } @Test(timeout = 30000) public void whenOffsetIsCommittedNextSessionStartsFromNextEventAfterCommitted() throws Exception { // write 4 events to event-type rangeClosed(0, 3) .forEach(x -> publishEvent(eventType.getName(), "{\"blah\":\"foo" + x + "\"}")); // create session, read from subscription and wait for events to be sent final TestStreamingClient client = TestStreamingClient .create(URL, subscription.getId(), "stream_limit=2") .start(); waitFor(() -> assertThat(client.getBatches(), hasSize(2))); assertThat(client.getBatches().get(0), equalToBatchIgnoringToken(singleEventBatch("0", "0", eventType.getName(), ImmutableMap.of("blah", "foo0"), "Stream started"))); assertThat(client.getBatches().get(1), equalToBatchIgnoringToken(singleEventBatch("0", "1", eventType.getName(), ImmutableMap.of("blah", "foo1")))); // commit offset that will also trigger session closing as we reached stream_limit and committed commitCursors(subscription.getId(), ImmutableList.of(client.getBatches().get(1).getCursor())); waitFor(() -> assertThat(client.isRunning(), is(false))); // create new session and read from subscription again client.start(); waitFor(() -> assertThat(client.getBatches(), hasSize(2))); // check that we have read the next two events with correct offsets assertThat(client.getBatches().get(0), equalToBatchIgnoringToken(singleEventBatch("0", "2", eventType.getName(), ImmutableMap.of("blah", "foo2"), "Stream started"))); assertThat(client.getBatches().get(1), equalToBatchIgnoringToken(singleEventBatch("0", "3", eventType.getName(), ImmutableMap.of("blah", "foo3")))); } @Test(timeout = 5000) public void whenCommitVeryFirstEventThenOk() throws Exception { publishEvent(eventType.getName(), "{\"blah\":\"foo\"}"); // create session, read from subscription and wait for events to be sent final TestStreamingClient client = TestStreamingClient .create(URL, subscription.getId(), "") .start(); waitFor(() -> assertThat(client.getBatches(), not(empty()))); // commit and check that status is 200 final int commitResult = commitCursors(subscription.getId(), ImmutableList.of(new SubscriptionCursor("0", "0", eventType.getName(), "token"))); assertThat(commitResult, equalTo(SC_OK)); } @Test(timeout = 15000) public void whenWindowSizeIsSetItIsConsidered() throws Exception { range(0, 15).forEach(x -> publishEvent(eventType.getName(), "{\"blah\":\"foo\"}")); final TestStreamingClient client = TestStreamingClient .create(URL, subscription.getId(), "max_uncommitted_size=5") .start(); waitFor(() -> assertThat(client.getBatches(), hasSize(5))); SubscriptionCursor cursorToCommit = client.getBatches().get(4).getCursor(); commitCursors(subscription.getId(), ImmutableList.of(cursorToCommit)); waitFor(() -> assertThat(client.getBatches(), hasSize(10))); cursorToCommit = client.getBatches().get(6).getCursor(); commitCursors(subscription.getId(), ImmutableList.of(cursorToCommit)); waitFor(() -> assertThat(client.getBatches(), hasSize(12))); } @Test(timeout = 15000) public void whenCommitTimeoutReachedSessionIsClosed() throws Exception { publishEvent(eventType.getName(), "{\"blah\":\"foo\"}"); final TestStreamingClient client = TestStreamingClient .create(URL, subscription.getId(), "") // commit_timeout is 5 seconds for test .start(); waitFor(() -> assertThat(client.getBatches(), hasSize(2)), 10000); waitFor(() -> assertThat(client.isRunning(), is(false)), 10000); assertThat(client.getBatches().get(1), equalToBatchIgnoringToken(singleEventBatch("0", "0", eventType.getName(), ImmutableMap.of(), "Commit timeout reached"))); } @Test(timeout = 15000) public void whenStreamTimeoutReachedSessionIsClosed() throws Exception { publishEvent(eventType.getName(), "{\"blah\":\"foo\"}"); final TestStreamingClient client = TestStreamingClient .create(URL, subscription.getId(), "stream_timeout=3") .start(); waitFor(() -> assertThat(client.getBatches(), hasSize(1))); // to check that stream_timeout works we need to commit everything we consumed, in other case // Nakadi will first wait till commit_timeout exceeds final SubscriptionCursor lastBatchCursor = client.getBatches().get(client.getBatches().size() - 1).getCursor(); commitCursors(subscription.getId(), ImmutableList.of(lastBatchCursor)); waitFor(() -> assertThat(client.isRunning(), is(false)), 5000); } @Test(timeout = 10000) public void whenBatchLimitAndTimeoutAreSetTheyAreConsidered() throws Exception { range(0, 12).forEach(x -> publishEvent(eventType.getName(), "{\"blah\":\"foo\"}")); final TestStreamingClient client = TestStreamingClient .create(URL, subscription.getId(), "batch_limit=5&batch_flush_timeout=1&max_uncommitted_size=20") .start(); waitFor(() -> assertThat(client.getBatches(), hasSize(3))); assertThat(client.getBatches().get(0).getEvents(), hasSize(5)); assertThat(client.getBatches().get(0).getCursor().getOffset(), is("4")); assertThat(client.getBatches().get(1).getEvents(), hasSize(5)); assertThat(client.getBatches().get(1).getCursor().getOffset(), is("9")); assertThat(client.getBatches().get(2).getEvents(), hasSize(2)); assertThat(client.getBatches().get(2).getCursor().getOffset(), is("11")); } @Test(timeout = 10000) public void whenThereAreNoEmptySlotsThenConflict() throws Exception { final TestStreamingClient client = TestStreamingClient .create(URL, subscription.getId(), "batch_flush_timeout=1"); client.start(); waitFor(() -> assertThat(client.getBatches(), hasSize(1))); given() .get(format("/subscriptions/{0}/events", subscription.getId())) .then() .statusCode(SC_CONFLICT); } @Test(timeout = 10000) public void whenConnectionIsClosedByClientNakadiRecognizesIt() throws Exception { final TestStreamingClient client = TestStreamingClient .create(URL, subscription.getId(), "batch_flush_timeout=1"); client.start(); waitFor(() -> assertThat(client.getBatches(), hasSize(1))); client.close(); Thread.sleep(2000); final TestStreamingClient anotherClient = TestStreamingClient .create(URL, subscription.getId(), "batch_flush_timeout=1"); anotherClient.start(); // if we start to get data for another client it means that Nakadi recognized that first client closed // connection (in other case it would not allow second client to connect because of lack of slots) waitFor(() -> assertThat(anotherClient.getBatches(), hasSize(1))); } @Test public void testGetSubscriptionStat() throws Exception { IntStream.range(0, 15).forEach(x -> publishEvent(eventType.getName(), "{\"blah\":\"foo\"}")); final TestStreamingClient client = TestStreamingClient .create(URL, subscription.getId(), "max_uncommitted_size=20") .start(); waitFor(() -> assertThat(client.getBatches(), hasSize(15))); List<SubscriptionEventTypeStats> subscriptionStats = Collections.singletonList(new SubscriptionEventTypeStats( eventType.getName(), Collections.singleton( new SubscriptionEventTypeStats.Partition("0", "assigned", 15, client.getSessionId()))) ); NakadiTestUtils.getSubscriptionStat(subscription) .then() .content(new StringContains(JSON_TEST_HELPER.asJsonString(new ItemsWrapper<>(subscriptionStats)))); final String partition = client.getBatches().get(0).getCursor().getPartition(); final SubscriptionCursor cursor = new SubscriptionCursor(partition, "9", eventType.getName(), "token"); commitCursors(subscription.getId(), ImmutableList.of(cursor)); subscriptionStats = Collections.singletonList(new SubscriptionEventTypeStats( eventType.getName(), Collections.singleton( new SubscriptionEventTypeStats.Partition("0", "assigned", 5, client.getSessionId()))) ); NakadiTestUtils.getSubscriptionStat(subscription) .then() .content(new StringContains(JSON_TEST_HELPER.asJsonString(new ItemsWrapper<>(subscriptionStats)))); } }
package com.couchbase.lite.replicator; import com.couchbase.lite.Attachment; import com.couchbase.lite.CouchbaseLiteException; import com.couchbase.lite.Database; import com.couchbase.lite.Document; import com.couchbase.lite.Emitter; import com.couchbase.lite.LiteTestCase; import com.couchbase.lite.LiveQuery; import com.couchbase.lite.Manager; import com.couchbase.lite.Mapper; import com.couchbase.lite.Query; import com.couchbase.lite.QueryEnumerator; import com.couchbase.lite.QueryRow; import com.couchbase.lite.Revision; import com.couchbase.lite.SavedRevision; import com.couchbase.lite.Status; import com.couchbase.lite.UnsavedRevision; import com.couchbase.lite.ValidationContext; import com.couchbase.lite.Validator; import com.couchbase.lite.View; import com.couchbase.lite.auth.FacebookAuthorizer; import com.couchbase.lite.internal.Body; import com.couchbase.lite.internal.RevisionInternal; import com.couchbase.lite.support.Base64; import com.couchbase.lite.support.HttpClientFactory; import com.couchbase.lite.threading.BackgroundTask; import com.couchbase.lite.util.Log; import com.couchbase.lite.util.TextUtils; import com.squareup.okhttp.mockwebserver.MockResponse; import com.squareup.okhttp.mockwebserver.MockWebServer; import com.squareup.okhttp.mockwebserver.RecordedRequest; import junit.framework.Assert; import org.apache.commons.io.IOUtils; import org.apache.commons.io.output.ByteArrayOutputStream; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.StatusLine; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.CookieStore; import org.apache.http.client.HttpClient; import org.apache.http.client.HttpResponseException; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.cookie.Cookie; import org.apache.http.entity.StringEntity; import org.apache.http.entity.mime.MultipartEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicHeader; import org.json.JSONArray; import org.json.JSONObject; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; public class ReplicationTest extends LiteTestCase { public static final String TAG = "Replicator"; /** * Verify that running a one-shot push replication will complete when run against a * mock server that returns 500 Internal Server errors on every request. */ public void testOneShotReplicationErrorNotification() throws Throwable { final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); mockHttpClient.addResponderThrowExceptionAllRequests(); URL remote = getReplicationURL(); manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient)); Replication pusher = database.createPushReplication(remote); runReplication(pusher); assertTrue(pusher.getLastError() != null); } /** * Verify that running a continuous push replication will emit a change while * in an error state when run against a mock server that returns 500 Internal Server * errors on every request. */ public void testContinuousReplicationErrorNotification() throws Throwable { final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); mockHttpClient.addResponderThrowExceptionAllRequests(); URL remote = getReplicationURL(); manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient)); Replication pusher = database.createPushReplication(remote); pusher.setContinuous(true); // add replication observer CountDownLatch countDownLatch = new CountDownLatch(1); ReplicationErrorObserver replicationErrorObserver = new ReplicationErrorObserver(countDownLatch); pusher.addChangeListener(replicationErrorObserver); // start replication pusher.start(); boolean success = countDownLatch.await(30, TimeUnit.SECONDS); assertTrue(success); pusher.stop(); } private HttpClientFactory mockFactoryFactory(final CustomizableMockHttpClient mockHttpClient) { return new HttpClientFactory() { @Override public HttpClient getHttpClient() { return mockHttpClient; } @Override public void addCookies(List<Cookie> cookies) { } @Override public void deleteCookie(String name) { } @Override public CookieStore getCookieStore() { return null; } }; } // Reproduces issue #167 public void testPushPurgedDoc() throws Throwable { int numBulkDocRequests = 0; HttpPost lastBulkDocsRequest = null; Map<String,Object> properties = new HashMap<String, Object>(); properties.put("testName", "testPurgeDocument"); Document doc = createDocumentWithProperties(database, properties); assertNotNull(doc); final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); mockHttpClient.addResponderRevDiffsAllMissing(); mockHttpClient.setResponseDelayMilliseconds(250); mockHttpClient.addResponderFakeLocalDocumentUpdate404(); HttpClientFactory mockHttpClientFactory = new HttpClientFactory() { @Override public HttpClient getHttpClient() { return mockHttpClient; } @Override public void addCookies(List<Cookie> cookies) { } @Override public void deleteCookie(String name) { } @Override public CookieStore getCookieStore() { return null; } }; URL remote = getReplicationURL(); manager.setDefaultHttpClientFactory(mockHttpClientFactory); Replication pusher = database.createPushReplication(remote); pusher.setContinuous(true); final CountDownLatch replicationCaughtUpSignal = new CountDownLatch(1); pusher.addChangeListener(new Replication.ChangeListener() { @Override public void changed(Replication.ChangeEvent event) { final int changesCount = event.getSource().getChangesCount(); final int completedChangesCount = event.getSource().getCompletedChangesCount(); String msg = String.format("changes: %d completed changes: %d", changesCount, completedChangesCount); Log.d(TAG, msg); if (changesCount == completedChangesCount && changesCount != 0) { replicationCaughtUpSignal.countDown(); } } }); pusher.start(); // wait until that doc is pushed boolean didNotTimeOut = replicationCaughtUpSignal.await(60, TimeUnit.SECONDS); assertTrue(didNotTimeOut); // at this point, we should have captured exactly 1 bulk docs request numBulkDocRequests = 0; for (HttpRequest capturedRequest : mockHttpClient.getCapturedRequests()) { if (capturedRequest instanceof HttpPost && ((HttpPost) capturedRequest).getURI().toString().endsWith("_bulk_docs")) { lastBulkDocsRequest = (HttpPost) capturedRequest; numBulkDocRequests += 1; } } assertEquals(1, numBulkDocRequests); // that bulk docs request should have the "start" key under its _revisions Map<String, Object> jsonMap = mockHttpClient.getJsonMapFromRequest((HttpPost) lastBulkDocsRequest); List docs = (List) jsonMap.get("docs"); Map<String, Object> onlyDoc = (Map) docs.get(0); Map<String, Object> revisions = (Map) onlyDoc.get("_revisions"); assertTrue(revisions.containsKey("start")); // now add a new revision, which will trigger the pusher to try to push it properties = new HashMap<String, Object>(); properties.put("testName2", "update doc"); UnsavedRevision unsavedRevision = doc.createRevision(); unsavedRevision.setUserProperties(properties); unsavedRevision.save(); // but then immediately purge it doc.purge(); // wait for a while to give the replicator a chance to push it // (it should not actually push anything) Thread.sleep(5*1000); // we should not have gotten any more _bulk_docs requests, because // the replicator should not have pushed anything else. // (in the case of the bug, it was trying to push the purged revision) numBulkDocRequests = 0; for (HttpRequest capturedRequest : mockHttpClient.getCapturedRequests()) { if (capturedRequest instanceof HttpPost && ((HttpPost) capturedRequest).getURI().toString().endsWith("_bulk_docs")) { numBulkDocRequests += 1; } } assertEquals(1, numBulkDocRequests); pusher.stop(); } public void testPusher() throws Throwable { CountDownLatch replicationDoneSignal = new CountDownLatch(1); String doc1Id; String docIdTimestamp = Long.toString(System.currentTimeMillis()); URL remote = getReplicationURL(); doc1Id = createDocumentsForPushReplication(docIdTimestamp); Map<String, Object> documentProperties; final boolean continuous = false; final Replication repl = database.createPushReplication(remote); repl.setContinuous(continuous); if (!isSyncGateway(remote)) { repl.setCreateTarget(true); Assert.assertTrue(repl.shouldCreateTarget()); } // Check the replication's properties: Assert.assertEquals(database, repl.getLocalDatabase()); Assert.assertEquals(remote, repl.getRemoteUrl()); Assert.assertFalse(repl.isPull()); Assert.assertFalse(repl.isContinuous()); Assert.assertNull(repl.getFilter()); Assert.assertNull(repl.getFilterParams()); Assert.assertNull(repl.getDocIds()); // TODO: CAssertNil(r1.headers); still not null! // Check that the replication hasn't started running: Assert.assertFalse(repl.isRunning()); Assert.assertEquals(Replication.ReplicationStatus.REPLICATION_STOPPED, repl.getStatus()); Assert.assertEquals(0, repl.getCompletedChangesCount()); Assert.assertEquals(0, repl.getChangesCount()); Assert.assertNull(repl.getLastError()); runReplication(repl); // since we pushed two documents, should expect the changes count to be >= 2 assertTrue(repl.getChangesCount() >= 2); assertTrue(repl.getCompletedChangesCount() >= 2); assertNull(repl.getLastError()); // make sure doc1 is there verifyRemoteDocExists(remote, doc1Id); // add doc3 documentProperties = new HashMap<String, Object>(); String doc3Id = String.format("doc3-%s", docIdTimestamp); Document doc3 = database.getDocument(doc3Id); documentProperties.put("bat", 677); doc3.putProperties(documentProperties); // re-run push replication final Replication repl2 = database.createPushReplication(remote); repl2.setContinuous(continuous); if (!isSyncGateway(remote)) { repl2.setCreateTarget(true); } String repl2CheckpointId = repl2.remoteCheckpointDocID(); runReplication(repl2); assertNull(repl2.getLastError()); // make sure the doc has been added verifyRemoteDocExists(remote, doc3Id); // verify sequence stored in local db has been updated boolean isPush = true; assertEquals(repl2.getLastSequence(), database.getLastSequenceStored(repl2CheckpointId, isPush)); // wait a few seconds in case reqeust to server to update checkpoint still in flight Thread.sleep(2000); // verify that the _local doc remote checkpoint has been updated and it matches String pathToCheckpointDoc = String.format("%s/_local/%s", remote.toExternalForm(), repl2CheckpointId); HttpResponse response = getRemoteDoc(new URL(pathToCheckpointDoc)); Map<String, Object> json = extractJsonFromResponse(response); String remoteLastSequence = (String) json.get("lastSequence"); assertEquals(repl2.getLastSequence(), remoteLastSequence); Log.d(TAG, "testPusher() finished"); } private Map<String, Object> extractJsonFromResponse(HttpResponse response) throws IOException{ InputStream is = response.getEntity().getContent(); return Manager.getObjectMapper().readValue(is, Map.class); } private String createDocumentsForPushReplication(String docIdTimestamp) throws CouchbaseLiteException { return createDocumentsForPushReplication(docIdTimestamp, "png"); } private Document createDocumentForPushReplication(String docId, String attachmentFileName, String attachmentContentType) throws CouchbaseLiteException { Map<String, Object> docJsonMap = MockHelper.generateRandomJsonMap(); Map<String, Object> docProperties = new HashMap<String, Object>(); docProperties.put("_id", docId); docProperties.putAll(docJsonMap); Document document = database.getDocument(docId); UnsavedRevision revision = document.createRevision(); revision.setProperties(docProperties); if (attachmentFileName != null) { revision.setAttachment( attachmentFileName, attachmentContentType, getAsset(attachmentFileName) ); } revision.save(); return document; } private String createDocumentsForPushReplication(String docIdTimestamp, String attachmentType) throws CouchbaseLiteException { String doc1Id; String doc2Id;// Create some documents: Map<String, Object> doc1Properties = new HashMap<String, Object>(); doc1Id = String.format("doc1-%s", docIdTimestamp); doc1Properties.put("_id", doc1Id); doc1Properties.put("foo", 1); doc1Properties.put("bar", false); Body body = new Body(doc1Properties); RevisionInternal rev1 = new RevisionInternal(body, database); Status status = new Status(); rev1 = database.putRevision(rev1, null, false, status); assertEquals(Status.CREATED, status.getCode()); doc1Properties.put("_rev", rev1.getRevId()); doc1Properties.put("UPDATED", true); @SuppressWarnings("unused") RevisionInternal rev2 = database.putRevision(new RevisionInternal(doc1Properties, database), rev1.getRevId(), false, status); assertEquals(Status.CREATED, status.getCode()); Map<String, Object> doc2Properties = new HashMap<String, Object>(); doc2Id = String.format("doc2-%s", docIdTimestamp); doc2Properties.put("_id", doc2Id); doc2Properties.put("baz", 666); doc2Properties.put("fnord", true); database.putRevision(new RevisionInternal(doc2Properties, database), null, false, status); assertEquals(Status.CREATED, status.getCode()); Document doc2 = database.getDocument(doc2Id); UnsavedRevision doc2UnsavedRev = doc2.createRevision(); if (attachmentType.equals("png")) { InputStream attachmentStream = getAsset("attachment.png"); doc2UnsavedRev.setAttachment("attachment.png", "image/png", attachmentStream); } else if (attachmentType.equals("txt")) { StringBuffer sb = new StringBuffer(); for (int i=0; i<1000; i++) { sb.append("This is a large attachemnt."); } ByteArrayInputStream attachmentStream = new ByteArrayInputStream(sb.toString().getBytes()); doc2UnsavedRev.setAttachment("attachment.txt", "text/plain", attachmentStream); } else { throw new RuntimeException("invalid attachment type: " + attachmentType); } SavedRevision doc2Rev = doc2UnsavedRev.save(); assertNotNull(doc2Rev); return doc1Id; } private boolean isSyncGateway(URL remote) { return (remote.getPort() == 4984 || remote.getPort() == 80); } private HttpResponse getRemoteDoc(URL pathToDoc) throws MalformedURLException, IOException { HttpClient httpclient = new DefaultHttpClient(); HttpResponse response = null; String responseString = null; response = httpclient.execute(new HttpGet(pathToDoc.toExternalForm())); StatusLine statusLine = response.getStatusLine(); if (statusLine.getStatusCode() != HttpStatus.SC_OK) { throw new RuntimeException("Did not get 200 status doing GET to URL: " + pathToDoc); } return response; } /** * TODO: 1. refactor to use getRemoteDoc * TODO: 2. can just make synchronous http call, no need for background task * * @param remote * @param doc1Id * @throws MalformedURLException */ private void verifyRemoteDocExists(URL remote, final String doc1Id) throws MalformedURLException { URL replicationUrlTrailing = new URL(String.format("%s/", remote.toExternalForm())); final URL pathToDoc = new URL(replicationUrlTrailing, doc1Id); Log.d(TAG, "Send http request to " + pathToDoc); final CountDownLatch httpRequestDoneSignal = new CountDownLatch(1); BackgroundTask getDocTask = new BackgroundTask() { @Override public void run() { HttpClient httpclient = new DefaultHttpClient(); HttpResponse response; String responseString = null; try { response = httpclient.execute(new HttpGet(pathToDoc.toExternalForm())); StatusLine statusLine = response.getStatusLine(); assertTrue(statusLine.getStatusCode() == HttpStatus.SC_OK); if(statusLine.getStatusCode() == HttpStatus.SC_OK){ ByteArrayOutputStream out = new ByteArrayOutputStream(); response.getEntity().writeTo(out); out.close(); responseString = out.toString(); assertTrue(responseString.contains(doc1Id)); Log.d(TAG, "result: " + responseString); } else{ //Closes the connection. response.getEntity().getContent().close(); throw new IOException(statusLine.getReasonPhrase()); } } catch (ClientProtocolException e) { assertNull("Got ClientProtocolException: " + e.getLocalizedMessage(), e); } catch (IOException e) { assertNull("Got IOException: " + e.getLocalizedMessage(), e); } httpRequestDoneSignal.countDown(); } }; getDocTask.execute(); Log.d(TAG, "Waiting for http request to finish"); try { httpRequestDoneSignal.await(300, TimeUnit.SECONDS); Log.d(TAG, "http request finished"); } catch (InterruptedException e) { e.printStackTrace(); } } public void testPusherBatching() throws Throwable { // create a bunch (INBOX_CAPACITY * 2) local documents int numDocsToSend = Replication.INBOX_CAPACITY * 2; for (int i=0; i < numDocsToSend; i++) { Map<String,Object> properties = new HashMap<String, Object>(); properties.put("testPusherBatching", i); createDocumentWithProperties(database, properties); } // kick off a one time push replication to a mock final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); mockHttpClient.addResponderFakeLocalDocumentUpdate404(); HttpClientFactory mockHttpClientFactory = mockFactoryFactory(mockHttpClient); URL remote = getReplicationURL(); manager.setDefaultHttpClientFactory(mockHttpClientFactory); Replication pusher = database.createPushReplication(remote); runReplication(pusher); assertNull(pusher.getLastError()); int numDocsSent = 0; // verify that only INBOX_SIZE documents are included in any given bulk post request List<HttpRequest> capturedRequests = mockHttpClient.getCapturedRequests(); for (HttpRequest capturedRequest : capturedRequests) { if (capturedRequest instanceof HttpPost) { HttpPost capturedPostRequest = (HttpPost) capturedRequest; if (capturedPostRequest.getURI().getPath().endsWith("_bulk_docs")) { ArrayList docs = CustomizableMockHttpClient.extractDocsFromBulkDocsPost(capturedRequest); String msg = "# of bulk docs pushed should be <= INBOX_CAPACITY"; assertTrue(msg, docs.size() <= Replication.INBOX_CAPACITY); numDocsSent += docs.size(); } } } assertEquals(numDocsToSend, numDocsSent); } public void testPusherDeletedDoc() throws Throwable { CountDownLatch replicationDoneSignal = new CountDownLatch(1); URL remote = getReplicationURL(); String docIdTimestamp = Long.toString(System.currentTimeMillis()); // Create some documents: Map<String, Object> documentProperties = new HashMap<String, Object>(); final String doc1Id = String.format("doc1-%s", docIdTimestamp); documentProperties.put("_id", doc1Id); documentProperties.put("foo", 1); documentProperties.put("bar", false); Body body = new Body(documentProperties); RevisionInternal rev1 = new RevisionInternal(body, database); Status status = new Status(); rev1 = database.putRevision(rev1, null, false, status); assertEquals(Status.CREATED, status.getCode()); documentProperties.put("_rev", rev1.getRevId()); documentProperties.put("UPDATED", true); documentProperties.put("_deleted", true); @SuppressWarnings("unused") RevisionInternal rev2 = database.putRevision(new RevisionInternal(documentProperties, database), rev1.getRevId(), false, status); assertTrue(status.getCode() >= 200 && status.getCode() < 300); final Replication repl = database.createPushReplication(remote); if (!isSyncGateway(remote)) { repl.setCreateTarget(true); } runReplication(repl); assertNull(repl.getLastError()); // make sure doc1 is deleted URL replicationUrlTrailing = new URL(String.format("%s/", remote.toExternalForm())); final URL pathToDoc = new URL(replicationUrlTrailing, doc1Id); Log.d(TAG, "Send http request to " + pathToDoc); final CountDownLatch httpRequestDoneSignal = new CountDownLatch(1); BackgroundTask getDocTask = new BackgroundTask() { @Override public void run() { org.apache.http.client.HttpClient httpclient = new DefaultHttpClient(); HttpResponse response; String responseString = null; try { response = httpclient.execute(new HttpGet(pathToDoc.toExternalForm())); StatusLine statusLine = response.getStatusLine(); Log.d(TAG, "statusLine " + statusLine); assertEquals(HttpStatus.SC_NOT_FOUND, statusLine.getStatusCode()); } catch (ClientProtocolException e) { assertNull("Got ClientProtocolException: " + e.getLocalizedMessage(), e); } catch (IOException e) { assertNull("Got IOException: " + e.getLocalizedMessage(), e); } finally { httpRequestDoneSignal.countDown(); } } }; getDocTask.execute(); Log.d(TAG, "Waiting for http request to finish"); try { httpRequestDoneSignal.await(300, TimeUnit.SECONDS); Log.d(TAG, "http request finished"); } catch (InterruptedException e) { e.printStackTrace(); } Log.d(TAG, "testPusherDeletedDoc() finished"); } public void failingTestPullerGzipped() throws Throwable { String docIdTimestamp = Long.toString(System.currentTimeMillis()); final String doc1Id = String.format("doc1-%s", docIdTimestamp); String attachmentName = "attachment.png"; addDocWithId(doc1Id, attachmentName, true); doPullReplication(); Log.d(TAG, "Fetching doc1 via id: " + doc1Id); Document doc1 = database.getDocument(doc1Id); assertNotNull(doc1); assertTrue(doc1.getCurrentRevisionId().startsWith("1-")); assertEquals(1, doc1.getProperties().get("foo")); Attachment attachment = doc1.getCurrentRevision().getAttachment(attachmentName); assertTrue(attachment.getLength() > 0); assertTrue(attachment.getGZipped()); byte[] receivedBytes = TextUtils.read(attachment.getContent()); InputStream attachmentStream = getAsset(attachmentName); byte[] actualBytes = TextUtils.read(attachmentStream); Assert.assertEquals(actualBytes.length, receivedBytes.length); Assert.assertEquals(actualBytes, receivedBytes); } public void testValidationBlockCalled() throws Throwable { String docIdTimestamp = Long.toString(System.currentTimeMillis()); final String doc1Id = String.format("doc1-%s", docIdTimestamp); Log.d(TAG, "Adding " + doc1Id + " directly to sync gateway"); addDocWithId(doc1Id, null, false); doPullReplication(); assertNotNull(database); Log.d(TAG, "Fetching doc1 via id: " + doc1Id); Document doc1 = database.getDocument(doc1Id); Log.d(TAG, "doc1" + doc1); assertNotNull(doc1); assertNotNull(doc1.getCurrentRevisionId()); assertTrue(doc1.getCurrentRevisionId().startsWith("1-")); assertNotNull(doc1.getProperties()); assertEquals(1, doc1.getProperties().get("foo")); // Add Validation block to reject documents with foo:1 database.setValidation("foo_not_1", new Validator() { @Override public void validate(Revision newRevision, ValidationContext context) { if (new Integer(1).equals(newRevision.getProperty("foo"))) { context.reject("Reject because foo is 1"); } } }); final String doc2Id = String.format("doc2-%s", docIdTimestamp); Log.d(TAG, "Adding " + doc2Id + " directly to sync gateway"); addDocWithId(doc2Id, null, false); doPullReplication(); Log.d(TAG, "Fetching doc2 via id: " + doc2Id); Document doc2 = database.getDocument(doc2Id); Log.d(TAG, "doc2" + doc2); assertNotNull(doc2); assertNull(doc2.getCurrentRevision()); // doc2 should have been rejected by validation, and therefore not present } public void underConstructionTestPullerRestart() throws Exception { MockWebServer server = new MockWebServer(); MockResponse fakeCheckpointResponse = new MockResponse(); fakeCheckpointResponse.setStatus("HTTP/1.1 404 NOT FOUND").setHeader("Content-Type", "application/json"); server.enqueue(fakeCheckpointResponse); MockResponse fakeChangesResponse = new MockResponse(); fakeChangesResponse.setStatus("HTTP/1.1 200 OK").setHeader("Content-Type", "application/json"); String changesBody = "{\"results\":[{\"seq\":2,\"id\":\"doc2\",\"changes\":[{\"rev\":\"1-5e38\"}]},{\"seq\":3,\"id\":\"doc3\",\"changes\":[{\"rev\":\"1-563b\"}]}],\"last_seq\":3}"; fakeChangesResponse.setBody(changesBody); server.enqueue(fakeChangesResponse); MockResponse fakeDoc2 = new MockResponse(); fakeDoc2.setStatus("HTTP/1.1 200 OK").setHeader("Content-Type", "application/json"); String doc2Body = "{\"_id\":\"doc2\",\"_rev\":\"1-5e38\",\"_revisions\":{\"ids\":[\"5e38\"],\"start\":1},\"fakefield1\":false,\"fakefield2\":1, \"fakefield3\":\"blah\"}"; fakeDoc2.setBody(doc2Body); server.enqueue(fakeDoc2); server.enqueue(fakeCheckpointResponse); server.enqueue(fakeChangesResponse); // will query changes again MockResponse fakeDoc3 = new MockResponse(); fakeDoc3.setStatus("HTTP/1.1 200 OK").setHeader("Content-Type", "application/json"); String doc3Body = "{\"_id\":\"doc3\",\"_rev\":\"1-5e48\",\"_revisions\":{\"ids\":[\"5e48\"],\"start\":1},\"fakefield1\":false,\"fakefield2\":1, \"fakefield3\":\"blah\"}"; fakeDoc3.setBody(doc3Body); server.enqueue(fakeDoc3); server.play(); URL baseUrl = server.getUrl("/db"); Log.d(TAG, "baseUrl: " + baseUrl); final Replication repl = (Replication) database.createPullReplication(baseUrl); repl.setContinuous(false); database.addChangeListener(new Database.ChangeListener() { @Override public void changed(Database.ChangeEvent event) { if (event.getChanges().size() > 0) { new Thread(new Runnable() { @Override public void run() { try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } repl.restart(); } }).start(); } } }); Log.d(TAG, "Doing pull replication with: " + repl); repl.start(); waitForReplicationFinishedXTimes(repl, 2); assertNull(repl.getLastError()); Log.d(TAG, "Finished pull replication with: " + repl); Document doc2Fetched = database.getDocument("doc2"); assertNotNull(doc2Fetched); assertTrue(doc2Fetched.getCurrentRevisionId().startsWith("1-5e38")); Document doc3Fetched = database.getDocument("doc3"); assertNotNull(doc3Fetched); assertTrue(doc3Fetched.getCurrentRevisionId().startsWith("1-5e48")); } /** * Pull replication test: * * - Single one-shot pull replication * - Against simulated sync gateway * - Remote docs have attachments */ public void testMockSinglePullSyncGwAttachments() throws Exception { boolean shutdownMockWebserver = true; boolean addAttachments = true; mockSinglePull(shutdownMockWebserver, MockDispatcher.ServerType.SYNC_GW, addAttachments); } /** * Pull replication test: * * - Single one-shot pull replication * - Against simulated sync gateway * - Remote docs do not have attachments */ public void testMockSinglePullSyncGw() throws Exception { boolean shutdownMockWebserver = true; boolean addAttachments = false; mockSinglePull(shutdownMockWebserver, MockDispatcher.ServerType.SYNC_GW, addAttachments); } /** * Pull replication test: * * - Single one-shot pull replication * - Against simulated couchdb * - Remote docs have attachments */ public void testMockSinglePullCouchDbAttachments() throws Exception { boolean shutdownMockWebserver = true; boolean addAttachments = true; mockSinglePull(shutdownMockWebserver, MockDispatcher.ServerType.COUCHDB, addAttachments); } /** * Pull replication test: * * - Single one-shot pull replication * - Against simulated couchdb * - Remote docs do not have attachments */ public void testMockSinglePullCouchDb() throws Exception { boolean shutdownMockWebserver = true; boolean addAttachments = false; mockSinglePull(shutdownMockWebserver, MockDispatcher.ServerType.COUCHDB, addAttachments); } /** * Do a pull replication * * TODO - instead calling server.takeRequest, call dispatcher.takeRequest and pass a path regex * * @param shutdownMockWebserver - should this test shutdown the mockwebserver * when done? if another test wants to pick up * where this left off, you should pass false. * @param serverType - should the mock return the Sync Gateway server type in * the "Server" HTTP Header? this changes the behavior of the * replicator to use bulk_get and POST reqeusts for _changes feeds. * @param addAttachments - should the mock sync gateway return docs with attachments? * @return a map that contains the mockwebserver (key="server") and the mock dispatcher * (key="dispatcher") */ public Map<String, Object> mockSinglePull(boolean shutdownMockWebserver, MockDispatcher.ServerType serverType, boolean addAttachments) throws Exception { String doc1Id = "doc1"; String doc1Rev = "1-5e38"; String doc2Id = "doc2"; String doc2Rev = "1-563b"; String doc1AttachName = "attachment.png"; String doc2AttachName = "attachment2.png"; int doc1Seq = 1; int doc2Seq = 2; // create mockwebserver and custom dispatcher MockWebServer server = MockHelper.getMockWebServer(); MockDispatcher dispatcher = new MockDispatcher(); dispatcher.setServerType(serverType); server.setDispatcher(dispatcher); // checkpoint GET response w/ 404 MockResponse fakeCheckpointResponse = new MockResponse(); MockHelper.set404NotFoundJson(fakeCheckpointResponse); dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse); // _changes response MockChangesFeed mockChangesFeed = new MockChangesFeed(); MockChangedDoc mockChangedDoc1 = new MockChangedDoc() .setSeq(doc1Seq) .setDocId(doc1Id) .setChangedRevIds(Arrays.asList(doc1Rev)); mockChangesFeed.add(mockChangedDoc1); MockChangedDoc mockChangedDoc2 = new MockChangedDoc() .setSeq(doc2Seq) .setDocId(doc2Id) .setChangedRevIds(Arrays.asList(doc2Rev)); mockChangesFeed.add(mockChangedDoc2); dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse()); // doc1 response Map<String, Object> doc1JsonMap = MockHelper.generateRandomJsonMap(); MockDocumentGet mockDocumentGet = new MockDocumentGet() .setDocId(doc1Id) .setRev(doc1Rev) .setJsonMap(doc1JsonMap); if (addAttachments) { mockDocumentGet.addAttachmentFilename(doc1AttachName); } String doc1PathRegex = "/db/doc1.*"; dispatcher.enqueueResponse(doc1PathRegex, mockDocumentGet.generateMockResponse()); // doc2 response Map<String, Object> doc2JsonMap = MockHelper.generateRandomJsonMap(); mockDocumentGet = new MockDocumentGet() .setDocId(doc2Id) .setRev(doc2Rev) .setJsonMap(doc2JsonMap); if (addAttachments) { mockDocumentGet.addAttachmentFilename(doc2AttachName); } String doc2PathRegex = "/db/doc2.*"; dispatcher.enqueueResponse(doc2PathRegex, mockDocumentGet.generateMockResponse()); // checkpoint PUT responses // it currently sends two checkpoint PUT responses back to back, MockCheckpointPut mockCheckpointPut = new MockCheckpointPut(); dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut); dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut); // start mock server server.play(); // run pull replication Replication pullReplication = doPullReplication(server.getUrl("/db")); // assert that we now have both docs in local db assertNotNull(database); Document doc1 = database.getDocument(doc1Id); assertNotNull(doc1); assertNotNull(doc1.getCurrentRevisionId()); assertTrue(doc1.getCurrentRevisionId().startsWith("1-")); assertNotNull(doc1.getProperties()); assertEquals(doc1JsonMap, doc1.getUserProperties()); Document doc2 = database.getDocument(doc2Id); assertNotNull(doc2); assertNotNull(doc2.getCurrentRevisionId()); assertNotNull(doc2.getProperties()); assertTrue(doc2.getCurrentRevisionId().startsWith("1-")); assertEquals(doc2JsonMap, doc2.getUserProperties()); // assert that docs have attachments (if applicable) if (addAttachments) { attachmentAsserts(doc1AttachName, doc1); attachmentAsserts(doc2AttachName, doc2); } // make assertions about outgoing requests from replicator -> mock RecordedRequest getCheckpointRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHECKPOINT); assertTrue(getCheckpointRequest.getMethod().equals("GET")); assertTrue(getCheckpointRequest.getPath().matches(MockHelper.PATH_REGEX_CHECKPOINT)); RecordedRequest getChangesFeedRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES); if (serverType == MockDispatcher.ServerType.SYNC_GW) { assertTrue(getChangesFeedRequest.getMethod().equals("POST")); } else { assertTrue(getChangesFeedRequest.getMethod().equals("GET")); } assertTrue(getChangesFeedRequest.getPath().matches(MockHelper.PATH_REGEX_CHANGES)); RecordedRequest doc1Request = dispatcher.takeRequest(doc1PathRegex); assertTrue(doc1Request.getMethod().equals("GET")); assertTrue(doc1Request.getPath().matches(doc1PathRegex)); RecordedRequest doc2Request = dispatcher.takeRequest(doc2PathRegex); assertTrue(doc2Request.getMethod().equals("GET")); assertTrue(doc2Request.getPath().matches(doc2PathRegex)); // workaround attempt for putCheckpointRequest being null // Thread.sleep(2000); // assertions regarding PUT checkpoint request. // these should be updated once the confusion in https://github.com/couchbase/couchbase-lite-java-core/issues/231#issuecomment-46199630 // is resolved. also, there should be assertions added regarding the _rev field // passed in the PUT checkpoint body. RecordedRequest putCheckpointRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHECKPOINT); assertNotNull(putCheckpointRequest); assertTrue(putCheckpointRequest.getMethod().equals("PUT")); assertTrue(putCheckpointRequest.getPath().matches(MockHelper.PATH_REGEX_CHECKPOINT)); // TODO: re-enable this assertion when 231 is fixed!! // make assertion about outgoing PUT checkpoint request. // make assertion about our local sequence // String lastSequence = database.lastSequenceWithCheckpointId(pullReplication.remoteCheckpointDocID()); // assertEquals(Integer.toString(doc2Seq), lastSequence); // dispatcher.verifyAllRecordedRequestsTaken(); // Shut down the server. Instances cannot be reused. if (shutdownMockWebserver) { server.shutdown(); } Map<String, Object> returnVal = new HashMap<String, Object>(); returnVal.put("server", server); returnVal.put("dispatcher", dispatcher); return returnVal; } public void testMockSinglePush() throws Exception { boolean shutdownMockWebserver = true; mockSinglePush(shutdownMockWebserver, MockDispatcher.ServerType.SYNC_GW); } /** * Do a push replication * * - Create docs in local db * - One with no attachment * - One with small attachment * - One with large attachment * */ public Map<String, Object> mockSinglePush(boolean shutdownMockWebserver, MockDispatcher.ServerType serverType) throws Exception { String doc1Id = "doc1"; String doc2Id = "doc2"; String doc3Id = "doc3"; String doc2PathRegex = String.format("/db/%s.*", doc2Id); String doc3PathRegex = String.format("/db/%s.*", doc3Id); String doc2AttachName = "attachment.png"; String doc3AttachName = "attachment5.png"; String contentType = "image/png"; // create mockwebserver and custom dispatcher MockWebServer server = MockHelper.getMockWebServer(); MockDispatcher dispatcher = new MockDispatcher(); dispatcher.setServerType(serverType); server.setDispatcher(dispatcher); server.play(); // add some documents Document doc1 = createDocumentForPushReplication(doc1Id, null, null); Document doc2 = createDocumentForPushReplication(doc2Id, doc2AttachName, contentType); Document doc3 = createDocumentForPushReplication(doc3Id, doc3AttachName, contentType); // checkpoint GET response w/ 404 MockResponse fakeCheckpointResponse = new MockResponse(); MockHelper.set404NotFoundJson(fakeCheckpointResponse); dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse); // _revs_diff response -- everything missing MockRevsDiff mockRevsDiff = new MockRevsDiff(); dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff); // _bulk_docs response -- everything stored MockBulkDocs mockBulkDocs = new MockBulkDocs(); dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs); // doc PUT responses MockDocumentPut mockDoc2Put = new MockDocumentPut() .setDocId(doc2Id) .setRev(doc2.getCurrentRevisionId()); dispatcher.enqueueResponse(doc2PathRegex, mockDoc2Put.generateMockResponse()); MockDocumentPut mockDoc3Put = new MockDocumentPut() .setDocId(doc3Id) .setRev(doc3.getCurrentRevisionId()); dispatcher.enqueueResponse(doc3PathRegex, mockDoc3Put.generateMockResponse()); // run replication Replication replication = database.createPushReplication(server.getUrl("/db")); replication.setContinuous(false); if (serverType != MockDispatcher.ServerType.SYNC_GW) { replication.setCreateTarget(true); Assert.assertTrue(replication.shouldCreateTarget()); } runReplication(replication); // make assertions about outgoing requests from replicator -> mock RecordedRequest getCheckpointRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHECKPOINT); assertTrue(getCheckpointRequest.getMethod().equals("GET")); assertTrue(getCheckpointRequest.getPath().matches(MockHelper.PATH_REGEX_CHECKPOINT)); RecordedRequest revsDiffRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_REVS_DIFF); assertTrue(revsDiffRequest.getUtf8Body().contains(doc1Id)); RecordedRequest bulkDocsRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_BULK_DOCS); assertTrue(bulkDocsRequest.getUtf8Body().contains(doc1Id)); assertFalse(bulkDocsRequest.getUtf8Body().contains(doc2Id)); RecordedRequest doc2putRequest = dispatcher.takeRequest(doc2PathRegex); assertTrue(doc2putRequest.getUtf8Body().contains(doc2Id)); assertFalse(doc2putRequest.getUtf8Body().contains(doc3Id)); RecordedRequest doc3putRequest = dispatcher.takeRequest(doc3PathRegex); assertTrue(doc3putRequest.getUtf8Body().contains(doc3Id)); assertFalse(doc3putRequest.getUtf8Body().contains(doc2Id)); RecordedRequest putCheckpointRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHECKPOINT); assertTrue(putCheckpointRequest.getMethod().equals("PUT")); String utf8Body = putCheckpointRequest.getUtf8Body(); Map <String, Object> checkpointJson = Manager.getObjectMapper().readValue(utf8Body, Map.class); assertEquals("3", checkpointJson.get("lastSequence")); dispatcher.verifyAllRecordedRequestsTaken(); // Shut down the server. Instances cannot be reused. if (shutdownMockWebserver) { server.shutdown(); } Map<String, Object> returnVal = new HashMap<String, Object>(); returnVal.put("server", server); returnVal.put("dispatcher", dispatcher); return returnVal; } private void attachmentAsserts(String docAttachName, Document doc) throws IOException, CouchbaseLiteException { Attachment attachment = doc.getCurrentRevision().getAttachment(docAttachName); assertNotNull(attachment); byte[] testAttachBytes = MockDocumentGet.getAssetByteArray(docAttachName); int attachLength = testAttachBytes.length; assertEquals(attachLength, attachment.getLength()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); baos.write(attachment.getContent()); byte[] actualAttachBytes = baos.toByteArray(); assertEquals(testAttachBytes.length, actualAttachBytes.length); for (int i=0; i<actualAttachBytes.length; i++) { boolean ithByteEqual = actualAttachBytes[i] == testAttachBytes[i]; if (!ithByteEqual) { Log.d(Log.TAG, "mismatch"); } assertTrue(ithByteEqual); } } public void testMockMultiplePullSyncGw() throws Exception { boolean shutdownMockWebserver = true; mockMultiplePull(shutdownMockWebserver, MockDispatcher.ServerType.SYNC_GW); } public void testMockMultiplePullCouchDb() throws Exception { boolean shutdownMockWebserver = true; mockMultiplePull(shutdownMockWebserver, MockDispatcher.ServerType.COUCHDB); } /** * * Simulate the following: * * - Add a few docs and do a pull replication * - One doc on sync gateway is now updated * - Do a second pull replication * - Assert we get the updated doc and save it locally * */ public Map<String, Object> mockMultiplePull(boolean shutdownMockWebserver, MockDispatcher.ServerType serverType) throws Exception { String doc1Id = "doc1"; // create mockwebserver and custom dispatcher boolean addAttachments = false; Map<String, Object> serverAndDispatcher = mockSinglePull(false, serverType, addAttachments); MockWebServer server = (MockWebServer) serverAndDispatcher.get("server"); MockDispatcher dispatcher = (MockDispatcher) serverAndDispatcher.get("dispatcher"); // this is needed because currently the upstream test does not assert that the // dispatcher recorded requests queues are empty, so we need to clear all residue first. // when the upstream test is changed to call dispatcher.verifyAllRecordedRequestsTaken(), // this call should no longer be necessary. dispatcher.reset(); String doc1Rev = "2-2e38"; int doc1Seq = 3; String checkpointRev = "0-1"; String checkpointLastSequence = "2"; // checkpoint GET response w/ seq = 2 MockCheckpointGet mockCheckpointGet = new MockCheckpointGet(); mockCheckpointGet.setOk("true"); mockCheckpointGet.setRev(checkpointRev); mockCheckpointGet.setLastSequence(checkpointLastSequence); dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointGet); // _changes response MockChangesFeed mockChangesFeed = new MockChangesFeed(); MockChangedDoc mockChangedDoc1 = new MockChangedDoc() .setSeq(doc1Seq) .setDocId(doc1Id) .setChangedRevIds(Arrays.asList(doc1Rev)); mockChangesFeed.add(mockChangedDoc1); MockResponse fakeChangesResponse = mockChangesFeed.generateMockResponse(); dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, fakeChangesResponse); // doc1 response Map<String, Object> doc1JsonMap = MockHelper.generateRandomJsonMap(); MockDocumentGet mockDocumentGet = new MockDocumentGet() .setDocId(doc1Id) .setRev(doc1Rev) .setJsonMap(doc1JsonMap); String doc1PathRegex = "/db/doc1.*"; dispatcher.enqueueResponse(doc1PathRegex, mockDocumentGet.generateMockResponse()); // checkpoint PUT response MockCheckpointPut mockCheckpointPut = new MockCheckpointPut(); dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut); // run pull replication doPullReplication(server.getUrl("/db")); // assert that we now have both docs in local db assertNotNull(database); Document doc1 = database.getDocument(doc1Id); assertNotNull(doc1); assertNotNull(doc1.getCurrentRevisionId()); assertTrue(doc1.getCurrentRevisionId().startsWith("2-")); assertEquals(doc1JsonMap, doc1.getUserProperties()); // make assertions about outgoing requests from replicator -> mock RecordedRequest getCheckpointRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHECKPOINT); assertNotNull(getCheckpointRequest); assertTrue(getCheckpointRequest.getMethod().equals("GET")); assertTrue(getCheckpointRequest.getPath().matches(MockHelper.PATH_REGEX_CHECKPOINT)); RecordedRequest getChangesFeedRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES); if (serverType == MockDispatcher.ServerType.SYNC_GW) { assertTrue(getChangesFeedRequest.getMethod().equals("POST")); } else { assertTrue(getChangesFeedRequest.getMethod().equals("GET")); } assertTrue(getChangesFeedRequest.getPath().matches(MockHelper.PATH_REGEX_CHANGES)); Log.d(TAG, "changes feed request: %s", getChangesFeedRequest.getPath()); RecordedRequest doc1Request = dispatcher.takeRequest(doc1PathRegex); assertTrue(doc1Request.getMethod().equals("GET")); assertTrue(doc1Request.getPath().matches("/db/doc1\\?rev=2-2e38.*")); // workaround for putCheckpointRequest being null .. Thread.sleep(2000); RecordedRequest putCheckpointRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHECKPOINT); assertNotNull(putCheckpointRequest); assertTrue(putCheckpointRequest.getMethod().equals("PUT")); assertTrue(putCheckpointRequest.getPath().matches(MockHelper.PATH_REGEX_CHECKPOINT)); String utf8Body = putCheckpointRequest.getUtf8Body(); Map <String, Object> checkpointJson = Manager.getObjectMapper().readValue(utf8Body, Map.class); assertEquals("3", checkpointJson.get("lastSequence")); assertEquals("0-1", checkpointJson.get("_rev")); if (shutdownMockWebserver) { server.shutdown(); } Map<String, Object> returnVal = new HashMap<String, Object>(); returnVal.put("server", server); returnVal.put("dispatcher", dispatcher); return returnVal; } public void testPuller() throws Throwable { String docIdTimestamp = Long.toString(System.currentTimeMillis()); final String doc1Id = String.format("doc1-%s", docIdTimestamp); final String doc2Id = String.format("doc2-%s", docIdTimestamp); Log.d(TAG, "Adding " + doc1Id + " directly to sync gateway"); addDocWithId(doc1Id, "attachment.png", false); Log.d(TAG, "Adding " + doc2Id + " directly to sync gateway"); addDocWithId(doc2Id, "attachment2.png", false); Replication pullReplication = doPullReplication(); // TODO: re-enable this assertion when 231 is fixed!! // String lastSequence = database.lastSequenceWithCheckpointId(pullReplication.remoteCheckpointDocID()); // assertEquals("2", lastSequence); assertNotNull(database); Log.d(TAG, "Fetching doc1 via id: " + doc1Id); Document doc1 = database.getDocument(doc1Id); Log.d(TAG, "doc1" + doc1); assertNotNull(doc1); assertNotNull(doc1.getCurrentRevisionId()); assertTrue(doc1.getCurrentRevisionId().startsWith("1-")); assertNotNull(doc1.getProperties()); assertEquals(1, doc1.getProperties().get("foo")); Log.d(TAG, "Fetching doc2 via id: " + doc2Id); Document doc2 = database.getDocument(doc2Id); assertNotNull(doc2); assertNotNull(doc2.getCurrentRevisionId()); assertNotNull(doc2.getProperties()); assertTrue(doc2.getCurrentRevisionId().startsWith("1-")); assertEquals(1, doc2.getProperties().get("foo")); // update doc1 on sync gateway String docJson = String.format("{\"foo\":2,\"bar\":true,\"_rev\":\"%s\",\"_id\":\"%s\"}", doc1.getCurrentRevisionId(), doc1.getId()); pushDocumentToSyncGateway(doc1.getId(), docJson); // do another pull Log.d(TAG, "Doing 2nd pull replication"); doPullReplication(); Log.d(TAG, "Finished 2nd pull replication"); // make sure it has the latest properties Document doc1Fetched = database.getDocument(doc1Id); assertNotNull(doc1Fetched); assertTrue(doc1Fetched.getCurrentRevisionId().startsWith("2-")); assertEquals(2, doc1Fetched.getProperties().get("foo")); Log.d(TAG, "testPuller() finished"); } /** * This is essentially a regression test for a deadlock * that was happening when the LiveQuery#onDatabaseChanged() * was calling waitForUpdateThread(), but that thread was * waiting on connection to be released by the thread calling * waitForUpdateThread(). When the deadlock bug was present, * this test would trigger the deadlock and never finish. */ public void testPullerWithLiveQuery() throws Throwable { View view = database.getView("testPullerWithLiveQueryView"); view.setMapReduce(new Mapper() { @Override public void map(Map<String, Object> document, Emitter emitter) { if (document.get("_id") != null) { emitter.emit(document.get("_id"), null); } } }, null, "1"); final CountDownLatch countDownLatch = new CountDownLatch(1); LiveQuery allDocsLiveQuery = view.createQuery().toLiveQuery(); allDocsLiveQuery.addChangeListener(new LiveQuery.ChangeListener() { @Override public void changed(LiveQuery.ChangeEvent event) { int numTimesCalled = 0; if (event.getError() != null) { throw new RuntimeException(event.getError()); } if (event.getRows().getCount() == 2) { countDownLatch.countDown(); } } }); // kick off live query allDocsLiveQuery.start(); // do pull replication against mock mockSinglePull(true, MockDispatcher.ServerType.SYNC_GW, true); // make sure we were called back with both docs boolean success = countDownLatch.await(30, TimeUnit.SECONDS); assertTrue(success); // clean up allDocsLiveQuery.stop(); } private Replication doPullReplication() { URL remote = getReplicationURL(); return doPullReplication(remote); } private Replication doPullReplication(URL url) { CountDownLatch replicationDoneSignal = new CountDownLatch(1); final Replication repl = (Replication) database.createPullReplication(url); repl.setContinuous(false); Log.d(TAG, "Doing pull replication with: " + repl); runReplication(repl); assertNull(repl.getLastError()); Log.d(TAG, "Finished pull replication with: " + repl); return repl; } private void addDocWithId(String docId, String attachmentName, boolean gzipped) throws IOException { final String docJson; if (attachmentName != null) { // add attachment to document InputStream attachmentStream = getAsset(attachmentName); ByteArrayOutputStream baos = new ByteArrayOutputStream(); IOUtils.copy(attachmentStream, baos); if (gzipped == false) { String attachmentBase64 = Base64.encodeBytes(baos.toByteArray()); docJson = String.format("{\"foo\":1,\"bar\":false, \"_attachments\": { \"%s\": { \"content_type\": \"image/png\", \"data\": \"%s\" } } }", attachmentName, attachmentBase64); } else { byte[] bytes = baos.toByteArray(); String attachmentBase64 = Base64.encodeBytes(bytes, Base64.GZIP); docJson = String.format("{\"foo\":1,\"bar\":false, \"_attachments\": { \"%s\": { \"content_type\": \"image/png\", \"data\": \"%s\", \"encoding\": \"gzip\", \"length\":%d } } }", attachmentName, attachmentBase64, bytes.length); } } else { docJson = "{\"foo\":1,\"bar\":false}"; } pushDocumentToSyncGateway(docId, docJson); workaroundSyncGatewayRaceCondition(); } private void pushDocumentToSyncGateway(String docId, final String docJson) throws MalformedURLException { // push a document to server URL replicationUrlTrailingDoc1 = new URL(String.format("%s/%s", getReplicationURL().toExternalForm(), docId)); final URL pathToDoc1 = new URL(replicationUrlTrailingDoc1, docId); Log.d(TAG, "Send http request to " + pathToDoc1); final CountDownLatch httpRequestDoneSignal = new CountDownLatch(1); BackgroundTask getDocTask = new BackgroundTask() { @Override public void run() { HttpClient httpclient = new DefaultHttpClient(); HttpResponse response; String responseString = null; try { HttpPut post = new HttpPut(pathToDoc1.toExternalForm()); StringEntity se = new StringEntity( docJson.toString() ); se.setContentType(new BasicHeader("content_type", "application/json")); post.setEntity(se); response = httpclient.execute(post); StatusLine statusLine = response.getStatusLine(); Log.d(TAG, "Got response: " + statusLine); assertTrue(statusLine.getStatusCode() == HttpStatus.SC_CREATED); } catch (ClientProtocolException e) { assertNull("Got ClientProtocolException: " + e.getLocalizedMessage(), e); } catch (IOException e) { assertNull("Got IOException: " + e.getLocalizedMessage(), e); } httpRequestDoneSignal.countDown(); } }; getDocTask.execute(); Log.d(TAG, "Waiting for http request to finish"); try { httpRequestDoneSignal.await(300, TimeUnit.SECONDS); Log.d(TAG, "http request finished"); } catch (InterruptedException e) { e.printStackTrace(); } } public void testGetReplicator() throws Throwable { Map<String,Object> properties = new HashMap<String,Object>(); properties.put("source", DEFAULT_TEST_DB); properties.put("target", getReplicationURL().toExternalForm()); Map<String,Object> headers = new HashMap<String,Object>(); String coolieVal = "SyncGatewaySession=c38687c2696688a"; headers.put("Cookie", coolieVal); properties.put("headers", headers); Replication replicator = manager.getReplicator(properties); assertNotNull(replicator); assertEquals(getReplicationURL().toExternalForm(), replicator.getRemoteUrl().toExternalForm()); assertTrue(!replicator.isPull()); assertFalse(replicator.isContinuous()); assertFalse(replicator.isRunning()); assertTrue(replicator.getHeaders().containsKey("Cookie")); assertEquals(replicator.getHeaders().get("Cookie"), coolieVal); // add replication observer CountDownLatch replicationDoneSignal = new CountDownLatch(1); ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal); replicator.addChangeListener(replicationFinishedObserver); // start the replicator Log.d(TAG, "Starting replicator " + replicator); replicator.start(); // now lets lookup existing replicator and stop it Log.d(TAG, "Looking up replicator"); properties.put("cancel", true); Replication activeReplicator = manager.getReplicator(properties); Log.d(TAG, "Found replicator " + activeReplicator + " and calling stop()"); activeReplicator.stop(); Log.d(TAG, "called stop(), waiting for it to finish"); // wait for replication to finish boolean didNotTimeOut = replicationDoneSignal.await(180, TimeUnit.SECONDS); Log.d(TAG, "replicationDoneSignal.await done, didNotTimeOut: " + didNotTimeOut); assertTrue(didNotTimeOut); assertFalse(activeReplicator.isRunning()); } public void testGetReplicatorWithAuth() throws Throwable { Map<String, Object> properties = getPushReplicationParsedJson(); Replication replicator = manager.getReplicator(properties); assertNotNull(replicator); assertNotNull(replicator.getAuthenticator()); assertTrue(replicator.getAuthenticator() instanceof FacebookAuthorizer); } public void testRunReplicationWithError() throws Exception { HttpClientFactory mockHttpClientFactory = new HttpClientFactory() { @Override public HttpClient getHttpClient() { CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); int statusCode = 500; mockHttpClient.addResponderFailAllRequests(statusCode); return mockHttpClient; } @Override public void addCookies(List<Cookie> cookies) { } @Override public void deleteCookie(String name) { } @Override public CookieStore getCookieStore() { return null; } }; String dbUrlString = "http://fake.test-url.com:4984/fake/"; URL remote = new URL(dbUrlString); final boolean continuous = false; Replication r1 = new Puller(database, remote, continuous, mockHttpClientFactory, manager.getWorkExecutor()); Assert.assertFalse(r1.isContinuous()); runReplication(r1); // It should have failed with a 404: Assert.assertEquals(Replication.ReplicationStatus.REPLICATION_STOPPED, r1.getStatus()); Assert.assertEquals(0, r1.getCompletedChangesCount()); Assert.assertEquals(0, r1.getChangesCount()); Assert.assertNotNull(r1.getLastError()); } /** * This test simulates a condition in which the replication will fail due to an authentication * error by using a FacebookAuthorizor with an invalid token. * * When the sync gateway tries to contact the facebook API, it will see that the token is invalid. * * The replicator should then stop and the getLastError() should contain a HttpResponseException * with a 401 error code. */ public void testReplicatorErrorStatus() throws Exception { if (isTestingAgainstSyncGateway()) { // register bogus fb token FacebookAuthorizer.registerAccessToken("fake_access_token", "[email protected]", getReplicationURL().toExternalForm()); // run replicator and make sure it has an error Map<String,Object> properties = getPullReplicationParsedJson(); Replication replicator = manager.getReplicator(properties); runReplication(replicator); assertNotNull(replicator.getLastError()); assertTrue(replicator.getLastError() instanceof HttpResponseException); assertEquals(401 /* unauthorized */, ((HttpResponseException)replicator.getLastError()).getStatusCode()); } } public void testGoOffline() throws Exception { URL remote = getReplicationURL(); Replication replicator = database.createPullReplication(remote); replicator.setContinuous(true); // add replication "idle" observer - exploit the fact that during observation, // the replication will go into an "idle" state after starting the change listener. CountDownLatch countDownLatch = new CountDownLatch(1); ReplicationIdleObserver replicationObserver = new ReplicationIdleObserver(countDownLatch); replicator.addChangeListener(replicationObserver); // add replication observer CountDownLatch countDownLatch2 = new CountDownLatch(1); ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(countDownLatch2); replicator.addChangeListener(replicationFinishedObserver); replicator.start(); boolean success = countDownLatch.await(30, TimeUnit.SECONDS); assertTrue(success); putReplicationOffline(replicator); Assert.assertTrue(replicator.getStatus() == Replication.ReplicationStatus.REPLICATION_OFFLINE); replicator.stop(); boolean success2 = countDownLatch2.await(30, TimeUnit.SECONDS); assertTrue(success2); } public void testBuildRelativeURLString() throws Exception { String dbUrlString = "http://10.0.0.3:4984/todos/"; Replication replicator = new Pusher(database, new URL(dbUrlString), false, null); String relativeUrlString = replicator.buildRelativeURLString("foo"); String expected = "http://10.0.0.3:4984/todos/foo"; Assert.assertEquals(expected, relativeUrlString); } public void testBuildRelativeURLStringWithLeadingSlash() throws Exception { String dbUrlString = "http://10.0.0.3:4984/todos/"; Replication replicator = new Pusher(database, new URL(dbUrlString), false, null); String relativeUrlString = replicator.buildRelativeURLString("/foo"); String expected = "http://10.0.0.3:4984/todos/foo"; Assert.assertEquals(expected, relativeUrlString); } public void testChannels() throws Exception { URL remote = getReplicationURL(); Replication replicator = database.createPullReplication(remote); List<String> channels = new ArrayList<String>(); channels.add("chan1"); channels.add("chan2"); replicator.setChannels(channels); Assert.assertEquals(channels, replicator.getChannels()); replicator.setChannels(null); Assert.assertTrue(replicator.getChannels().isEmpty()); } public void testChannelsMore() throws MalformedURLException, CouchbaseLiteException { Database db = startDatabase(); URL fakeRemoteURL = new URL("http://couchbase.com/no_such_db"); Replication r1 = db.createPullReplication(fakeRemoteURL); assertTrue(r1.getChannels().isEmpty()); r1.setFilter("foo/bar"); assertTrue(r1.getChannels().isEmpty()); Map<String, Object> filterParams= new HashMap<String, Object>(); filterParams.put("a", "b"); r1.setFilterParams(filterParams); assertTrue(r1.getChannels().isEmpty()); r1.setChannels(null); assertEquals("foo/bar", r1.getFilter()); assertEquals(filterParams, r1.getFilterParams()); List<String> channels = new ArrayList<String>(); channels.add("NBC"); channels.add("MTV"); r1.setChannels(channels); assertEquals(channels, r1.getChannels()); assertEquals("sync_gateway/bychannel", r1.getFilter()); filterParams= new HashMap<String, Object>(); filterParams.put("channels", "NBC,MTV"); assertEquals(filterParams, r1.getFilterParams()); r1.setChannels(null); assertEquals(r1.getFilter(), null); assertEquals(null ,r1.getFilterParams()); } public void testHeaders() throws Exception { final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); mockHttpClient.addResponderThrowExceptionAllRequests(); HttpClientFactory mockHttpClientFactory = new HttpClientFactory() { @Override public HttpClient getHttpClient() { return mockHttpClient; } @Override public void addCookies(List<Cookie> cookies) { } @Override public void deleteCookie(String name) { } @Override public CookieStore getCookieStore() { return null; } }; URL remote = getReplicationURL(); manager.setDefaultHttpClientFactory(mockHttpClientFactory); Replication puller = database.createPullReplication(remote); Map<String, Object> headers = new HashMap<String, Object>(); headers.put("foo", "bar"); puller.setHeaders(headers); runReplication(puller); assertNotNull(puller.getLastError()); boolean foundFooHeader = false; List<HttpRequest> requests = mockHttpClient.getCapturedRequests(); for (HttpRequest request : requests) { Header[] requestHeaders = request.getHeaders("foo"); for (Header requestHeader : requestHeaders) { foundFooHeader = true; Assert.assertEquals("bar", requestHeader.getValue()); } } Assert.assertTrue(foundFooHeader); manager.setDefaultHttpClientFactory(null); } /** * Regression test for issue couchbase/couchbase-lite-android#174 */ public void testAllLeafRevisionsArePushed() throws Exception { final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); mockHttpClient.addResponderRevDiffsAllMissing(); mockHttpClient.setResponseDelayMilliseconds(250); mockHttpClient.addResponderFakeLocalDocumentUpdate404(); HttpClientFactory mockHttpClientFactory = new HttpClientFactory() { @Override public HttpClient getHttpClient() { return mockHttpClient; } @Override public void addCookies(List<Cookie> cookies) { } @Override public void deleteCookie(String name) { } @Override public CookieStore getCookieStore() { return null; } }; manager.setDefaultHttpClientFactory(mockHttpClientFactory); Document doc = database.createDocument(); SavedRevision rev1a = doc.createRevision().save(); SavedRevision rev2a = createRevisionWithRandomProps(rev1a, false); SavedRevision rev3a = createRevisionWithRandomProps(rev2a, false); // delete the branch we've been using, then create a new one to replace it SavedRevision rev4a = rev3a.deleteDocument(); SavedRevision rev2b = createRevisionWithRandomProps(rev1a, true); assertEquals(rev2b.getId(), doc.getCurrentRevisionId()); // sync with remote DB -- should push both leaf revisions Replication push = database.createPushReplication(getReplicationURL()); runReplication(push); assertNull(push.getLastError()); // find the _revs_diff captured request and decode into json boolean foundRevsDiff = false; List<HttpRequest> captured = mockHttpClient.getCapturedRequests(); for (HttpRequest httpRequest : captured) { if (httpRequest instanceof HttpPost) { HttpPost httpPost = (HttpPost) httpRequest; if (httpPost.getURI().toString().endsWith("_revs_diff")) { foundRevsDiff = true; Map<String, Object> jsonMap = CustomizableMockHttpClient.getJsonMapFromRequest(httpPost); // assert that it contains the expected revisions List<String> revisionIds = (List) jsonMap.get(doc.getId()); assertEquals(2, revisionIds.size()); assertTrue(revisionIds.contains(rev4a.getId())); assertTrue(revisionIds.contains(rev2b.getId())); } } } assertTrue(foundRevsDiff); } public void testRemoteConflictResolution() throws Exception { // Create a document with two conflicting edits. Document doc = database.createDocument(); SavedRevision rev1 = doc.createRevision().save(); SavedRevision rev2a = createRevisionWithRandomProps(rev1, false); SavedRevision rev2b = createRevisionWithRandomProps(rev1, true); // make sure we can query the db to get the conflict Query allDocsQuery = database.createAllDocumentsQuery(); allDocsQuery.setAllDocsMode(Query.AllDocsMode.ONLY_CONFLICTS); QueryEnumerator rows = allDocsQuery.run(); boolean foundDoc = false; assertEquals(1, rows.getCount()); for (Iterator<QueryRow> it = rows; it.hasNext();) { QueryRow row = it.next(); if (row.getDocument().getId().equals(doc.getId())) { foundDoc = true; } } assertTrue(foundDoc); // Push the conflicts to the remote DB. Replication push = database.createPushReplication(getReplicationURL()); runReplication(push); assertNull(push.getLastError()); // Prepare a bulk docs request to resolve the conflict remotely. First, advance rev 2a. JSONObject rev3aBody = new JSONObject(); rev3aBody.put("_id", doc.getId()); rev3aBody.put("_rev", rev2a.getId()); // Then, delete rev 2b. JSONObject rev3bBody = new JSONObject(); rev3bBody.put("_id", doc.getId()); rev3bBody.put("_rev", rev2b.getId()); rev3bBody.put("_deleted", true); // Combine into one _bulk_docs request. JSONObject requestBody = new JSONObject(); requestBody.put("docs", new JSONArray(Arrays.asList(rev3aBody, rev3bBody))); // Make the _bulk_docs request. HttpClient client = new DefaultHttpClient(); String bulkDocsUrl = getReplicationURL().toExternalForm() + "/_bulk_docs"; HttpPost request = new HttpPost(bulkDocsUrl); request.setHeader("Content-Type", "application/json"); String json = requestBody.toString(); request.setEntity(new StringEntity(json)); HttpResponse response = client.execute(request); // Check the response to make sure everything worked as it should. assertEquals(201, response.getStatusLine().getStatusCode()); String rawResponse = IOUtils.toString(response.getEntity().getContent()); JSONArray resultArray = new JSONArray(rawResponse); assertEquals(2, resultArray.length()); for (int i = 0; i < resultArray.length(); i++) { assertTrue(((JSONObject) resultArray.get(i)).isNull("error")); } workaroundSyncGatewayRaceCondition(); // Pull the remote changes. Replication pull = database.createPullReplication(getReplicationURL()); runReplication(pull); assertNull(pull.getLastError()); // Make sure the conflict was resolved locally. assertEquals(1, doc.getConflictingRevisions().size()); } public void testOnlineOfflinePusher() throws Exception { URL remote = getReplicationURL(); // mock sync gateway final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); mockHttpClient.addResponderFakeLocalDocumentUpdate404(); mockHttpClient.addResponderRevDiffsSmartResponder(); HttpClientFactory mockHttpClientFactory = mockFactoryFactory(mockHttpClient); manager.setDefaultHttpClientFactory(mockHttpClientFactory); // create a replication observer CountDownLatch replicationDoneSignal = new CountDownLatch(1); ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal); // create a push replication Replication pusher = database.createPushReplication(remote); Log.d(Database.TAG, "created pusher: " + pusher); pusher.addChangeListener(replicationFinishedObserver); pusher.setContinuous(true); pusher.start(); for (int i=0; i<5; i++) { Log.d(Database.TAG, "testOnlineOfflinePusher, i: " + i); final String docFieldName = "testOnlineOfflinePusher" + i; // put the replication offline putReplicationOffline(pusher); // add a response listener to wait for a bulk_docs request from the pusher final CountDownLatch gotBulkDocsRequest = new CountDownLatch(1); CustomizableMockHttpClient.ResponseListener bulkDocsListener = new CustomizableMockHttpClient.ResponseListener() { @Override public void responseSent(HttpUriRequest httpUriRequest, HttpResponse response) { if (httpUriRequest.getURI().getPath().endsWith("_bulk_docs")) { Log.d(TAG, "testOnlineOfflinePusher responselistener called with _bulk_docs"); ArrayList docs = CustomizableMockHttpClient.extractDocsFromBulkDocsPost(httpUriRequest); Log.d(TAG, "docs: " + docs); for (Object docObject : docs) { Map<String, Object> doc = (Map) docObject; if (doc.containsKey(docFieldName)) { Log.d(TAG, "Found expected doc in _bulk_docs: " + doc); gotBulkDocsRequest.countDown(); } else { Log.d(TAG, "Ignore doc in _bulk_docs: " + doc); } } } } }; mockHttpClient.addResponseListener(bulkDocsListener); // add a document String docFieldVal = "foo" + i; Map<String,Object> properties = new HashMap<String, Object>(); properties.put(docFieldName, docFieldVal); createDocumentWithProperties(database, properties); // put the replication online, which should trigger it to send outgoing bulk_docs request putReplicationOnline(pusher); // wait until we get a bulk docs request Log.d(Database.TAG, "waiting for bulk docs request with " + docFieldName); boolean succeeded = gotBulkDocsRequest.await(90, TimeUnit.SECONDS); assertTrue(succeeded); Log.d(Database.TAG, "got bulk docs request with " + docFieldName); mockHttpClient.removeResponseListener(bulkDocsListener); mockHttpClient.clearCapturedRequests(); } Log.d(Database.TAG, "calling pusher.stop()"); pusher.stop(); Log.d(Database.TAG, "called pusher.stop()"); // wait for replication to finish Log.d(Database.TAG, "waiting for replicationDoneSignal"); boolean didNotTimeOut = replicationDoneSignal.await(90, TimeUnit.SECONDS); Log.d(Database.TAG, "done waiting for replicationDoneSignal. didNotTimeOut: " + didNotTimeOut); assertTrue(didNotTimeOut); assertFalse(pusher.isRunning()); } public void testPushReplicationRecoverableError() throws Exception { int statusCode = 503; String statusMsg = "Transient Error"; boolean expectReplicatorError = false; runPushReplicationWithTransientError(statusCode, statusMsg, expectReplicatorError); } public void testPushReplicationRecoverableIOException() throws Exception { int statusCode = -1; // code to tell it to throw an IOException String statusMsg = null; boolean expectReplicatorError = false; runPushReplicationWithTransientError(statusCode, statusMsg, expectReplicatorError); } public void testPushReplicationNonRecoverableError() throws Exception { int statusCode = 404; String statusMsg = "NOT FOUND"; boolean expectReplicatorError = true; runPushReplicationWithTransientError(statusCode, statusMsg, expectReplicatorError); } public void runPushReplicationWithTransientError(int statusCode, String statusMsg, boolean expectReplicatorError) throws Exception { Map<String,Object> properties1 = new HashMap<String,Object>(); properties1.put("doc1", "testPushReplicationTransientError"); createDocWithProperties(properties1); final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); mockHttpClient.addResponderFakeLocalDocumentUpdate404(); CustomizableMockHttpClient.Responder sentinal = CustomizableMockHttpClient.fakeBulkDocsResponder(); Queue<CustomizableMockHttpClient.Responder> responders = new LinkedList<CustomizableMockHttpClient.Responder>(); responders.add(CustomizableMockHttpClient.transientErrorResponder(statusCode, statusMsg)); ResponderChain responderChain = new ResponderChain(responders, sentinal); mockHttpClient.setResponder("_bulk_docs", responderChain); // create a replication observer to wait until replication finishes CountDownLatch replicationDoneSignal = new CountDownLatch(1); ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal); // create replication and add observer manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient)); Replication pusher = database.createPushReplication(getReplicationURL()); pusher.addChangeListener(replicationFinishedObserver); // save the checkpoint id for later usage String checkpointId = pusher.remoteCheckpointDocID(); // kick off the replication pusher.start(); // wait for it to finish boolean success = replicationDoneSignal.await(60, TimeUnit.SECONDS); assertTrue(success); Log.d(TAG, "replicationDoneSignal finished"); if (expectReplicatorError == true) { assertNotNull(pusher.getLastError()); } else { assertNull(pusher.getLastError()); } // workaround for the fact that the replicationDoneSignal.wait() call will unblock before all // the statements in Replication.stopped() have even had a chance to execute. // (specifically the ones that come after the call to notifyChangeListeners()) Thread.sleep(500); String localLastSequence = database.lastSequenceWithCheckpointId(checkpointId); if (expectReplicatorError == true) { assertNull(localLastSequence); } else { assertNotNull(localLastSequence); } } public void testPushReplicationCanMissDocs() throws Exception { assertEquals(0, database.getLastSequenceNumber()); Map<String,Object> properties1 = new HashMap<String,Object>(); properties1.put("doc1", "testPushReplicationCanMissDocs"); final Document doc1 = createDocWithProperties(properties1); Map<String,Object> properties2 = new HashMap<String,Object>(); properties1.put("doc2", "testPushReplicationCanMissDocs"); final Document doc2 = createDocWithProperties(properties2); UnsavedRevision doc2UnsavedRev = doc2.createRevision(); InputStream attachmentStream = getAsset("attachment.png"); doc2UnsavedRev.setAttachment("attachment.png", "image/png", attachmentStream); SavedRevision doc2Rev = doc2UnsavedRev.save(); assertNotNull(doc2Rev); final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); mockHttpClient.addResponderFakeLocalDocumentUpdate404(); mockHttpClient.setResponder("_bulk_docs", new CustomizableMockHttpClient.Responder() { @Override public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException { String json = "{\"error\":\"not_found\",\"reason\":\"missing\"}"; return CustomizableMockHttpClient.generateHttpResponseObject(404, "NOT FOUND", json); } }); mockHttpClient.setResponder(doc2.getId(), new CustomizableMockHttpClient.Responder() { @Override public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException { Map<String, Object> responseObject = new HashMap<String, Object>(); responseObject.put("id", doc2.getId()); responseObject.put("ok", true); responseObject.put("rev", doc2.getCurrentRevisionId()); return CustomizableMockHttpClient.generateHttpResponseObject(responseObject); } }); // create a replication obeserver to wait until replication finishes CountDownLatch replicationDoneSignal = new CountDownLatch(1); ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal); // create replication and add observer manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient)); Replication pusher = database.createPushReplication(getReplicationURL()); pusher.addChangeListener(replicationFinishedObserver); // save the checkpoint id for later usage String checkpointId = pusher.remoteCheckpointDocID(); // kick off the replication pusher.start(); // wait for it to finish boolean success = replicationDoneSignal.await(60, TimeUnit.SECONDS); assertTrue(success); Log.d(TAG, "replicationDoneSignal finished"); // we would expect it to have recorded an error because one of the docs (the one without the attachment) // will have failed. assertNotNull(pusher.getLastError()); // workaround for the fact that the replicationDoneSignal.wait() call will unblock before all // the statements in Replication.stopped() have even had a chance to execute. // (specifically the ones that come after the call to notifyChangeListeners()) Thread.sleep(500); String localLastSequence = database.lastSequenceWithCheckpointId(checkpointId); Log.d(TAG, "database.lastSequenceWithCheckpointId(): " + localLastSequence); Log.d(TAG, "doc2.getCurrentRevision().getSequence(): " + doc2.getCurrentRevision().getSequence()); String msg = "Since doc1 failed, the database should _not_ have had its lastSequence bumped" + " to doc2's sequence number. If it did, it's bug: github.com/couchbase/couchbase-lite-java-core/issues/95"; assertFalse(msg, Long.toString(doc2.getCurrentRevision().getSequence()).equals(localLastSequence)); assertNull(localLastSequence); assertTrue(doc2.getCurrentRevision().getSequence() > 0); } public void testPushUpdatedDocWithoutReSendingAttachments() throws Exception { assertEquals(0, database.getLastSequenceNumber()); Map<String,Object> properties1 = new HashMap<String,Object>(); properties1.put("dynamic", 1); final Document doc = createDocWithProperties(properties1); SavedRevision doc1Rev = doc.getCurrentRevision(); // Add attachment to document UnsavedRevision doc2UnsavedRev = doc.createRevision(); InputStream attachmentStream = getAsset("attachment.png"); doc2UnsavedRev.setAttachment("attachment.png", "image/png", attachmentStream); SavedRevision doc2Rev = doc2UnsavedRev.save(); assertNotNull(doc2Rev); final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); mockHttpClient.addResponderFakeLocalDocumentUpdate404(); mockHttpClient.setResponder(doc.getId(), new CustomizableMockHttpClient.Responder() { @Override public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException { Map<String, Object> responseObject = new HashMap<String, Object>(); responseObject.put("id", doc.getId()); responseObject.put("ok", true); responseObject.put("rev", doc.getCurrentRevisionId()); return CustomizableMockHttpClient.generateHttpResponseObject(responseObject); } }); // create replication and add observer manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient)); Replication pusher = database.createPushReplication(getReplicationURL()); runReplication(pusher); List<HttpRequest> captured = mockHttpClient.getCapturedRequests(); for (HttpRequest httpRequest : captured) { // verify that there are no PUT requests with attachments if (httpRequest instanceof HttpPut) { HttpPut httpPut = (HttpPut) httpRequest; HttpEntity entity=httpPut.getEntity(); //assertFalse("PUT request with updated doc properties contains attachment", entity instanceof MultipartEntity); } } mockHttpClient.clearCapturedRequests(); Document oldDoc =database.getDocument(doc.getId()); UnsavedRevision aUnsavedRev = oldDoc.createRevision(); Map<String,Object> prop = new HashMap<String,Object>(); prop.putAll(oldDoc.getProperties()); prop.put("dynamic", (Integer) oldDoc.getProperty("dynamic") +1); aUnsavedRev.setProperties(prop); final SavedRevision savedRev=aUnsavedRev.save(); mockHttpClient.setResponder(doc.getId(), new CustomizableMockHttpClient.Responder() { @Override public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException { Map<String, Object> responseObject = new HashMap<String, Object>(); responseObject.put("id", doc.getId()); responseObject.put("ok", true); responseObject.put("rev", savedRev.getId()); return CustomizableMockHttpClient.generateHttpResponseObject(responseObject); } }); final String json = String.format("{\"%s\":{\"missing\":[\"%s\"],\"possible_ancestors\":[\"%s\",\"%s\"]}}",doc.getId(),savedRev.getId(),doc1Rev.getId(), doc2Rev.getId()); mockHttpClient.setResponder("_revs_diff", new CustomizableMockHttpClient.Responder() { @Override public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException { return mockHttpClient.generateHttpResponseObject(json); } }); pusher = database.createPushReplication(getReplicationURL()); runReplication(pusher); captured = mockHttpClient.getCapturedRequests(); for (HttpRequest httpRequest : captured) { // verify that there are no PUT requests with attachments if (httpRequest instanceof HttpPut) { HttpPut httpPut = (HttpPut) httpRequest; HttpEntity entity=httpPut.getEntity(); assertFalse("PUT request with updated doc properties contains attachment", entity instanceof MultipartEntity); } } } public void testServerDoesNotSupportMultipart() throws Exception { assertEquals(0, database.getLastSequenceNumber()); Map<String,Object> properties1 = new HashMap<String,Object>(); properties1.put("dynamic", 1); final Document doc = createDocWithProperties(properties1); SavedRevision doc1Rev = doc.getCurrentRevision(); // Add attachment to document UnsavedRevision doc2UnsavedRev = doc.createRevision(); InputStream attachmentStream = getAsset("attachment.png"); doc2UnsavedRev.setAttachment("attachment.png", "image/png", attachmentStream); SavedRevision doc2Rev = doc2UnsavedRev.save(); assertNotNull(doc2Rev); final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); mockHttpClient.addResponderFakeLocalDocumentUpdate404(); Queue<CustomizableMockHttpClient.Responder> responders = new LinkedList<CustomizableMockHttpClient.Responder>(); //Reject multipart PUT with response code 415 responders.add(new CustomizableMockHttpClient.Responder() { @Override public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException { String json = "{\"error\":\"Unsupported Media Type\",\"reason\":\"missing\"}"; return CustomizableMockHttpClient.generateHttpResponseObject(415, "Unsupported Media Type", json); } }); // second call should be plain json, return good response responders.add(new CustomizableMockHttpClient.Responder() { @Override public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException { Map<String, Object> responseObject = new HashMap<String, Object>(); responseObject.put("id", doc.getId()); responseObject.put("ok", true); responseObject.put("rev", doc.getCurrentRevisionId()); return CustomizableMockHttpClient.generateHttpResponseObject(responseObject); } }); ResponderChain responderChain = new ResponderChain(responders); mockHttpClient.setResponder(doc.getId(), responderChain); // create replication and add observer manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient)); Replication pusher = database.createPushReplication(getReplicationURL()); runReplication(pusher); List<HttpRequest> captured = mockHttpClient.getCapturedRequests(); int entityIndex =0; for (HttpRequest httpRequest : captured) { // verify that there are no PUT requests with attachments if (httpRequest instanceof HttpPut) { HttpPut httpPut = (HttpPut) httpRequest; HttpEntity entity=httpPut.getEntity(); if(entityIndex++ == 0) { assertTrue("PUT request with attachment is not multipart", entity instanceof MultipartEntity); } else { assertFalse("PUT request with attachment is multipart", entity instanceof MultipartEntity); } } } } public void testContinuousPushReplicationGoesIdle() throws Exception { // make sure we are starting empty assertEquals(0, database.getLastSequenceNumber()); // add docs Map<String,Object> properties1 = new HashMap<String,Object>(); properties1.put("doc1", "testContinuousPushReplicationGoesIdle"); final Document doc1 = createDocWithProperties(properties1); // create a mock http client that serves as a mocked out sync gateway final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); // replication to do initial sync up - has to be continuous replication so the checkpoint id // matches the next continuous replication we're gonna do later. manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient)); Replication firstPusher = database.createPushReplication(getReplicationURL()); firstPusher.setContinuous(true); final String checkpointId = firstPusher.remoteCheckpointDocID(); // save the checkpoint id for later usage // intercept checkpoint PUT request and return a 201 response with expected json mockHttpClient.setResponder("_local", new CustomizableMockHttpClient.Responder() { @Override public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException { String id = String.format("_local/%s", checkpointId); String json = String.format("{\"id\":\"%s\",\"ok\":true,\"rev\":\"0-2\"}", id); return CustomizableMockHttpClient.generateHttpResponseObject(201, "OK", json); } }); // start the continuous replication CountDownLatch replicationIdleSignal = new CountDownLatch(1); ReplicationIdleObserver replicationIdleObserver = new ReplicationIdleObserver(replicationIdleSignal); firstPusher.addChangeListener(replicationIdleObserver); firstPusher.start(); // wait until we get an IDLE event boolean successful = replicationIdleSignal.await(30, TimeUnit.SECONDS); assertTrue(successful); stopReplication(firstPusher); // the last sequence should be "1" at this point. we will use this later final String lastSequence = database.lastSequenceWithCheckpointId(checkpointId); assertEquals("1", lastSequence); // start a second continuous replication Replication secondPusher = database.createPushReplication(getReplicationURL()); secondPusher.setContinuous(true); final String secondPusherCheckpointId = secondPusher.remoteCheckpointDocID(); assertEquals(checkpointId, secondPusherCheckpointId); // when this goes to fetch the checkpoint, return the last sequence from the previous replication mockHttpClient.setResponder("_local", new CustomizableMockHttpClient.Responder() { @Override public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException { String id = String.format("_local/%s", secondPusherCheckpointId); String json = String.format("{\"id\":\"%s\",\"ok\":true,\"rev\":\"0-2\",\"lastSequence\":\"%s\"}", id, lastSequence); return CustomizableMockHttpClient.generateHttpResponseObject(200, "OK", json); } }); // start second replication replicationIdleSignal = new CountDownLatch(1); replicationIdleObserver = new ReplicationIdleObserver(replicationIdleSignal); secondPusher.addChangeListener(replicationIdleObserver); secondPusher.start(); // wait until we get an IDLE event successful = replicationIdleSignal.await(30, TimeUnit.SECONDS); assertTrue(successful); stopReplication(secondPusher); } private Document createDocWithProperties(Map<String, Object> properties1) throws CouchbaseLiteException { Document doc1 = database.createDocument(); UnsavedRevision revUnsaved = doc1.createRevision(); revUnsaved.setUserProperties(properties1); SavedRevision rev = revUnsaved.save(); assertNotNull(rev); return doc1; } public void disabledTestCheckpointingWithServerError() throws Exception { String remoteCheckpointDocId; String lastSequenceWithCheckpointIdInitial; String lastSequenceWithCheckpointIdFinal; URL remote = getReplicationURL(); // add docs String docIdTimestamp = Long.toString(System.currentTimeMillis()); createDocumentsForPushReplication(docIdTimestamp); // do push replication against mock replicator that fails to save remote checkpoint final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient(); mockHttpClient.addResponderFakeLocalDocumentUpdate404(); manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient)); Replication pusher = database.createPushReplication(remote); remoteCheckpointDocId = pusher.remoteCheckpointDocID(); lastSequenceWithCheckpointIdInitial = database.lastSequenceWithCheckpointId(remoteCheckpointDocId); runReplication(pusher); List<HttpRequest> capturedRequests = mockHttpClient.getCapturedRequests(); for (HttpRequest capturedRequest : capturedRequests) { if (capturedRequest instanceof HttpPost) { HttpPost capturedPostRequest = (HttpPost) capturedRequest; } } // sleep to allow for any "post-finished" activities on the replicator related to checkpointing Thread.sleep(2000); // make sure local checkpoint is not updated lastSequenceWithCheckpointIdFinal = database.lastSequenceWithCheckpointId(remoteCheckpointDocId); String msg = "since the mock replicator rejected the PUT to _local/remoteCheckpointDocId, we " + "would expect lastSequenceWithCheckpointIdInitial == lastSequenceWithCheckpointIdFinal"; assertEquals(msg, lastSequenceWithCheckpointIdFinal, lastSequenceWithCheckpointIdInitial); Log.d(TAG, "replication done"); } public void testServerIsSyncGatewayVersion() { Replication pusher = database.createPushReplication(getReplicationURL()); assertFalse(pusher.serverIsSyncGatewayVersion("0.01")); pusher.setServerType("Couchbase Sync Gateway/0.93"); assertTrue(pusher.serverIsSyncGatewayVersion("0.92")); assertFalse(pusher.serverIsSyncGatewayVersion("0.94")); } private void putReplicationOffline(Replication replication) throws InterruptedException { final CountDownLatch wentOffline = new CountDownLatch(1); Replication.ChangeListener offlineChangeListener = new Replication.ChangeListener() { @Override public void changed(Replication.ChangeEvent event) { if (!event.getSource().online) { wentOffline.countDown(); } } }; replication.addChangeListener(offlineChangeListener); replication.goOffline(); boolean succeeded = wentOffline.await(30, TimeUnit.SECONDS); assertTrue(succeeded); replication.removeChangeListener(offlineChangeListener); } private void putReplicationOnline(Replication replication) throws InterruptedException { final CountDownLatch wentOnline = new CountDownLatch(1); Replication.ChangeListener onlineChangeListener = new Replication.ChangeListener() { @Override public void changed(Replication.ChangeEvent event) { if (event.getSource().online) { wentOnline.countDown(); } } }; replication.addChangeListener(onlineChangeListener); replication.goOnline(); boolean succeeded = wentOnline.await(30, TimeUnit.SECONDS); assertTrue(succeeded); replication.removeChangeListener(onlineChangeListener); } public void testDifferentCheckpointsFilteredReplication() throws Exception { Replication pullerNoFilter = database.createPullReplication(getReplicationURL()); String noFilterCheckpointDocId = pullerNoFilter.remoteCheckpointDocID(); Replication pullerWithFilter1 = database.createPullReplication(getReplicationURL()); pullerWithFilter1.setFilter("foo/bar"); Map<String, Object> filterParams= new HashMap<String, Object>(); filterParams.put("a", "aval"); filterParams.put("b", "bval"); pullerWithFilter1.setDocIds(Arrays.asList("doc3", "doc1", "doc2")); pullerWithFilter1.setFilterParams(filterParams); String withFilterCheckpointDocId = pullerWithFilter1.remoteCheckpointDocID(); assertFalse(withFilterCheckpointDocId.equals(noFilterCheckpointDocId)); Replication pullerWithFilter2 = database.createPullReplication(getReplicationURL()); pullerWithFilter2.setFilter("foo/bar"); filterParams= new HashMap<String, Object>(); filterParams.put("b", "bval"); filterParams.put("a", "aval"); pullerWithFilter2.setDocIds(Arrays.asList("doc2", "doc3", "doc1")); pullerWithFilter2.setFilterParams(filterParams); String withFilterCheckpointDocId2 = pullerWithFilter2.remoteCheckpointDocID(); assertTrue(withFilterCheckpointDocId.equals(withFilterCheckpointDocId2)); } public void testSetReplicationCookie() throws Exception { URL replicationUrl = getReplicationURL(); Replication puller = database.createPullReplication(replicationUrl); String cookieName = "foo"; String cookieVal = "bar"; boolean isSecure = false; boolean httpOnly = false; // expiration date - 1 day from now Calendar cal = Calendar.getInstance(); cal.setTime(new Date()); int numDaysToAdd = 1; cal.add(Calendar.DATE, numDaysToAdd); Date expirationDate = cal.getTime(); // set the cookie puller.setCookie(cookieName, cookieVal, "", expirationDate, isSecure, httpOnly); // make sure it made it into cookie store and has expected params CookieStore cookieStore = puller.getClientFactory().getCookieStore(); List<Cookie> cookies = cookieStore.getCookies(); assertEquals(1, cookies.size()); Cookie cookie = cookies.get(0); assertEquals(cookieName, cookie.getName()); assertEquals(cookieVal, cookie.getValue()); assertEquals(replicationUrl.getHost(), cookie.getDomain()); assertEquals(replicationUrl.getPath(), cookie.getPath()); assertEquals(expirationDate, cookie.getExpiryDate()); assertEquals(isSecure, cookie.isSecure()); // add a second cookie String cookieName2 = "foo2"; puller.setCookie(cookieName2, cookieVal, "", expirationDate, isSecure, false); assertEquals(2, cookieStore.getCookies().size()); // delete cookie puller.deleteCookie(cookieName2); // should only have the original cookie left assertEquals(1, cookieStore.getCookies().size()); assertEquals(cookieName, cookieStore.getCookies().get(0).getName()); } private void workaroundSyncGatewayRaceCondition() { try { Thread.sleep(5 * 1000); } catch (InterruptedException e) { e.printStackTrace(); } } }
package com.dmdirc.addons.ui_swing.wizard.firstrun; import com.dmdirc.Main; import com.dmdirc.actions.ActionManager; import com.dmdirc.actions.CoreActionType; import com.dmdirc.actions.interfaces.ActionType; import com.dmdirc.addons.ui_swing.MainFrame; import com.dmdirc.config.IdentityManager; import com.dmdirc.interfaces.ActionListener; import com.dmdirc.logger.ErrorLevel; import com.dmdirc.logger.Logger; import com.dmdirc.ui.interfaces.FirstRunWizard; import com.dmdirc.addons.ui_swing.Apple; import com.dmdirc.addons.ui_swing.dialogs.profiles.ProfileManagerDialog; import com.dmdirc.addons.ui_swing.wizard.Step; import com.dmdirc.addons.ui_swing.wizard.WizardDialog; import com.dmdirc.addons.ui_swing.wizard.WizardListener; import com.dmdirc.ui.IconManager; import com.dmdirc.util.resourcemanager.ResourceManager; import java.awt.Dimension; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Map; import java.util.Map.Entry; /** First run wizard, used to initially setup the client for the user. */ public final class SwingFirstRunWizard implements WizardListener, FirstRunWizard { /** Wizard dialog. */ private WizardDialog wizardDialog; /** First run or update. */ private boolean firstRun = true; /** Instatiate the wizard. */ public SwingFirstRunWizard() { this(true); } /** * Instantiate the wizard. * * @param firstRun is this the first run or an update? */ public SwingFirstRunWizard(final boolean firstRun) { this.firstRun = firstRun; wizardDialog = new WizardDialog("DMDirc: " + (firstRun ? "Setup wizard" : "Migration wizard"), new ArrayList<Step>(), this, null); wizardDialog.setIconImage(IconManager.getIconManager().getImage("icon")); wizardDialog.addWizardListener(this); if(Apple.isAppleUI()) { wizardDialog.setMinimumSize(new Dimension(400, 425)); } else { wizardDialog.setMinimumSize(new Dimension(400, 400)); } } /** {@inheritDoc} */ @Override public void wizardFinished() { if (ResourceManager.getResourceManager() == null) { return; } if (((ExtractionStep) wizardDialog.getStep(0)).getPluginsState()) { extractPlugins(); } if (((ExtractionStep) wizardDialog.getStep(0)).getActionsState()) { extractActions(); } if (firstRun) { IdentityManager.getConfigIdentity().setOption("updater", "enable", ((CommunicationStep) wizardDialog.getStep(1)).checkUpdates()); IdentityManager.getConfigIdentity().setOption("general", "submitErrors", ((CommunicationStep) wizardDialog.getStep(1)).checkErrors()); } if (firstRun && ((ProfileStep) wizardDialog.getStep(2)).getProfileManagerState()) { ActionManager.addListener(new ActionListener() { /** {@inheritDoc} */ @Override public void processEvent(final ActionType type, final StringBuffer format, final Object... arguments) { ProfileManagerDialog.showProfileManagerDialog((MainFrame) Main.getUI().getMainWindow()); } }, CoreActionType.CLIENT_OPENED); } wizardDialog.dispose(); } /** {@inheritDoc} */ @Override public void wizardCancelled() { wizardDialog.dispose(); } /** {@inheritDoc} */ @Override public void extractPlugins() { extractCorePlugins(); } /** Extracts the core plugins. */ public static void extractCorePlugins() { //Copy actions final Map<String, byte[]> resources = ResourceManager.getResourceManager(). getResourcesStartingWithAsBytes("plugins"); for (Entry<String, byte[]> resource : resources.entrySet()) { try { final String resourceName = Main.getConfigDir() + "plugins" + resource.getKey(). substring(7, resource.getKey().length()); final File newDir = new File(resourceName.substring(0, resourceName.lastIndexOf('/')) + "/"); if (!newDir.exists()) { newDir.mkdirs(); } final File newFile = new File(newDir, resourceName.substring(resourceName.lastIndexOf('/') + 1, resourceName.length())); if (!newFile.isDirectory()) { ResourceManager.getResourceManager(). resourceToFile(resource.getValue(), newFile); } } catch (IOException ex) { Logger.userError(ErrorLevel.LOW, "Failed to extract plugins"); } } } /** {@inheritDoc} */ @Override public void extractActions() { extractCoreActions(); } /** Extracts the core actions. */ public static void extractCoreActions() { //Copy actions final Map<String, byte[]> resources = ResourceManager.getResourceManager(). getResourcesStartingWithAsBytes("com/dmdirc/actions/defaults"); for (Entry<String, byte[]> resource : resources.entrySet()) { try { final String resourceName = Main.getConfigDir() + "actions" + resource.getKey(). substring(27, resource.getKey().length()); final File newDir = new File(resourceName.substring(0, resourceName.lastIndexOf('/')) + "/"); if (!newDir.exists()) { newDir.mkdirs(); } final File newFile = new File(newDir, resourceName.substring(resourceName.lastIndexOf('/') + 1, resourceName.length())); if (!newFile.isDirectory()) { ResourceManager.getResourceManager(). resourceToFile(resource.getValue(), newFile); } } catch (IOException ex) { Logger.userError(ErrorLevel.LOW, "Failed to extract actions"); } } } /** {@inheritDoc} */ @Override public void display() { if (firstRun) { wizardDialog.addStep(new FirstRunExtractionStep()); wizardDialog.addStep(new CommunicationStep()); wizardDialog.addStep(new ProfileStep()); } else { wizardDialog.addStep(new MigrationExtrationStep()); } wizardDialog.display(); } /** * Returns the dialog associated with this wizard. * * @return Associated wizard dialog */ public WizardDialog getWizardDialog() { return wizardDialog; } }
package com.phoebushighschool.phoebusrobotics.ultimateascent; import edu.wpi.first.wpilibj.PIDOutput; import edu.wpi.first.wpilibj.PIDSource; import edu.wpi.first.wpilibj.can.CANTimeoutException; /** * TankDrive * * This class controls the turning speed and angle side of the drive system * * @author Anna */ public class TankDrive implements PIDOutput, PIDSource { protected UltimateAscentBot robot; protected Tread rightTread; protected Tread leftTread; public GyroSensor gyro = null; double speed; /** * TankDrive * * constructor * * @throws CANTimeoutException */ public TankDrive() throws CANTimeoutException { rightTread = new Tread(this, Parameters.rightTreadCanID, Parameters.rightGearLowSolenoidChannel, Parameters.rightGearHighSolenoidChannel); leftTread = new Tread(this, Parameters.leftTreadCanID, Parameters.leftGearLowSolenoidChannel, Parameters.leftGearHighSolenoidChannel); rightTread.setGear(Tread.Gear.kLow); leftTread.setGear(Tread.Gear.kLow); // gyro = new GyroSensor(Parameters.gyroAnalogChannel); } /** * drive * * This method takes a percent value, and turns the robot according to * the value where positive values are to the right and negative values * are to the left. * * when robot is stationary: one tread moves forward while other tread moves * in reverse * * when robot is moving: ability to turn is greatly limited by speed of robot; * the faster the robot is traveling, the ability to turn * is reduced * * @param drivePercentPower - number in the range of -1.0 .. 0.0 .. 1.0 where * 0.0 is not moving and 1.0 is moving full speed * {left/right} and -1.0 is moving full speed {right/ * left} * @param turnPercentPower - number in the range of -1.0 .. 0.0 .. 1.0 where * 0.0 is not turning and 1.0 is turning full speed * {left/right} and -1.0 is turning full speed {right/ * left} */ public void drive(double drivePercentPower, double turnPercentPower, double kDamp) throws CANTimeoutException { double leftSpeed; double rightSpeed; //when in high gear, adjust drivePercentPower for higher gear ratio double adjustedDrivePercentPower = drivePercentPower; if (leftTread.isHighGear()) { adjustedDrivePercentPower = drivePercentPower * 2.27; } turnPercentPower = decayTurnPower(adjustedDrivePercentPower, turnPercentPower, kDamp); if (Math.abs(turnPercentPower) + Math.abs(drivePercentPower) > 1.0 ) { drivePercentPower = drivePercentPower / (drivePercentPower + turnPercentPower); turnPercentPower = turnPercentPower / (turnPercentPower + drivePercentPower); } leftSpeed = drivePercentPower + turnPercentPower; rightSpeed = drivePercentPower - turnPercentPower; leftTread.drive (leftSpeed); rightTread.drive(rightSpeed); } /** * decayTurnPower * * Mathematical logic/equation to determine value of power used to turn at any *given point while robot is moving * * @param forwardPercentPower - number in the range of -1.0 .. 0.0 .. 1.0 where * 0.0 is not moving and 1.0 is moving full speed * {left/right} and -1.0 is moving full speed {right/ * left} * @param turnPercentPower - number in the range of -1.0 .. 0.0 .. 1.0 where * 0.0 is not turning and 1.0 is turning full speed * {left/right} and -1.0 is turning full speed {right/ * left} * @return */ public double decayTurnPower(double forwardPercentPower, double turnPercentPower, double kDamp) { double decayValue; decayValue = (-1.0/(kDamp * FRCMath.pow(forwardPercentPower, 2) + 1.0)) + 1.0; if (turnPercentPower > 0.0) { turnPercentPower -= decayValue; if (turnPercentPower < 0.0) { turnPercentPower = 0.0; } } if (turnPercentPower < 0.0) { turnPercentPower += decayValue; if (turnPercentPower > 0.0) { turnPercentPower = 0.0; } } return turnPercentPower; } /** * This method takes a joystick value, and turns the robot according to * the value * * @param speedToTurn - number in the range of -1.0 .. 0.0 .. 1.0 where * 0.0 is not turning and 1.0 is turning full speed * {left/right} and -1.0 is turning full speed {right/ * left} */ public void pidWrite(double speedToTurn) { try { drive (0.0, speedToTurn, 0.0); } catch (CANTimeoutException e) { throw new RuntimeException(e.getMessage()); } } /** * * @return */ public double pidGet() { return gyro.pidGet(); } /** * * @param gear */ public void setGear(Tread.Gear gear) { leftTread.setGear( gear); rightTread.setGear(gear); } public Tread.Gear getGear() { return leftTread.getGear(); } public boolean isGyroPresent() { if (gyro != null) { return true; } return false; } }
package com.redhat.ceylon.compiler.typechecker.analyzer; import static com.redhat.ceylon.compiler.typechecker.parser.CeylonLexer.ASTRING_LITERAL; import static com.redhat.ceylon.compiler.typechecker.parser.CeylonLexer.AVERBATIM_STRING; import static com.redhat.ceylon.compiler.typechecker.parser.CeylonLexer.STRING_END; import static com.redhat.ceylon.compiler.typechecker.parser.CeylonLexer.STRING_MID; import static com.redhat.ceylon.compiler.typechecker.parser.CeylonLexer.STRING_START; import static com.redhat.ceylon.compiler.typechecker.parser.CeylonLexer.VERBATIM_STRING; import static java.lang.Character.isWhitespace; import static java.lang.Character.toChars; import static java.lang.Integer.parseInt; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.antlr.runtime.CommonToken; import org.antlr.runtime.Token; import com.redhat.ceylon.compiler.typechecker.tree.Node; import com.redhat.ceylon.compiler.typechecker.tree.Tree; import com.redhat.ceylon.compiler.typechecker.tree.Tree.CharLiteral; import com.redhat.ceylon.compiler.typechecker.tree.Tree.CompilationUnit; import com.redhat.ceylon.compiler.typechecker.tree.Tree.Literal; import com.redhat.ceylon.compiler.typechecker.tree.Tree.QuotedLiteral; import com.redhat.ceylon.compiler.typechecker.tree.Tree.StringLiteral; import com.redhat.ceylon.compiler.typechecker.tree.Tree.StringTemplate; import com.redhat.ceylon.compiler.typechecker.tree.Visitor; public class LiteralVisitor extends Visitor { private int indent; static final Pattern DOC_LINK_PATTERN = Pattern.compile("\\[\\[(([^\"`|\\[\\]]*\\|)?((module )|(package )|(class )|(interface )|(function )|(value )|(alias ))?(((\\w|\\.)+)::)?(\\w*)(\\.(\\w*))*(\\(\\))?)\\]\\]"); private static Pattern CHARACTER_ESCAPE_PATTERN = Pattern.compile("\\\\(\\{ @Override public void visit(CompilationUnit that) { if (!that.getLiteralsProcessed()) { super.visit(that); that.setLiteralsProcessed(true); } } @Override public void visit(StringLiteral that) { if (that.getToken()==null) return; int type = that.getToken().getType(); String text = that.getText(); boolean verbatim = type==AVERBATIM_STRING || type==ASTRING_LITERAL; if (verbatim) { Matcher m = DOC_LINK_PATTERN.matcher(text); while (m.find()) { String group = m.group(1); int start = that.getStartIndex()+m.start(1); int end = that.getStartIndex()+m.end(1); String[] linesUpTo = text.substring(0, m.start(1)).split("\n"); CommonToken token = new CommonToken(ASTRING_LITERAL, group); token.setStartIndex(start); token.setStopIndex(end-1); token.setTokenIndex(that.getToken().getTokenIndex()); int line = that.getToken().getLine() + linesUpTo.length-1; int charInLine = linesUpTo.length==0 ? 0 : linesUpTo[linesUpTo.length-1].length(); token.setLine(line); token.setCharPositionInLine(charInLine); that.addDocLink(new Tree.DocLink(token)); } } if (type!=STRING_MID && type!=STRING_END) { indent = getIndentPosition(that); } if (type==VERBATIM_STRING || type==AVERBATIM_STRING) { text = text.substring(3, text.length()-(text.endsWith("\"\"\"")?3:0)); } else if (type==STRING_MID) { text = text.substring(2, text.length()-2); } else if (type==STRING_END) { text = text.substring(2, text.length()-(text.endsWith("\"")?1:0)); } else if (type==STRING_START) { text = text.substring(1, text.length()-2); } else { text = text.substring(1, text.length()-(text.endsWith("\"")?1:0)); } StringBuilder result = new StringBuilder(); boolean allTrimmed = stripIndent(text, indent, result, verbatim); if (!allTrimmed) { that.addError("multiline string content should align with start of string: string begins at character position " + indent, 6000); } if (type!=VERBATIM_STRING && type!=AVERBATIM_STRING) { interpolateEscapes(result, that); } that.setText(result.toString()); if (type!=STRING_MID && type!=STRING_START) { indent = 0; } } @Override public void visit(StringTemplate that) { int oi = indent; indent = 0; super.visit(that); indent = oi; } @Override public void visit(QuotedLiteral that) { StringBuilder result = new StringBuilder(); stripIndent(that.getText(), getIndentPosition(that), result, true); //interpolateEscapes(result, that); that.setText(result.toString()); } private int getIndentPosition(Literal that) { Token token = that.getToken(); return token==null ? 0 : token.getCharPositionInLine() + getQuoteLength(token); } private int getQuoteLength(Token token) { int type = token.getType(); return type==VERBATIM_STRING || type==AVERBATIM_STRING ? 3 : 1; } @Override public void visit(CharLiteral that) { StringBuilder result = new StringBuilder(that.getText()); interpolateEscapes(result, that); that.setText(result.toString()); } static final String digits = "\\d+"; static final String groups = "\\d{1,3}(_\\d{3})+"; static final String fractionalGroups = "(\\d{3}_)+\\d{1,3}"; static final String magnitude = "k|M|G|T|P"; static final String fractionalMagnitude = "m|u|n|p|f"; static final String exponent = "(e|E)(\\+|-)?" + digits; static final String hexDigits = "(\\d|[a-f]|[A-F])+"; static final String hexGroups = "(\\d|[a-f]|[A-F]){1,4}(_(\\d|[a-f]|[A-F]){4})+|(\\d|[a-f]|[A-F]){1,2}(_(\\d|[a-f]|[A-F]){2})+"; static final String binDigits = "(0|1)+"; static final String binGroups = "(0|1){1,4}(_(0|1){4})+"; @Override public void visit(Tree.NaturalLiteral that) { super.visit(that); String text = that.getToken().getText(); if (!text.matches("^(" + digits + "|" + groups + ")(" + magnitude + ")?$") && !text.matches("#(" + hexDigits + "|" + hexGroups + ")") && !text.matches("\\$(" + binDigits + "|" + binGroups + ")")) { that.addError("illegal integer literal format"); } that.setText(that.getText() .replace("_", "") .replace("k", "000") .replace("M", "000000") .replace("G", "000000000") .replace("T", "000000000000") .replace("P", "000000000000000")); } @Override public void visit(Tree.FloatLiteral that) { super.visit(that); String text = that.getToken().getText(); if (!text.matches("^(" + digits + "|" + groups + ")(\\.(" + digits + "|" + fractionalGroups + ")(" + magnitude + "|" + fractionalMagnitude + "|" + exponent + ")?|" + fractionalMagnitude + ")$")) { that.addError("illegal floating literal format"); } that.setText(that.getText() .replace("_", "") .replace("k", "e+3") .replace("M", "e+6") .replace("G", "e+9") .replace("T", "e+12") .replace("P", "e+15") .replace("m", "e-3") .replace("u", "e-6") .replace("n", "e-9") .replace("p", "e-12") .replace("f", "e-15")); } private static boolean stripIndent(final String text, final int indentation, final StringBuilder result, boolean verbatim) { boolean correctlyIndented = true; int num = 0; for (String line: text.split("\n|\r\n?")) { if (num++==0) { result.append(line); } else { for (int i = 0; i < line.length(); i++) { if (i < indentation) { if (!isWhitespace(line.charAt(i))) { correctlyIndented = false; result.append(line.substring(i)); break; } } else { result.append(line.substring(indentation)); break; } } } if (!verbatim && line.endsWith("\\")) { result.setLength(result.length()-1); } else { result.append("\n"); } } result.setLength(result.length()-1); return correctlyIndented; } private static void interpolateEscapes(StringBuilder result, Node node) { Matcher matcher; int start=0; while ((matcher = CHARACTER_ESCAPE_PATTERN.matcher(result)) .find(start)) { String hex = matcher.group(2); String name = matcher.group(3); if (name!=null) { boolean found=false; for (int codePoint=0; codePoint<=0xE01EF; codePoint++) { String cn = Character.getName(codePoint); if (cn!=null && cn.equals(name)) { char[] chars = toChars(codePoint); result.replace(matcher.start(), matcher.end(), new String(chars)); found = true; break; } } if (!found) { int emoji = -1; switch (name) { case ":)": case ":-)": case "=)": emoji = 0x1f603; break; case "O:)": case "O:-)": case "O=)": emoji = 0x1f607; break; case "}:)": case "}:-)": case "}=)": emoji = 0x1f608; break; case ":-(": case ":(": case "=(": emoji = 0x1f61e; break; case ":-|": case ":|": case "=|": emoji = 0x1f610; break; case ";-)": case ";)": emoji = 0x1f609; break; case "B-)": case "B)": emoji = 0x1f60e; break; case ":-D": case ":D": emoji = 0x1f600; break; case "=D": emoji = 0x1f604; break; case "-_-": emoji = 0x1f611; break; case "o_o": emoji = 0x1f613; break; case "u_u": emoji = 0x1f614; break; case ">_<": emoji = 0x1f623; break; case "^_^": emoji = 0x1f601; break; case "^_^;;": emoji = 0x1f605; break; case "<3": emoji = 0x1f49c; break; case "<\\3": case "</3": emoji = 0x1f494; break; case "~@~": emoji = 0x1f4a9; break; case "(]:{": emoji = 0x1f473; break; case "-<@%": emoji = 0x1f41d; break; case ":(|)": emoji = 0x1f435; break; case ":(:)": emoji = 0x1f437; break; case ":*": case ":-*": emoji = 0x1f617; break; case ";*": case ";-*": emoji = 0x1f618; break; case ":\\": case ":-\\": case "=\\": case ":/": case ":-/": case "=/": emoji = 0x1f615; break; case ":S": case ":-S": case ":s": case ":-s": emoji = 0x1f616; break; case ":P": case ":-P": case "=P": case ":p": case ":-p": case "=p": emoji = 0x1f61b; break; case ";P": case ";-P": case ";p": case ";-p": emoji = 0x1f61c; break; case ">.<": case ">:(": case ">:-(": case ">=(": emoji = 0x1f621; break; case "T_T": case ":'(": case ";_;": case "='(": emoji = 0x1f622; break; case "D:": emoji = 0x1f626; break; case "o.o": case ":o": case ":-o": case "=o": emoji = 0x1f62e; break; case "O.O": case ":O": case ":-O": case "=O": emoji = 0x1f632; break; case "x_x": case "X-O": case "x-o": case "X(": case "X-(": emoji = 0x1f635; break; case ":X)": case ":3": case "(=^..^=)": case "(=^.^=)": case "=^_^=": emoji = 0x1f638; break; default: node.addError("illegal unicode escape sequence: " + name + " is not a Unicode character"); } if (emoji>0) { result.replace(matcher.start(), matcher.end(), new String(Character.toChars(emoji))); } } } else if (hex!=null) { if (hex.length()!=2 && hex.length()!=4 && hex.length()!=6 && hex.length()!=8) { //tolerate 8 digits for backward compatibility only! node.addError("illegal unicode escape sequence: must consist of 2, 4, or 6 digits"); } else { int codePoint=0; try { codePoint = parseInt(hex, 16); } catch (NumberFormatException nfe) { node.addError("illegal unicode escape sequence: '" + hex + "' is not a hexadecimal number"); } char[] chars; try { chars = toChars(codePoint); } catch (IllegalArgumentException iae) { node.addError("illegal unicode escape sequence: '" + hex + "' is not a valid Unicode code point"); chars = toChars(0); } result.replace(matcher.start(), matcher.end(), new String(chars)); } } else { char escape = matcher.group(5).charAt(0); char ch; switch (escape) { case 'b': ch = '\b'; break; case 't': ch = '\t'; break; case 'n': ch = '\n'; break; case 'f': ch = '\f'; break; case 'r': ch = '\r'; break; case 'e': ch = 0x1b; break; case '0': ch = 0; break; case '"': case '\'': case '`': case '\\': ch = escape; break; default: node.addError("illegal escape sequence: \\" + escape); ch='?'; } result.replace(matcher.start(), matcher.end(), Character.toString(ch)); } start = matcher.start()+1; } } @Override public void visit(Tree.Identifier that) { super.visit(that); String text = that.getText(); int index = 0; while (index<text.length()) { int cp = text.codePointAt(index); index += Character.charCount(cp); int type = Character.getType(cp); boolean num = type==Character.LETTER_NUMBER || type==Character.DECIMAL_DIGIT_NUMBER || type==Character.OTHER_NUMBER; boolean letter = type==Character.LOWERCASE_LETTER || type==Character.UPPERCASE_LETTER || type==Character.TITLECASE_LETTER || type==Character.OTHER_LETTER|| type==Character.MODIFIER_LETTER; boolean us = cp=='_'; if (index==0 && num) { that.addError("identifier may not begin with a digit"); break; } else if (!num && !letter && !us) { that.addError("identifier must be composed of letters, digits, and underscores"); break; } } } }
package dr.inference.operators.factorAnalysis; import dr.inference.model.Likelihood; import dr.inference.model.MatrixParameterInterface; import dr.inference.operators.AbstractAdaptableOperator; import dr.inference.operators.GeneralOperator; import dr.inference.operators.PathDependent; import dr.xml.*; public class LoadingsRotationOperator extends AbstractAdaptableOperator implements GeneralOperator, PathDependent { private final AbstractAdaptableOperator baseOperator; private final MatrixParameterInterface parameter; public LoadingsRotationOperator(AbstractAdaptableOperator baseOperator, MatrixParameterInterface parameter) { this.baseOperator = baseOperator; this.parameter = parameter; } @Override public String getOperatorName() { return null; } @Override public double doOperation(Likelihood joint) { syncBaseOperator(); double hastingsRatio = baseOperator.doOperation(joint); double oldLikelihood = joint.getLogLikelihood(); reflect(); double newLikelihood = joint.getLogLikelihood(); if (oldLikelihood != newLikelihood) { throw new RuntimeException("Fix this"); } return hastingsRatio; } private void reflect() { boolean changed = false; int dim = Math.min(parameter.getRowDimension(), parameter.getColumnDimension()); for (int i = 0; i < dim; i++) { if (parameter.getParameterValue(i, i) < 0) { changed = true; for (int j = i; j < parameter.getRowDimension(); j++) { parameter.setParameterValueQuietly(j, i, -parameter.getParameterValue(j, i)); } } } if (changed) { parameter.fireParameterChangedEvent(); } } @Override public double doOperation() { syncBaseOperator(); double hastingsRatio = baseOperator.doOperation(); reflect(); return hastingsRatio; } private void syncBaseOperator() { baseOperator.setAcceptCount(getAcceptCount()); baseOperator.setRejectCount(getRejectCount()); baseOperator.setSumDeviation(getSumDeviation()); } @Override protected void setAdaptableParameterValue(double value) { baseOperator.setAdaptableParameter(value); } @Override protected double getAdaptableParameterValue() { return baseOperator.getAdaptableParameter(); } @Override public double getRawParameter() { return baseOperator.getRawParameter(); } @Override public String getAdaptableParameterName() { return baseOperator.getAdaptableParameterName(); } public static final String PARSER_NAME = "loadingsRotationOperator"; public static AbstractXMLObjectParser PARSER = new AbstractXMLObjectParser() { @Override public Object parseXMLObject(XMLObject xo) { AbstractAdaptableOperator baseOperator = (AbstractAdaptableOperator) xo.getChild(AbstractAdaptableOperator.class); MatrixParameterInterface parameter = (MatrixParameterInterface) xo.getChild(MatrixParameterInterface.class); return new LoadingsRotationOperator(baseOperator, parameter); } @Override public XMLSyntaxRule[] getSyntaxRules() { return new XMLSyntaxRule[]{ new ElementRule(AbstractAdaptableOperator.class), new ElementRule(MatrixParameterInterface.class), }; } @Override public String getParserDescription() { return null; } @Override public Class getReturnType() { return LoadingsRotationOperator.class; } @Override public String getParserName() { return PARSER_NAME; } }; }
package gov.nih.nci.cananolab.service.common.impl; import gov.nih.nci.cananolab.domain.common.File; import gov.nih.nci.cananolab.domain.common.Keyword; import gov.nih.nci.cananolab.dto.common.FileBean; import gov.nih.nci.cananolab.dto.common.UserBean; import gov.nih.nci.cananolab.exception.FileException; import gov.nih.nci.cananolab.exception.NoAccessException; import gov.nih.nci.cananolab.service.common.FileService; import gov.nih.nci.cananolab.service.common.helper.FileServiceHelper; import gov.nih.nci.cananolab.service.security.AuthorizationService; import gov.nih.nci.cananolab.system.applicationservice.CustomizedApplicationService; import gov.nih.nci.cananolab.util.Constants; import gov.nih.nci.cananolab.util.PropertyUtils; import gov.nih.nci.system.client.ApplicationServiceProvider; import java.io.FileOutputStream; import java.io.IOException; import java.util.Collection; import java.util.HashSet; import org.apache.log4j.Logger; /** * Local implementation of FileService * * @author pansu * */ public class FileServiceLocalImpl implements FileService { private static Logger logger = Logger.getLogger(FileServiceLocalImpl.class); private FileServiceHelper helper = new FileServiceHelper(); public FileServiceLocalImpl() { } /** * Load the file for the given fileId from the database * * @param fileId * @return */ public FileBean findFileById(String fileId, UserBean user) throws FileException, NoAccessException { FileBean fileBean = null; try { File file = helper.findFileById(fileId, user); if (file != null) { fileBean = new FileBean(file); if (user != null) { helper.retrieveVisibility(fileBean, user); } return fileBean; } return fileBean; } catch (NoAccessException e) { throw e; } catch (Exception e) { logger.error("Problem finding the file by id: " + fileId, e); throw new FileException(); } } private void writeFile(byte[] fileContent, String fullFileName) throws IOException { String path = fullFileName.substring(0, fullFileName.lastIndexOf("/")); java.io.File pathDir = new java.io.File(path); if (!pathDir.exists()) pathDir.mkdirs(); java.io.File file = new java.io.File(fullFileName); if (file.exists()) { return; // don't save again } FileOutputStream oStream = new FileOutputStream(new java.io.File( fullFileName)); oStream.write(fileContent); } // save to the file system if fileData is not empty public void writeFile(FileBean fileBean, UserBean user) throws FileException, NoAccessException { if (user == null || !user.isCurator()) { throw new NoAccessException(); } try { if (fileBean.getNewFileData() != null) { String rootPath = PropertyUtils.getProperty( Constants.CANANOLAB_PROPERTY, "fileRepositoryDir"); String fullFileName = rootPath + "/" + fileBean.getDomainFile().getUri(); writeFile(fileBean.getNewFileData(), fullFileName); } assignVisibility(fileBean); } catch (Exception e) { logger.error("Problem writing file " + fileBean.getDomainFile().getUri() + " to the file system."); throw new FileException(); } } /** * Preparing keywords and other information prior to saving a file * * @param file * @throws FileException */ public void prepareSaveFile(File file, UserBean user) throws FileException, NoAccessException { if (user == null || !user.isCurator()) { throw new NoAccessException(); } try { CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider .getApplicationService(); if (file.getId() != null) { File dbFile = (File) appService.get(File.class, file.getId()); if (dbFile != null) { // don't change createdBy and createdDate if it is already // persisted file.setCreatedBy(dbFile.getCreatedBy()); file.setCreatedDate(dbFile.getCreatedDate()); } else { String err = "Object doesn't exist in the database anymore. Please log in again."; logger.error(err); throw new FileException(err); } } if (file.getKeywordCollection() != null) { Collection<Keyword> keywords = new HashSet<Keyword>(file .getKeywordCollection()); file.getKeywordCollection().clear(); for (Keyword keyword : keywords) { Keyword dbKeyword = (Keyword) appService.getObject( Keyword.class, "name", keyword.getName()); if (dbKeyword != null) { keyword.setId(dbKeyword.getId()); } appService.saveOrUpdate(keyword); file.getKeywordCollection().add(keyword); } } } catch (Exception e) { logger.error("Problem in preparing saving a file: ", e); throw new FileException(); } } private void assignVisibility(FileBean fileBean) throws FileException { try { AuthorizationService authService = new AuthorizationService( Constants.CSM_APP_NAME); authService.assignVisibility(fileBean.getDomainFile().getId() .toString(), fileBean.getVisibilityGroups(), null); } catch (Exception e) { String err = "Error in setting file visibility for " + fileBean.getDisplayName(); logger.error(err, e); throw new FileException(err, e); } } }
package gov.nih.nci.cadsr.cadsrpasswordchange.core; import gov.nih.nci.cadsr.cadsrpasswordchange.domain.User; import java.io.FileInputStream; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.Properties; import oracle.jdbc.pool.OracleDataSource; import org.apache.log4j.Logger; import org.joda.time.DateTime; import org.joda.time.DateTimeUtils; import org.joda.time.Interval; import org.joda.time.LocalTime; public class NotifyPassword { private static org.apache.log4j.Logger _logger = org.apache.log4j.Logger.getLogger(NotifyPassword.class); private static PasswordNotify dao; public static String emailSubject; public static String emailBody; private Properties _propList; private Connection _conn; private String _dsurl; private String _user; private String _pswd; private String _processingNotificationDays; public NotifyPassword(Connection conn) { this._conn = conn; } /** * Open a single simple connection to the database. No pooling is necessary. * * @param _dsurl * The Oracle TNSNAME entry describing the database location. * @param user_ * The ORACLE user id. * @param pswd_ * The password which must match 'user_'. * @return The database error code. */ private int open() throws Exception { // If we already have a connection, don't bother. if (_conn != null) { //return 0; _conn.close(); //CADSRPASSW-56 } try { // OracleDataSource ods = new OracleDataSource(); // String parts[] = _dsurl.split("[:]"); // ods.setDriverType("thin"); _logger.info("NotifyPassword v1.0 build 20.2"); // String connString=_dsurl; // ods.setURL(connString); // ods.setUser(_user); // ods.setPassword(_pswd); // _logger.info("NotifyPassword:open _dsurl[" + _dsurl + "] via _user["+ _user + "]"); // _conn = ods.getConnection(_user, _pswd); _logger.debug("got connection using direct jdbc url [" + _dsurl + "]"); Properties info = new Properties(); info.put( "user", _user ); _logger.debug("with user id [" + _user + "]"); info.put( "password", _pswd ); Class.forName("oracle.jdbc.driver.OracleDriver"); _conn = DriverManager.getConnection(_dsurl, info); _logger.info("connected to the database"); _conn.setAutoCommit(true); return 0; } catch (SQLException ex) { throw ex; } } /** * Load the properties from the XML file specified. * * @param propFile_ the properties file. */ private void loadProp(String propFile_) throws Exception { _propList = new Properties(); _logger.debug("\n\nLoading properties...\n\n"); try { FileInputStream in = new FileInputStream(propFile_); _propList.loadFromXML(in); in.close(); } catch (Exception ex) { throw ex; } _dsurl = _propList.getProperty(Constants._DSURL); if (_dsurl == null) _logger.error("Missing " + Constants._DSURL + " connection string in " + propFile_); _user = _propList.getProperty(Constants._DSUSER); if (_user == null) _logger.error("Missing " + Constants._DSUSER + " in " + propFile_); _pswd = _propList.getProperty(Constants._DSPSWD); if (_pswd == null) _logger.error("Missing " + Constants._DSPSWD + " in " + propFile_); } public void doAll(String propFile_) throws Exception { _logger.debug("NotifyPassword.doAll entered ..."); loadProp(propFile_); open(); dao = new PasswordNotifyDAO(_conn); _processingNotificationDays = dao.getProcessTypes(); if(_processingNotificationDays != null) { try { List<String> types = new ArrayList<String>(Arrays.asList(_processingNotificationDays.split(","))); //note: no space in between the , separator int size = types.size(); int index = 1; for (String t : types) { _logger.info("Notification type " + t + " started ..."); process(Integer.valueOf(t).intValue(), size, index, types); ++index; _logger.debug("Notification type " + t + " processed."); } _logger.debug(".doAll."); } catch (Exception e) { //e.printStackTrace(); _logger.error(CommonUtil.toString(e)); } } else { _logger.error("Missing processing types. Please check EMAIL.NOTIFY_TYPE property value in the table sbrext.tool_options_view_ext."); } if (_conn != null) { _conn.close(); _conn = null; } _logger.debug("NotifyPassword.doAll done."); } private void process(int days, int size, int index, List<String>types) throws Exception { _logger.debug("\nNotifyPassword.process entered ..."); List<User> recipients = null; open(); dao = new PasswordNotifyDAO(_conn); recipients = dao.getPasswordExpiringList(days, size, index, types); if (recipients != null && recipients.size() > 0) { for (User u : recipients) { if(u != null) { _logger.info("Processing user [" + u.getUsername() + "] attempted [" + u.getAttemptedCount() + "] type [" + u.getProcessingType() + "] password updated [" + u.getPasswordChangedDate() + "] email [" + u.getElectronicMailAddress() + "] expiry date [" + u.getExpiryDate() + "]"); if(isNotificationValid(u, days, size, index)) { _logger.info("NotifyPassword.process saving into queue for user: " + u.getUsername()); saveIntoQueue(u, days); _logger.debug("NotifyPassword.process queued email for user: " + u.getUsername() + " under type " + days); _logger.info("NotifyPassword.process sending email for user: " + u.getUsername() + " under type " + days); try { if(sendEmail(u, days)) { _logger.debug("NotifyPassword.sendEmail *** DONE ***"); _logger.info("NotifyPassword.process updating success for user: " + u.getUsername() + " under type " + days); updateStatus(u, Constants.SUCCESS + String.valueOf(days), days); _logger.debug("NotifyPassword.process updated success for user: " + u.getUsername() + " under type " + days); } else { _logger.info("NotifyPassword.process updating failure for user: " + u.getUsername() + " under type " + days); updateStatus(u, Constants.FAILED + String.valueOf(days), days); _logger.debug("NotifyPassword.process updated failure for user: " + u.getUsername() + " under type " + days); } } catch (Exception e) { e.printStackTrace(); _logger.error(e); _logger.info("NotifyPassword.process updating failure for user: " + u.getUsername() + " under type " + days); updateStatus(u, Constants.UNKNOWN + String.valueOf(days), days); _logger.debug("NotifyPassword.process updated failure for user: " + u.getUsername() + " under type " + days); } } else { _logger.info("isNotificationValid is not valid, notification aborted for user: " + u.getUsername()); updateStatus(u, Constants.INVALID + String.valueOf(days), days); _logger.debug("status date updated for user " + u); } } } } else { _logger.info(" } _logger.debug("NotifyPassword.process done.\n\n"); } /** * Add or update the queue with the outgoing email. */ private void saveIntoQueue(User user, int daysLeft) throws Exception { _logger.debug("saveIntoQueue entered"); _logger.info("saveIntoQueue:user [" + user + "] type " + daysLeft); open(); dao = new PasswordNotifyDAO(_conn); user.setProcessingType(String.valueOf(daysLeft)); _logger.info("saveIntoQueue:type " + daysLeft + " set"); open(); dao = new PasswordNotifyDAO(_conn); dao.updateQueue(user); _logger.debug("saveIntoQueue done"); } private boolean sendEmail(User user, int daysLeft) throws Exception { boolean retVal = false; _logger.debug("NotifyPassword.sendEmail entered ..."); open(); dao = new PasswordNotifyDAO(_conn); String adminEmailAddress = dao.getAdminEmailAddress(); _logger.debug("NotifyPassword.sendEmail adminEmailAddress [" + adminEmailAddress + "]"); open(); dao = new PasswordNotifyDAO(_conn); // String emailSubject = EmailHelper.handleDaysToken(dao.getEmailSubject(), daysLeft); String emailSubject = dao.getEmailSubject(); _logger.debug("NotifyPassword.sendEmail emailSubject [" + emailSubject + "]"); open(); dao = new PasswordNotifyDAO(_conn); String emailBody = EmailHelper.handleExpiryDateToken(dao.getEmailBody(), user.getExpiryDate()); _logger.debug("NotifyPassword.sendEmail emailBody [" + emailBody + "]"); emailBody = EmailHelper.handleUserIDToken(emailBody, user); //CADSRPASSW-62 _logger.info("sendEmail:user id = [" + user.getUsername() + "] body processed = [" + emailBody + "]"); String emailAddress = user.getElectronicMailAddress(); _logger.debug("NotifyPassword.sendEmail emailAddress [" + emailAddress + "]"); open(); dao = new PasswordNotifyDAO(_conn); String host = dao.getHostName(); _logger.debug("NotifyPassword.sendEmail host [" + host + "]"); open(); dao = new PasswordNotifyDAO(_conn); String port = dao.getHostPort(); _logger.debug("NotifyPassword.sendEmail port [" + port + "]"); EmailSending ms = new EmailSending(adminEmailAddress, "dummy", host, port, emailAddress, emailSubject, emailBody); _logger.debug("NotifyPassword.sendEmail sending email ..."); //retVal = true; //open this just for test retVal = ms.send(); //uncomment this!!! _logger.debug("NotifyPassword.ms.send() is " + retVal); return retVal; } /** * Method to make sure the latest processing details is reflected with the passed user. * @param user * @return * @throws Exception */ private User refresh(User user) throws Exception { open(); dao = new PasswordNotifyDAO(_conn); return dao.loadQueue(user); } /** * Update the status of the delivery, sent or not sent. * * @param users list of users affected * @throws Exception */ private void updateStatus(User user, String status, int daysLeft) throws Exception { if(user == null) { throw new Exception("User is NULL or empty."); } user = refresh(user); int currentCount = user.getAttemptedCount(); String dStatus = user.getDeliveryStatus(); if(dStatus == null) { dStatus = ""; } open(); dao = new PasswordNotifyDAO(_conn); user.setProcessingType(String.valueOf(daysLeft)); if(status != null && status.equals(Constants.SUCCESS + String.valueOf(daysLeft))) { user.setAttemptedCount(++currentCount); _logger.info("user id [" + user.getUsername() + "] status = [" + status + "] attempted count [" + user.getAttemptedCount() + "]"); int index = dStatus.indexOf(Constants.SUCCESS + String.valueOf(daysLeft)); if(index == -1) { if(dStatus.length() > 0) { user.setDeliveryStatus(dStatus + " " + status); } else { user.setDeliveryStatus(status); } } } else { int indexF = dStatus.indexOf(Constants.FAILED + String.valueOf(daysLeft)); int indexI = dStatus.indexOf(Constants.INVALID + String.valueOf(daysLeft)); int indexU = dStatus.indexOf(Constants.UNKNOWN + String.valueOf(daysLeft)); //uncomment the following just for test if(indexF == -1 && indexI == -1 && indexU == -1) { if(dStatus.length() > 0) { user.setDeliveryStatus(dStatus + " " + status); } else { user.setDeliveryStatus(status); } } _logger.debug("user id [" + user.getUsername() + "] status = [" + status + "]"); } user.setDateModified(new Timestamp(DateTimeUtils.currentTimeMillis())); dao = new PasswordNotifyDAO(_conn); dao.updateQueue(user); } public boolean isNotificationValid(User user, int daysLeft, int totalNotificationTypes, int currentNotificationIndex) throws Exception { _logger.debug("isNotificationValid entered"); boolean ret = false; boolean daysCondition = false; boolean deliveryStatus = false; String processedType = null; int attempted = -1; String status = null; long daysSincePasswordChange = -1; _logger.info("isNotificationValid: calculating last password change time (to see if the password has been changed) ..."); java.sql.Date passwordChangedDate = user.getPasswordChangedDate(); if(passwordChangedDate == null) { throw new Exception("Not able to determine what is the password changed date or password change date is empty (from sys.cadsr_users view)."); } daysSincePasswordChange = CommonUtil.calculateDays(passwordChangedDate, new Date(DateTimeUtils.currentTimeMillis())); //daysSincePasswordChange = 1; //open this just for test _logger.info("isNotificationValid: last password change time was " + daysSincePasswordChange); if(daysSincePasswordChange != 0 && !isChangedRecently(daysLeft, daysSincePasswordChange)) { //not recently changed (today) _logger.info("isNotificationValid: password was not recently changed"); if(totalNotificationTypes != currentNotificationIndex && !isAlreadySent(user, daysLeft)) { _logger.info("isNotificationValid: type " + daysLeft + " is not the last notification type"); if(user != null) { _logger.debug("isNotificationValid: checking user ..."); //not the last type - send only once if(user.getDeliveryStatus() == null && user.getProcessingType() == null) { //has not been processed at all ret = true; _logger.debug("isNotificationValid is true: has not been processed before"); } else if(user.getDeliveryStatus() != null && user.getDeliveryStatus().indexOf(Constants.SUCCESS + String.valueOf(daysLeft)) == -1) { //processed but was not successful for whatever reason ret = true; _logger.debug("isNotificationValid is true: processed but was not successful (thus should retry)"); } /*else if(user.getDeliveryStatus() != null && user.getDeliveryStatus().equals(Constants.FAILED)) { //processed but failed ret = true; _logger.debug("isNotificationValid is true: processed but failed"); } else if(user.getProcessingType() != null && !user.getProcessingType().equals(String.valueOf(daysLeft))) { //it is different type of notification ret = true; _logger.debug("isNotificationValid is true: it is of different processing type, current type is " + daysLeft + " but the user's last processed type was " + user.getProcessingType()); } else { _logger.info("isNotificationValid is false: none of the condition(s) met"); }*/ _logger.debug("isNotificationValid: check user done"); } else { throw new Exception("User is NULL or empty."); } } else { _logger.info("isNotificationValid: type " + daysLeft + " is the last notification type"); if(daysLeft != Constants.DEACTIVATED_VALUE) { _logger.info("isNotificationValid: type " + daysLeft + " (the last notification type) is active"); //the last notification type Calendar start = Calendar.getInstance(); start.setTime(passwordChangedDate); _logger.info("isNotificationValid: checking for day(s) since password change and if the password was recently changed within the days of the type"); // if(daysSincePasswordChange >= 1 && !isChangedRecently(daysLeft, daysSincePasswordChange)) { if(isOverADaySinceLastSent(user)) { ret = true; _logger.debug("isNotificationValid is true: current type is " + daysLeft + "(daily notification) and it has been over a day since the last notice"); } else { _logger.debug("isNotificationValid is false: current type is " + daysLeft + "(daily notification) and it has not been over a day since the last notice sent"); } _logger.info("isNotificationValid is " + ret + ": it has been " + daysSincePasswordChange + " day(s) since the password change"); } else { _logger.debug("daily notification is disabled (types = '"+ _processingNotificationDays + "')."); } } } else if(daysSincePasswordChange == 0 || isChangedRecently(daysLeft, daysSincePasswordChange)) { //reset everything if changed today OR if changed after the last check point _logger.debug("isNotificationValid is false, removing the user from the queue ..."); open(); dao = new PasswordNotifyDAO(_conn); _logger.debug("isNotificationValid: removing the user [" + user + "] removed from the queue ..."); dao.removeQueue(user); _logger.info("isNotificationValid is false: user [" + user + "] removed from the queue."); } _logger.debug("isNotificationValid exiting with ret " + ret + " ..."); return ret; } private boolean isOverADaySinceLastSent(User user) throws Exception { boolean retVal = false; _logger.debug("isOverADaySinceLastSent entered"); LocalTime currentTime = new LocalTime(); // LocalTime start = new LocalTime(11, 30); // LocalTime end = new LocalTime(12, 30); // LocalInterval interval = new LocalInterval(start, end); // DateTime test = new DateTime(2010, 5, 25, 16, 0, 0, 0); // System.out.println(interval.contains(test)); int currentHour = currentTime.getHourOfDay(); if(currentHour == 12) { retVal = true; } _logger.debug("isOverADaySinceLastSent: exiting with ret " + retVal + " ..."); return retVal; } /** * Method to check if the password is changed within the days of the type, e.g. if the type is 7 and the changed happened within 7 days, * then the return is true, otherwise it is false. * * @param daysLeft the type e.g. 14, 7 or 4 * @param daysSincePasswordChange the password changed date/time since now * @return */ private boolean isChangedRecently(int daysLeft, long daysSincePasswordChange) { boolean ret = false; _logger.debug("isChangedRecently entered"); if(daysSincePasswordChange <= daysLeft) { ret = true; _logger.info("isChangedRecently:daysSincePasswordChange is " + daysSincePasswordChange + " which is <= " + daysLeft + ", thus set to " + ret); } //ret = false; //open this just for test _logger.debug("isChangedRecently is " + ret); return ret; } /** * Method to check if notification type of the user has been sent or otherwise. If sent, return true otherwise false; * * @param user * @param daysLeft the type e.g. 14, 7 or 4 * @return * @throws Exception */ private boolean isAlreadySent(User user, int daysLeft) throws Exception { _logger.info("isAlreadySent user " + user ); if(user == null || user.getUsername() == null) { throw new Exception("User/ID is NULL or empty."); } PreparedStatement stmt = null; ResultSet rs = null; boolean retVal = false; String s = null; String t = null; try { open(); _logger.debug("connected"); if(_conn == null) { throw new Exception("Connection is NULL or empty."); } String sql = "select delivery_status, processing_type from SBREXT.PASSWORD_NOTIFICATION where upper(ua_name) = ?"; stmt = _conn.prepareStatement(sql); stmt.setString(1, user.getUsername().toUpperCase()); _logger.debug("isAlreadySent:check user [" + user + "] sent status"); rs = stmt.executeQuery(); if(rs.next()) { s = rs.getString("delivery_status"); t = rs.getString("processing_type"); } _logger.debug("isAlreadySent: user [" + user + "] sent status [" + s + "]"); } catch (Exception ex) { _logger.debug(ex.getMessage()); } finally { if (rs != null) { try { rs.close(); } catch (SQLException e) { _logger.error(e.getMessage()); } } if (stmt != null) { try { stmt.close(); } catch (SQLException e) { _logger.error(e.getMessage()); } } if (_conn != null) { try { _conn.close(); _conn = null; } catch (SQLException e) { _logger.error(e.getMessage()); } } } if(s != null && s.indexOf(Constants.SUCCESS + String.valueOf(daysLeft)) > -1) { retVal = true; } _logger.info("returning isAlreadySent [" + retVal + "]"); return retVal; } /** * To run this in Eclipse - * * 1. Copy log4j.properties from bin/ into java/ folder * 2. Add java/ folder into the Run classpath * 3. Add program arguments "[full path]\config.xml" in the Run */ public static void main(String[] args) { if (args.length != 1) { _logger.fatal(NotifyPassword.class.getName() + " config.xml"); return; } NotifyPassword np = new NotifyPassword(null); try { _logger.info(""); _logger.info(NotifyPassword.class.getClass().getName() + " begins"); np.doAll(args[0]); } catch (Exception ex) { _logger.error(ex.toString(), ex); } } }
package org.orbeon.oxf.xforms.processor.handlers; import org.apache.commons.lang.StringUtils; import org.dom4j.Element; import org.dom4j.QName; import org.orbeon.oxf.pipeline.api.ExternalContext; import org.orbeon.oxf.resources.ResourceManagerWrapper; import org.orbeon.oxf.xforms.*; import org.orbeon.oxf.xml.ContentHandlerHelper; import org.orbeon.oxf.xml.ElementHandlerController; import org.orbeon.oxf.xml.XMLConstants; import org.orbeon.oxf.xml.XMLUtils; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; import java.util.Map; /** * Handle xhtml:body. */ public class XHTMLBodyHandler extends XFormsBaseHandler { private ContentHandlerHelper helper; // private String formattingPrefix; public XHTMLBodyHandler() { super(false, true); } public void start(String uri, String localname, String qName, Attributes attributes) throws SAXException { final XFormsStaticState staticState = containingDocument.getStaticState(); // Register control handlers on controller registerHandlers(handlerContext.getController(), staticState); // Add class for YUI skin // TODO: should be configurable somehow attributes = XMLUtils.appendToClassAttribute(attributes, "yui-skin-sam"); // Start xhtml:body final ContentHandler contentHandler = handlerContext.getController().getOutput(); contentHandler.startElement(uri, localname, qName, attributes); helper = new ContentHandlerHelper(contentHandler); final XFormsControls xformsControls = containingDocument.getControls(); final String htmlPrefix = XMLUtils.prefixFromQName(qName); // Get formatting prefix and declare it if needed // TODO: would be nice to do this here, but then we need to make sure this prefix is available to other handlers // formattingPrefix = handlerContext.findFormattingPrefixDeclare(); final String requestPath; final String xformsSubmissionPath; { final ExternalContext.Request request = handlerContext.getExternalContext().getRequest(); requestPath = request.getRequestPath(); if (staticState.getDeploymentType() != XFormsConstants.DeploymentType.plain) { // Integrated or separate deployment mode xformsSubmissionPath = "/xforms-server-submit";// TODO: read property? } else { // Plain deployment mode: submission posts to URL of the current page and xforms-xml-submission.xpl intercepts that xformsSubmissionPath = requestPath; } } // Noscript panel is included before the xhtml:form element, in case the form is hidden through CSS if (!handlerContext.isNoScript()) { // TODO: must send startPrefixMapping()/endPrefixMapping()? helper.element("", XMLConstants.XINCLUDE_URI, "include", new String[] { "href", getIncludedResourcePath(requestPath, "noscript-panel.xml") }); } // Create xhtml:form element final boolean hasUpload = staticState.hasControlByName("upload"); helper.startElement(htmlPrefix, XMLConstants.XHTML_NAMESPACE_URI, "form", new String[] { // Add id so that things work in portals "id", XFormsUtils.getFormId(containingDocument), // Regular classes "class", "xforms-form" + (handlerContext.isNoScript() ? " xforms-noscript" : " xforms-initially-hidden") + (handlerContext.isSpanHTMLLayout() ? " xforms-layout-span" : " xforms-layout-nospan"), // Submission parameters "action", xformsSubmissionPath, "method", "POST", // In noscript mode, don't add event handler "onsubmit", handlerContext.isNoScript() ? null : "return false", hasUpload ? "enctype" : null, hasUpload ? "multipart/form-data" : null}); { // Output encoded static and dynamic state helper.element(htmlPrefix, XMLConstants.XHTML_NAMESPACE_URI, "input", new String[] { "type", "hidden", "name", "$static-state", "value", handlerContext.getEncodedClientState().getStaticState() }); helper.element(htmlPrefix, XMLConstants.XHTML_NAMESPACE_URI, "input", new String[]{ "type", "hidden", "name", "$dynamic-state", "value", handlerContext.getEncodedClientState().getDynamicState() }); } if (!handlerContext.isNoScript()) { // Other fields used by JavaScript helper.element(htmlPrefix, XMLConstants.XHTML_NAMESPACE_URI, "input", new String[]{ "type", "hidden", "name", "$server-events", "value", "" }); helper.element(htmlPrefix, XMLConstants.XHTML_NAMESPACE_URI, "input", new String[]{ "type", "hidden", "name", "$client-state", "value", "" }); // Store information about nested repeats hierarchy { helper.element(htmlPrefix, XMLConstants.XHTML_NAMESPACE_URI, "input", new String[]{ "type", "hidden", "name", "$repeat-tree", "value", staticState.getRepeatHierarchyString() }); } // Store information about the initial index of each repeat { final StringBuilder repeatIndexesBuilder = new StringBuilder(); final Map<String, Integer> repeatIdToIndex = xformsControls.getCurrentControlTree().getMinimalRepeatIdToIndex(staticState); if (repeatIdToIndex.size() != 0) { for (final Map.Entry<String, Integer> currentEntry: repeatIdToIndex.entrySet()) { final String repeatId = currentEntry.getKey(); final Integer index = currentEntry.getValue(); if (repeatIndexesBuilder.length() > 0) repeatIndexesBuilder.append(','); repeatIndexesBuilder.append(repeatId); repeatIndexesBuilder.append(' '); repeatIndexesBuilder.append(index); } } helper.element(htmlPrefix, XMLConstants.XHTML_NAMESPACE_URI, "input", new String[]{ "type", "hidden", "name", "$repeat-indexes", "value", repeatIndexesBuilder.toString() }); } // Ajax loading indicator if (XFormsProperties.isAjaxShowLoadingIcon(containingDocument)) { helper.startElement(htmlPrefix, XMLConstants.XHTML_NAMESPACE_URI, "span", new String[]{ "class", "xforms-loading-loading" }); helper.text("Loading..."); // text is hardcoded, but you can rewrite it in the theme if needed helper.endElement(); helper.element(htmlPrefix, XMLConstants.XHTML_NAMESPACE_URI, "span", new String[]{ "class", "xforms-loading-none" }); } // Ajax error panel if (XFormsProperties.isAjaxShowErrors(containingDocument)) { // XInclude dialog so users can configure it // TODO: must send startPrefixMapping()/endPrefixMapping()? helper.element("", XMLConstants.XINCLUDE_URI, "include", new String[] { "href", getIncludedResourcePath(requestPath, "error-dialog.xml") }); } // Help panel // TODO: must send startPrefixMapping()/endPrefixMapping()? helper.element("", XMLConstants.XINCLUDE_URI, "include", new String[] { "href", getIncludedResourcePath(requestPath, "help-panel.xml") }); // Templates { final String spanQName = XMLUtils.buildQName(htmlPrefix, "span"); final String TEMPLATE_ID = "$xforms-effective-id$"; // HACK: We would be ok with just one template, but IE 6 doesn't allow setting the input/@type attribute properly // xforms:select[@appearance = 'full'], xforms:input[@type = 'xs:boolean'] XFormsSelect1Handler.outputItemFullTemplate(pipelineContext, handlerContext, contentHandler, htmlPrefix, spanQName, containingDocument, reusableAttributes, attributes, "xforms-select-full-template", TEMPLATE_ID, true, "checkbox"); // xforms:select1[@appearance = 'full'] XFormsSelect1Handler.outputItemFullTemplate(pipelineContext, handlerContext, contentHandler, htmlPrefix, spanQName, containingDocument, reusableAttributes, attributes, "xforms-select1-full-template", TEMPLATE_ID, true, "radio"); } } else { // Noscript mode helper.element(htmlPrefix, XMLConstants.XHTML_NAMESPACE_URI, "input", new String[]{ "type", "hidden", "name", "$noscript", "value", "true" }); } } public static void registerHandlers(final ElementHandlerController controller, final XFormsStaticState staticState) { // xforms:input controller.registerHandler(XFormsInputHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "input"); // xforms:output controller.registerHandler(XFormsOutputTextHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "output", controller.new Matcher() { public boolean match(Attributes attributes) { return XFormsConstants.XXFORMS_TEXT_APPEARANCE_QNAME.equals(controller.getAttributeQNameValue(attributes.getValue(XFormsConstants.APPEARANCE_QNAME.getName()))); } }); controller.registerHandler(XFormsOutputDownloadHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "output", controller.new Matcher() { public boolean match(Attributes attributes) { return XFormsConstants.XXFORMS_DOWNLOAD_APPEARANCE_QNAME.equals(getAppearance(attributes)); } }); controller.registerHandler(XFormsOutputImageHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "output", controller.new Matcher() { public boolean match(Attributes attributes) { final String mediatypeValue = attributes.getValue("mediatype"); return mediatypeValue != null && mediatypeValue.startsWith("image/"); } }); controller.registerHandler(XFormsOutputHTMLHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "output", controller.new Matcher() { public boolean match(Attributes attributes) { final String mediatypeValue = attributes.getValue("mediatype"); return mediatypeValue != null && mediatypeValue.equals("text/html"); } }); controller.registerHandler(XFormsOutputDefaultHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "output"); // xforms:trigger final ElementHandlerController.Matcher triggerSubmitMinimalMatcher = controller.new Matcher() { public boolean match(Attributes attributes) { final QName appearance = getAppearance(attributes); return appearance != null && !staticState.isNoScript() // is noscript mode, use the full appearance && (XFormsConstants.XFORMS_MINIMAL_APPEARANCE_QNAME.equals(appearance) // minimal appearance || XFormsConstants.XXFORMS_LINK_APPEARANCE_QNAME.equals(appearance)); // legacy appearance } }; controller.registerHandler(XFormsTriggerMinimalHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "trigger", triggerSubmitMinimalMatcher); controller.registerHandler(XFormsTriggerFullHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "trigger"); // xforms:submit controller.registerHandler(XFormsTriggerMinimalHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "submit", triggerSubmitMinimalMatcher); controller.registerHandler(XFormsTriggerFullHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "submit"); // xforms:group controller.registerHandler(XFormsGroupInternalHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "group", controller.new Matcher() { public boolean match(Attributes attributes) { return XFormsConstants.XXFORMS_INTERNAL_APPEARANCE_QNAME.equals(getAppearance(attributes)); } }); controller.registerHandler(XFormsGroupFieldsetHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "group", controller.new Matcher() { public boolean match(Attributes attributes) { return XFormsConstants.XXFORMS_FIELDSET_APPEARANCE_QNAME.equals(getAppearance(attributes)); } }); controller.registerHandler(XFormsGroupSeparatorHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "group", controller.new Matcher() { public boolean match(Attributes attributes) { // XFormsAnnotatorContentHandler adds this appearance if needed // NOTE: we just check on the attribute value instead of resolving the QName, so that XFormsAnnotatorContentHandler // doesn't have to declare the xxforms:* prefix. final String appearanceAttributeValue = attributes.getValue(XFormsConstants.APPEARANCE_QNAME.getName()); return XFormsConstants.XXFORMS_SEPARATOR_APPEARANCE_QNAME.getQualifiedName().equals(appearanceAttributeValue); } }); controller.registerHandler(XFormsGroupDefaultHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "group"); // xforms:switch controller.registerHandler(XFormsSwitchHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "switch"); controller.registerHandler(XFormsCaseHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "case"); // xforms:repeat controller.registerHandler(XFormsRepeatHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "repeat"); // xforms:secret controller.registerHandler(XFormsSecretHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "secret"); // xforms:upload controller.registerHandler(XFormsUploadHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "upload"); // xforms:range controller.registerHandler(XFormsRangeHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "range"); // Other controls controller.registerHandler(XFormsTextareaHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "textarea"); controller.registerHandler(XFormsSelectHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "select"); controller.registerHandler(XFormsSelect1Handler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "select1"); controller.registerHandler(XXFormsDialogHandler.class.getName(), XFormsConstants.XXFORMS_NAMESPACE_URI, "dialog"); // Add handlers for LHHA elements // TODO: check w/ XFStaticState if there are any standalone LHHA elements controller.registerHandler(XFormsLabelHintHelpAlertHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "label"); controller.registerHandler(XFormsLabelHintHelpAlertHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "help"); controller.registerHandler(XFormsLabelHintHelpAlertHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "hint"); controller.registerHandler(XFormsLabelHintHelpAlertHandler.class.getName(), XFormsConstants.XFORMS_NAMESPACE_URI, "alert"); // Add handlers for custom components final Map<QName, Element> componentBindings = staticState.getXBLBindings().getComponentBindings(); if (componentBindings != null) { for (final QName currentQName: componentBindings.keySet()) { controller.registerHandler(XXFormsComponentHandler.class.getName(), currentQName.getNamespaceURI(), currentQName.getName()); } } } public void end(String uri, String localname, String qName) throws SAXException { // Close xhtml:form helper.endElement(); // Close xhtml:body final ContentHandler contentHandler = handlerContext.getController().getOutput(); contentHandler.endElement(uri, localname, qName); } private String getIncludedResourcePath(String requestPath, String fileName) { // Path will look like "/app-name/whatever" final String[] pathElements = StringUtils.split(requestPath, '/'); if (pathElements.length >= 2) { final String appName = pathElements[0];// it seems that split() does not return first blank match final String path = "/apps/" + appName + "/" + fileName; if (ResourceManagerWrapper.instance().exists(path)) { return "oxf:" + path; } } // Default return "oxf:/config/" + fileName; } }
package org.openlmis.stockmanagement.service; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import javax.sql.DataSource; import net.sf.jasperreports.engine.JRException; import net.sf.jasperreports.engine.JasperCompileManager; import net.sf.jasperreports.engine.JasperReport; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Spy; import org.openlmis.stockmanagement.domain.JasperTemplate; import org.openlmis.stockmanagement.exception.JasperReportViewException; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; @SpringBootTest @RunWith(SpringRunner.class) public class JasperReportServiceIntegrationTest { private static final String EMPTY_REPORT_RESOURCE = "/empty-report.jrxml"; private static final int DOUBLE_HIKARI_DEFAULT_POOL_SIZE = 20; private static final String DATE_FORMAT = "dd/MM/yyyy"; private static final String DATE_TIME_FORMAT = "dd/MM/yyyy HH:mm:ss"; private static final String FORMAT = "format"; private static final String PDF = "pdf"; @InjectMocks private JasperReportService service; @Spy private DataSource dataSource; private ByteArrayOutputStream bos = new ByteArrayOutputStream(); private ObjectOutputStream out; private JasperTemplate template = new JasperTemplate(); private Map<String, Object> params = new HashMap<>(); @Before public void setUp() throws IOException { out = new ObjectOutputStream(bos); } @Test public void generateReportShouldNotThrowErrorAfterPrintingReport20Times() throws JRException, IOException, JasperReportViewException { out.writeObject(getEmptyReport()); out.flush(); template.setData(bos.toByteArray()); params.put(FORMAT, PDF); for (int i = 0; i <= DOUBLE_HIKARI_DEFAULT_POOL_SIZE; i++) { service.generateReport(template, params); } } @Test public void shouldGenerateReportForDatasourceParam() throws JRException, IOException { out.writeObject(getEmptyReport()); out.flush(); template.setData(bos.toByteArray()); params.put("datasource", new ArrayList<>()); service.generateReport(template, params); } @Test public void shouldGenerateReportWithProperParams() throws JRException, IOException { out.writeObject(getEmptyReport()); out.flush(); template.setData(bos.toByteArray()); params.put("dateTimeFormat", DATE_TIME_FORMAT); params.put("dateFormat", DATE_FORMAT); params.put(FORMAT, PDF); service.generateReport(template, params); } @Test public void shouldCatchJasperReportViewExceptionWhenDatasourceReturnsNull() throws JRException, IOException, SQLException { out.writeObject(getEmptyReport()); out.flush(); template.setData(bos.toByteArray()); params.put(FORMAT, PDF); when(dataSource.getConnection()).thenThrow(NullPointerException.class); try { service.generateReport(template, params); } catch (JasperReportViewException e) { assertTrue(e.getMessage().contains("stockmanagement.error.generateReport.failed")); } } @Test public void shouldCatchJasperReportViewExceptionWhenDatasourceReturnsSqlException() throws JRException, IOException, SQLException { out.writeObject(getEmptyReport()); out.flush(); template.setData(bos.toByteArray()); params.put(FORMAT, "pdf"); when(dataSource.getConnection()).thenThrow(SQLException.class); try { service.generateReport(template, params); } catch (JasperReportViewException e) { assertTrue(e.getMessage().contains("stockmanagement.error.generateReport.failed")); } } @Test(expected = JasperReportViewException.class) public void shouldThrowJasperReportViewExceptionIfReportIsNotSavedAsObjectOutputStream() { template.setData(bos.toByteArray()); params.put(FORMAT, PDF); service.generateReport(template, params); } @Test(expected = JasperReportViewException.class) public void shouldThrowJasperReportViewExceptionWhenNoParamsPassed() throws JRException, IOException { out.writeObject(getEmptyReport()); out.flush(); template.setData(bos.toByteArray()); service.generateReport(template, null); } private JasperReport getEmptyReport() throws JRException { return JasperCompileManager .compileReport(getClass().getResourceAsStream(EMPTY_REPORT_RESOURCE)); } }
package jsettlers.logic.map.newGrid.partition; import java.awt.Color; import java.awt.EventQueue; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JTextField; import javax.swing.border.EmptyBorder; import jsettlers.common.material.EMaterialType; import jsettlers.common.position.ISPosition2D; import jsettlers.common.position.ShortPoint2D; import jsettlers.logic.algorithms.path.IPathCalculateable; import jsettlers.logic.algorithms.path.test.DummyEmptyAStarMap; import jsettlers.logic.map.newGrid.partition.manager.manageables.interfaces.IMaterialRequester; import synchronic.timer.NetworkTimer; public class PartitionsGridTestingWnd extends JFrame { private static final Color[] partitionColors = { Color.ORANGE, Color.RED, Color.BLUE, Color.CYAN, Color.GREEN, Color.LIGHT_GRAY, Color.DARK_GRAY, Color.MAGENTA, Color.PINK, Color.YELLOW }; private static final int X_OFFSET = 30; private static final int Y_OFFSET = 170; private final short BLOCK_SIZE = 60; private static final short HEIGHT = 3; private static final short WIDTH = 3; private static final long serialVersionUID = 1L; private JPanel contentPane; private JTextField textField; private JButton btnRepaint; private PartitionsGrid partitionsGrid; private DummyEmptyAStarMap aStarMap; /** * Launch the application. */ public static void main(String[] args) { NetworkTimer.get().schedule(); EventQueue.invokeLater(new Runnable() { @Override public void run() { final PartitionsGridTestingWnd frame = new PartitionsGridTestingWnd(); frame.setVisible(true); new Thread(new Runnable() { @Override public void run() { initiateTests(frame); } }).start(); } }); } private static void initiateTests(PartitionsGridTestingWnd frame) { PartitionsGrid partitionsGrid = frame.partitionsGrid; partitionsGrid.changePlayerAt((short) 1, (short) 0, (byte) 0); partitionsGrid.changePlayerAt((short) 2, (short) 2, (byte) 0); partitionsGrid.changePlayerAt((short) 1, (short) 2, (byte) 0); partitionsGrid.changePlayerAt((short) 2, (short) 0, (byte) 0); partitionsGrid.pushMaterial(new ShortPoint2D(2, 0), EMaterialType.PLANK); partitionsGrid.pushMaterial(new ShortPoint2D(2, 0), EMaterialType.PLANK); partitionsGrid.pushMaterial(new ShortPoint2D(2, 0), EMaterialType.PLANK); partitionsGrid.request(new TestMaterialRequester(2, 0), EMaterialType.STONE, (byte) 1); partitionsGrid.request(new TestMaterialRequester(2, 0), EMaterialType.STONE, (byte) 1); partitionsGrid.request(new TestMaterialRequester(2, 0), EMaterialType.STONE, (byte) 1); partitionsGrid.request(new TestMaterialRequester(2, 0), EMaterialType.STONE, (byte) 1); frame.aStarMap.setBlocked(1, 1, true); partitionsGrid.changePlayerAt((short) 1, (short) 1, (byte) 0); System.out.println(" partitionsGrid.changePlayerAt((short) 0, (short) 1, (byte) 0); System.out.println(" partitionsGrid.changePlayerAt((short) 0, (short) 0, (byte) 0); System.out.println(); } /** * Create the frame. */ public PartitionsGridTestingWnd() { aStarMap = new DummyEmptyAStarMap(WIDTH, HEIGHT) { @Override public boolean isBlocked(IPathCalculateable requester, short x, short y) { return super.isBlocked(requester, x, y) || requester != null && partitionsGrid.getPlayerAt(x, y) != requester.getPlayer(); } }; IPartitionableGrid partitionableGrid = new IPartitionableGrid() { @Override public boolean isBlocked(short x, short y) { return aStarMap.isBlocked(null, x, y); } @Override public void changedPartitionAt(short x, short y) { // TODO Auto-generated method stub } @Override public void setDebugColor(short x, short y, jsettlers.common.Color color) { // TODO Auto-generated method stub } }; partitionsGrid = new PartitionsGrid(WIDTH, HEIGHT, partitionableGrid, aStarMap); setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); setBounds(100, 100, 700, 500); contentPane = new JPanel(); contentPane.setBorder(new EmptyBorder(5, 5, 5, 5)); setContentPane(contentPane); contentPane.setLayout(null); textField = new JTextField(); textField.setText("0"); textField.setBounds(628, 25, 46, 20); contentPane.add(textField); textField.setColumns(10); JLabel lblTeam = new JLabel("player"); lblTeam.setBounds(628, 11, 46, 14); contentPane.add(lblTeam); btnRepaint = new JButton("repaint"); btnRepaint.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { repaint(); } }); btnRepaint.setBounds(585, 56, 89, 23); contentPane.add(btnRepaint); } @Override public void paint(Graphics graphics) { super.paint(graphics); Graphics2D g = (Graphics2D) graphics; g.translate(0, 300); for (short x = 0; x < WIDTH; x++) { for (short y = 0; y < HEIGHT; y++) { short partition = partitionsGrid.getPartitionAt(x, y); int drawX = x * BLOCK_SIZE + X_OFFSET + (HEIGHT - y) * BLOCK_SIZE / 2; int drawY = -(HEIGHT - y) * BLOCK_SIZE + Y_OFFSET; if (partition >= 0) { g.setColor(partitionColors[partition]); g.fillRect(drawX, drawY, BLOCK_SIZE, -BLOCK_SIZE); } else { g.setColor(Color.BLACK); g.drawRect(drawX, drawY, BLOCK_SIZE, -BLOCK_SIZE); } g.setColor(Color.WHITE); g.drawString(x + "|" + y, drawX + BLOCK_SIZE / 2, drawY - BLOCK_SIZE / 2); } } } private static class TestMaterialRequester implements IMaterialRequester { private final ISPosition2D pos; TestMaterialRequester(int x, int y) { this.pos = new ShortPoint2D(x, y); } @Override public ISPosition2D getPos() { return pos; } @Override public boolean isActive() { return true; } @Override public void requestFailed() { // TODO Auto-generated method stub } } }
package app.hongs; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; public abstract class CoreSerial implements Serializable { /** * * @param path * @param name * @param time * @throws app.hongs.HongsException */ public CoreSerial(String path, String name, long time) throws HongsException { this.init(path, name, time); } /** * * @param path * @param name * @param date * @throws app.hongs.HongsException */ public CoreSerial(String path, String name, Date date) throws HongsException { this.init(path, name, date); } /** * * @param name * @param time * @throws app.hongs.HongsException */ public CoreSerial(String name, long time) throws HongsException { this.init(name, time); } /** * * @param name * @param date * @throws app.hongs.HongsException */ public CoreSerial(String name, Date date) throws HongsException { this.init(name, date); } /** * * @param name * @throws app.hongs.HongsException */ public CoreSerial(String name) throws HongsException { this.init(name); } /** * (init) */ public CoreSerial() { // TODO: init } /** * * @throws app.hongs.HongsException */ abstract protected void imports() throws HongsException; /** * * @param time * @return true * @throws app.hongs.HongsException */ protected boolean expired(long time) throws HongsException { return time != 0 && time < System.currentTimeMillis(); } /** * () * @param path * @param name * @param time * @throws app.hongs.HongsException */ protected final void init(String path, String name, long time) throws HongsException { if (path == null) { path = Core.DATA_PATH + File.separator + "serial"; } File file = new File(path + File.separator + name + ".ser"); this.load(file, time + file.lastModified( )); } /** * () * @param path * @param name * @param date * @throws app.hongs.HongsException */ protected final void init(String path, String name, Date date) throws HongsException { if (path == null) { path = Core.DATA_PATH + File.separator + "serial"; } File file = new File(path + File.separator + name + ".ser"); this.load(file, date!=null?date.getTime():0); } protected final void init(String name, long time) throws HongsException { this.init(null, name, time); } protected final void init(String name, Date date) throws HongsException { this.init(null, name, date); } protected final void init(String name) throws HongsException { this.init(null, name, null); } /** * * @param file * @param time * @throws app.hongs.HongsException */ protected void load(File file, long time) throws HongsException { ReadWriteLock rwlock = lock(file.getAbsolutePath()); Lock lock; lock = rwlock. readLock(); lock.lock(); try { if (file.exists() && !expired(time)) { load(file); return; } } finally { lock.unlock( ); } lock = rwlock.writeLock(); lock.lock(); try { imports( ); save(file); } finally { lock.unlock( ); } } /** * * @param file * @throws app.hongs.HongsException */ protected final void load(File file) throws HongsException { try { FileInputStream fis = new FileInputStream(file); ObjectInputStream ois = new ObjectInputStream(fis ); //fis.getChannel().lock(); Map map = (Map)ois.readObject(); load( map ); ois.close(); } catch (ClassNotFoundException ex) { throw new HongsException(0x10d8, ex); } catch (FileNotFoundException ex) { throw new HongsException(0x10d6, ex); } catch (IOException ex) { throw new HongsException(0x10d4, ex); } } /** * * @param file * @throws app.hongs.HongsException */ protected final void save(File file) throws HongsException { if (!file.exists()) { File dn = file.getParentFile(); if (!dn.exists()) { dn.mkdirs(); } try { file.createNewFile( ); } catch (IOException ex) { throw new HongsException(0x10d0, ex); } } try { FileOutputStream fos = new FileOutputStream(file); ObjectOutputStream oos = new ObjectOutputStream(fos ); //fos.getChannel().lock(); Map map = new HashMap(); save( map ); oos.writeObject ( map ); oos.flush(); oos.close(); } catch (FileNotFoundException ex) { throw new HongsException(0x10d6, ex); } catch (IOException ex) { throw new HongsException(0x10d2, ex); } } /** * * @param map * @throws app.hongs.HongsException */ private void load(Map<String, Object> map) throws HongsException { Field[] fields; fields = this.getClass().getFields(); for (Field field : fields) { int ms = field.getModifiers(); if (Modifier.isTransient(ms ) || Modifier.isStatic(ms ) || Modifier.isFinal (ms)) { continue; } String name = field.getName(); try { field.set(this, map.get(name)); } catch (IllegalAccessException e) { throw new HongsException(0x10da, e); } catch (IllegalArgumentException e) { throw new HongsException(0x10da, e); } } fields = this.getClass().getDeclaredFields(); for (Field field : fields) { int ms = field.getModifiers(); if (Modifier.isTransient(ms ) || Modifier.isPublic(ms ) || Modifier.isStatic(ms ) || Modifier.isFinal (ms)) { continue; } String name = field.getName(); try { field.setAccessible( true ); field.set(this, map.get(name)); } catch (IllegalAccessException e) { throw new HongsException(0x10da, e); } catch (IllegalArgumentException e) { throw new HongsException(0x10da, e); } } } /** * * @param map * @throws HongsException */ private void save(Map<String, Object> map) throws HongsException { Field[] fields; fields = this.getClass().getFields(); for (Field field : fields) { int ms = field.getModifiers(); if (Modifier.isTransient(ms ) || Modifier.isStatic(ms ) || Modifier.isFinal (ms)) { continue; } String name = field.getName(); try { map.put(name, field.get(this)); } catch (IllegalAccessException e) { throw new HongsException(0x10da, e); } catch (IllegalArgumentException e) { throw new HongsException(0x10da, e); } } fields = this.getClass().getDeclaredFields(); for (Field field : fields) { int ms = field.getModifiers(); if (Modifier.isTransient(ms ) || Modifier.isPublic(ms ) || Modifier.isStatic(ms ) || Modifier.isFinal (ms)) { continue; } String name = field.getName(); try { field.setAccessible( true ); map.put(name, field.get(this)); } catch (IllegalAccessException e) { throw new HongsException(0x10da, e); } catch (IllegalArgumentException e) { throw new HongsException(0x10da, e); } } } private ReadWriteLock lock(String flag) { ReadWriteLock rwlock; Lock lock; lock = lockr. readLock(); lock.lock(); try { rwlock = locks.get(flag); if (rwlock != null) { return rwlock; } } finally { lock.unlock(); } lock = lockr.writeLock(); lock.lock(); try { rwlock = new ReentrantReadWriteLock(); locks.put(flag, rwlock); return rwlock; } finally { lock.unlock(); } } private static Map<String, ReadWriteLock> locks = new HashMap( ); private static ReadWriteLock lockr = new ReentrantReadWriteLock(); }
package net.openhft.chronicle.network.internal.lookuptable; import net.openhft.chronicle.bytes.MappedBytes; import net.openhft.chronicle.core.Jvm; import net.openhft.chronicle.core.OS; import net.openhft.chronicle.core.io.Closeable; import net.openhft.chronicle.core.io.IORuntimeException; import net.openhft.chronicle.core.io.ReferenceOwner; import net.openhft.chronicle.network.HostnamePortLookupTable; import net.openhft.chronicle.wire.*; import org.jetbrains.annotations.NotNull; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.channels.FileLock; import java.nio.channels.OverlappingFileLockException; import java.util.HashSet; import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentSkipListMap; import java.util.function.BiConsumer; import java.util.function.Supplier; import static java.lang.String.format; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static net.openhft.chronicle.core.util.Time.sleep; /** * Stores the mappings in a shared file, will work across processes */ public class FileBasedHostnamePortLookupTable implements HostnamePortLookupTable, java.io.Closeable, ReferenceOwner { private static final long MINIMUM_INITIAL_FILE_SIZE_BYTES = 1_024 * 512; // We want to prevent resizing private static final long LOCK_TIMEOUT_MS = 10_000; private static final int DELETE_TABLE_FILE_TIMEOUT_MS = 1_000; private static final int PID = Jvm.getProcessId(); private static final String DEFAULT_FILE_NAME = "shared_hostname_mappings"; private final JSONWire sharedTableWire; private final MappedBytes sharedTableBytes; private final File sharedTableFile; private final long actualBytesSize; private final ConcurrentSkipListMap<String, ProcessScopedMapping> allMappings = new ConcurrentSkipListMap<>(); public FileBasedHostnamePortLookupTable() { this(DEFAULT_FILE_NAME); } public FileBasedHostnamePortLookupTable(String fileName) { sharedTableFile = new File(fileName); try { if (sharedTableFile.createNewFile() && !sharedTableFile.canWrite()) { throw new IllegalStateException("Cannot write to existing shared mapping file " + sharedTableFile); } long pagesForMinimum = (long) Math.ceil(((float) MINIMUM_INITIAL_FILE_SIZE_BYTES) / OS.SAFE_PAGE_SIZE); actualBytesSize = pagesForMinimum * OS.SAFE_PAGE_SIZE; sharedTableBytes = MappedBytes.mappedBytes(sharedTableFile, actualBytesSize, OS.SAFE_PAGE_SIZE, false); sharedTableBytes.disableThreadSafetyCheck(true); sharedTableWire = new JSONWire(sharedTableBytes); sharedTableWire.consumePadding(); } catch (IOException e) { throw new RuntimeException("Error creating shared mapping file", e); } } @Override public synchronized InetSocketAddress lookup(String description) { return lockFileAndDo(() -> { readFromTable(); final ProcessScopedMapping mapping = allMappings.get(description); return mapping != null ? mapping.inetSocketAddress() : null; }, true); } @Override public synchronized void clear() { lockFileAndDo(() -> { readFromTable(); allMappings.keySet().forEach(key -> { if (allMappings.get(key).pid == PID) { allMappings.remove(key); } }); writeToTable(); }, false); } @Override public synchronized Set<String> aliases() { return lockFileAndDo(() -> { readFromTable(); return allMappings.keySet(); }, true); } @Override public synchronized void put(String description, InetSocketAddress address) { lockFileAndDo(() -> { readFromTable(); final ProcessScopedMapping newMapping = new ProcessScopedMapping(PID, address); ProcessScopedMapping oldValue = allMappings.put(description, newMapping); if (oldValue != null) { Jvm.error().on(FileBasedHostnamePortLookupTable.class, format("Over-wrote hostname mapping for %s, old value=%s, new value=%s", description, oldValue, newMapping)); } writeToTable(); }, false); } @Override public synchronized void forEach(BiConsumer<String, InetSocketAddress> consumer) { lockFileAndDo(() -> { readFromTable(); allMappings.forEach((description, mapping) -> consumer.accept(description, mapping.inetSocketAddress())); }, true); } private void writeToTable() { assert sharedTableWire.startUse(); sharedTableBytes.reserve(this); try { sharedTableWire.clear(); sharedTableWire.writeAllAsMap(String.class, ProcessScopedMapping.class, allMappings); zeroOutRemainingBytes((int) sharedTableBytes.writePosition()); } finally { sharedTableBytes.release(this); assert sharedTableWire.endUse(); } } private void zeroOutRemainingBytes(int fromIndex) { sharedTableBytes.readLimit(sharedTableBytes.realCapacity()); for (int i = fromIndex; i < actualBytesSize; i++) { sharedTableBytes.readPosition(i); if (sharedTableBytes.readByte() == 0) { break; } sharedTableBytes.writeByte(i, (byte) 0); } } private void readFromTable() { final StringBuilder sb = Wires.acquireStringBuilder(); final ProcessScopedMapping reusableMapping = new ProcessScopedMapping(); assert sharedTableWire.startUse(); sharedTableBytes.reserve(this); try { sharedTableBytes.readPosition(0); sharedTableBytes.readLimit(sharedTableBytes.realCapacity()); Set<String> readMappings = new HashSet<>(); while (true) { final ValueIn valueIn = sharedTableWire.readEventName(sb); if (sb.length() == 0) { break; } valueIn.object(reusableMapping, ProcessScopedMapping.class); final String name = sb.toString(); readMappings.add(name); insertOrUpdateEntry(name, reusableMapping); } // Remove removed Set<String> existingKeys = new HashSet<>(allMappings.keySet()); for (String key : existingKeys) { if (!readMappings.contains(key)) { allMappings.remove(key); } } } finally { sharedTableBytes.release(this); assert sharedTableWire.endUse(); } } private void insertOrUpdateEntry(String name, ProcessScopedMapping mapping) { final ProcessScopedMapping existingMapping = allMappings.get(name); if (existingMapping == null || !existingMapping.equals(mapping)) { allMappings.put(name, new ProcessScopedMapping(mapping.pid, mapping.hostname, mapping.port)); } } private void lockFileAndDo(Runnable runnable, boolean shared) { this.lockFileAndDo(() -> { runnable.run(); return null; }, shared); } private <T> T lockFileAndDo(Supplier<T> supplier, boolean shared) { final long timeoutAt = System.currentTimeMillis() + LOCK_TIMEOUT_MS; final long startMs = System.currentTimeMillis(); Throwable lastThrown = null; int count; for (count = 1; System.currentTimeMillis() < timeoutAt; count++) { try (FileLock fileLock = sharedTableBytes.mappedFile().tryLock(0, Long.MAX_VALUE, shared)) { if (fileLock != null) { try { T t = supplier.get(); long elapsedMs = System.currentTimeMillis() - startMs; if (elapsedMs > 100) Jvm.perf().on(getClass(), "Took " + elapsedMs / 1000.0 + " seconds to obtain the lock on " + sharedTableFile, lastThrown); return t; } catch (OverlappingFileLockException e) { throw new RuntimeException("Attempted to resize the underlying bytes, increase the MINIMUM_INITIAL_FILE_SIZE_BYTES or make this work with resizing!", e); } } } catch (IOException | OverlappingFileLockException e) { // failed to acquire the lock, wait until other operation completes lastThrown = e; } int delay = Math.min(250, count * count); sleep(delay, MILLISECONDS); } if (Jvm.isDebugEnabled(FileBasedHostnamePortLookupTable.class)) { final long elapsedMs = System.currentTimeMillis() - startMs; final String message = "Failed to acquire lock on the shared mappings file. Retrying, file=" + sharedTableFile + ", count=" + count + ", elapsed=" + elapsedMs + " ms"; Jvm.debug().on(FileBasedHostnamePortLookupTable.class, message, lastThrown); } RuntimeException re = new RuntimeException("Couldn't acquire lock on shared mapping file " + sharedTableFile); re.initCause(lastThrown); throw re; } @Override public synchronized void close() throws IOException { Closeable.closeQuietly(sharedTableWire, sharedTableBytes); long endTime = System.currentTimeMillis() + DELETE_TABLE_FILE_TIMEOUT_MS; while (sharedTableFile.exists()) { sharedTableFile.delete(); if (System.currentTimeMillis() > endTime) { Jvm.warn().on(FileBasedHostnamePortLookupTable.class, "Error deleting the shared lookup table"); break; } } } static class ProcessScopedMapping implements ReadMarshallable, WriteMarshallable { private int pid; private String hostname; private int port; private InetSocketAddress address; public ProcessScopedMapping() { } public ProcessScopedMapping(int pid, InetSocketAddress address) { if (address == null) { throw new IllegalArgumentException("Address must not be null"); } this.pid = pid; this.hostname = address.getHostName(); this.port = address.getPort(); this.address = address; } public ProcessScopedMapping(int pid, String hostname, int port) { this.pid = pid; this.hostname = hostname; this.port = port; } public InetSocketAddress inetSocketAddress() { if (address == null) { address = new InetSocketAddress(hostname, port); } return address; } @Override public void readMarshallable(@NotNull WireIn wire) throws IORuntimeException { pid = wire.read("pid").int32(); hostname = wire.read("hostname").text(); port = wire.read("port").readInt(); } @Override public void writeMarshallable(@NotNull WireOut wire) { wire.write("pid").int32(pid) .write("hostname").text(hostname) .write("port").int32(port); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ProcessScopedMapping that = (ProcessScopedMapping) o; return pid == that.pid && port == that.port && hostname.equals(that.hostname); } @Override public int hashCode() { return Objects.hash(pid, hostname, port); } @Override public String toString() { return "ProcessScopedMapping{" + "pid=" + pid + ", hostname='" + hostname + '\'' + ", port=" + port + ", hasInetSocketAddress=" + (address != null) + '}'; } } }
package org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.get; import java.io.StringReader; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.TimeZone; import java.util.concurrent.BlockingQueue; import org.apache.log4j.Logger; import org.buddycloud.channelserver.channel.ChannelManager; import org.buddycloud.channelserver.channel.Conf; import org.buddycloud.channelserver.db.CloseableIterator; import org.buddycloud.channelserver.db.exception.NodeStoreException; import org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.JabberPubsub; import org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.PubSubElementProcessorAbstract; import org.buddycloud.channelserver.pubsub.model.GlobalItemID; import org.buddycloud.channelserver.pubsub.model.NodeItem; import org.buddycloud.channelserver.pubsub.model.impl.GlobalItemIDImpl; import org.dom4j.DocumentException; import org.dom4j.Element; import org.dom4j.io.SAXReader; import org.xmpp.packet.IQ; import org.xmpp.packet.JID; import org.xmpp.packet.Packet; import org.xmpp.packet.PacketError; import org.xmpp.packet.PacketError.Condition; import org.xmpp.packet.PacketError.Type; public class RecentItemsGet extends PubSubElementProcessorAbstract { private static final Logger LOGGER = Logger.getLogger(RecentItemsGet.class); private static final String NODE_SUFIX = "/posts"; private static final SimpleDateFormat SDF = new SimpleDateFormat(Conf.DATE_FORMAT); static { SDF.setTimeZone(TimeZone.getTimeZone("UTC")); } private Date maxAge; private Integer maxItems; private Element pubsub; private SAXReader xmlReader; // RSM details private String firstItemId = null; private String lastItemId = null; private GlobalItemID afterItemId = null; private int maxResults = -1; public RecentItemsGet(BlockingQueue<Packet> outQueue, ChannelManager channelManager) { setChannelManager(channelManager); setOutQueue(outQueue); xmlReader = new SAXReader(); } @Override public void process(Element elm, JID actorJID, IQ reqIQ, Element rsm) throws Exception { response = IQ.createResultIQ(reqIQ); request = reqIQ; actor = actorJID; node = elm.attributeValue("node"); resultSetManagement = rsm; if (null == actor) { actor = request.getFrom(); } if (!isValidStanza()) { outQueue.put(response); return; } if (!channelManager.isLocalJID(request.getFrom())) { response.getElement().addAttribute("remote-server-discover", "false"); } pubsub = response.getElement().addElement("pubsub", JabberPubsub.NAMESPACE_URI); try { parseRsmElement(); addRecentItems(); addRsmElement(); outQueue.put(response); } catch (NodeStoreException e) { LOGGER.error(e); response.getElement().remove(pubsub); setErrorCondition(PacketError.Type.wait, PacketError.Condition.internal_server_error); } outQueue.put(response); } private void parseRsmElement() { if (null == resultSetManagement) { return; } Element max = null; Element after = null; if (null != (max = resultSetManagement.element("max"))) { maxResults = Integer.parseInt(max.getTextTrim()); } if (null != (after = resultSetManagement.element("after"))) { try { afterItemId = GlobalItemIDImpl.fromString(after.getTextTrim()); } catch(IllegalArgumentException e) { LOGGER.error(e); createExtendedErrorReply(Type.modify, Condition.bad_request, "Could not parse the 'after' id: " + after); return; } } } private void addRsmElement() throws NodeStoreException { if (null == firstItemId) { return; } Element rsm = pubsub.addElement("set"); rsm.addNamespace("", NS_RSM); rsm.addElement("first").setText(firstItemId); rsm.addElement("last").setText(lastItemId); rsm.addElement("count").setText( String.valueOf(channelManager.getCountRecentItems(actor, maxAge, maxItems, NODE_SUFIX))); } private void addRecentItems() throws NodeStoreException { CloseableIterator<NodeItem> items = channelManager.getRecentItems( actor, maxAge, maxItems, maxResults, afterItemId, NODE_SUFIX); String lastNodeId = ""; Element itemsElement = null; while (items.hasNext()) { NodeItem item = items.next(); if (!item.getNodeId().equals(lastNodeId)) { itemsElement = pubsub.addElement("items"); itemsElement.addAttribute("node", item.getNodeId()); lastNodeId = item.getNodeId(); } try { Element entry = xmlReader.read(new StringReader(item.getPayload())) .getRootElement(); Element entryIdEl = entry.element("id"); String itemId = item.getId(); if(entryIdEl != null) { itemId = entryIdEl.getTextTrim(); } Element itemElement = itemsElement.addElement("item"); itemElement.addAttribute("id", item.getId()); if (null == firstItemId) { firstItemId = itemId; } lastItemId = itemId; itemElement.add(entry); } catch (DocumentException e) { LOGGER.error("Error parsing a node entry, ignoring. " + item.getId()); } } } private boolean isValidStanza() { Element recentItems = request.getChildElement().element("recent-items"); try { String max = recentItems.attributeValue("max"); if (null == max) { createExtendedErrorReply(PacketError.Type.modify, PacketError.Condition.bad_request, "max-required"); return false; } maxItems = Integer.parseInt(max); String since = recentItems.attributeValue("since"); if (null == since) { createExtendedErrorReply(PacketError.Type.modify, PacketError.Condition.bad_request, "since-required"); return false; } maxAge = SDF.parse(since); } catch (NumberFormatException e) { LOGGER.error(e); createExtendedErrorReply(PacketError.Type.modify, PacketError.Condition.bad_request, "invalid-max-value-provided"); return false; } catch (ParseException e) { createExtendedErrorReply(PacketError.Type.modify, PacketError.Condition.bad_request, "invalid-since-value-provided"); LOGGER.error(e); return false; } return true; } @Override public boolean accept(Element elm) { return elm.getName().equals("recent-items"); } }
package xyz.brassgoggledcoders.opentransport.modules.immersiveengineering; import blusunrize.immersiveengineering.common.IEContent; import blusunrize.immersiveengineering.common.blocks.BlockIEBase; import blusunrize.immersiveengineering.common.blocks.wooden.BlockTypes_WoodenDevice0; import net.minecraftforge.fml.common.event.FMLPreInitializationEvent; import xyz.brassgoggledcoders.opentransport.api.blockcontainers.IBlockContainer; import xyz.brassgoggledcoders.opentransport.blocks.BlockContainerBase; import xyz.brassgoggledcoders.opentransport.interactions.BlockActivationInteraction; import xyz.brassgoggledcoders.opentransport.registries.BlockContainerRegistry; public class IEBlockContainers { public static void preInit(FMLPreInitializationEvent event) { BlockIEBase blockWoodenDevice0 = IEContent.blockWoodenDevice0; IBlockContainer woodenStorageCrate = new BlockContainerBase(blockWoodenDevice0).setUnlocalizedName( blockWoodenDevice0.getUnlocalizedName() + "." + BlockTypes_WoodenDevice0.CRATE.getName()).setBlockState( blockWoodenDevice0.getDefaultState() .withProperty(blockWoodenDevice0.property, BlockTypes_WoodenDevice0.CRATE)) .setClickInteraction(new BlockActivationInteraction()).setGuiInterface(new CrateInterface()); BlockContainerRegistry.registerContainer(woodenStorageCrate); IBlockContainer reinforcedWoodenStorageCrate = new BlockContainerBase(blockWoodenDevice0).setUnlocalizedName( blockWoodenDevice0.getUnlocalizedName() + "." + BlockTypes_WoodenDevice0.REINFORCED_CRATE.getName()) .setBlockState(blockWoodenDevice0.getDefaultState() .withProperty(blockWoodenDevice0.property, BlockTypes_WoodenDevice0.REINFORCED_CRATE)) .setClickInteraction(new BlockActivationInteraction()).setGuiInterface(new CrateInterface()); BlockContainerRegistry.registerContainer(reinforcedWoodenStorageCrate); } }
package jade.domain; import jade.util.leap.*; import jade.domain.FIPAAgentManagement.*; import jade.core.Agent; import jade.core.AID; import jade.lang.acl.ACLMessage; import jade.lang.acl.MessageTemplate; import jade.content.*; import jade.content.lang.*; import jade.content.lang.sl.*; import jade.content.lang.Codec.*; import jade.content.onto.Ontology; import jade.content.onto.BasicOntology; import jade.content.onto.basic.Action; import jade.content.onto.basic.Result; import jade.content.abs.*; import jade.content.onto.OntologyException; import jade.content.onto.basic.Done; import java.util.Date; /** * This class provides a set of static methods to communicate with * a DF Service that complies with FIPA specifications. * It includes methods to register, deregister, modify and search with a DF. * Each of this method has version with all the needed parameters, or with a * subset of them where, those parameters that can be omitted have been * defaulted to the default DF of the platform, the AID of the sending agent, * the default Search Constraints. * Notice that all these methods blocks every activity of the agent until * the action (i.e. register/deregister/modify/search) has been successfully * executed or a jade.domain.FIPAException exception has been thrown * (e.g. because a FAILURE message has been received by the DF). * In some cases, instead, it is more convenient to execute this task in a * non-blocking way. The method getNonBlockingBehaviour() returns a * non-blocking behaviour of type RequestFIPAServiceBehaviour that can be * added to the queue of the agent behaviours, as usual, by using * <code>Agent.addBehaviour()</code>. * @author Fabio Bellifemine (CSELT S.p.A.) * @author Elisabetta Cortese (TiLab S.p.A.) @version $Date$ $Revision$ * **/ public class DFService extends FIPAServiceCommunicator { private static Codec c = new SLCodec(); private static Ontology o = FIPAManagementOntology.getInstance(); private static ContentManager cm = new ContentManager(); static { cm.registerLanguage(c, "FIPA-SL0"); cm.registerLanguage(c, "FIPA-SL"); // The subscription message uses full SL cm.registerOntology(o); } /** * check that the <code>DFAgentDescription</code> contains the mandatory * slots, i.e. the agent name and, for each servicedescription, the * service name and the service type * @throw a MissingParameter exception is it is not valid */ static void checkIsValid(DFAgentDescription dfd) throws MissingParameter { // FIXME: use FIPAManagementOntology constants instead of Strings if (dfd.getName()==null) throw new MissingParameter(FIPAManagementOntology.DFAGENTDESCRIPTION, "name"); Iterator i = dfd.getAllServices(); ServiceDescription sd; while (i.hasNext()) { sd = (ServiceDescription)i.next(); if (sd.getName() == null) throw new MissingParameter(FIPAManagementOntology.SERVICEDESCRIPTION, "name"); if (sd.getType() == null) throw new MissingParameter(FIPAManagementOntology.SERVICEDESCRIPTION, "type"); } } /** Register a DFDescription with a <b>DF</b> agent. The lease duration request is not exact; the returned lease is allowed to have a shorter (but not longer) duration thatn what was requested setting the corresponding lease-time field in the <code>DFAgentDescription</code>. @param a is the Agent performing the registration (it is needed in order to send/receive messages @param dfName The AID of the <b>DF</b> agent to register with. @param dfd A <code>DFAgentDescriptor</code> object containing all data necessary to the registration. If the Agent name is empty, than it is set according to the <code>a</code> parameter. @return the effective lease time, in milliseconds, assigned to the <code>DFAgentDescription</code>. 0 if the request has not been satisfied. @exception FIPAException A suitable exception can be thrown when a <code>refuse</code> or <code>failure</code> messages are received from the DF to indicate some error condition or when the method locally discovers that the DFDescription is not valid. */ public static Date register(Agent a, AID dfName, DFAgentDescription dfd) throws FIPAException { ACLMessage request = createRequestMessage(a, dfName); if (dfd.getName() == null) dfd.setName(a.getAID()); checkIsValid(dfd); // Build a DF action object for the request Register r = new Register(); r.setDescription(dfd); Action act = new Action(); act.setActor(dfName); act.setAction(r); synchronized (cm) { try{ cm.fillContent(request, act); } catch(Exception e){ throw new FIPAException("Error encoding REQUEST content. "+e); } } // request.setContent(encode(act,c,o)); // Send message and collect reply ACLMessage reply = doFipaRequestClient(a,request); // get the effective lease time assigne the current request Date retLeaseTime = null; Done doneRegister = null; try{ synchronized (cm) { doneRegister = (Done) cm.extractContent(reply); } }catch(Exception e) { throw new FIPAException("Error decoding REQUEST content. "+e); } if(doneRegister!= null) { Action replyAction = (Action) doneRegister.getAction(); Register replyRegister = (Register) replyAction.getAction(); //Register replyRegister = (Register) doneRegister.getAction(); DFAgentDescription replyDFA = (DFAgentDescription)replyRegister.getDescription(); retLeaseTime = replyDFA.getLeaseTime(); } return retLeaseTime; } /** * registers a <code>DFAgentDescription</code> with the default DF * @see #register(Agent,AID,DFAgentDescription) **/ public static Date register(Agent a, DFAgentDescription dfd) throws FIPAException { return register(a,a.getDefaultDF(),dfd); } /** Deregister a DFAgentDescription from a <b>DF</b> agent. @param dfName The AID of the <b>DF</b> agent to deregister from. @param dfd A <code>DFAgentDescription</code> object containing all data necessary to the deregistration. @exception FIPAException A suitable exception can be thrown when a <code>refuse</code> or <code>failure</code> messages are received from the DF to indicate some error condition. */ public static void deregister(Agent a, AID dfName, DFAgentDescription dfd) throws FIPAException { ACLMessage request = createRequestMessage(a, dfName); if (dfd.getName() == null) dfd.setName(a.getAID()); // Build a DF action object for the request Deregister d = new Deregister(); d.setDescription(dfd); Action act = new Action(); act.setActor(dfName); act.setAction(d); synchronized (cm) { try{ cm.fillContent(request, act); } catch(Exception e){ throw new FIPAException("Error encoding REQUEST content. "+e); } } // request.setContent(encode(act,c,o)); // Send message and collect reply doFipaRequestClient(a,request); } /** * The default DF of the platform is used. @see #deregister(Agent a, AID dfName, DFAgentDescription dfd) **/ public static void deregister(Agent a, DFAgentDescription dfd) throws FIPAException { deregister(a,a.getDefaultDF(),dfd); } /** * A default Agent Description is used which contains only the AID * of this agent. @see #deregister(Agent a, AID dfName, DFAgentDescription dfd) **/ public static void deregister(Agent a, AID dfName) throws FIPAException { DFAgentDescription dfd = new DFAgentDescription(); dfd.setName(a.getAID()); deregister(a,dfName,dfd); } /** * The default DF of the platform is used. * A default Agent Description is used which contains only the AID * of this agent. @see #deregister(Agent a, AID dfName, DFAgentDescription dfd) **/ public static void deregister(Agent a) throws FIPAException { DFAgentDescription dfd = new DFAgentDescription(); dfd.setName(a.getAID()); deregister(a,dfd); } /** Modifies data contained within a <b>DF</b> agent. @param a is the Agent performing the request of modification @param dfName The AID of the <b>DF</b> agent holding the data to be changed. @param dfd A <code>DFAgentDescriptor</code> object containing all new data values; @return the effective lease time, in milliseconds, assigned to the <code>DFAgentDescription</code>. 0 if the request has not been satisfied. @exception FIPAException A suitable exception can be thrown when a <code>refuse</code> or <code>failure</code> messages are received from the DF to indicate some error condition. */ public static Date modify(Agent a, AID dfName, DFAgentDescription dfd) throws FIPAException { ACLMessage request = createRequestMessage(a, dfName); if (dfd.getName() == null) dfd.setName(a.getAID()); checkIsValid(dfd); // Build a DF action object for the request Modify m = new Modify(); m.setDescription(dfd); Action act = new Action(); act.setActor(dfName); act.setAction(m); synchronized (cm) { try{ cm.fillContent(request, act); } catch(Exception e){ throw new FIPAException("Error encoding REQUEST content. "+e); } } // // Write the action in the :content slot of the request // request.setContent(encode(act,c,o)); // Send message and collect reply ACLMessage reply = doFipaRequestClient(a,request); // get the effective lease time assigne the current request Date retLeaseTime = null; Done doneModify = null; try{ synchronized(cm) { doneModify = (Done) cm.extractContent(reply); } }catch(Exception e) { throw new FIPAException("Error dencoding INFORM content. "+e); } if(doneModify!=null) { Action replyAction = (Action) doneModify.getAction(); Modify replyModify = (Modify) replyAction.getAction(); DFAgentDescription replyDFA = (DFAgentDescription)replyModify.getDescription(); retLeaseTime = replyDFA.getLeaseTime(); } return retLeaseTime; } /** * The default DF of the platform is used. @see #modify(Agent a, AID dfName, DFAgentDescription dfd) **/ public static Date modify(Agent a, DFAgentDescription dfd) throws FIPAException { return modify(a,a.getDefaultDF(),dfd); } /** Searches for data contained within a <b>DF</b> agent. @param a is the Agent performing the request of search @param dfName The AID of the <b>DF</b> agent to start search from. @param dfd A <code>DFAgentDescription</code> object containing data to search for; this parameter is used as a template to match data against. @param constraints of the search @return An array of <code>DFAgentDescription</code> containing all found items matching the given descriptor, subject to given search constraints for search depth and result size. @exception FIPAException A suitable exception can be thrown when a <code>refuse</code> or <code>failure</code> messages are received from the DF to indicate some error condition. */ public static DFAgentDescription[] search(Agent a, AID dfName, DFAgentDescription dfd, SearchConstraints constraints) throws FIPAException { ACLMessage request = createRequestMessage(a, dfName); // Build a DF action object for the request Search s = new Search(); s.setDescription(dfd); s.setConstraints(constraints); Action act = new Action(); act.setActor(dfName); act.setAction(s); synchronized (cm) { try{ cm.fillContent(request, act); } catch(Exception e){ throw new FIPAException("Error encoding REQUEST content. "+e); } } // Send message and collect reply ACLMessage inform = doFipaRequestClient(a,request); // Parse the content and returns the items found as an array Result r = null; synchronized (cm) { try{ r = (Result) cm.extractContent( inform ); } catch(Exception e){ throw new FIPAException("Error decoding INFORM content. "+e); } } return toArray(r.getItems()); } private static DFAgentDescription[] toArray(List l) throws FIPAException { try { DFAgentDescription[] items = new DFAgentDescription[l.size()]; for(int i = 0; i < l.size(); i++){ items[i] = (DFAgentDescription)l.get(i); } return items; } catch (ClassCastException cce) { throw new FIPAException("Found items are not DFAgentDescriptions. "+cce); } } /** * The default DF is used. @see #search(Agent a, AID dfName, DFAgentDescription dfd, SearchConstraints constraints) **/ public static DFAgentDescription[] search(Agent a, DFAgentDescription dfd, SearchConstraints constraints) throws FIPAException { return search(a,a.getDefaultDF(),dfd,constraints); } /** * The default DF is used. * The default SearchConstraints are used. According to FIPA they are * defaulted to null value for all slots. @see #search(Agent a, AID dfName, DFAgentDescription dfd, SearchConstraints constraints) **/ public static DFAgentDescription[] search(Agent a, DFAgentDescription dfd) throws FIPAException { SearchConstraints constraints = new SearchConstraints(); return search(a,a.getDefaultDF(),dfd,constraints); } /** * The default SearchConstraints are used. According to FIPA they are * defaulted to null value for all slots. @see #search(Agent a, AID dfName, DFAgentDescription dfd, SearchConstraints constraints) **/ public static DFAgentDescription[] search(Agent a, AID dfName, DFAgentDescription dfd) throws FIPAException { SearchConstraints constraints = new SearchConstraints(); return search(a,dfName,dfd,constraints); } // SUBSCRIPTION related methods /** Utility method that allows easily creating the message that has to be sent to the DF to subscribe to receive notifications when a new DF agent description matching the indicated template is registererd with the DF. This method can be fruitfully used in combination with the <code>SubscriptionInitiator</code> protocol. @param a The agent that is subscribing to the DF @param dfName The AID of the <b>DF</b> agent to subscribe to. @param template A <code>DFAgentDescription</code> object that is used as a template to identify DF description that will be notified @param constraints The constraints to limit the number of results to be notified. @return the subscription message. @see jade.proto.SubscriptionInitiator */ public static ACLMessage getSubscriptionMessage(Agent a, AID dfName, DFAgentDescription template, SearchConstraints constraints) throws FIPAException { ACLMessage subscribe = new ACLMessage(ACLMessage.SUBSCRIBE); subscribe.setSender(a.getAID()); subscribe.addReceiver(dfName); subscribe.setProtocol("fipa-subscribe"); subscribe.setLanguage(c.getName()); subscribe.setOntology(o.getName()); AbsVariable x = new AbsVariable("x", FIPAManagementVocabulary.DFAGENTDESCRIPTION); // Build a DF action object for the request Search s = new Search(); s.setDescription(template); s.setConstraints(constraints); Action actSearch = new Action(); actSearch.setActor(dfName); actSearch.setAction(s); AbsPredicate results = new AbsPredicate(BasicOntology.RESULT); results.set(BasicOntology.RESULT_VALUE, x); synchronized (cm) { try { results.set(BasicOntology.RESULT_ACTION, o.fromObject(actSearch)); AbsIRE iota = new AbsIRE(SLVocabulary.IOTA); iota.setVariable(x); iota.setProposition(results); cm.fillContent(subscribe, iota); } catch (Exception e) { throw new FIPAException("Error creating subscription message. "+e); } } return subscribe; } /** Searches the DF and remains blocked until a result is found or the specified timeout has expired. @param a The agent that is performing the search @param dfName The AID of the <b>DF</b> agent where to search into. @param template A <code>DFAgentDescription</code> object that is used as a template to identify the DF descriptions to search for. @param constraints The constraints to limit the number of results to be sent back. @param timeout The maximum amount of time that we want to remain blocked waiting for results. @return The DF agent descriptions matching the specified template or <code>null</code> if the timeout expires. */ public static DFAgentDescription[] searchUntilFound(Agent a, AID dfName, DFAgentDescription dfd, SearchConstraints constraints, long timeout) throws FIPAException { ACLMessage subscribe = getSubscriptionMessage(a, dfName, dfd, constraints); // set conv-id and reply-with field String replyWith ="rw"+a.getName()+(new Date()).getTime(); String convId = "conv"+a.getName()+(new Date()).getTime(); subscribe.setReplyWith( replyWith ); subscribe.setConversationId( convId ); a.send(subscribe); DFAgentDescription[] result = waitForResults(a, timeout, replyWith, convId); // SEND the CANCEL message ACLMessage cancel = new ACLMessage(ACLMessage.CANCEL); cancel.addReceiver(dfName); cancel.setLanguage(c.getName()); cancel.setOntology(o.getName()); cancel.setConversationId(convId); Action act = new Action(dfName, OntoACLMessage.wrap(subscribe)); synchronized (cm) { try { cm.fillContent(cancel, act); } catch (Exception e) { e.printStackTrace(); } } a.send(cancel); return result; } private static DFAgentDescription[] waitForResults(Agent a, long timeout, String replyWith, String convId) throws FIPAException { MessageTemplate mt = MessageTemplate.and(MessageTemplate.MatchConversationId(convId),MessageTemplate.MatchInReplyTo(replyWith)); long sendTime = System.currentTimeMillis(); ACLMessage reply = a.blockingReceive(mt, timeout); if(reply != null) { if (reply.getPerformative() == ACLMessage.AGREE){ // We received an AGREE --> Go back waiting for the INFORM unless the time is over. long agreeTime = System.currentTimeMillis(); timeout -= (agreeTime - sendTime); if (timeout <= 0) { return null; } reply = a.blockingReceive(mt, timeout); } if(reply != null) { if (reply.getPerformative() == ACLMessage.INFORM){ // We received the INFORM --> Parse it and return the result List items = null; try{ synchronized (cm) { AbsPredicate absEquals = (AbsPredicate) cm.extractAbsContent( reply ); items = (List) o.toObject( absEquals.getAbsTerm(SLVocabulary.EQUALS_RIGHT) ); } } catch(Exception e){ throw new FIPAException("Error decoding INFORM content. "+e); } return toArray(items); } else { // We received a REFUSE, NOT_UNDERSTOOD, FAILURE or OUT_OF_SEQUENCE --> ERROR throw new FIPAException(reply.getContent()); } } } // The timeout has expired return null; } /** In some cases it is more convenient to execute this tasks in a non-blocking way. This method returns a non-blocking behaviour that can be added to the queue of the agent behaviours, as usual, by using <code>Agent.addBehaviour()</code>. <p> Several ways are available to get the result of this behaviour and the programmer can select one according to his preferred programming style: <ul> <li> call getLastMsg() and getSearchResults() where both throw a NotYetReadyException if the task has not yet finished; <li>create a SequentialBehaviour composed of two sub-behaviours: the first subbehaviour is the returned RequestFIPAServiceBehaviour, while the second one is application-dependent and is executed only when the first is terminated; <li>use directly the class RequestFIPAServiceBehaviour by extending it and overriding all the handleXXX methods that handle the states of the fipa-request interaction protocol. </ul> * @param a is the agent performing the task * @param dfName is the AID of the DF that should perform the requested action * @param actionName is the name of the action (one of the constants defined * in FIPAAgentManagementOntology: REGISTER / DEREGISTER / MODIFY / SEARCH). * @param dfd is the agent description * @param constraints are the search constraints (can be null if this is * not a search operation) * @return the behaviour to be added to the agent @exception FIPAException A suitable exception can be thrown to indicate some error condition locally discovered (e.g.the agentdescription is not valid.) @see jade.domain.FIPAAgentManagement.FIPAAgentManagementOntology **/ public static RequestFIPAServiceBehaviour getNonBlockingBehaviour(Agent a, AID dfName, String actionName, DFAgentDescription dfd, SearchConstraints constraints) throws FIPAException { return new RequestFIPAServiceBehaviour(a,dfName,actionName,dfd,constraints); } /** * The default DF is used. * @see #getNonBlockingBehaviour(Agent a, AID dfName, String actionName, DFAgentDescription dfd, SearchConstraints constraints) **/ public static RequestFIPAServiceBehaviour getNonBlockingBehaviour(Agent a, String actionName, DFAgentDescription dfd, SearchConstraints constraints) throws FIPAException { return getNonBlockingBehaviour(a,a.getDefaultDF(),actionName,dfd,constraints); } /** * The default DF is used. the default SearchContraints are used. a default AgentDescription is used, where only the agent AID is set. * @see #getNonBlockingBehaviour(Agent a, AID dfName, String actionName, DFAgentDescription dfd, SearchConstraints constraints) **/ public static RequestFIPAServiceBehaviour getNonBlockingBehaviour(Agent a, String actionName) throws FIPAException { DFAgentDescription dfd = new DFAgentDescription(); dfd.setName(a.getAID()); SearchConstraints constraints = new SearchConstraints(); return getNonBlockingBehaviour(a,a.getDefaultDF(),actionName,dfd,constraints); } /** the default SearchContraints are used. a default AgentDescription is used, where only the agent AID is set. * @see #getNonBlockingBehaviour(Agent a, AID dfName, String actionName, DFAgentDescription dfd, SearchConstraints constraints) **/ public static RequestFIPAServiceBehaviour getNonBlockingBehaviour(Agent a, AID dfName, String actionName) throws FIPAException { DFAgentDescription dfd = new DFAgentDescription(); dfd.setName(a.getAID()); SearchConstraints constraints = new SearchConstraints(); return getNonBlockingBehaviour(a,dfName,actionName,dfd,constraints); } /** * The defautl DF is used. the default SearchContraints are used. * @see #getNonBlockingBehaviour(Agent a, AID dfName, String actionName, DFAgentDescription dfd, SearchConstraints constraints) **/ public static RequestFIPAServiceBehaviour getNonBlockingBehaviour(Agent a, String actionName, DFAgentDescription dfd) throws FIPAException { SearchConstraints constraints = new SearchConstraints(); return getNonBlockingBehaviour(a,a.getDefaultDF(),actionName,dfd,constraints); } /** * the default SearchContraints are used. * @see #getNonBlockingBehaviour(Agent a, AID dfName, String actionName, DFAgentDescription dfd, SearchConstraints constraints) **/ public static RequestFIPAServiceBehaviour getNonBlockingBehaviour(Agent a, AID dfName, String actionName, DFAgentDescription dfd) throws FIPAException { SearchConstraints constraints = new SearchConstraints(); return getNonBlockingBehaviour(a,dfName,actionName,dfd,constraints); } }
package ca.corefacility.bioinformatics.irida.model; import java.net.URI; import java.net.URISyntaxException; import java.util.Objects; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.PostLoad; import javax.persistence.PrePersist; import javax.persistence.PreUpdate; import javax.persistence.Table; import javax.persistence.Transient; @Entity @Table(name = "remoteApi") public class RemoteAPI implements Comparable<RemoteAPI>{ @Id @GeneratedValue(strategy=GenerationType.AUTO) private Long id; @Transient private URI serviceURI; //keeping a string representation of the service URI so it's stored nicer in the database @Column(name="serviceURI") private String stringServiceURI; private String description; public RemoteAPI(){ } public RemoteAPI(URI serviceURI, String description){ this.serviceURI = serviceURI; this.description = description; } /** * Setting the proper service URI after load * @throws URISyntaxException */ @PostLoad public void postLoad() throws URISyntaxException{ serviceURI = new URI(stringServiceURI); } /** * Setting the string service URI before we store it in the database */ @PrePersist @PreUpdate public void prePersist() { stringServiceURI = serviceURI.toString(); } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public URI getServiceURI() { return serviceURI; } public void setServiceURI(URI serviceURI) { this.serviceURI = serviceURI; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } @Override public String toString() { return "RemoteAPI [" + serviceURI + ", " + description + "]"; } @Override public boolean equals(Object other) { if (other instanceof RemoteAPI) { RemoteAPI p = (RemoteAPI) other; return Objects.equals(serviceURI, p.serviceURI); } return false; } @Override public int compareTo(RemoteAPI o) { return serviceURI.compareTo(o.serviceURI); } @Override public int hashCode() { return Objects.hash(serviceURI); } }
package ca.corefacility.bioinformatics.irida.model.user; import java.util.Objects; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.Table; import javax.validation.constraints.NotNull; import org.hibernate.envers.Audited; import org.springframework.security.core.GrantedAuthority; /** * Roles for authorization in the application. * * @author Franklin Bristow <[email protected]> * @author Thomas Matthews <[email protected]> */ @Entity @Table(name = "system_role") @Audited public class Role implements Comparable<Role>, GrantedAuthority { private static final long serialVersionUID = 7595149386708058927L; /** * Constant reference for administrative role. */ public static final Role ROLE_ANONYMOUS = new Role("ROLE_ANONYMOUS"); /** * Constant reference for administrative role. */ public static final Role ROLE_ADMIN = new Role("ROLE_ADMIN"); /** * Constant reference for user role. */ public static final Role ROLE_USER = new Role("ROLE_USER"); /** * Constant reference for the manager role */ public static final Role ROLE_MANAGER = new Role("ROLE_MANAGER"); /** * Constant reference for the sequencer role. */ public static final Role ROLE_SEQUENCER = new Role("ROLE_SEQUENCER"); /** * Constant reference for the OAuth2 client role */ public static final Role ROLE_CLIENT = new Role("ROLE_CLIENT"); @Id private String name; @NotNull private String description; private Role() { } private Role(String name) { this(); this.name = name; } private Role(String name, String description) { this(name); this.description = description; } @Override public int compareTo(Role r) { return name.compareTo(r.name); } @Override public boolean equals(Object other) { if (other instanceof Role) { Role r = (Role) other; return Objects.equals(name, r.name); } return false; } @Override public int hashCode() { return Objects.hash(name); } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } @Override public String getAuthority() { return name; } /** * Return a {@link Role} for the given string value * * @param value * The string value to create a {@link Role} for * @return A new {@link Role} instance for the given string value */ public static Role valueOf(String value) { return new Role(value); } }
package com.adioss.security.certificate; import java.math.BigInteger; import java.security.KeyPair; import java.security.cert.X509Certificate; import java.util.*; import org.bouncycastle.asn1.x500.X500Name; import org.bouncycastle.asn1.x509.AuthorityKeyIdentifier; import org.bouncycastle.asn1.x509.BasicConstraints; import org.bouncycastle.asn1.x509.ExtendedKeyUsage; import org.bouncycastle.asn1.x509.Extension; import org.bouncycastle.asn1.x509.GeneralName; import org.bouncycastle.asn1.x509.GeneralNames; import org.bouncycastle.asn1.x509.KeyPurposeId; import org.bouncycastle.asn1.x509.KeyUsage; import org.bouncycastle.asn1.x509.SubjectKeyIdentifier; import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo; import org.bouncycastle.cert.X509CertificateHolder; import org.bouncycastle.cert.X509v1CertificateBuilder; import org.bouncycastle.cert.X509v3CertificateBuilder; import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter; import org.bouncycastle.cert.jcajce.JcaX509ExtensionUtils; import org.bouncycastle.operator.ContentSigner; import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder; import org.bouncycastle.pkcs.PKCS10CertificationRequest; import org.bouncycastle.pkcs.PKCS10CertificationRequestBuilder; class CertificateGenerator { private static final int VALIDITY_PERIOD = 7 * 24 * 60 * 60 * 1000; // one week private static final String SIGNATURE_ALGORITHM = "SHA256WithRSAEncryption"; private static final Date START_DATE = new Date(System.currentTimeMillis() - 50000); private static final Date END_DATE = new Date(System.currentTimeMillis() + VALIDITY_PERIOD); /** * Generate a V1 version(ex usage is CA root) of X.509 self signed certificate * * @param keyPair used to generate certificate: public key in certificate, sign with private key (self signed) * @return a self signed {@link X509Certificate} */ static X509Certificate generateX509V1Certificate(KeyPair keyPair) throws Exception { BigInteger serial = BigInteger.valueOf(System.currentTimeMillis()); X509v1CertificateBuilder x509v1CertificateBuilder = new X509v1CertificateBuilder(new X500Name("CN=Test Certificate"), serial, START_DATE, END_DATE, new X500Name("CN=Fake Issuer DN"), SubjectPublicKeyInfo.getInstance(keyPair.getPublic().getEncoded())); ContentSigner contentSigner = new JcaContentSignerBuilder(SIGNATURE_ALGORITHM).build(keyPair.getPrivate()); X509CertificateHolder x509CertificateHolder = x509v1CertificateBuilder.build(contentSigner); return new JcaX509CertificateConverter().getCertificate(x509CertificateHolder); } /** * Same source code as {@code generateX509V1Certificate} but with a V3 of X.509: root CA certificate (V3 version of X.509 self signed certificate) */ static X509Certificate generateRootCert(KeyPair keyPair, String subjectValue) throws Exception { X500Name x500Name = new X500Name(subjectValue); BigInteger serial = BigInteger.valueOf(System.currentTimeMillis()); X509v3CertificateBuilder x509v3CertificateBuilder = new X509v3CertificateBuilder(x500Name, serial, START_DATE, END_DATE, x500Name, SubjectPublicKeyInfo.getInstance(keyPair.getPublic().getEncoded())); // can sign only next level. Critical: true x509v3CertificateBuilder.addExtension(Extension.basicConstraints, true, new BasicConstraints(0)); // can sign certificate or encrypt other keys x509v3CertificateBuilder.addExtension(Extension.keyUsage, true, new KeyUsage(KeyUsage.digitalSignature | KeyUsage.keyCertSign)); ContentSigner contentSigner = new JcaContentSignerBuilder(SIGNATURE_ALGORITHM).build(keyPair.getPrivate()); // generate certificate X509CertificateHolder x509CertificateHolder = x509v3CertificateBuilder.build(contentSigner); return new JcaX509CertificateConverter().getCertificate(x509CertificateHolder); } /** * Create an intermediate CA, sign by root CA, used to sign other certificates */ static X509Certificate generateIntermediateCA(KeyPair keyPair, KeyPair caRootKeyPair, X509Certificate caCert, String subjectValue) throws Exception { ContentSigner contentSigner = new JcaContentSignerBuilder(SIGNATURE_ALGORITHM).build(caRootKeyPair.getPrivate()); X500Name subject = new X500Name(subjectValue); X500Name issuer = new X500Name(caCert.getSubjectDN().getName()); SubjectPublicKeyInfo subjectPublicKeyInfo = SubjectPublicKeyInfo.getInstance(keyPair.getPublic().getEncoded()); PKCS10CertificationRequestBuilder certificationRequestBuilder = new PKCS10CertificationRequestBuilder(subject, subjectPublicKeyInfo); PKCS10CertificationRequest request = certificationRequestBuilder.build(contentSigner); BigInteger serial = BigInteger.valueOf(System.currentTimeMillis()); X500Name subject1 = request.toASN1Structure().getCertificationRequestInfo().getSubject(); X509v3CertificateBuilder x509v3CertificateBuilder = new X509v3CertificateBuilder(issuer, serial, START_DATE, END_DATE, subject1, request.getSubjectPublicKeyInfo()); // can sign any size path of next level. Critical: true x509v3CertificateBuilder.addExtension(Extension.basicConstraints, true, new BasicConstraints(Integer.MAX_VALUE)); // can sign certificate or encrypt other keys. Critical: true x509v3CertificateBuilder.addExtension(Extension.keyUsage, true, new KeyUsage(KeyUsage.digitalSignature | KeyUsage.keyCertSign)); // add issuer subjectKey identifier. Critical: false AuthorityKeyIdentifier authorityKeyIdentifier = new JcaX509ExtensionUtils().createAuthorityKeyIdentifier(caCert.getPublicKey()); x509v3CertificateBuilder.addExtension(Extension.authorityKeyIdentifier, false, authorityKeyIdentifier); // add subjectKey identifier. Critical: false SubjectKeyIdentifier subjectKeyIdentifier = new JcaX509ExtensionUtils().createSubjectKeyIdentifier(keyPair.getPublic()); x509v3CertificateBuilder.addExtension(Extension.subjectKeyIdentifier, false, subjectKeyIdentifier); // put a DNS SAN. Critical: false GeneralNames generalNames = new GeneralNames(new GeneralName[]{new GeneralName(GeneralName.dNSName, subjectValue + ".intermediate.ca")}); x509v3CertificateBuilder.addExtension(Extension.subjectAlternativeName, false, generalNames); // generate certificate X509CertificateHolder x509CertificateHolder = x509v3CertificateBuilder.build(contentSigner); return new JcaX509CertificateConverter().getCertificate(x509CertificateHolder); } /** * Create basic certificate signed by intermediate CA */ static X509Certificate generateEndEntityCert(KeyPair keyPair, KeyPair intermediateCaKeyPair, X509Certificate intermediateCaKeyCertificate, String subjectValue) throws Exception { ContentSigner contentSigner = new JcaContentSignerBuilder(SIGNATURE_ALGORITHM).build(intermediateCaKeyPair.getPrivate()); X500Name subject = new X500Name(subjectValue); X500Name issuer = new X500Name(intermediateCaKeyCertificate.getSubjectDN().getName()); SubjectPublicKeyInfo subjectPublicKeyInfo = SubjectPublicKeyInfo.getInstance(keyPair.getPublic().getEncoded()); PKCS10CertificationRequestBuilder certificationRequestBuilder = new PKCS10CertificationRequestBuilder(subject, subjectPublicKeyInfo); PKCS10CertificationRequest request = certificationRequestBuilder.build(contentSigner); BigInteger serial = BigInteger.valueOf(System.currentTimeMillis()); X500Name subject1 = request.toASN1Structure().getCertificationRequestInfo().getSubject(); X509v3CertificateBuilder x509v3CertificateBuilder = new X509v3CertificateBuilder(issuer, serial, START_DATE, END_DATE, subject1, request.getSubjectPublicKeyInfo()); // not a CA x509v3CertificateBuilder.addExtension(Extension.basicConstraints, true, new BasicConstraints(false)); // server auth. Critical: true x509v3CertificateBuilder.addExtension(Extension.extendedKeyUsage, true, new ExtendedKeyUsage(KeyPurposeId.id_kp_serverAuth)); // public key is used for key transport. Critical: true x509v3CertificateBuilder.addExtension(Extension.keyUsage, true, new KeyUsage(KeyUsage.digitalSignature | KeyUsage.keyEncipherment)); // add issuer subjectKey identifier. Critical: false AuthorityKeyIdentifier authorityKeyIdentifier = new JcaX509ExtensionUtils().createAuthorityKeyIdentifier(intermediateCaKeyCertificate.getPublicKey()); x509v3CertificateBuilder.addExtension(Extension.authorityKeyIdentifier, false, authorityKeyIdentifier); // add subjectKey identifier. Critical: false SubjectKeyIdentifier subjectKeyIdentifier = new JcaX509ExtensionUtils().createSubjectKeyIdentifier(keyPair.getPublic()); x509v3CertificateBuilder.addExtension(Extension.subjectKeyIdentifier, false, subjectKeyIdentifier); // put a DNS SAN GeneralNames generalNames = new GeneralNames(new GeneralName[]{new GeneralName(GeneralName.dNSName, subjectValue + ".end.certificate.ca")}); x509v3CertificateBuilder.addExtension(Extension.subjectAlternativeName, false, generalNames); // generate certificate X509CertificateHolder x509CertificateHolder = x509v3CertificateBuilder.build(contentSigner); return new JcaX509CertificateConverter().getCertificate(x509CertificateHolder); } private CertificateGenerator() { } }
package ca.corefacility.bioinformatics.irida.service.impl.unit.sample; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.IOException; import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.validation.Validation; import javax.validation.Validator; import javax.validation.ValidatorFactory; import org.junit.Before; import org.junit.Test; import com.google.common.collect.Lists; import ca.corefacility.bioinformatics.irida.exceptions.AnalysisAlreadySetException; import ca.corefacility.bioinformatics.irida.exceptions.SequenceFileAnalysisException; import ca.corefacility.bioinformatics.irida.model.genomeFile.AssembledGenomeAnalysis; import ca.corefacility.bioinformatics.irida.model.joins.Join; import ca.corefacility.bioinformatics.irida.model.joins.impl.ProjectSampleJoin; import ca.corefacility.bioinformatics.irida.model.project.Project; import ca.corefacility.bioinformatics.irida.model.sample.Sample; import ca.corefacility.bioinformatics.irida.model.sample.SampleSequenceFileJoin; import ca.corefacility.bioinformatics.irida.model.sample.SampleSequencingObjectJoin; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFile; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SingleEndSequenceFile; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisFastQC; import ca.corefacility.bioinformatics.irida.repositories.AssembledGenomeAnalysisRepository; import ca.corefacility.bioinformatics.irida.repositories.analysis.AnalysisRepository; import ca.corefacility.bioinformatics.irida.repositories.joins.project.ProjectSampleJoinRepository; import ca.corefacility.bioinformatics.irida.repositories.joins.sample.SampleSequenceFileJoinRepository; import ca.corefacility.bioinformatics.irida.repositories.joins.sample.SampleSequencingObjectJoinRepository; import ca.corefacility.bioinformatics.irida.repositories.sample.SampleRepository; import ca.corefacility.bioinformatics.irida.repositories.sequencefile.SequenceFilePairRepository; import ca.corefacility.bioinformatics.irida.service.impl.sample.SampleServiceImpl; import ca.corefacility.bioinformatics.irida.service.sample.SampleService; /** * Unit tests for {@link SampleServiceImpl}. * */ public class SampleServiceImplTest { private SampleService sampleService; private SampleRepository sampleRepository; private ProjectSampleJoinRepository psjRepository; private SampleSequenceFileJoinRepository ssfRepository; private AnalysisRepository analysisRepository; private SequenceFilePairRepository sequenceFilePairRepository; private AssembledGenomeAnalysisRepository assembledGenomeAnalysisRepository; private SampleSequencingObjectJoinRepository ssoRepository; private Validator validator; private AssembledGenomeAnalysis assembledGenome1; private AssembledGenomeAnalysis assembledGenome2; /** * Variation in a floating point number to be considered equal. */ private static final double deltaFloatEquality = 0.000001; @Before public void setUp() { sampleRepository = mock(SampleRepository.class); psjRepository = mock(ProjectSampleJoinRepository.class); ssfRepository = mock(SampleSequenceFileJoinRepository.class); analysisRepository = mock(AnalysisRepository.class); sequenceFilePairRepository = mock(SequenceFilePairRepository.class); assembledGenomeAnalysisRepository = mock(AssembledGenomeAnalysisRepository.class); ssoRepository = mock(SampleSequencingObjectJoinRepository.class); assembledGenome1 = mock(AssembledGenomeAnalysis.class); assembledGenome2 = mock(AssembledGenomeAnalysis.class); ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); validator = factory.getValidator(); sampleService = new SampleServiceImpl(sampleRepository, psjRepository, ssfRepository, analysisRepository, ssoRepository, sequenceFilePairRepository, assembledGenomeAnalysisRepository, validator); } @Test public void testFindAssembliesForSampleNoAssemblies() { Sample s = new Sample(); s.setId(1L); SequenceFilePair pair = new SequenceFilePair(); when(sequenceFilePairRepository.getSequenceFilePairsForSample(s)).thenReturn(Lists.newArrayList(pair)); assertEquals("Invalid number of assemblies found", 0, sampleService.findAssembliesForSample(s).size()); } @Test public void testFindAssembliesForSampleOneAssemblies() { Sample s = new Sample(); s.setId(1L); SequenceFilePair pair = new SequenceFilePair(); pair.setAssembledGenome(assembledGenome1); when(assembledGenomeAnalysisRepository.getAssembledGenomeForSequenceFilePair(pair)) .thenReturn(assembledGenome1); when(sequenceFilePairRepository.getSequenceFilePairsForSample(s)).thenReturn(Lists.newArrayList(pair)); assertEquals("Invalid number of assemblies found", 1, sampleService.findAssembliesForSample(s).size()); } @Test public void testFindAssembliesForSampleTwoAssemblies() { Sample s = new Sample(); s.setId(1L); SequenceFilePair pair1 = new SequenceFilePair(); pair1.setAssembledGenome(assembledGenome1); SequenceFilePair pair2 = new SequenceFilePair(); pair2.setAssembledGenome(assembledGenome2); when(sequenceFilePairRepository.getSequenceFilePairsForSample(s)).thenReturn(Lists.newArrayList(pair1, pair2)); when(assembledGenomeAnalysisRepository.getAssembledGenomeForSequenceFilePair(pair1)).thenReturn( assembledGenome1); when(assembledGenomeAnalysisRepository.getAssembledGenomeForSequenceFilePair(pair2)).thenReturn( assembledGenome2); assertEquals("Invalid number of assemblies found", 2, sampleService.findAssembliesForSample(s).size()); } @Test public void testGetSampleForProject() { Project p = new Project(); p.setId(1111L); Sample s = new Sample(); s.setId(2222L); ProjectSampleJoin join = new ProjectSampleJoin(p, s); List<Join<Project, Sample>> joins = new ArrayList<>(); joins.add(join); when(psjRepository.getSamplesForProject(p)).thenReturn(joins); sampleService.getSampleForProject(p, s.getId()); verify(psjRepository).getSamplesForProject(p); } @Test public void testRemoveSequenceFileFromSample() { Sample s = new Sample(); s.setId(1111L); SequenceFile sf = new SequenceFile(); sf.setId(2222L); SingleEndSequenceFile obj = new SingleEndSequenceFile(sf); obj.setId(2L); SampleSequencingObjectJoin join = new SampleSequencingObjectJoin(s, obj); when(ssoRepository.readObjectForSample(s, obj.getId())).thenReturn(join); sampleService.removeSequencingObjectFromSample(s, obj); verify(ssoRepository).delete(join); } @Test public void testMergeSamples() { // For every sample in toMerge, the service should: // 1. call SequenceFileRepository to get the sequence files in that // sample, // 2. call SequenceFileRepository to add the sequence files to // mergeInto, // 3. call SampleRepository to persist the sample as deleted. final int SIZE = 3; Sample s = s(1L); Project project = p(1L); Sample[] toMerge = new Sample[SIZE]; SequenceFile[] toMerge_sf = new SequenceFile[SIZE]; SampleSequenceFileJoin[] s_sf_joins = new SampleSequenceFileJoin[SIZE]; SampleSequenceFileJoin[] s_sf_original = new SampleSequenceFileJoin[SIZE]; ProjectSampleJoin[] p_s_joins = new ProjectSampleJoin[SIZE]; for (long i = 0; i < SIZE; i++) { int p = (int) i; toMerge[p] = s(i + 2); toMerge_sf[p] = sf(i + 2); s_sf_joins[p] = new SampleSequenceFileJoin(s, toMerge_sf[p]); p_s_joins[p] = new ProjectSampleJoin(project, toMerge[p]); List<Join<Project, Sample>> projectSampleJoins = new ArrayList<>(); projectSampleJoins.add(p_s_joins[p]); List<Join<Sample, SequenceFile>> sampleSequenceFileJoins = new ArrayList<>(); SampleSequenceFileJoin join = new SampleSequenceFileJoin(toMerge[p], toMerge_sf[p]); sampleSequenceFileJoins.add(join); s_sf_original[p] = join; when(ssfRepository.getFilesForSample(toMerge[p])).thenReturn(sampleSequenceFileJoins); when(ssfRepository.save(s_sf_joins[p])).thenReturn(s_sf_joins[p]); when(ssfRepository.readFileForSample(toMerge[p], toMerge_sf[p])).thenReturn(join); when(psjRepository.getProjectForSample(toMerge[p])).thenReturn(projectSampleJoins); // for deletion when(psjRepository.readSampleForProject(project, toMerge[p])).thenReturn(p_s_joins[p]); } List<Join<Project, Sample>> joins = new ArrayList<>(); joins.add(new ProjectSampleJoin(project, s)); when(psjRepository.getProjectForSample(s)).thenReturn(joins); Sample saved = sampleService.mergeSamples(project, s, toMerge); verify(psjRepository).getProjectForSample(s); for (int i = 0; i < SIZE; i++) { verify(ssfRepository).getFilesForSample(toMerge[i]); verify(ssfRepository).save(s_sf_joins[i]); verify(ssfRepository).delete(s_sf_original[i]); verify(sampleRepository).delete(toMerge[i].getId()); verify(psjRepository).getProjectForSample(toMerge[i]); verify(psjRepository).delete(p_s_joins[i]); } assertEquals("The saved sample should be the same as the sample to merge into.", s, saved); } @Test public void testRejectSampleMergeDifferentProjects() { Sample s1 = new Sample(); s1.setId(1L); Sample s2 = new Sample(); s2.setId(2L); Project p1 = new Project(); p1.setId(1L); p1.setName("project 1"); Project p2 = new Project(); p2.setId(2L); p2.setName("project 2"); List<Join<Project, Sample>> p1_s1 = new ArrayList<>(); p1_s1.add(new ProjectSampleJoin(p1, s1)); List<Join<Project, Sample>> p2_s2 = new ArrayList<>(); p2_s2.add(new ProjectSampleJoin(p2, s2)); when(psjRepository.getProjectForSample(s1)).thenReturn(p1_s1); when(psjRepository.getProjectForSample(s2)).thenReturn(p2_s2); try { sampleService.mergeSamples(p1, s1, s2); fail("Samples from different projects were allowed to be merged."); } catch (IllegalArgumentException e) { } catch (Exception e) { e.printStackTrace(); fail("Failed for an unknown reason; stack trace preceded."); } verify(psjRepository).getProjectForSample(s1); verify(psjRepository).getProjectForSample(s2); } /** * Tests out successfully getting the coverage from a sample with no * sequence files. * * @throws SequenceFileAnalysisException */ @Test public void testGetCoverageForSampleSuccessZero() throws SequenceFileAnalysisException { Sample s1 = new Sample(); s1.setId(1L); when(ssfRepository.getFilesForSample(s1)).thenReturn(new ArrayList<Join<Sample, SequenceFile>>()); double coverage = sampleService.estimateCoverageForSample(s1, 10); assertEquals(0, coverage, deltaFloatEquality); } /** * Tests out successfully getting the coverage from a sample with a sequence * file. * * @throws SequenceFileAnalysisException * @throws AnalysisAlreadySetException */ @Test public void testGetCoverageForSampleSuccess() throws SequenceFileAnalysisException, AnalysisAlreadySetException { Sample s1 = new Sample(); s1.setId(1L); SequenceFile sf1 = new SequenceFile(); sf1.setId(2222L); SampleSequenceFileJoin join = new SampleSequenceFileJoin(s1, sf1); AnalysisFastQC analysisFastQC1 = AnalysisFastQC.sloppyBuilder().executionManagerAnalysisId("id") .totalBases(1000L).build(); sf1.setFastQCAnalysis(analysisFastQC1); when(ssfRepository.getFilesForSample(s1)).thenReturn(Arrays.asList(join)); when(analysisRepository.findFastqcAnalysisForSequenceFile(sf1)).thenReturn(analysisFastQC1); double coverage = sampleService.estimateCoverageForSample(s1, 500L); assertEquals(2.0, coverage, deltaFloatEquality); } /** * Tests out passing an invalid reference length. * * @throws SequenceFileAnalysisException */ @Test(expected = IllegalArgumentException.class) public void testGetCoverageForSampleInvalidReferenceLength() throws SequenceFileAnalysisException { sampleService.estimateCoverageForSample(new Sample(), 0L); } /** * Tests out successfully getting the total bases from a sample with no * sequence files. * * @throws SequenceFileAnalysisException */ @Test public void testGetTotalBasesForSampleSuccessZero() throws SequenceFileAnalysisException { Sample s1 = new Sample(); s1.setId(1L); when(ssfRepository.getFilesForSample(s1)).thenReturn(new ArrayList<Join<Sample, SequenceFile>>()); long actualBases = sampleService.getTotalBasesForSample(s1); assertEquals(0, actualBases); } /** * Tests out successfully getting the total bases from a sample with one * sequence file. * * @throws SequenceFileAnalysisException * @throws AnalysisAlreadySetException */ @Test public void testGetTotalBasesForSampleSuccessOne() throws SequenceFileAnalysisException, AnalysisAlreadySetException { Sample s1 = new Sample(); s1.setId(1L); SequenceFile sf1 = new SequenceFile(); sf1.setId(2222L); SampleSequenceFileJoin join = new SampleSequenceFileJoin(s1, sf1); AnalysisFastQC analysisFastQC1 = AnalysisFastQC.sloppyBuilder().executionManagerAnalysisId("id") .totalBases(1000L).build(); sf1.setFastQCAnalysis(analysisFastQC1); when(ssfRepository.getFilesForSample(s1)).thenReturn(Arrays.asList(join)); when(analysisRepository.findFastqcAnalysisForSequenceFile(sf1)).thenReturn(analysisFastQC1); long actualBases = sampleService.getTotalBasesForSample(s1); assertEquals(1000, actualBases); } /** * Tests out successfully getting the total bases from a sample with two * sequence files. * * @throws SequenceFileAnalysisException * @throws AnalysisAlreadySetException */ @Test public void testGetTotalBasesForSampleSuccessTwo() throws SequenceFileAnalysisException, AnalysisAlreadySetException { Sample s1 = new Sample(); s1.setId(1L); SequenceFile sf1 = new SequenceFile(); sf1.setId(2222L); SequenceFile sf2 = new SequenceFile(); sf1.setId(3333L); SampleSequenceFileJoin join1 = new SampleSequenceFileJoin(s1, sf1); SampleSequenceFileJoin join2 = new SampleSequenceFileJoin(s1, sf2); AnalysisFastQC analysisFastQC1 = AnalysisFastQC.sloppyBuilder().executionManagerAnalysisId("id") .totalBases(1000L).build(); sf1.setFastQCAnalysis(analysisFastQC1); AnalysisFastQC analysisFastQC2 = AnalysisFastQC.sloppyBuilder().executionManagerAnalysisId("id2") .totalBases(1000L).build(); sf2.setFastQCAnalysis(analysisFastQC2); when(ssfRepository.getFilesForSample(s1)).thenReturn(Arrays.asList(join1, join2)); when(analysisRepository.findFastqcAnalysisForSequenceFile(sf1)).thenReturn(analysisFastQC1); when(analysisRepository.findFastqcAnalysisForSequenceFile(sf2)).thenReturn(analysisFastQC2); long actualBases = sampleService.getTotalBasesForSample(s1); assertEquals(2000, actualBases); } /** * Tests out failing to get the total bases from a sample with one sequence * file due to missing FastQC * * @throws SequenceFileAnalysisException */ @Test(expected = SequenceFileAnalysisException.class) public void testGetTotalBasesForSampleFailNoFastQC() throws SequenceFileAnalysisException { Sample s1 = new Sample(); s1.setId(1L); SequenceFile sf1 = new SequenceFile(); sf1.setId(2222L); SampleSequenceFileJoin join = new SampleSequenceFileJoin(s1, sf1); when(ssfRepository.getFilesForSample(s1)).thenReturn(Arrays.asList(join)); sampleService.getTotalBasesForSample(s1); } /** * Tests out failing to get the total bases from a sample with one sequence * file due to too many FastQC * * @throws SequenceFileAnalysisException */ @Test(expected = SequenceFileAnalysisException.class) public void testGetTotalBasesForSampleFailMultipleFastQC() throws SequenceFileAnalysisException { Sample s1 = new Sample(); s1.setId(1L); SequenceFile sf1 = new SequenceFile(); sf1.setId(2222L); SampleSequenceFileJoin join = new SampleSequenceFileJoin(s1, sf1); when(ssfRepository.getFilesForSample(s1)).thenReturn(Arrays.asList(join)); sampleService.getTotalBasesForSample(s1); } private Sample s(Long id) { Sample s = new Sample(); s.setId(id); return s; } private SequenceFile sf(Long id) { SequenceFile sf = new SequenceFile(); sf.setId(id); try { sf.setFile(Files.createTempFile(null, null)); } catch (IOException e) { } return sf; } private Project p(Long id) { Project p = new Project(); p.setId(id); return p; } }
package com.codeski.dynmap.structures; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardWatchEventKinds; import java.nio.file.WatchEvent; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.logging.Logger; import com.codeski.nbt.NBTReader; import com.codeski.nbt.tags.NBT; import com.codeski.nbt.tags.NBTByte; import com.codeski.nbt.tags.NBTCompound; import com.codeski.nbt.tags.NBTInteger; import com.codeski.nbt.tags.NBTList; import com.codeski.nbt.tags.NBTString; import com.google.common.base.Joiner; import org.bukkit.Bukkit; import org.bukkit.World; import org.bukkit.World.Environment; import org.bukkit.configuration.file.FileConfiguration; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.world.WorldLoadEvent; import org.bukkit.event.world.WorldUnloadEvent; import org.bukkit.plugin.java.JavaPlugin; import org.dynmap.DynmapCommonAPI; import org.dynmap.markers.MarkerAPI; import org.dynmap.markers.MarkerSet; import org.mcstats.MetricsLite; public class DynmapStructuresPlugin extends JavaPlugin implements Listener { private class DynmapStructuresRunnable implements Runnable { private final File directory; private boolean stop = false; private final World world; public DynmapStructuresRunnable(World world) { this.world = world; directory = new File(this.world.getWorldFolder(), "data/"); } @Override public void run() { logger.info("Adding thread for world '" + world.getName() + "'."); Path path = Paths.get(directory.toURI()); try (WatchService watcher = path.getFileSystem().newWatchService()) { path.register(watcher, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY); while (!stop) { WatchKey key = watcher.take(); List<WatchEvent<?>> events = key.pollEvents(); if (events.size() == 0) continue; List<String> changed = new ArrayList<>(); for (WatchEvent<?> event : events) { String eventFile = event.context().toString(); for (String str : enabled) if (str.equalsIgnoreCase(eventFile) && !changed.contains(str)) changed.add(str); } if (changed.size() > 0) this.update(changed.toArray(new String[changed.size()])); if (!key.reset()) { logger.warning("Something went wrong with the watch service and it must be stopped. Sorry!"); stop = true; } } } catch (Exception e) { e.printStackTrace(System.err); } logger.info("Removing thread for world '" + world.getName() + "'."); } public void stop() { stop = true; } public void update(String[] changed) { logger.info("Updating markers for world '" + world.getName() + "'."); logger.info("Updating: " + Joiner.on(", ").join(changed)); for (String str : changed) try { File file = new File(directory, str); if (!file.exists()) continue; NBTCompound structures = NBTReader.read(file).<NBTCompound>get("data").<NBTCompound>get("Features"); if (structures == null || structures.getPayload() == null) continue; for (NBT<?> temp : structures.getPayload()) { NBTCompound structure = (NBTCompound) temp; String id = structure.<NBTString>get("id").getPayload(); String wn = world.getName(); int x = structure.<NBTInteger>get("ChunkX").getPayload(); int z = structure.<NBTInteger>get("ChunkZ").getPayload(); if (str.equalsIgnoreCase("Village.dat") || str.equalsIgnoreCase("BOPVillage.dat")) { // Make sure this Village is actually in the world if (structure.<NBTByte>get("Valid") == null || structure.<NBTByte>get("Valid").getPayload() == 0) continue; } else if (str.equalsIgnoreCase("Temple.dat") || str.equalsIgnoreCase("BOPTemple.dat")) { // Check if this Temple is from Biomes O Plenty if (id.equalsIgnoreCase("BOPTemple")) id = "Temple"; // Check if this Temple exists and if it's actually something else String type = ""; boolean moved = false; List<NBT<?>> children = structure.<NBTList>get("Children").getPayload(); if (children.size() > 0 && children.get(0) instanceof NBTCompound) { for (NBT<?> child : ((NBTCompound) children.get(0)).getPayload()) { // Check if this Temple has been moved to allow generation if (child.getName().equalsIgnoreCase("id")) type = child.getPayload().toString(); if (child.getName().equalsIgnoreCase("HPos")) moved = ((NBTInteger) child).getPayload() != -1; // Check if this Temple is actually a Igloo or Witch if (child.getName().equalsIgnoreCase("id") && ((NBTString) child).getPayload().equalsIgnoreCase("Iglu")) id = "Igloo"; else if (child.getName().equalsIgnoreCase("Witch") && ((NBTByte) child).getPayload() > 0) id = "Witch"; } } if (!type.equalsIgnoreCase("TeDP") && !moved) continue; if (id == null) continue; if (id.equalsIgnoreCase("Igloo") && !configuration.getBoolean("structures.igloo")) continue; else if (id.equalsIgnoreCase("Temple") && !configuration.getBoolean("structures.temple")) continue; else if (id.equalsIgnoreCase("Witch") && !configuration.getBoolean("structures.witch")) continue; } else if (str.equalsIgnoreCase("Monument.dat")) { // Make sure this Monument is actually in the world if (structure.<NBTList>get("Processed").getPayload().size() == 0) continue; } else if (str.equalsIgnoreCase("Fortress.dat")) // If this world is not Nether try to get one that is if (world.getEnvironment() != Environment.NETHER && Bukkit.getWorld(world.getName() + "_nether") != null && Bukkit.getWorld(world.getName() + "_nether").getEnvironment() == Environment.NETHER) wn = world.getName() + "_nether"; else continue; if (id == null) continue; String label = id; if (noLabels) label = ""; else if (includeCoordinates) label = id + " [" + x * 16 + "," + z * 16 + "]"; set.createMarker(id + "," + x + "," + z, label, wn, x * 16, 64, z * 16, api.getMarkerIcon("structures." + id.toLowerCase(Locale.ROOT)), false); } } catch (IOException e) { e.printStackTrace(System.err); } } } private MarkerAPI api; private FileConfiguration configuration; private String[] enabled; private final String[] images = {"Fortress", "Igloo", "Mineshaft", "Monument", "Stronghold", "Temple", "Village", "Witch"}; private boolean includeCoordinates; private Logger logger; private boolean noLabels; private final HashMap<World, DynmapStructuresRunnable> runnables = new HashMap<>(); private MarkerSet set; private final HashMap<World, Thread> threads = new HashMap<>(); @Override public void onEnable() { // Set up the metrics try { MetricsLite metrics = new MetricsLite(this); metrics.start(); } catch (IOException e) { logger.warning("Unable to enable metrics - something went wrong!"); e.printStackTrace(); } // Set up the configuration this.saveDefaultConfig(); configuration = this.getConfig(); configuration.options().copyDefaults(true); this.saveConfig(); // Register for events this.getServer().getPluginManager().registerEvents(this, this); // Save the logger for convenience logger = this.getLogger(); // Check if Dynmap is even enabled if (Bukkit.getPluginManager().isPluginEnabled("dynmap")) { // Set up our Dynmap layer api = ((DynmapCommonAPI) Bukkit.getPluginManager().getPlugin("dynmap")).getMarkerAPI(); set = api.createMarkerSet(configuration.getString("layer.name").toLowerCase(Locale.ROOT), configuration.getString("layer.name"), null, false); set.setHideByDefault(configuration.getBoolean("layer.hidebydefault")); set.setLayerPriority(configuration.getInt("layer.layerprio")); noLabels = configuration.getBoolean("layer.nolabels"); int minZoom = configuration.getInt("layer.minzoom"); if (minZoom > 0) set.setMinZoom(minZoom); includeCoordinates = configuration.getBoolean("layer.inc-coord"); // Create the marker icons for (String str : images) { InputStream in = this.getClass().getResourceAsStream("/" + str.toLowerCase(Locale.ROOT) + ".png"); if (in != null) if (api.getMarkerIcon("structures." + str.toLowerCase(Locale.ROOT)) == null) api.createMarkerIcon("structures." + str.toLowerCase(Locale.ROOT), str, in); else api.getMarkerIcon("structures." + str.toLowerCase(Locale.ROOT)).setMarkerIconImage(in); } // Build an array of files to parse if changed List<String> enabled = new ArrayList<>(); if (configuration.getBoolean("structures.fortress")) enabled.add("Fortress.dat"); if (configuration.getBoolean("structures.mineshaft")) enabled.add("Mineshaft.dat"); if (configuration.getBoolean("structures.monument")) enabled.add("Monument.dat"); if (configuration.getBoolean("structures.stronghold")) enabled.add("Stronghold.dat"); if (configuration.getBoolean("structures.igloo") || configuration.getBoolean("structures.temple") || configuration.getBoolean("structures.witch")) { enabled.add("BOPTemple.dat"); enabled.add("Temple.dat"); } if (configuration.getBoolean("structures.village")) { enabled.add("BOPVillage.dat"); enabled.add("Village.dat"); } this.enabled = enabled.toArray(new String[enabled.size()]); // Parse the worlds that have already been loaded for (World w : Bukkit.getWorlds()) this.addWorld(w); } } @EventHandler public void onWorldLoad(WorldLoadEvent event) { this.addWorld(event.getWorld()); } @EventHandler public void onWorldUnload(WorldUnloadEvent event) { this.removeWorld(event.getWorld()); } private void addWorld(World world) { switch (world.getEnvironment()) { case NORMAL: case NETHER: if (world.canGenerateStructures()) { // Update markers for this world DynmapStructuresRunnable r = new DynmapStructuresRunnable(world); r.update(enabled); // Add a thread to watch this world for changes Thread t = new Thread(r); t.setPriority(Thread.MIN_PRIORITY); t.start(); runnables.put(world, r); threads.put(world, t); } break; default: } } private void removeWorld(World world) { runnables.get(world).stop(); runnables.remove(world); threads.remove(world); } }
package com.concurrentperformance.ringingmaster.engine.notation.impl; import com.concurrentperformance.ringingmaster.engine.NumberOfBells; import com.concurrentperformance.ringingmaster.engine.helper.PlainCourseHelper; import com.concurrentperformance.ringingmaster.engine.method.MethodLead; import com.concurrentperformance.ringingmaster.engine.method.MethodRow; import com.concurrentperformance.ringingmaster.engine.method.impl.MethodBuilder; import com.concurrentperformance.ringingmaster.engine.notation.NotationBody; import com.concurrentperformance.ringingmaster.engine.touch.proof.Proof; import com.concurrentperformance.ringingmaster.generated.notation.persist.SerializableNotation; import com.ringingmaster.extraction.CentralCouncilMethodExtractor; import com.ringingmaster.extraction.MethodExtractor; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; public class LeadHeadCalculatorTest { private final Logger log = LoggerFactory.getLogger(this.getClass()); private final MethodExtractor methodExtractor = new CentralCouncilMethodExtractor(); @Test public void checkAllCCLibrary() { long count = methodExtractor .extractNotationsToStream() // .filter(serializableNotation -> serializableNotation.getName().startsWith("Cheeky Little Place")) // .filter(serializableNotation -> serializableNotation.getStage() == 5) .filter(this::calculatedLeadHeadNotEqualsToSuppliedLH) // .peek(serializableNotation -> log.warn(serializableNotation.getName() + " " + serializableNotation.getStage())) .count(); log.warn("[{}] methods calculated different to supplied LeadHead", count); } boolean calculatedLeadHeadNotEqualsToSuppliedLH(SerializableNotation serializableNotation) { log.info(serializableNotation.toString()); String ccName = serializableNotation.getName(); int ccStage = serializableNotation.getStage(); boolean ccIsFoldedPalindrome = serializableNotation.isFoldedPalindrome(); String ccNotation = serializableNotation.getNotation(); String ccNotation2 = serializableNotation.getNotation2(); String ccLeadHead = serializableNotation.getLeadHead(); NotationBuilder notationBuilder = NotationBuilder.getInstance(); notationBuilder.setNumberOfWorkingBells(NumberOfBells.valueOf(ccStage)); if (!ccIsFoldedPalindrome) { notationBuilder.setUnfoldedNotationShorthand(ccNotation); } else { notationBuilder.setFoldedPalindromeNotationShorthand(ccNotation, ccNotation2); } notationBuilder.setName(ccName); NotationBody notationBody = notationBuilder.build(); Proof proof = PlainCourseHelper.buildPlainCourse(notationBody, "TEST", false); MethodLead lead = proof.getCreatedMethod().getLead(0); //log.warn(lead.toString()); //assertEquals(notationBody.getNameIncludingNumberOfBells() + " Lead Head Code", ccLeadHead, notationBody.getLeadHeadCode()); if (!ccLeadHead.equals(notationBody.getLeadHeadCode())) { log.warn("[{}] {} [{}](calculated) vs [{}](library) NOT OK", notationBody.getNumberOfWorkingBells().getBellCount(), notationBody.getNameIncludingNumberOfBells(), notationBody.getLeadHeadCode(), ccLeadHead); return true; } else { return false; } } @Test public void lookupLeadHeadFunctions() { MethodRow row = MethodBuilder.parse(NumberOfBells.BELLS_9, "124638597"); assertEquals("124638597", row.getDisplayString(false)); assertEquals("f", LeadHeadCalculator.lookupLeadHeadCode(row, LeadHeadCalculator.LeadHeadType.NEAR)); assertEquals("m", LeadHeadCalculator.lookupLeadHeadCode(row, LeadHeadCalculator.LeadHeadType.FAR)); } }
package com.englishtown.bitbucket.hook; import com.atlassian.bitbucket.concurrent.BucketProcessor; import com.atlassian.bitbucket.i18n.I18nService; import com.atlassian.bitbucket.permission.Permission; import com.atlassian.bitbucket.repository.Repository; import com.atlassian.bitbucket.repository.RepositoryService; import com.atlassian.bitbucket.scm.Command; import com.atlassian.bitbucket.scm.ScmCommandBuilder; import com.atlassian.bitbucket.scm.ScmService; import com.atlassian.bitbucket.scm.git.command.GitCommandExitHandler; import com.atlassian.bitbucket.server.ApplicationPropertiesService; import com.atlassian.bitbucket.user.SecurityService; import com.google.common.base.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; import java.net.URI; import java.net.URISyntaxException; import java.time.Duration; import java.util.List; import java.util.Locale; import static com.englishtown.bitbucket.hook.MirrorRepositoryHook.PROP_PREFIX; public class MirrorBucketProcessor implements BucketProcessor<MirrorRequest> { static final String PROP_TIMEOUT = PROP_PREFIX + "timeout";
package org.sagebionetworks.web.unitclient.widget.entity.team; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyList; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.List; import org.junit.Before; import org.junit.Test; import org.sagebionetworks.repo.model.MembershipInvitation; import org.sagebionetworks.repo.model.UserProfile; import org.sagebionetworks.schema.adapter.JSONObjectAdapter; import org.sagebionetworks.schema.adapter.JSONObjectAdapterException; import org.sagebionetworks.schema.adapter.org.json.JSONObjectAdapterImpl; import org.sagebionetworks.web.client.GlobalApplicationState; import org.sagebionetworks.web.client.SynapseClientAsync; import org.sagebionetworks.web.client.security.AuthenticationController; import org.sagebionetworks.web.client.transform.NodeModelCreator; import org.sagebionetworks.web.client.utils.Callback; import org.sagebionetworks.web.client.widget.team.OpenUserInvitationsWidget; import org.sagebionetworks.web.client.widget.team.OpenUserInvitationsWidgetView; import org.sagebionetworks.web.shared.MembershipInvitationBundle; import org.sagebionetworks.web.test.helper.AsyncMockStubber; import com.google.gwt.user.client.rpc.AsyncCallback; public class OpenUserInvitationsWidgetTest { SynapseClientAsync mockSynapseClient; GlobalApplicationState mockGlobalApplicationState; OpenUserInvitationsWidgetView mockView; String teamId = "123"; OpenUserInvitationsWidget widget; AuthenticationController mockAuthenticationController; NodeModelCreator mockNodeModelCreator; Callback mockTeamUpdatedCallback; JSONObjectAdapter adapter = new JSONObjectAdapterImpl(); UserProfile testProfile; MembershipInvitation testInvite; @Before public void before() throws JSONObjectAdapterException { mockGlobalApplicationState = mock(GlobalApplicationState.class); mockSynapseClient = mock(SynapseClientAsync.class); mockView = mock(OpenUserInvitationsWidgetView.class); mockAuthenticationController = mock(AuthenticationController.class); mockNodeModelCreator = mock(NodeModelCreator.class); mockTeamUpdatedCallback = mock(Callback.class); widget = new OpenUserInvitationsWidget(mockView, mockSynapseClient, mockGlobalApplicationState, mockAuthenticationController, mockNodeModelCreator); testProfile = new UserProfile(); testProfile.setOwnerId("42"); testProfile.setFirstName("Bob"); when(mockNodeModelCreator.createJSONEntity(anyString(), eq(UserProfile.class))).thenReturn(testProfile); testInvite = new MembershipInvitation(); testInvite.setTeamId(teamId); testInvite.setUserId(testProfile.getOwnerId()); testInvite.setMessage("This is a test invite"); when(mockNodeModelCreator.createJSONEntity(anyString(), eq(MembershipInvitation.class))).thenReturn(testInvite); List<MembershipInvitationBundle> testReturn = new ArrayList<MembershipInvitationBundle>(); testReturn.add(new MembershipInvitationBundle()); AsyncMockStubber.callSuccessWith(testReturn).when(mockSynapseClient).getOpenTeamInvitations(anyString(), any(AsyncCallback.class)); AsyncMockStubber.callSuccessWith(null).when(mockSynapseClient).deleteMembershipInvitation(anyString(), any(AsyncCallback.class)); } @SuppressWarnings("unchecked") @Test public void testConfigure() throws Exception { widget.configure(teamId, mockTeamUpdatedCallback); verify(mockSynapseClient).getOpenTeamInvitations(anyString(), any(AsyncCallback.class)); verify(mockView).configure(anyList(), anyList()); } public void testConfigureFailure() throws Exception { AsyncMockStubber.callFailureWith(new Exception("unhandled exception")).when(mockSynapseClient).getOpenTeamInvitations(anyString(), any(AsyncCallback.class)); widget.configure(teamId, mockTeamUpdatedCallback); verify(mockSynapseClient).getOpenTeamInvitations(anyString(), any(AsyncCallback.class)); verify(mockView).showErrorMessage(anyString()); } @SuppressWarnings("unchecked") @Test public void testDeleteOpenInvite() throws Exception { widget.configure(teamId, mockTeamUpdatedCallback); widget.removeInvitation("123"); verify(mockSynapseClient).deleteMembershipInvitation(anyString(), any(AsyncCallback.class)); verify(mockTeamUpdatedCallback).invoke(); verify(mockView).configure(anyList(), anyList()); } public void testDeleteOpenInviteFailure() throws Exception { AsyncMockStubber.callFailureWith(new Exception("unhandled exception")).when(mockSynapseClient).deleteMembershipInvitation(anyString(), any(AsyncCallback.class)); widget.configure(teamId, mockTeamUpdatedCallback); widget.removeInvitation("123"); verify(mockSynapseClient).getOpenTeamInvitations(anyString(), any(AsyncCallback.class)); verify(mockView).showErrorMessage(anyString()); } }
package com.github.anba.es6draft.runtime.internal; import static com.github.anba.es6draft.runtime.types.Null.NULL; import static com.github.anba.es6draft.runtime.types.Undefined.UNDEFINED; import java.lang.annotation.Annotation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodHandles.Lookup; import java.lang.invoke.MethodType; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.AbstractMap.SimpleImmutableEntry; import java.util.ArrayList; import java.util.Arrays; import java.util.EnumMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Map.Entry; import com.github.anba.es6draft.repl.StopExecutionException; import com.github.anba.es6draft.runtime.AbstractOperations; import com.github.anba.es6draft.runtime.ExecutionContext; import com.github.anba.es6draft.runtime.types.BuiltinSymbol; import com.github.anba.es6draft.runtime.types.Callable; import com.github.anba.es6draft.runtime.types.Intrinsics; import com.github.anba.es6draft.runtime.types.Property; import com.github.anba.es6draft.runtime.types.PropertyDescriptor; import com.github.anba.es6draft.runtime.types.ScriptObject; import com.github.anba.es6draft.runtime.types.builtins.NativeFunction; import com.github.anba.es6draft.runtime.types.builtins.OrdinaryObject; /** * Utility class to set-up initial properties for objects */ public final class Properties { private Properties() { } /** * Compatiblity extension marker */ @Documented @Target({ ElementType.TYPE }) @Retention(RetentionPolicy.RUNTIME) public static @interface CompatibilityExtension { CompatibilityOption value(); } /** * Built-in prototype */ @Documented @Target({ ElementType.FIELD }) @Retention(RetentionPolicy.RUNTIME) public static @interface Prototype { } /** * Built-in function property */ @Documented @Target({ ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) public static @interface Function { /** * Function name */ String name(); /** * Function symbol */ BuiltinSymbol symbol() default BuiltinSymbol.NONE; /** * Function arity */ int arity(); /** * Function attributes, default to <code>{[[Writable]]: true, [[Enumerable]]: * false, [[Configurable]]: true}</code> */ Attributes attributes() default @Attributes(writable = true, enumerable = false, configurable = true); } /** * Built-in value property */ @Documented @Target({ ElementType.FIELD }) @Retention(RetentionPolicy.RUNTIME) public static @interface Value { /** * Value name */ String name(); /** * Value symbol */ BuiltinSymbol symbol() default BuiltinSymbol.NONE; /** * Value attributes, default to <code>{[[Writable]]: true, [[Enumerable]]: * false, [[Configurable]]: true}</code> */ Attributes attributes() default @Attributes(writable = true, enumerable = false, configurable = true); } /** * Built-in accessor property */ @Documented @Target({ ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) public static @interface Accessor { /** * Accessor name */ String name(); /** * Accessor symbol */ BuiltinSymbol symbol() default BuiltinSymbol.NONE; enum Type { Getter, Setter } /** * Accessor type */ Type type(); /** * Accessor attributes, default to <code>{[[Enumerable]]: * false, [[Configurable]]: true}</code> */ Attributes attributes() default @Attributes(writable = false /*unused*/, enumerable = false, configurable = true); } /** * Built-in function property as an alias function */ @Documented @Target({ ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) public static @interface AliasFunction { /** * Function name */ String name(); /** * Function symbol */ BuiltinSymbol symbol() default BuiltinSymbol.NONE; /** * Function attributes, default to <code>{[[Writable]]: true, [[Enumerable]]: * false, [[Configurable]]: true}</code> */ Attributes attributes() default @Attributes(writable = true, enumerable = false, configurable = true); } /** * Built-in function property as an alias function */ @Documented @Target({ ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) public static @interface AliasFunctions { AliasFunction[] value(); } @Documented @Target({ ElementType.ANNOTATION_TYPE }) @Retention(RetentionPolicy.RUNTIME) public static @interface Attributes { boolean writable(); boolean enumerable(); boolean configurable(); } @Documented @Target(ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) public static @interface Optional { /** * Default runtime type */ Default value() default Default.Undefined; /** * Default boolean value, only applicable if {@code value()} is {@link Default#Boolean} */ boolean booleanValue() default false; /** * Default string value, only applicable if {@code value()} is {@link Default#String} */ String stringValue() default ""; /** * Default number value, only applicable if {@code value()} is {@link Default#Number} */ double numberValue() default Double.NaN; enum Default { NONE, Undefined, Null, Boolean, String, Number; static Object defaultValue(Optional optional) { switch (optional.value()) { case Undefined: return UNDEFINED; case Null: return NULL; case Boolean: return optional.booleanValue(); case Number: return optional.numberValue(); case String: return optional.stringValue(); case NONE: default: return null; } } } } private static ClassValue<ObjectLayout> internalLayouts = new ClassValue<ObjectLayout>() { @Override protected ObjectLayout computeValue(Class<?> type) { return createInternalObjectLayout(type); } }; private static ClassValue<ObjectLayout> externalLayouts = new ClassValue<ObjectLayout>() { @Override protected ObjectLayout computeValue(Class<?> type) { return createExternalObjectLayout(type); } }; private static class ObjectLayout { CompatibilityExtension extension; Prototype proto = null; Object protoValue = null; Map<Value, Object> values = null; Map<Function, MethodHandle> functions = null; Map<Accessor, MethodHandle> accessors = null; List<Entry<AliasFunction, Function>> aliases = null; } /** * Sets the {@link Prototype} and creates own properties for {@link Value}, {@link Function} and * {@link Accessor} fields */ public static void createProperties(ScriptObject owner, ExecutionContext cx, Class<?> holder) { if (holder.getName().startsWith(INTERNAL_PACKAGE)) { assert owner instanceof OrdinaryObject; createInternalProperties((OrdinaryObject) owner, cx, holder); } else { createExternalProperties(owner, cx, holder); } } private static final String INTERNAL_PACKAGE = "com.github.anba.es6draft.runtime.objects."; public static final class Converter { private final MethodHandle ToBooleanMH; private final MethodHandle ToStringMH; private final MethodHandle ToFlatStringMH; private final MethodHandle ToNumberMH; private final MethodHandle ToObjectMH; private final MethodHandle ToCallableMH; private final MethodHandle ToBooleanArrayMH; private final MethodHandle ToStringArrayMH; private final MethodHandle ToFlatStringArrayMH; private final MethodHandle ToNumberArrayMH; private final MethodHandle ToObjectArrayMH; private final MethodHandle ToCallableArrayMH; private final MethodHandle ToScriptExceptionMH; Converter(ExecutionContext cx) { ToBooleanMH = _ToBooleanMH; ToStringMH = MethodHandles.insertArguments(_ToStringMH, 0, cx); ToFlatStringMH = MethodHandles.insertArguments(_ToFlatStringMH, 0, cx); ToNumberMH = MethodHandles.insertArguments(_ToNumberMH, 0, cx); ToObjectMH = MethodHandles.insertArguments(_ToObjectMH, 0, cx); ToCallableMH = MethodHandles.insertArguments(_ToCallableMH, 0, cx); ToBooleanArrayMH = _ToBooleanArrayMH; ToStringArrayMH = MethodHandles.insertArguments(_ToStringArrayMH, 0, cx); ToFlatStringArrayMH = MethodHandles.insertArguments(_ToFlatStringArrayMH, 0, cx); ToNumberArrayMH = MethodHandles.insertArguments(_ToNumberArrayMH, 0, cx); ToObjectArrayMH = MethodHandles.insertArguments(_ToObjectArrayMH, 0, cx); ToCallableArrayMH = MethodHandles.insertArguments(_ToCallableArrayMH, 0, cx); ToScriptExceptionMH = MethodHandles.insertArguments(_ToScriptExceptionMH, 0, cx); } private MethodHandle filterFor(Class<?> c) { if (c == Object.class) { return null; } else if (c == Double.TYPE) { return ToNumberMH; } else if (c == Boolean.TYPE) { return ToBooleanMH; } else if (c == String.class) { return ToFlatStringMH; } else if (c == CharSequence.class) { return ToStringMH; } else if (c == ScriptObject.class) { return ToObjectMH; } else if (c == Callable.class) { return ToCallableMH; } throw new IllegalArgumentException(); } private MethodHandle arrayfilterFor(Class<?> c) { assert c.isArray(); c = c.getComponentType(); if (c == Object.class) { return null; } else if (c == Double.TYPE) { return ToNumberArrayMH; } else if (c == Boolean.TYPE) { return ToBooleanArrayMH; } else if (c == String.class) { return ToFlatStringArrayMH; } else if (c == CharSequence.class) { return ToStringArrayMH; } else if (c == ScriptObject.class) { return ToObjectArrayMH; } else if (c == Callable.class) { return ToCallableArrayMH; } throw new IllegalArgumentException(); } private static final MethodHandle _ToBooleanMH; private static final MethodHandle _ToStringMH; private static final MethodHandle _ToFlatStringMH; private static final MethodHandle _ToNumberMH; private static final MethodHandle _ToObjectMH; static { Lookup lookup = MethodHandles.publicLookup(); try { _ToStringMH = lookup.findStatic(AbstractOperations.class, "ToString", MethodType .methodType(CharSequence.class, ExecutionContext.class, Object.class)); _ToFlatStringMH = lookup.findStatic(AbstractOperations.class, "ToFlatString", MethodType.methodType(String.class, ExecutionContext.class, Object.class)); _ToNumberMH = lookup.findStatic(AbstractOperations.class, "ToNumber", MethodType.methodType(Double.TYPE, ExecutionContext.class, Object.class)); _ToBooleanMH = lookup.findStatic(AbstractOperations.class, "ToBoolean", MethodType.methodType(Boolean.TYPE, Object.class)); _ToObjectMH = lookup.findStatic(AbstractOperations.class, "ToObject", MethodType .methodType(ScriptObject.class, ExecutionContext.class, Object.class)); } catch (NoSuchMethodException | IllegalAccessException e) { throw new IllegalStateException(e); } } private static final MethodHandle _ToCallableMH; static { Lookup lookup = MethodHandles.publicLookup(); try { MethodHandle mh = lookup .findStatic(ScriptRuntime.class, "CheckCallable", MethodType.methodType( Callable.class, Object.class, ExecutionContext.class)); _ToCallableMH = MethodHandles .permuteArguments(mh, MethodType.methodType(Callable.class, ExecutionContext.class, Object.class), 1, 0); } catch (NoSuchMethodException | IllegalAccessException e) { throw new IllegalStateException(e); } } private static final MethodHandle _ToBooleanArrayMH; private static final MethodHandle _ToStringArrayMH; private static final MethodHandle _ToFlatStringArrayMH; private static final MethodHandle _ToNumberArrayMH; private static final MethodHandle _ToObjectArrayMH; private static final MethodHandle _ToCallableArrayMH; static { Lookup lookup = MethodHandles.publicLookup(); try { _ToStringArrayMH = lookup.findStatic(Converter.class, "ToString", MethodType .methodType(CharSequence[].class, ExecutionContext.class, Object[].class)); _ToFlatStringArrayMH = lookup.findStatic(Converter.class, "ToFlatString", MethodType.methodType(String[].class, ExecutionContext.class, Object[].class)); _ToNumberArrayMH = lookup.findStatic(Converter.class, "ToNumber", MethodType .methodType(double[].class, ExecutionContext.class, Object[].class)); _ToBooleanArrayMH = lookup.findStatic(Converter.class, "ToBoolean", MethodType.methodType(boolean[].class, Object[].class)); _ToObjectArrayMH = lookup.findStatic(Converter.class, "ToObject", MethodType .methodType(ScriptObject[].class, ExecutionContext.class, Object[].class)); _ToCallableArrayMH = lookup.findStatic(Converter.class, "ToCallable", MethodType .methodType(Callable[].class, ExecutionContext.class, Object[].class)); } catch (NoSuchMethodException | IllegalAccessException e) { throw new IllegalStateException(e); } } public static boolean[] ToBoolean(Object[] source) { boolean[] target = new boolean[source.length]; for (int i = 0; i < target.length; i++) { target[i] = AbstractOperations.ToBoolean(source[i]); } return target; } public static CharSequence[] ToString(ExecutionContext cx, Object[] source) { CharSequence[] target = new CharSequence[source.length]; for (int i = 0; i < target.length; i++) { target[i] = AbstractOperations.ToString(cx, source[i]); } return target; } public static String[] ToFlatString(ExecutionContext cx, Object[] source) { String[] target = new String[source.length]; for (int i = 0; i < target.length; i++) { target[i] = AbstractOperations.ToFlatString(cx, source[i]); } return target; } public static double[] ToNumber(ExecutionContext cx, Object[] source) { double[] target = new double[source.length]; for (int i = 0; i < target.length; i++) { target[i] = AbstractOperations.ToNumber(cx, source[i]); } return target; } public static ScriptObject[] ToObject(ExecutionContext cx, Object[] source) { ScriptObject[] target = new ScriptObject[source.length]; for (int i = 0; i < target.length; i++) { target[i] = AbstractOperations.ToObject(cx, source[i]); } return target; } public static Callable[] ToCallable(ExecutionContext cx, Object[] source) { Callable[] target = new Callable[source.length]; for (int i = 0; i < target.length; i++) { target[i] = ScriptRuntime.CheckCallable(source[i], cx); } return target; } private static final MethodHandle _ToScriptExceptionMH; static { Lookup lookup = MethodHandles.publicLookup(); try { _ToScriptExceptionMH = lookup.findStatic(Converter.class, "ToScriptException", MethodType.methodType(ScriptException.class, ExecutionContext.class, Exception.class)); } catch (NoSuchMethodException | IllegalAccessException e) { throw new IllegalStateException(e); } } public static ScriptException ToScriptException(ExecutionContext cx, Exception cause) { if (cause instanceof StopExecutionException) throw (StopExecutionException) cause; if (cause instanceof ScriptException) return (ScriptException) cause; String info = Objects.toString(cause.getMessage(), cause.getClass().getSimpleName()); return Errors.throwInternalError(cx, Messages.Key.InternalError, info); } } private static void createExternalProperties(ScriptObject owner, ExecutionContext cx, Class<?> holder) { ObjectLayout layout = externalLayouts.get(holder); if (layout.functions != null) { Converter converter = new Converter(cx); for (Entry<Function, MethodHandle> entry : layout.functions.entrySet()) { Function function = entry.getKey(); MethodHandle unreflect = entry.getValue(); MethodHandle handle = getInstanceMethodHandle(converter, unreflect, owner); String name = function.name(); int arity = function.arity(); Attributes attrs = function.attributes(); NativeFunction fun = new NativeFunction(cx.getRealm(), name, arity, handle); owner.defineOwnProperty(cx, name, propertyDescriptor(fun, attrs)); } } } private static void createInternalProperties(OrdinaryObject owner, ExecutionContext cx, Class<?> holder) { ObjectLayout layout = internalLayouts.get(holder); if (layout.extension != null && !cx.getRealm().isEnabled(layout.extension.value())) { // return if extension is not enabled return; } if (layout.proto != null) { createPrototype(owner, cx, layout.proto, layout.protoValue); } if (layout.values != null) { for (Entry<Value, Object> entry : layout.values.entrySet()) { createValue(owner, cx, entry.getKey(), entry.getValue()); } } if (layout.functions != null) { for (Entry<Function, MethodHandle> entry : layout.functions.entrySet()) { createFunction(owner, cx, entry.getKey(), entry.getValue()); } } if (layout.accessors != null) { Map<String, PropertyDescriptor> accessors1 = new LinkedHashMap<>(); Map<BuiltinSymbol, PropertyDescriptor> accessors2 = new EnumMap<>(BuiltinSymbol.class); for (Entry<Accessor, MethodHandle> entry : layout.accessors.entrySet()) { createAccessor(owner, cx, entry.getKey(), entry.getValue(), accessors1, accessors2); } completeAccessors(owner, cx, accessors1, accessors2); } if (layout.aliases != null) { for (Entry<AliasFunction, Function> entry : layout.aliases) { createAliasFunction(owner, cx, entry.getKey(), entry.getValue()); } } } private static ObjectLayout createExternalObjectLayout(Class<?> holder) { try { ObjectLayout layout = new ObjectLayout(); Lookup lookup = MethodHandles.publicLookup(); for (Method method : holder.getDeclaredMethods()) { if (Modifier.isStatic(method.getModifiers())) continue; Function function = method.getAnnotation(Function.class); if (function != null) { if (layout.functions == null) { layout.functions = new LinkedHashMap<>(); } layout.functions.put(function, lookup.unreflect(method)); } } return layout; } catch (IllegalAccessException e) { throw new IllegalArgumentException(e); } } private static ObjectLayout createInternalObjectLayout(Class<?> holder) { try { ObjectLayout layout = new ObjectLayout(); Lookup lookup = MethodHandles.publicLookup(); layout.extension = holder.getAnnotation(CompatibilityExtension.class); boolean hasProto = false; for (Field field : holder.getDeclaredFields()) { if (!Modifier.isStatic(field.getModifiers())) continue; Value value = field.getAnnotation(Value.class); Prototype prototype = field.getAnnotation(Prototype.class); assert value == null || prototype == null; if (value != null) { assert Modifier.isFinal(field.getModifiers()); if (layout.values == null) { layout.values = new LinkedHashMap<>(); } layout.values.put(value, getRawValue(field)); } if (prototype != null) { assert Modifier.isFinal(field.getModifiers()); assert !hasProto; hasProto = true; layout.proto = prototype; layout.protoValue = getRawValue(field); } } for (Method method : holder.getDeclaredMethods()) { if (!Modifier.isStatic(method.getModifiers())) continue; Function function = method.getAnnotation(Function.class); Accessor accessor = method.getAnnotation(Accessor.class); AliasFunction alias = method.getAnnotation(AliasFunction.class); AliasFunctions aliases = method.getAnnotation(AliasFunctions.class); assert function == null || accessor == null; assert alias == null || function != null; assert aliases == null || function != null; if (function != null) { if (layout.functions == null) { layout.functions = new LinkedHashMap<>(); } layout.functions.put(function, getStaticMethodHandle(lookup, method)); } if (accessor != null) { if (layout.accessors == null) { layout.accessors = new LinkedHashMap<>(); } layout.accessors.put(accessor, getStaticMethodHandle(lookup, method)); } if (alias != null) { if (layout.aliases == null) { layout.aliases = new ArrayList<>(); } layout.aliases.add(new SimpleImmutableEntry<>(alias, function)); } if (aliases != null) { if (layout.aliases == null) { layout.aliases = new ArrayList<>(); } for (AliasFunction a : aliases.value()) { layout.aliases.add(new SimpleImmutableEntry<>(a, function)); } } } return layout; } catch (IllegalAccessException e) { throw new IllegalArgumentException(e); } } private static Object getRawValue(Field field) throws IllegalAccessException { Object rawValue = field.get(null); return rawValue; } private static <T> MethodHandle getInstanceMethodHandle(Converter converter, MethodHandle unreflect, T owner) { MethodHandle handle = unreflect; handle = handle.bindTo(owner); boolean varargs = unreflect.isVarargsCollector(); if (varargs) { // bindTo() removes the var-args collector flag, restore it MethodType type = unreflect.type(); Class<?> parameterType = type.parameterType(type.parameterCount() - 1); handle = handle.asVarargsCollector(parameterType); } MethodType type = handle.type(); int pcount = type.parameterCount(); int actual = type.parameterCount() - (varargs ? 1 : 0); Class<?>[] params = type.parameterArray(); MethodHandle[] filters = new MethodHandle[pcount]; for (int p = 0; p < actual; ++p) { filters[p] = converter.filterFor(params[p]); } if (varargs) { filters[pcount - 1] = converter.arrayfilterFor(params[pcount - 1]); } handle = MethodHandles.filterArguments(handle, 0, filters); Class<?> returnType = type.returnType(); if (returnType == Double.TYPE || returnType == Boolean.TYPE || returnType == String.class || returnType == CharSequence.class || ScriptObject.class.isAssignableFrom(returnType)) { handle = MethodHandles.explicitCastArguments(handle, handle.type().changeReturnType(Object.class)); } else if (returnType == Void.TYPE) { handle = MethodHandles.filterReturnValue(handle, MethodHandles.constant(Object.class, UNDEFINED)); } else if (returnType != Object.class) { throw new IllegalArgumentException(); } MethodHandle filter; if (varargs) { filter = MethodHandles.insertArguments(ParameterFilter.filterVarArgs, 0, actual); } else { filter = filter(actual); } MethodHandle spreader = MethodHandles.spreadInvoker(handle.type(), 0); handle = MethodHandles.insertArguments(spreader, 0, handle); handle = MethodHandles.filterArguments(handle, 0, filter); handle = MethodHandles.dropArguments(handle, 0, Object.class); MethodHandle thrower = MethodHandles.throwException(handle.type().returnType(), ScriptException.class); thrower = MethodHandles.filterArguments(thrower, 0, converter.ToScriptExceptionMH); handle = MethodHandles.catchException(handle, Exception.class, thrower); return handle; } private static MethodHandle getStaticMethodHandle(Lookup lookup, Method method) throws IllegalAccessException { // check: (ExecutionContext, Object, Object[]) -> Object MethodHandle handle = lookup.unreflect(method); MethodType type = handle.type(); StaticMethodKind kind = staticMethodKind(type); if (kind == StaticMethodKind.Invalid) { throw new IllegalArgumentException(handle.toString()); } if (kind == StaticMethodKind.Spreader) { int fixedArguments = 2; boolean varargs = handle.isVarargsCollector(); int actual = type.parameterCount() - fixedArguments - (varargs ? 1 : 0); Object[] defaults = methodDefaults(method, fixedArguments, actual); MethodHandle filter; if (defaults != null && varargs) { filter = MethodHandles.insertArguments(ParameterFilter.filterVarArgsDefaults, 0, actual, defaults); } else if (defaults != null) { filter = MethodHandles.insertArguments(ParameterFilter.filterDefaults, 0, actual, defaults); } else if (varargs) { filter = MethodHandles.insertArguments(ParameterFilter.filterVarArgs, 0, actual); } else { filter = filter(actual); } MethodHandle spreader = MethodHandles.spreadInvoker(type, fixedArguments); spreader = MethodHandles.insertArguments(spreader, 0, handle); spreader = MethodHandles.filterArguments(spreader, fixedArguments, filter); handle = spreader; } return handle; } private static final MethodHandle filters[] = new MethodHandle[5]; private static MethodHandle filter(int n) { assert n >= 0; if (n < filters.length) { MethodHandle filter = filters[n]; if (filter == null) { filters[n] = filter = MethodHandles.insertArguments(ParameterFilter.filter, 0, n); } return filter; } return MethodHandles.insertArguments(ParameterFilter.filter, 0, n); } public static final class ParameterFilter { private ParameterFilter() { } private static final MethodHandle filterVarArgsDefaults; private static final MethodHandle filterDefaults; private static final MethodHandle filterVarArgs; private static final MethodHandle filter; static { Lookup lookup = MethodHandles.publicLookup(); try { filterVarArgsDefaults = lookup.findStatic(ParameterFilter.class, "filterVarArgsDefaults", MethodType.methodType(Object[].class, Integer.TYPE, Object[].class, Object[].class)); filterDefaults = lookup.findStatic(ParameterFilter.class, "filterDefaults", MethodType.methodType(Object[].class, Integer.TYPE, Object[].class, Object[].class)); filterVarArgs = lookup.findStatic(ParameterFilter.class, "filterVarArgs", MethodType.methodType(Object[].class, Integer.TYPE, Object[].class)); filter = lookup.findStatic(ParameterFilter.class, "filter", MethodType.methodType(Object[].class, Integer.TYPE, Object[].class)); } catch (NoSuchMethodException | IllegalAccessException e) { throw new IllegalStateException(e); } } private static final Object[] EMPTY_ARRAY = new Object[] {}; public static Object[] filterVarArgsDefaults(int n, Object[] defaultValues, Object[] args) { assert n == defaultValues.length; Object[] arguments = Arrays.copyOf(args, n + 1, Object[].class); if (args.length == n) { arguments[n] = EMPTY_ARRAY; } else if (args.length > n) { arguments[n] = Arrays.copyOfRange(args, n, args.length, Object[].class); } else { int argslen = args.length; System.arraycopy(defaultValues, argslen, arguments, argslen, (n - argslen)); arguments[n] = EMPTY_ARRAY; } return arguments; } public static Object[] filterDefaults(int n, Object[] defaultValues, Object[] args) { assert n == defaultValues.length; if (args.length == n) { return args; } else if (args.length > n) { Object[] arguments = Arrays.copyOf(args, n, Object[].class); return arguments; } else { Object[] arguments = Arrays.copyOf(args, n, Object[].class); int argslen = args.length; System.arraycopy(defaultValues, argslen, arguments, argslen, (n - argslen)); return arguments; } } public static Object[] filterVarArgs(int n, Object[] args) { Object[] arguments = Arrays.copyOf(args, n + 1, Object[].class); if (args.length == n) { arguments[n] = EMPTY_ARRAY; } else if (args.length > n) { arguments[n] = Arrays.copyOfRange(args, n, args.length, Object[].class); } else { Arrays.fill(arguments, args.length, n, UNDEFINED); arguments[n] = EMPTY_ARRAY; } return arguments; } public static Object[] filter(int n, Object[] args) { if (args.length == n) { return args; } else if (args.length > n) { Object[] arguments = Arrays.copyOf(args, n, Object[].class); return arguments; } else { Object[] arguments = Arrays.copyOf(args, n, Object[].class); Arrays.fill(arguments, args.length, n, UNDEFINED); return arguments; } } } private static void createPrototype(OrdinaryObject owner, ExecutionContext cx, Prototype proto, Object rawValue) { Object value = resolveValue(cx, rawValue); assert value == null || value instanceof ScriptObject; ScriptObject prototype = (ScriptObject) value; owner.setPrototype(prototype); } private static void createValue(OrdinaryObject owner, ExecutionContext cx, Value val, Object rawValue) { String name = val.name(); BuiltinSymbol sym = val.symbol(); Attributes attrs = val.attributes(); Object value = resolveValue(cx, rawValue); if (sym == BuiltinSymbol.NONE) { owner.defineOwnProperty(cx, name, propertyDescriptor(value, attrs)); } else { owner.defineOwnProperty(cx, sym.get(), propertyDescriptor(value, attrs)); } } private static void createFunction(OrdinaryObject owner, ExecutionContext cx, Function function, MethodHandle mh) { String name = function.name(); BuiltinSymbol sym = function.symbol(); int arity = function.arity(); Attributes attrs = function.attributes(); mh = MethodHandles.insertArguments(mh, 0, cx); NativeFunction fun = new NativeFunction(cx.getRealm(), name, arity, mh); if (sym == BuiltinSymbol.NONE) { owner.defineOwnProperty(cx, name, propertyDescriptor(fun, attrs)); } else { owner.defineOwnProperty(cx, sym.get(), propertyDescriptor(fun, attrs)); } } private static void createAccessor(OrdinaryObject owner, ExecutionContext cx, Accessor accessor, MethodHandle mh, Map<String, PropertyDescriptor> accessors1, Map<BuiltinSymbol, PropertyDescriptor> accessors2) { String name = accessor.name(); BuiltinSymbol sym = accessor.symbol(); Accessor.Type type = accessor.type(); int arity = (type == Accessor.Type.Getter ? 0 : 1); Attributes attrs = accessor.attributes(); mh = MethodHandles.insertArguments(mh, 0, cx); NativeFunction fun = new NativeFunction(cx.getRealm(), name, arity, mh); PropertyDescriptor desc; if (sym == BuiltinSymbol.NONE) { if ((desc = accessors1.get(name)) == null) { accessors1.put(name, desc = propertyDescriptor(null, null, attrs)); } } else { if ((desc = accessors2.get(sym)) == null) { accessors2.put(sym, desc = propertyDescriptor(null, null, attrs)); } } assert !attrs.writable() && attrs.enumerable() == desc.isEnumerable() && attrs.configurable() == desc.isConfigurable(); if (type == Accessor.Type.Getter) { assert desc.getGetter() == null; desc.setGetter(fun); } else { assert desc.getSetter() == null; desc.setSetter(fun); } } private static void completeAccessors(OrdinaryObject owner, ExecutionContext cx, Map<String, PropertyDescriptor> accessors1, Map<BuiltinSymbol, PropertyDescriptor> accessors2) { if (accessors1 != null) { for (Entry<String, PropertyDescriptor> entry : accessors1.entrySet()) { owner.defineOwnProperty(cx, entry.getKey(), entry.getValue()); } } if (accessors2 != null) { for (Entry<BuiltinSymbol, PropertyDescriptor> entry : accessors2.entrySet()) { owner.defineOwnProperty(cx, entry.getKey().get(), entry.getValue()); } } } private static void createAliasFunction(OrdinaryObject owner, ExecutionContext cx, AliasFunction alias, Function function) { String name = alias.name(); BuiltinSymbol sym = alias.symbol(); Attributes attrs = alias.attributes(); Property fun; if (function.symbol() == BuiltinSymbol.NONE) { fun = owner.getOwnProperty(cx, function.name()); } else { fun = owner.getOwnProperty(cx, function.symbol().get()); } assert fun != null; if (sym == BuiltinSymbol.NONE) { owner.defineOwnProperty(cx, name, propertyDescriptor(fun.getValue(), attrs)); } else { owner.defineOwnProperty(cx, sym.get(), propertyDescriptor(fun.getValue(), attrs)); } } private static Object resolveValue(ExecutionContext cx, Object value) { if (value instanceof Intrinsics) { value = cx.getIntrinsic((Intrinsics) value); } return value; } private enum StaticMethodKind { Invalid, Spreader, ObjectArray } private static StaticMethodKind staticMethodKind(MethodType type) { int pcount = type.parameterCount(); if (pcount < 2) return StaticMethodKind.Invalid; Class<?>[] params = type.parameterArray(); int p = 0; // first two parameters are (ExecutionContext, Object=ThisValue) if (!(ExecutionContext.class.equals(params[p++]) && Object.class.equals(params[p++]))) { return StaticMethodKind.Invalid; } // always required to return Object (for now at least) if (!Object.class.equals(type.returnType())) { return StaticMethodKind.Invalid; } if (p + 1 == pcount && Object[].class.equals(params[p])) { // (Realm, Object, Object[]) case return StaticMethodKind.ObjectArray; } // otherwise all trailing arguments need to be of type Object for (; p < pcount; ++p) { if (Object.class.equals(params[p])) { continue; } if (p + 1 == pcount && Object[].class.equals(params[p])) { continue; } return StaticMethodKind.Invalid; } return StaticMethodKind.Spreader; } private static Object[] methodDefaults(Method method, int fixedArguments, int actual) { Object[] defaults = null; Annotation[][] parameterAnnotations = method.getParameterAnnotations(); for (int parameter = 0; parameter < actual; ++parameter) { Annotation[] annotations = parameterAnnotations[parameter + fixedArguments]; for (Annotation annotation : annotations) { Class<? extends Annotation> type = annotation.annotationType(); if (type == Optional.class) { Optional optional = (Optional) annotation; if (defaults == null) { defaults = new Object[actual]; Arrays.fill(defaults, UNDEFINED); } defaults[parameter] = Optional.Default.defaultValue(optional); } } } return defaults; } private static PropertyDescriptor propertyDescriptor(Object value, Attributes attrs) { return new PropertyDescriptor(value, attrs.writable(), attrs.enumerable(), attrs.configurable()); } private static PropertyDescriptor propertyDescriptor(Callable getter, Callable setter, Attributes attrs) { return new PropertyDescriptor(getter, setter, attrs.enumerable(), attrs.configurable()); } }
package com.github.aureliano.achmed.os.service; import org.apache.log4j.Logger; import com.github.aureliano.achmed.command.CommandFacade; import com.github.aureliano.achmed.command.CommandResponse; import com.github.aureliano.achmed.helper.StringHelper; public class DebianService extends LinuxService { private static final Logger logger = Logger.getLogger(DebianService.class); private static final String SERVICE = "/usr/sbin/service"; public DebianService() { super(); } @Override public CommandResponse start() { if (this.isRunning()) { logger.debug("Service " + super.properties.getName() + " is already running."); return null; } if (StringHelper.isEmpty(super.properties.getBinary())) { return CommandFacade.executeCommand(SERVICE, super.properties.getName(), "start"); } return super.start(); } @Override public CommandResponse stop() { if (!this.isRunning()) { logger.debug("Service " + super.properties.getName() + " is not running."); return null; } if (StringHelper.isEmpty(super.properties.getBinary())) { return CommandFacade.executeCommand(SERVICE, super.properties.getName(), "stop"); } return super.stop(); } @Override public CommandResponse restart() { if ((super.properties.getHasRestart() != null) && (super.properties.getHasRestart())) { return CommandFacade.executeCommand(SERVICE, super.properties.getName(), "restart"); } return super.restart(); } @Override public boolean isRunning() { if ((super.properties.getHasStatus() != null) && (super.properties.getHasStatus())) { CommandResponse res = CommandFacade.executeCommand(SERVICE, super.properties.getName(), "status"); return (res.getExitStatusCode() == 0); } return super.isRunning(); } public CommandResponse enableBootstrap() { throw new UnsupportedOperationException("Not implemented yet."); } public CommandResponse disableBootstrap() { throw new UnsupportedOperationException("Not implemented yet."); } }
package org.jboss.as.test.integration.domain; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.*; import java.io.IOException; import java.util.Iterator; import java.util.List; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.client.helpers.domain.DomainClient; import org.jboss.as.controller.operations.common.Util; import org.jboss.as.test.integration.domain.management.util.DomainLifecycleUtil; import org.jboss.as.test.integration.domain.management.util.DomainTestUtils; import org.jboss.as.test.integration.domain.management.util.WildFlyManagedConfiguration; import org.jboss.as.test.integration.management.util.MgmtOperationException; import org.jboss.as.test.shared.TimeoutUtil; import org.jboss.dmr.ModelNode; import org.jboss.dmr.Property; import org.junit.Assert; import org.junit.Test; /** * Checks that the child resources that should be ordered are in fact so on a slave reconnect. * At the moment this is only jgroups protocols. Although we have good tests for the indexed adds * working on reconnect in core, this is here as a sanity that no special describe handler is used * overriding the default mechanism. * * @author Kabir Khan */ public class OrderedChildResourcesTestCase extends BuildConfigurationTestBase { public static final String slaveAddress = System.getProperty("jboss.test.host.slave.address", "127.0.0.1"); private static final String SECONDARY_HOST_NAME = "secondary"; private static final int ADJUSTED_SECOND = TimeoutUtil.adjust(1000); private static final String TARGET_PROTOCOL = "pbcast.STABLE"; @Test public void testOrderedChildResources() throws Exception { final WildFlyManagedConfiguration masterConfig = createConfiguration("domain.xml", "host-primary.xml", getClass().getSimpleName()); final DomainLifecycleUtil masterUtils = new DomainLifecycleUtil(masterConfig); final WildFlyManagedConfiguration slaveConfig = createConfiguration("domain.xml", "host-secondary.xml", getClass().getSimpleName(), SECONDARY_HOST_NAME, slaveAddress, 19990); final DomainLifecycleUtil slaveUtils = new DomainLifecycleUtil(slaveConfig); try { masterUtils.start(); slaveUtils.start(); PathAddress stackAddress = PathAddress.pathAddress(PROFILE, "full-ha") .append(SUBSYSTEM, "jgroups") .append("stack", "tcp"); final ModelNode originalMasterStack = readResource(masterUtils.getDomainClient(), stackAddress); originalMasterStack.protect(); final ModelNode originalSlaveStack = readResource(slaveUtils.getDomainClient(), stackAddress); originalSlaveStack.protect(); Assert.assertEquals(originalMasterStack, originalSlaveStack); int index = -1; ModelNode value = null; Iterator<Property> it = originalMasterStack.get(PROTOCOL).asPropertyList().iterator(); for (int i = 0; it.hasNext(); i++) { Property property = it.next(); if (property.getName().equals(TARGET_PROTOCOL)) { value = property.getValue(); index = i; break; } } //Make sure that we found the protocol and that it is not at the end Assert.assertTrue(0 <= index); Assert.assertTrue(index < originalMasterStack.get(PROTOCOL).keys().size() - 2); PathAddress targetProtocolAddress = stackAddress.append(PROTOCOL, TARGET_PROTOCOL); //Remove the protocol DomainTestUtils.executeForResult(Util.createRemoveOperation(targetProtocolAddress), masterUtils.getDomainClient()); //Reload the master into admin-only and re-add the protocol reloadMaster(masterUtils, true); ModelNode add = value.clone(); add.get(OP).set(ADD); add.get(OP_ADDR).set(targetProtocolAddress.toModelNode()); add.get(ADD_INDEX).set(index); DomainTestUtils.executeForResult(add, masterUtils.getDomainClient()); //Reload the master into normal mode and check the protocol is in the right place on the slave reloadMaster(masterUtils, false); ModelNode slaveStack = readResource(slaveUtils.getDomainClient(), stackAddress); Assert.assertEquals(originalMasterStack, slaveStack); //Check that :read-operation-description has add-index defined; WFLY-6782 ModelNode rodOp = Util.createOperation(READ_OPERATION_DESCRIPTION_OPERATION, targetProtocolAddress); rodOp.get(NAME).set(ADD); ModelNode result = DomainTestUtils.executeForResult(rodOp, masterUtils.getDomainClient()); Assert.assertTrue(result.get(REQUEST_PROPERTIES).hasDefined(ADD_INDEX)); } finally { try { slaveUtils.stop(); } finally { masterUtils.stop(); } } } private ModelNode readResource(DomainClient client, PathAddress pathAddress) throws IOException, MgmtOperationException { ModelNode rr = Util.createEmptyOperation(READ_RESOURCE_OPERATION, pathAddress); return DomainTestUtils.executeForResult(rr, client); } private void reloadMaster(DomainLifecycleUtil domainMasterLifecycleUtil, boolean adminOnly) throws Exception{ ModelNode restartAdminOnly = Util.createEmptyOperation("reload", PathAddress.pathAddress(HOST, "master")); restartAdminOnly.get("admin-only").set(adminOnly); domainMasterLifecycleUtil.executeAwaitConnectionClosed(restartAdminOnly); domainMasterLifecycleUtil.connect(); domainMasterLifecycleUtil.awaitHostController(System.currentTimeMillis()); if (!adminOnly) { //Wait for the slave to reconnect, look for the slave in the list of hosts long end = System.currentTimeMillis() + 20 * ADJUSTED_SECOND; boolean slaveReconnected = false; do { Thread.sleep(1 * ADJUSTED_SECOND); slaveReconnected = checkSlaveReconnected(domainMasterLifecycleUtil.getDomainClient()); } while (!slaveReconnected && System.currentTimeMillis() < end); } } private boolean checkSlaveReconnected(DomainClient masterClient) throws Exception { ModelNode op = Util.createEmptyOperation(READ_CHILDREN_NAMES_OPERATION, PathAddress.EMPTY_ADDRESS); op.get(CHILD_TYPE).set(HOST); try { ModelNode ret = DomainTestUtils.executeForResult(op, masterClient); List<ModelNode> list = ret.asList(); if (list.size() == 2) { for (ModelNode entry : list) { if (SECONDARY_HOST_NAME.equals(entry.asString())){ return true; } } } } catch (Exception e) { } return false; } }
package com.github.sd4324530.fastweixin.api.enums; /** * * @author peiyu * @since 1.2 */ public enum ResultType { SYSTEM_BUSY(-1), SUCCESS(0), /** * access_tokenAppSecretaccess_token */ APP_SECRET_ERROR(40001), ILLEGAL_TOKEN_TYPE(40002), /** * OpenID */ ILLEGAL_OPEN_ID(40003), ILLEGAL_MEDIA_TYPE(40004), ILLEGAL_FILE_TYPE(40005), ILLEGAL_FILE_SIZE(40006), ILLEGAL_MEDIA_ID(40007), ILLEGAL_MESSAGE_TYPE(40008), ILLEGAL_PICTURE_SIZE(40009), ILLEGAL_VOICE_SIZE(40010), ILLEGAL_VIDEO_SIZE(40011), ILLEGAL_THUMBNAIL_SIZE(40012), /** * APPID */ ILLEGAL_APP_ID(40013), /** * access_token */ ILLEGAL_ACCESS_TOKEN(40014), ILLEGAL_MENU_TYPE(40015), ILLEGAL_MENU_NUMBER(40016), ILLEGAL_BUTTON_NAME_LENTH(40018), /** * KEY */ ILLEGAL_BUTTON_KEY_LENTH(40019), /** * URL */ ILLEGAL_BUTTON_URL_LENTH(40020), ILLEGAL_MENU_VERSION(40021), ILLEGAL_SUB_MENU_LEVEL(40022), ILLEGAL_SUB_MENU_NUMBER(40023), ILLEGAL_SUB_MENU_TYPE(40024), ILLEGAL_SUB_MENU_LENTH(40025), /** * KEY */ ILLEGAL_SUB_MENU_KEY_LENTH(40026), /** * URL */ ILLEGAL_SUB_MENU_URL_LENTH(40027), ILLEGAL_MENU_USER(40028), /** * oauth_code */ ILLEGAL_OAUTH_CODE(40029), /** * refresh_token */ ILLEGAL_REFRESH_TOKEN(40030), /** * openid */ ILLEGAL_OPENID_LIST(40031), /** * openid */ ILLEGAL_OPENID_LIST_LENTH(40032), ILLEGAL_REQUEST_STRING(40033), ILLEGAL_PARAM(40035), ILLEGAL_REQUEST_TYPE(40038), /** * URL */ ILLEGAL_URL_LENTH(40039), ILLEGAL_GROUP_ID(40050), ILLEGAL_GROUP_NAME(40051), /** * access_token */ NO_ACCESS_TOKEN(41001), /** * appid */ NO_APPID(41002), /** * refresh_token */ NO_REFRESH_TOKEN(41003), /** * secret */ NO_SECRET(41004), NO_MEDIA_DATA(41005), /** * media_id */ NO_MEDIA_ID(41006), NO_SUB_MENU_DATA(41007), /** * oauth code */ NO_OAUTH_CODE(41008), /** * openid */ NO_OPEN_ID(41009), /** * access_token */ ACCESS_TOKEN_TIMEOUT(42001), /** * refresh_token */ REFRESH_TOKEN_TIMEOUT(42002), /** * oauth_code */ OAUTH_CODE_TIMEOUT(42003), /** * GET */ NEED_REQUEST_GET(43001), /** * POST */ NEED_REQUEST_POST(43002), /** * HTTPS */ NEED_REQUEST_HTTPS(43003), NEED_USER_FOLLOW(43004), NEED_FRIEND(43005), MEDIA_FILE_IS_NULL(44001), /** * POST */ POST_DATA_IS_NULL(44002), NEWS_MESSAGE_IS_NULL(44003), TEXT_MESSAGE_IS_NULL(44004), MEDIA_DATA_OVER_LIMIT(45001), MESSAGE_CONTENT_OVER_LIMIT(45002), TITLE_OVER_LIMIT(45003), DESCRIPTION_OVER_LIMIT(45004), LINK_OVER_LIMIT(45005), PICTURE_LINK_OVER_LIMIT(45006), VOICE_TIME_OVER_LIMIT(45007), NEWS_MESSAGE_OVER_LIMIT(45008), INTERFACE_OVER_LIMIT(45009), MENU_OVER_LIMIT(45010), REVIEW_TIME_OVER_LIMIT(45015), NO_MODIFY_SYSTEM_GROUP(45016), GROUP_NAME_TOO_LONG(45017), GROUP_COUNT_TOO_MANY(45018), NOT_EXIST_MEDIA_DATA(46001), NOT_EXIST_MENU_VERSION(46002), NOT_EXIST_MENU_DATA(46003), NOT_EXIST_USER(46004), /** * JSON/XML */ JSON_OR_XML_ERROR(47001), /** * api */ API_NOT_ALLOW_CALL(48001), /** * api */ USER_NOT_ALLOW_API(50001); Integer code; private ResultType(Integer code) { this.code = code; } @Override public String toString() { return code.toString(); } }
package com.google.code.maven_replacer_plugin.file; import java.io.File; import java.io.IOException; public class FileUtils { public boolean fileNotExists(String filename) { if (filename == null || filename.trim().length() == 0) { return true; } return !new File(filename).exists(); } public void ensureFolderStructureExists(String file) { File outputFile = new File(file); if (outputFile.getParent() == null) { return; } if (!outputFile.isDirectory()) { File parentPath = new File(outputFile.getParent()); if (!parentPath.exists() && !parentPath.mkdirs()) { throw new Error("Error creating directory."); } } else { throw new IllegalArgumentException("OutputFile cannot be a directory: " + file); } } public String readFile(String file) throws IOException { return org.apache.commons.io.FileUtils.readFileToString(new File(file)); } public void writeToFile(String outputFile, String content) throws IOException { ensureFolderStructureExists(outputFile); org.apache.commons.io.FileUtils.writeStringToFile(new File(outputFile), content); } public String createFullPath(String... dirsAndFilename) { StringBuilder fullPath = new StringBuilder(); for (int i=0; i < dirsAndFilename.length - 1; i++) { fullPath.append(dirsAndFilename[i]); fullPath.append(File.separator); } fullPath.append(dirsAndFilename[dirsAndFilename.length - 1]); return fullPath.toString(); } }
package com.google.step.snippet.external; import com.google.step.snippet.data.Card; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.URI; import java.net.URISyntaxException; import java.util.regex.Pattern; import org.apache.http.HttpEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.json.JSONArray; import org.json.JSONObject; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; public final class StackOverflowClient implements Client { private static final String SEARCH_URL_TEMPLATE = "https://api.stackexchange.com/2.2/questions/%s?" + "order=desc&sort=activity&site=stackoverflow"; private static final String QUESTION_URL_TEMPLATE = "https://api.stackexchange.com/2.2/questions/%s/answers?" + "order=desc&sort=votes&site=stackoverflow"; // This URL specifies a custom StackExchange API filter that generates answer body. private static final String ANSWER_URL_TEMPLATE = "https://api.stackexchange.com/2.2/answers/%s?order" + "=desc&sort=activity&site=stackoverflow&filter=!9_bDE(fI5"; // The URL is in the pattern of stackoverlow.com/questions/question_id/title. // The ID_INDEX help retrieve the question_id from parsed URL. private static final int ID_INDEX = 2; private static final String ITEM_PARAMETER = "items"; private static final String TITLE_PARAMETER = "title"; private static final String BODY_PARAMETER = "body"; private static final String CODE_PARAMETER = "code"; private static final String ANSWER_ID_PARAMETER = "answer_id"; // Set 200 to be the maximum length of description for MVP. private static final int MAX_DESCRIPTION_LENGTH = 200; private final String cseId; public StackOverflowClient(String cseId) { this.cseId = cseId; } @Override public String getCseId() { return cseId; } /** * Creates and returns a {@code Card} for the given StackOverflow URL. * * @param url the URL of the StackOverflow question to create the card for * @return the created card, or {@code null} if a card could not be created */ @Override public Card search(String url) { String questionId = getQuestionId(url); if (questionId == null) { return null; } String answerId = getAnswerId(questionId); if (answerId == null) { return null; } String title = getTitle(questionId); String answerBody = getAnswerBody(answerId); if (title == null || answerBody == null) { return null; } // No description or code is allowed for StackOverflow. String description = getDescription(answerBody); String code = getCode(answerBody); return new Card(title, code, url, description); } /* Get the question id of passed in URL. */ private String getQuestionId(String url) { URI uri; try { uri = new URI(url); } catch (URISyntaxException e) { return null; } // Parse the URL to get the question id. String[] segments = uri.getPath().split("/"); String questionId = segments[ID_INDEX]; if (!Pattern.matches("[0-9]+", questionId)) { return null; } return questionId; } /* Return the most voted answer's id. */ private String getAnswerId(String questionId) { String questionUrl = String.format(QUESTION_URL_TEMPLATE, questionId); return getResponse(questionUrl, ANSWER_ID_PARAMETER); } /* Return the question title using question id */ private String getTitle(String questionId) { String searchUrl = String.format(SEARCH_URL_TEMPLATE, questionId); return getResponse(searchUrl, TITLE_PARAMETER); } /* Get the content of the answer and store it in the card. */ private String getAnswerBody(String answerId) { String answerUrl = String.format(ANSWER_URL_TEMPLATE, answerId); return getResponse(answerUrl, BODY_PARAMETER); } /* Return the description parsed from answer body. */ private String getDescription(String body) { Document doc = Jsoup.parse(body); // Combine all description in the answer body. Elements descriptionHtml = doc.select("p"); String description = ""; for (Element e : descriptionHtml) { description += e.outerHtml(); if (description.length() >= MAX_DESCRIPTION_LENGTH) { description = description.substring(0, MAX_DESCRIPTION_LENGTH); break; } } return description; } /* Return the code parsed from answer body. */ private String getCode(String body) { Document doc = Jsoup.parse(body); // Combine all code in the answer body. Elements codeHtml = doc.select(CODE_PARAMETER); String code = ""; for (Element e : codeHtml) { code += e.outerHtml(); } return code; } private String getResponse(String url, String fieldParam) { CloseableHttpClient httpClient = HttpClients.createDefault(); CloseableHttpResponse response; try { response = httpClient.execute(new HttpGet(url)); } catch (IOException e) { return null; } if (response.getStatusLine().getStatusCode() != 200) { return null; } HttpEntity entity = response.getEntity(); if (entity == null) { return null; } BufferedReader reader; try { reader = new BufferedReader(new InputStreamReader(entity.getContent())); } catch (IOException e) { return null; } StringBuilder responseBody = new StringBuilder(); String line; try { while ((line = reader.readLine()) != null) { responseBody.append(line); } reader.close(); } catch (IOException e) { return null; } JSONObject json = new JSONObject(responseBody.toString()); JSONArray items = json.getJSONArray(ITEM_PARAMETER); if (items == null || items.length() == 0) { return null; } String res = items.getJSONObject(0).get(fieldParam).toString(); if (response.getStatusLine().getStatusCode() != 200) { return null; } return res; } }
package com.itwheel.edigate.pricat.processor; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.sql.Connection; import javax.sql.DataSource; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.component.file.GenericFile; import org.milyn.edi.unedifact.d96a.D96AInterchangeFactory; import org.milyn.smooks.edi.unedifact.model.UNEdifactInterchange; import com.itwheel.edigate.utils.SpringLocator; public class ParsePricatProc implements Processor { private DataSource ediDs = null; public void setEdiDs(DataSource ediDs) { this.ediDs = ediDs; } public void process(Exchange exchange) throws Exception { GenericFile<File> fileObj = (GenericFile<File>)exchange.getIn().getBody(); if(fileObj != null) { System.out.println(fileObj.getFileName()); D96AInterchangeFactory d96aFactory = D96AInterchangeFactory.getInstance(); InputStream pricatis = new FileInputStream((File)fileObj.getBody()); UNEdifactInterchange interchange = d96aFactory.fromUNEdifact(pricatis); ParseEdi edi = new ParseEdi(); EdiPricatHeadBean head = edi.parse(interchange); // DataSource ds = (DataSource)SpringLocator.getBean("edi_ds"); Connection conn = this.ediDs.getConnection(); try { conn.setAutoCommit(false); EdiPricatDao dao = new EdiPricatDao(); dao.handler(conn, head); conn.commit(); } catch (Exception e) { conn.rollback(); } finally { conn.close(); } exchange.getOut().setHeaders(exchange.getIn().getHeaders()); exchange.getOut().setBody(interchange); } } }
package com.partagames.imageresizetool; import org.apache.commons.cli.*; import javax.imageio.ImageIO; import java.awt.*; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import static com.partagames.imageresizetool.Constants.*; public class SimpleImageResizeTool { private static Options options; private static String[] imageFileStrings; private static Dimensions dimensions; private static Path outputFolder; private static String format = OUTPUT_IMAGE_FORMATS.get(0); // default png private static String scalingHint = SUPPORTED_SCALING_HINTS.get(1); // default bilinear private static final Map<String, BufferedImage> imageFiles = new HashMap<>(); public static void main(String[] args) throws Exception { options = new Options(); // required options options.addOption(Option.builder(ARG_DIMENSIONS_SHORT).longOpt(ARG_DIMENSIONS).hasArg(true).optionalArg(false) .desc("Target image dimensions in pixels (e.g 1280x720)").required(true).build()); // optional options options.addOption(Option.builder(ARG_FORMAT_SHORT).longOpt(ARG_FORMAT).hasArg(true).optionalArg(false) .desc("Image output format (png,jpg,gif)").required(false).build()); options.addOption(Option.builder(ARG_OUTPUT_SHORT).longOpt(ARG_OUTPUT).hasArg(true).optionalArg(false) .desc("Image output folder").required(false).build()); options.addOption(Option.builder(ARG_HINT_SHORT).longOpt(ARG_HINT).hasArg(true).optionalArg(false) .desc("Scaling hint (n=nearest, b=bilinear)").required(false).build()); options.addOption(Option.builder(ARG_HELP_SHORT).longOpt(ARG_HELP).hasArg(false) .desc("Shows this help message.").required(false).build()); if (parseAndPrepareArguments(args, options)) { createBufferedImages(); resizeAndWriteImages(); } } /** * Parses all command line arguments and prepares them. * * @param args Command line arguments. * @param options Apache CLI options * @return True if arguments were prepared correctly and we can continue execution */ private static boolean parseAndPrepareArguments(String[] args, Options options) { // parse through arguments and prepare them appropriately final CommandLineParser parser = new DefaultParser(); final CommandLine cmd; try { cmd = parser.parse(options, args); } catch (MissingOptionException | MissingArgumentException e) { System.out.println(e.getMessage() + "\n"); printHelpAndUsage(); return false; } catch (ParseException e2) { System.out.println("Error: There was a problem parsing the command line arguments, please check your command.\n"); printHelpAndUsage(); throw new RuntimeException(e2); } // show help if (cmd.hasOption(ARG_HELP)) { printHelpAndUsage(); return false; } if (cmd.getArgList().isEmpty()) { System.out.println("Error: Missing argument: comma-separated list of images!\n"); printHelpAndUsage(); return false; } else { imageFileStrings = cmd.getArgList().get(0).split(","); } // prepare mandatory arguments if (cmd.hasOption(ARG_DIMENSIONS)) { final String[] dimensionStrings = cmd.getOptionValue(ARG_DIMENSIONS).split("x"); try { dimensions = new Dimensions(Integer.parseInt(dimensionStrings[0]), Integer.parseInt(dimensionStrings[1])); } catch (Exception e) { System.out.println("Error: Dimension argument was not correct!\n"); printHelpAndUsage(); return false; } } // prepare optional arguments if (cmd.hasOption(ARG_OUTPUT)) { outputFolder = Paths.get(cmd.getOptionValue(ARG_OUTPUT)); } if (cmd.hasOption(ARG_FORMAT)) { final String outputFormatString = cmd.getOptionValue("format").toLowerCase(); if (Constants.OUTPUT_IMAGE_FORMATS.contains(outputFormatString)) { format = outputFormatString; } else { System.out.println("Error: Wrong output image format!\n"); printHelpAndUsage(); return false; } } if (cmd.hasOption(ARG_HINT)) { final String scalingHintString = cmd.getOptionValue(ARG_HINT); if (SUPPORTED_SCALING_HINTS.contains(scalingHintString)) { scalingHint = scalingHintString; } else { System.out.println("Error: Wrong scaling hint!\n"); printHelpAndUsage(); return false; } } return true; } /** * Prints help and usage. */ private static void printHelpAndUsage() { // automatically generate the help statement final HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("resizer [options ...] [/folder/image1,/folder/image2 ...]", options); } /** * Reads the images to memory. */ private static void createBufferedImages() { for (int i = 0; i < imageFileStrings.length; i++) { try { imageFiles.put(imageFileStrings[i], ImageIO.read(new File(imageFileStrings[i]))); } catch (IOException e) { System.out.println("Warning: File " + imageFileStrings[i] + " missing, corrupted or not supported, ignoring..."); } } } /** * Resizes and writes the images to the given or default output folder. */ private static void resizeAndWriteImages() { File outputFolderFile; // if output folder is given as cli option if (outputFolder != null) { outputFolderFile = outputFolder.toFile(); if (!outputFolderFile.exists()) { outputFolderFile.mkdirs(); } } else { // default output folder outputFolderFile = new File("output/"); if (!outputFolderFile.exists()) { outputFolderFile.mkdirs(); } } // resize and write images for (String key : imageFiles.keySet()) { final String fileName = extractFileNameFromPath(key); final BufferedImage image = imageFiles.get(key); final BufferedImage scaledImage = scale(image, dimensions.width, dimensions.height); try { ImageIO.write(scaledImage, format, new File(outputFolderFile.getPath() + "/" + dimensions.width + "_x_" + dimensions.height + "_" + fileName + "." + format)); } catch (IOException e) { System.out.println("Error: Cannot write " + key + " to output folder. Ignoring..."); } } } /** * Extracts file name from full file path. * * @param filePath File path * @return File name */ private static String extractFileNameFromPath(String filePath) { final Path p = Paths.get(filePath); return p.getFileName().toString(); } /** * Scales an image to the desired dimensions. * * @param img Original image * @param newW Target width * @param newH Target height * @return Scaled image */ public static BufferedImage scale(BufferedImage img, int newW, int newH) { int w = img.getWidth(); int h = img.getHeight(); final BufferedImage dimg = new BufferedImage(newW, newH, img.getType()); final Graphics2D g = dimg.createGraphics(); // use provided rendering hint, default is bilinear switch (scalingHint) { case "n": g.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR); break; case "b": g.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR); break; } g.drawImage(img, 0, 0, newW, newH, 0, 0, w, h, null); g.dispose(); return dimg; } }
package com.puyixiaowo.fblog.controller.afu; import com.puyixiaowo.fblog.bean.admin.afu.AfuBean; import com.puyixiaowo.fblog.bean.admin.afu.AfuTypeBean; import com.puyixiaowo.fblog.bean.sys.PageBean; import com.puyixiaowo.fblog.bean.sys.ResponseBean; import com.puyixiaowo.fblog.controller.BaseController; import com.puyixiaowo.fblog.exception.DBObjectExistsException; import com.puyixiaowo.fblog.service.AfuService; import com.puyixiaowo.fblog.service.AfuTypeService; import com.puyixiaowo.fblog.utils.DBUtils; import com.puyixiaowo.fblog.utils.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import spark.Request; import spark.Response; /** * @author Moses * @date 2017-08-26 */ public class AfuApiController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(AfuApiController.class); public static String apiAfus(Request request, Response response) { PageBean pageBean = getPageBean(request); if (!verifySign(request)) { pageBean.errorMessage(""); return pageBean.serialize(); } AfuBean afuBean = null; try { afuBean = getParamsEntity(request, AfuBean.class, false); pageBean = AfuService.selectAfuPageBean(afuBean, pageBean); } catch (Exception e) { logger.error(e.getMessage()); } return pageBean.serialize(); } public static String apiAfusEdit(Request request, Response response) { ResponseBean responseBean = new ResponseBean(); if (!verifySign(request)) { responseBean.errorMessage(""); return responseBean.serialize(); } AfuBean afuBean = null; try { afuBean = getParamsEntity(request, AfuBean.class, true); AfuBean afuBeanDB = DBUtils.selectOne("select * from afu where name=:name", afuBean); if (afuBeanDB != null) { afuBeanDB.setContent(afuBean.getContent()); afuBean = afuBeanDB; } AfuTypeBean afuTypeBean = AfuTypeService.getAfuTypeById(afuBean.getType()); if (afuTypeBean == null) { responseBean.errorMessage(""); } if (afuTypeBean.getStatus() == 0) { responseBean.errorMessage(""); } else { afuBean.setType(afuTypeBean.getId()); afuBean.setCreateTime(System.currentTimeMillis()); DBUtils.insertOrUpdate(afuBean); } } catch (DBObjectExistsException e) { responseBean.errorMessage(""); } catch (Exception e) { responseBean.errorMessage(e.getMessage()); } return responseBean.serialize(); } public static String apiAfusDelete(Request request, Response response) { ResponseBean responseBean = new ResponseBean(); if (!verifySign(request)) { responseBean.errorMessage(""); return responseBean.serialize(); } String id = request.queryParams("id"); String name = request.queryParams("name"); String typeStr = request.queryParams("type"); Long type = Long.valueOf(StringUtils.isBlank(typeStr) ? "0" : typeStr); try { if (StringUtils.isNotBlank(id)) { DBUtils.deleteByIds(AfuBean.class, id); } else if (StringUtils.isNotBlank(name) && type > 0){ AfuBean afuBean = new AfuBean(); afuBean.setName(name); afuBean.setType(type); DBUtils.executeSql("delete from afu where name = :name and type=:type", afuBean); } } catch (Exception e) { responseBean.errorMessage(e.getMessage()); } return responseBean.serialize(); } }
package com.redhat.ceylon.compiler.loader; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import com.redhat.ceylon.compiler.typechecker.model.Declaration; import com.redhat.ceylon.compiler.typechecker.model.DeclarationKind; import com.redhat.ceylon.compiler.typechecker.model.FunctionalParameter; import com.redhat.ceylon.compiler.typechecker.model.Getter; import com.redhat.ceylon.compiler.typechecker.model.Interface; import com.redhat.ceylon.compiler.typechecker.model.IntersectionType; import com.redhat.ceylon.compiler.typechecker.model.Method; import com.redhat.ceylon.compiler.typechecker.model.MethodOrValue; import com.redhat.ceylon.compiler.typechecker.model.Module; import com.redhat.ceylon.compiler.typechecker.model.ModuleImport; import com.redhat.ceylon.compiler.typechecker.model.Parameter; import com.redhat.ceylon.compiler.typechecker.model.ParameterList; import com.redhat.ceylon.compiler.typechecker.model.ProducedType; import com.redhat.ceylon.compiler.typechecker.model.Scope; import com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration; import com.redhat.ceylon.compiler.typechecker.model.TypeParameter; import com.redhat.ceylon.compiler.typechecker.model.UnionType; import com.redhat.ceylon.compiler.typechecker.model.Value; import com.redhat.ceylon.compiler.typechecker.model.ValueParameter; /** Generates the metamodel for all objects in a module. * * @author Enrique Zamudio */ public class MetamodelGenerator { public static final String KEY_CLASSES = "$c"; public static final String KEY_INTERFACES = "$i"; public static final String KEY_OBJECTS = "$o"; public static final String KEY_METHODS = "$m"; public static final String KEY_ATTRIBUTES = "$at"; public static final String KEY_ANNOTATIONS = "$an"; public static final String KEY_TYPE = "$t"; public static final String KEY_TYPES = "$ts"; public static final String KEY_TYPE_PARAMS = "$tp"; public static final String KEY_METATYPE = "$mt"; public static final String KEY_MODULE = "$md"; public static final String KEY_NAME = "$nm"; public static final String KEY_PACKAGE = "$pk"; public static final String KEY_PARAMS = "$ps"; public static final String KEY_SELF_TYPE = "$st"; public static final String ANN_DEFAULT = "$def"; public static final String ANN_SHARED = "$shr"; public static final String ANN_ACTUAL = "$act"; public static final String ANN_FORMAL = "$fml"; public static final String METATYPE_CLASS = "cls"; public static final String METATYPE_INTERFACE = "ifc"; public static final String METATYPE_OBJECT = "obj"; public static final String METATYPE_METHOD = "mthd"; public static final String METATYPE_ATTRIBUTE = "attr"; public static final String METATYPE_GETTER = "gttr"; public static final String METATYPE_TYPE_PARAMETER = "tpm"; public static final String METATYPE_PARAMETER = "prm"; //public static final String METATYPE_TYPE_CONSTRAINT = "tc"; private final Map<String, Object> model = new HashMap<String, Object>(); private final Module module; public MetamodelGenerator(Module module) { this.module = module; model.put("$mod-name", module.getNameAsString()); model.put("$mod-version", module.getVersion()); if (!module.getImports().isEmpty()) { ArrayList<String> imps = new ArrayList<String>(module.getImports().size()); for (ModuleImport mi : module.getImports()) { imps.add(String.format("%s/%s", mi.getModule().getNameAsString(), mi.getModule().getVersion())); } model.put("$mod-deps", imps); } } /** Returns the in-memory model as a collection of maps. * The top-level map represents the module. */ public Map<String, Object> getModel() { return Collections.unmodifiableMap(model); } @SuppressWarnings("unchecked") private Map<String, Object> findParent(Declaration d) { Map<String,Object> pkgmap = (Map<String,Object>)model.get(d.getUnit().getPackage().getNameAsString()); if (pkgmap == null) { pkgmap = new HashMap<String, Object>(); if (d.getUnit().getPackage().isShared()) { pkgmap.put("$pkg-shared", "1"); } model.put(d.getUnit().getPackage().getNameAsString(), pkgmap); } if (d.isToplevel()) { return pkgmap; } ArrayList<String> names = new ArrayList<String>(); Scope sc = d.getContainer(); while (sc.getContainer() != null) { if (sc instanceof TypeDeclaration) { names.add(0, ((TypeDeclaration) sc).getName()); } sc = sc.getContainer(); } Map<String, Object> last = pkgmap; for (String name : names) { if (last == pkgmap) { last = (Map<String, Object>)last.get(name); } else if (last.containsKey(KEY_METHODS) && ((Map<String,Object>)last.get(KEY_METHODS)).containsKey(name)) { last = (Map<String,Object>)((Map<String,Object>)last.get(KEY_METHODS)).get(name); } else if (last.containsKey(KEY_ATTRIBUTES) && ((Map<String,Object>)last.get(KEY_ATTRIBUTES)).containsKey(name)) { last = (Map<String,Object>)((Map<String,Object>)last.get(KEY_ATTRIBUTES)).get(name); } else if (last.containsKey(KEY_CLASSES) && ((Map<String,Object>)last.get(KEY_CLASSES)).containsKey(name)) { last = (Map<String,Object>)((Map<String,Object>)last.get(KEY_CLASSES)).get(name); } else if (last.containsKey(KEY_INTERFACES) && ((Map<String,Object>)last.get(KEY_INTERFACES)).containsKey(name)) { last = (Map<String,Object>)((Map<String,Object>)last.get(KEY_INTERFACES)).get(name); } else if (last.containsKey(KEY_OBJECTS) && ((Map<String,Object>)last.get(KEY_OBJECTS)).containsKey(name)) { last = (Map<String,Object>)((Map<String,Object>)last.get(KEY_OBJECTS)).get(name); } } return last; } /** Create a map for the specified ProducedType. * Includes name, package, module and type parameters, unless it's a union or intersection * type, in which case it contains a "comp" key with an "i" or "u" and a key "types" with * the list of types that compose it. */ private Map<String, Object> typeMap(ProducedType pt) { TypeDeclaration d = pt.getDeclaration(); Map<String, Object> m = new HashMap<String, Object>(); if (d instanceof UnionType || d instanceof IntersectionType) { List<ProducedType> subtipos = d instanceof UnionType ? d.getCaseTypes() : d.getSatisfiedTypes(); List<Map<String,Object>> subs = new ArrayList<Map<String,Object>>(subtipos.size()); for (ProducedType sub : subtipos) { subs.add(typeMap(sub)); } m.put("comp", d instanceof UnionType ? "u" : "i"); m.put(KEY_TYPES, subs); return m; } m.put(KEY_NAME, d.getName()); if (d.getDeclarationKind()==DeclarationKind.TYPE_PARAMETER) { //For types that reference type parameters, we're done return m; } com.redhat.ceylon.compiler.typechecker.model.Package pkg = d.getUnit().getPackage(); m.put(KEY_PACKAGE, pkg.getNameAsString()); if (!pkg.getModule().equals(module)) { m.put(KEY_MODULE, pkg.getModule().getNameAsString()); } putTypeParameters(m, pt); return m; } /** Returns a map with the same info as {@link #typeParameterMap(ProducedType)} but with * an additional key "variance" if it's covariant ("out") or contravariant ("in"). */ private Map<String, Object> typeParameterMap(TypeParameter tp) { Map<String, Object> map = new HashMap<String, Object>(); map.put(MetamodelGenerator.KEY_NAME, tp.getName()); if (tp.isCovariant()) { map.put("variance", "out"); } else if (tp.isContravariant()) { map.put("variance", "in"); } if (tp.getSelfType() != null) { map.put(KEY_SELF_TYPE, typeMap(tp.getSelfType())); } if (tp.getSatisfiedTypes() != null && !tp.getSatisfiedTypes().isEmpty()) { encodeTypes(tp.getSatisfiedTypes(), map, "satisfies"); } else if (tp.getCaseTypes() != null && !tp.getCaseTypes().isEmpty()) { encodeTypes(tp.getCaseTypes(), map, "of"); } if (tp.isSequenced()) { map.put("seq", "1"); } return map; } /** Create a map for the ProducedType, as a type parameter. * Includes name, package, module and type parameters, unless it's a union or intersection * type, in which case it will contain a "comp" key with an "i" or "u", and a list of the types * that compose it. */ private Map<String, Object> typeParameterMap(ProducedType pt) { Map<String, Object> m = new HashMap<String, Object>(); TypeDeclaration d = pt.getDeclaration(); m.put(KEY_METATYPE, METATYPE_TYPE_PARAMETER); if (d instanceof UnionType || d instanceof IntersectionType) { List<ProducedType> subtipos = d instanceof UnionType ? d.getCaseTypes() : d.getSatisfiedTypes(); List<Map<String,Object>> subs = new ArrayList<Map<String,Object>>(subtipos.size()); for (ProducedType sub : subtipos) { subs.add(typeMap(sub)); } m.put("comp", d instanceof UnionType ? "u" : "i"); m.put(KEY_TYPES, subs); return m; } m.put(KEY_NAME, d.getName()); if (d.getDeclarationKind()==DeclarationKind.TYPE_PARAMETER) { //Don't add package, etc return m; } com.redhat.ceylon.compiler.typechecker.model.Package pkg = d.getUnit().getPackage(); m.put(KEY_PACKAGE, pkg.getNameAsString()); if (!pkg.getModule().equals(module)) { m.put(KEY_MODULE, d.getUnit().getPackage().getModule().getNameAsString()); } putTypeParameters(m, pt); return m; } private void putTypeParameters(Map<String, Object> container, ProducedType pt) { if (pt.getTypeArgumentList() != null && !pt.getTypeArgumentList().isEmpty()) { List<Map<String, Object>> list = new ArrayList<Map<String, Object>>(pt.getTypeArgumentList().size()); for (ProducedType tparm : pt.getTypeArgumentList()) { list.add(typeParameterMap(tparm)); } container.put(KEY_TYPE_PARAMS, list); } } /** Create a list of maps from the list of type parameters. * @see #typeParameterMap(TypeParameter) */ private List<Map<String, Object>> typeParameters(List<TypeParameter> tpl) { if (tpl != null && !tpl.isEmpty()) { List<Map<String, Object>> l = new ArrayList<Map<String,Object>>(tpl.size()); for (TypeParameter tp : tpl) { l.add(typeParameterMap(tp)); } return l; } return null; } /** Create a list of maps for the parameter list. Each map will be a parameter, including * name, type, default value (if any), and whether it's sequenced. */ private List<Map<String,Object>> parameterListMap(ParameterList plist) { List<Parameter> parms = plist.getParameters(); if (parms.size() > 0) { List<Map<String,Object>> p = new ArrayList<Map<String,Object>>(parms.size()); for (Parameter parm : parms) { Map<String, Object> pm = new HashMap<String, Object>(); pm.put(KEY_NAME, parm.getName()); pm.put(KEY_METATYPE, METATYPE_PARAMETER); if (parm.isSequenced()) { pm.put("seq", "1"); } if (parm.isDefaulted()) { pm.put(ANN_DEFAULT, "1"); } if (parm.getTypeDeclaration().getDeclarationKind()==DeclarationKind.TYPE_PARAMETER) { pm.put(KEY_TYPE, parm.getTypeDeclaration().getName()); } else { pm.put(KEY_TYPE, typeMap(parm.getType())); } if (parm instanceof ValueParameter) { pm.put("$pt", "v"); if (((ValueParameter) parm).isHidden()) { pm.put("$hdn", "1"); } } else if (parm instanceof FunctionalParameter) { pm.put("$pt", "f"); List<List<Map<String, Object>>> _paramLists = new ArrayList<List<Map<String,Object>>>( ((FunctionalParameter)parm).getParameterLists().size()); for (ParameterList subplist : ((FunctionalParameter)parm).getParameterLists()) { List<Map<String,Object>> params = parameterListMap(subplist); if (params == null) { params = Collections.emptyList(); } _paramLists.add(params); } if (_paramLists.size() > 1 || !_paramLists.get(0).isEmpty()) { pm.put(KEY_PARAMS, _paramLists); } } else { throw new IllegalStateException("Unknown parameter type " + parm.getClass().getName()); } //TODO do these guys need anything else? /*if (parm.getDefaultArgument() != null) { //This could be compiled to JS... pm.put(ANN_DEFAULT, parm.getDefaultArgument().getSpecifierExpression().getExpression().getTerm().getText()); }*/ p.add(pm); } return p; } return null; } @SuppressWarnings("unchecked") public void encodeMethod(Method d) { Map<String, Object> parent; if (d.isToplevel() || d.isMember()) { parent = findParent(d); if (parent == null) { //System.out.println("orphaned method - How the hell did this happen? " + that.getLocation() + " @ " + that.getUnit().getFilename()); return; } if (!d.isToplevel()) { if (!parent.containsKey(KEY_METHODS)) { parent.put(KEY_METHODS, new HashMap<String,Object>()); } parent = (Map<String, Object>)parent.get(KEY_METHODS); } } else { return; } Map<String, Object> m = new HashMap<String, Object>(); m.put(KEY_METATYPE, METATYPE_METHOD); m.put(KEY_NAME, d.getName()); List<Map<String, Object>> tpl = typeParameters(d.getTypeParameters()); if (tpl != null) { m.put(KEY_TYPE_PARAMS, tpl); } m.put(KEY_TYPE, typeMap(d.getType())); List<List<Map<String, Object>>> paramLists = new ArrayList<List<Map<String,Object>>>(d.getParameterLists().size()); for (ParameterList plist : d.getParameterLists()) { List<Map<String,Object>> params = parameterListMap(plist); if (params == null) { params = Collections.emptyList(); } paramLists.add(params); } if (paramLists.size() > 1 || !paramLists.get(0).isEmpty()) { m.put(KEY_PARAMS, paramLists); } //Certain annotations encodeSharedActualFormalDefault(d, m); parent.put(d.getName(), m); //We really don't need to go inside a method's body //super.visit(that); } /** Create and store the metamodel info for an attribute. */ public void encodeAttribute(Value d) { Map<String, Object> m = encodeAttributeOrGetter(d); if (m != null && d.isVariable()) { m.put("var", "1"); } } @SuppressWarnings("unchecked") public void encodeClass(com.redhat.ceylon.compiler.typechecker.model.Class d) { Map<String, Object> parent = findParent(d); if (d.isToplevel() || d.isMember()) { if (!d.isToplevel()) { if (!parent.containsKey(KEY_CLASSES)) { parent.put(KEY_CLASSES, new HashMap<String,Object>()); } parent = (Map<String,Object>)parent.get(KEY_CLASSES); } } else { return; } Map<String, Object> m = new HashMap<String, Object>(); m.put(KEY_METATYPE, METATYPE_CLASS); m.put(KEY_NAME, d.getName()); //Type parameters List<Map<String, Object>> tpl = typeParameters(d.getTypeParameters()); if (tpl != null) { m.put(KEY_TYPE_PARAMS, tpl); } //self type if (d.getSelfType() != null) { m.put(KEY_SELF_TYPE, typeMap(d.getSelfType())); } //Extends if (d.getExtendedType() != null) { m.put("super", typeMap(d.getExtendedType())); } //Satisfies encodeTypes(d.getSatisfiedTypes(), m, "satisfies"); //Initializer parameters List<Map<String,Object>> inits = parameterListMap(d.getParameterList()); if (inits != null && !inits.isEmpty()) { m.put(KEY_PARAMS, inits); } //Case types encodeTypes(d.getCaseTypes(), m, "of"); //Certain annotations encodeSharedActualFormalDefault(d, m); if (d.isAbstract()) { m.put("abstract", "1"); } if (d.isAnonymous()) { m.put("$anon", "1"); } parent.put(d.getName(), m); } @SuppressWarnings("unchecked") public void encodeInterface(Interface d) { Map<String, Object> parent = findParent(d); if (d.isToplevel() || d.isMember()) { if (!d.isToplevel()) { if (!parent.containsKey(KEY_INTERFACES)) { parent.put(KEY_INTERFACES, new HashMap<String,Object>()); } parent = (Map<String,Object>)parent.get(KEY_INTERFACES); } } else { return; } Map<String, Object> m = new HashMap<String, Object>(); m.put(KEY_METATYPE, METATYPE_INTERFACE); m.put(KEY_NAME, d.getName()); //Type parameters List<Map<String, Object>> tpl = typeParameters(d.getTypeParameters()); if (tpl != null) { m.put(KEY_TYPE_PARAMS, tpl); } //self type if (d.getSelfType() != null) { m.put(KEY_SELF_TYPE, d.getSelfType().getDeclaration().getName()); } //satisfies encodeTypes(d.getSatisfiedTypes(), m, "satisfies"); //Case types encodeTypes(d.getCaseTypes(), m, "of"); //Certain annotations encodeSharedActualFormalDefault(d, m); parent.put(d.getName(), m); } @SuppressWarnings("unchecked") public void encodeObject(Value d) { Map<String, Object> parent = findParent(d); if (d.isToplevel() || d.isMember()) { if (!d.isToplevel()) { if (!parent.containsKey(KEY_OBJECTS)) { parent.put(KEY_OBJECTS, new HashMap<String, Object>()); } parent = (Map<String,Object>)parent.get(KEY_OBJECTS); } } else { return; } Map<String, Object> m = new HashMap<String, Object>(); m.put(KEY_METATYPE, METATYPE_OBJECT); m.put(KEY_NAME, d.getName()); //Extends m.put("super", typeMap(d.getTypeDeclaration().getExtendedType())); //Satisfies encodeTypes(d.getTypeDeclaration().getSatisfiedTypes(), m, "satisfies"); //Certain annotations if (d.isShared()) { m.put(ANN_SHARED, "1"); } parent.put(d.getName(), m); } @SuppressWarnings("unchecked") private Map<String, Object> encodeAttributeOrGetter(MethodOrValue d) { Map<String, Object> m = new HashMap<String, Object>(); Map<String, Object> parent; if (d.isToplevel() || d.isMember()) { parent = findParent(d); if (parent == null) { return null; } if (!d.isToplevel()) { if (!parent.containsKey(KEY_ATTRIBUTES)) { parent.put(KEY_ATTRIBUTES, new HashMap<String, Object>()); } parent = (Map<String,Object>)parent.get(KEY_ATTRIBUTES); } } else { //Ignore attributes inside control blocks, methods, etc. return null; } m.put(KEY_NAME, d.getName()); m.put(KEY_METATYPE, d instanceof Getter ? METATYPE_GETTER : METATYPE_ATTRIBUTE); m.put(KEY_TYPE, typeMap(d.getType())); encodeSharedActualFormalDefault(d, m); parent.put(d.getName(), m); return m; } public void encodeGetter(Getter d) { encodeAttributeOrGetter(d); } /** Encodes the list of types and puts them under the specified key in the map. */ private void encodeTypes(List<ProducedType> types, Map<String,Object> m, String key) { if (types == null || types.isEmpty()) return; List<Map<String, Object>> sats = new ArrayList<Map<String,Object>>(types.size()); for (ProducedType st : types) { sats.add(typeMap(st)); } m.put(key, sats); } private void encodeSharedActualFormalDefault(Declaration d, Map<String, Object> m) { if (d.isShared()) { m.put(ANN_SHARED, "1"); } if (d.isActual()) { m.put(ANN_ACTUAL, "1"); } if (d.isFormal()) { m.put(ANN_FORMAL, "1"); } if (d.isDefault()) { m.put(ANN_DEFAULT, "1"); } } }
package com.tenable.io.api.exports.models; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.tenable.io.api.models.SeverityLevel; import com.tenable.io.api.models.VulnerabilityState; import java.util.List; import java.util.Map; @JsonInclude( JsonInclude.Include.NON_DEFAULT ) public class VulnsExportFilters { private List<SeverityLevel> severity; private List<VulnerabilityState> state; private List<String> pluginFamily; private long since; private String cidrRange; private long firstFound; private long lastFound; private long lastFixed; private Map<String, String[]> tags; /** * Sets the list of severities to include in the export * Defaults to all severity levels * * @param severity List of vulnerability severities */ public void setSeverity(List<SeverityLevel> severity ) { this.severity = severity; } /** * Gets the list of severities * * @return the severities */ public List<SeverityLevel> getSeverity() { return this.severity; } /** * Sets the list of vulnerability states to include in the export (OPEN, REOPENED, or FIXED) * Defaults to 'OPEN' and 'REOPENED' * * @param list of vulnerability states */ public void setState( List<VulnerabilityState> state ) { this.state = state; } /** * Gets the list of vulnerability states * * @return the list of vulnerability states */ public List<VulnerabilityState> getState() { return this.state; } /** * Sets the list of plugin family of the exported vulnerabilities * This filter is case sensitive * * @param a list of plugin family names */ @JsonProperty( "plugin_family" ) public void setPluginFamily( List<String> pluginFamily ) { this.pluginFamily = pluginFamily; } /** * Gets the list of plugin family names * * @return the list of plugin family names */ @JsonProperty( "plugin_family" ) public List<String> getPluginFamily() { return this.pluginFamily; } /** * Sets the start date (in Unix time) for the the range of new or updated vulnerability data to export * Defaults to all regardless of date * * @param since */ public void setSince( long since ) { this.since = since; } public long getSince() { return this.since; } /** * Sets the start date (in Unix time) for the range of vulnerability data you want to export, * based on when a scan first found a vulnerability on an asset. * * @param firstFound */ public void setFirstFound( long firstFound ) { this.firstFound = firstFound; } public long getFirstFound() { return this.firstFound; } /** * Sets the start date (in Unix time) for the range of vulnerability data you want to export, * based on when a scan last found a vulnerability on an asset. * * @param lastFound */ public void setLastFound( long lastFound ) { this.lastFound = lastFound; } public long getLastFound() { return this.lastFound; } /** * Sets the start date (in Unix time) for the range of vulnerability data you want to export, * based on when the vulnerability state was changed to fixed. * * @param lastFixed */ public void setLastFixed( long lastFixed ) { this.lastFixed = lastFixed; } public long getLastFixed() { return this.lastFixed; } /** * Sets the cidr_range filter for the assets to include in the export. * * @param cidrRange the cidr range */ public void setCidrRange( String cidrRange ) { this.cidrRange = cidrRange; } public String getCidrRange() { return this.cidrRange; } /** * Setting this filter will return vulnerabilities for all assets that have been assigned the specified tags. * * @param tags the asset tags */ public void setTags( Map<String, String[]> tags ) { this.tags = tags; } /** * Gets the asset tags filter. * * @return the asset tags */ public Map<String, String[]> getTags() { return tags; } }
package com.virjar.sipsoup.parse.expression.node; import com.virjar.sipsoup.parse.expression.OperatorEnv; public abstract class WrapperUnit extends AlgorithmUnit { private AlgorithmUnit delegate = null; protected abstract String targetName(); protected AlgorithmUnit wrap() { if (delegate == null) { delegate = OperatorEnv.createByName(targetName()); delegate.setLeft(left); delegate.setRight(right); } return delegate; } }
package de.cinovo.cloudconductor.api.interfaces; import javax.annotation.security.RolesAllowed; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import de.cinovo.cloudconductor.api.MediaType; import de.cinovo.cloudconductor.api.model.ConfigValue; @Path("/config") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public interface IConfigValue { /** * @return a collection of known templates */ @GET @RolesAllowed({"VIEW_CONFIGVALUES", "EDIT_CONFIGVALUES"}) String[] getAvailableTemplates(); /** * Returns configuration of the given template as Key-Value Pairs * * @param template the template name * @return set of stacked config values */ @GET @Path("/{template}") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_JAVAARGS, MediaType.APPLICATION_JAVAPROPS}) @RolesAllowed({"VIEW_CONFIGVALUES", "EDIT_CONFIGVALUES", "USE_AGENT_API"}) ConfigValue[] get(@PathParam("template") String template); /** * Deletes all configuration values for the given template. * * @param templateName the name of the template */ @DELETE @Path("/{template}") @RolesAllowed({"EDIT_CONFIGVALUES"}) void deleteForTemplate(@PathParam("template") String templateName); /** * Returns all configuration key of a template in a non stacked variant * * @param template the template name * @return array of configuration values */ @GET @Path("/{template}/unstacked") @RolesAllowed({"VIEW_CONFIGVALUES", "EDIT_CONFIGVALUES", "USE_AGENT_API"}) ConfigValue[] getUnstacked(@PathParam("template") String template); /** * Returns configuration of the given service within the template as Key-Value Pairs * * @param template the template name * @param service the name of the service * @return array of key value pairs representing the configuration of the service within the template */ @GET @Path("/{template}/{service}") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_JAVAARGS, MediaType.APPLICATION_JAVAPROPS}) @RolesAllowed({"VIEW_CONFIGVALUES", "EDIT_CONFIGVALUES", "USE_AGENT_API"}) ConfigValue[] get(@PathParam("template") String template, @PathParam("service") String service); /** * Deletes all configuration values for given template and service. * * @param template the name of the template * @param service the name of the service */ @DELETE @Path("/{template}/{service}") @RolesAllowed({"EDIT_CONFIGVALUES"}) void deleteForService(@PathParam("template") String template, @PathParam("service") String service); /** * Returns the value for a key of the given service within the template as Key-Value Pairs * * @param template the template name * @param service the name of the service * @param key the name of the key * @return the value of the key of the service within the template */ @GET @Path("/{template}/{service" + ":.*}/{key}") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_JAVAARGS, MediaType.APPLICATION_JAVAPROPS}) @RolesAllowed({"VIEW_CONFIGVALUES", "EDIT_CONFIGVALUES", "USE_AGENT_API"}) String get(@PathParam("template") String template, @PathParam("service") String service, @PathParam("key") String key); /** * Returns the value for a key of the given service within the template as Key-Value Pair, but does no parent matching * * @param template the template name * @param service the name of the service * @param key the name of the key * @return the value of the key of the service within the template */ @GET @Path("/{template}/{service" + ":.*}/{key}/exact") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_JAVAARGS, MediaType.APPLICATION_JAVAPROPS}) @RolesAllowed({"VIEW_CONFIGVALUES", "EDIT_CONFIGVALUES", "USE_AGENT_API"}) String getExact(@PathParam("template") String template, @PathParam("service") String service, @PathParam("key") String key); /** * Adds a new key-value pair to the configuration of a service within a template * * @param config the config value */ @PUT @RolesAllowed({"EDIT_CONFIGVALUES"}) void save(ConfigValue config); /** * Delete a ConfigValue * * @param template the template name * @param service the name of the service * @param key the name of the key */ @DELETE @Path("/{template}/{service" + ":.*}/{key}") @RolesAllowed({"EDIT_CONFIGVALUES"}) void delete(@PathParam("template") String template, @PathParam("service") String service, @PathParam("key") String key); }
package de.felixschulze.teamcity; /** * Helper for TeamCity Service Messages * * @author <a href="[email protected]">Felix Schulze</a> */ public class TeamCityStatusMessageHelper { public static String escapeString(String string) { String tmp = string.replace("|", "||"); tmp = tmp.replace("'", "|'"); tmp = tmp.replace("\"", "|'"); tmp = tmp.replaceAll("\\n", "|n"); tmp = tmp.replaceAll("\\r", "|r"); tmp = tmp.replace("[", "|["); tmp = tmp.replace("]", "|]"); return tmp; } public static String buildNumberString(String version) { return "##teamcity[buildNumber '" + TeamCityStatusMessageHelper.escapeString(version) + "']"; } public static String buildStatusString(TeamCityStatusType type, String text) { return "##teamcity[buildStatus status='" + type.toString() + "' text='" + TeamCityStatusMessageHelper.escapeString(text) + "']"; } /** * @deprecated As of release 1.0, replaced by {@link #buildStatusString(TeamCityStatusType type, String text)} */ @Deprecated public static String buildStatusFailureString(TeamCityStatusType type, String text) { return TeamCityStatusMessageHelper.buildStatusString(type, text); } public static String buildMessageString(TeamCityStatusType type, String text, String errorDetails) { if (type == TeamCityStatusType.ERROR) { return "##teamcity[message text='" + TeamCityStatusMessageHelper.escapeString(text) + "' errorDetails='" + TeamCityStatusMessageHelper.escapeString(errorDetails) + "' status='" + type.toString() + "']"; } return "##teamcity[message status='" + type.toString() + "' text='" + TeamCityStatusMessageHelper.escapeString(text) + "']"; } public static String buildProgressString(TeamCityProgressType type, String text) { if (type == TeamCityProgressType.START) { return "##teamcity[progressStart '" + TeamCityStatusMessageHelper.escapeString(text) + "']"; } else if (type == TeamCityProgressType.FINISH) { return "##teamcity[progressFinish '" + TeamCityStatusMessageHelper.escapeString(text) + "']"; } else if (type == TeamCityProgressType.MESSAGE) { return "##teamcity[progressMessage '" + TeamCityStatusMessageHelper.escapeString(text) + "']"; } return ""; } public static String importDataString(TeamCityImportDataType type, String path) { return "##teamcity[importData type='" + type.toString() + "' path='" + path + "']"; } public static String testSuiteStarted(String name) { return "##teamcity[testSuiteStarted name='" + TeamCityStatusMessageHelper.escapeString(name) + "']"; } public static String testSuiteFinished(String name) { return "##teamcity[testSuiteFinished name='" + TeamCityStatusMessageHelper.escapeString(name) + "']"; } public static String testStarted(String name) { return "##teamcity[testStarted name='" + TeamCityStatusMessageHelper.escapeString(name) + "']"; } public static String testFailed(String name, String message, String details) { return "##teamcity[testFailed name='" + TeamCityStatusMessageHelper.escapeString(name) + "' message='" + TeamCityStatusMessageHelper.escapeString(message) + "' details='" + TeamCityStatusMessageHelper.escapeString(details) + "']"; } public static String testIgnored(String name, String message) { return "##teamcity[testIgnored name='" + TeamCityStatusMessageHelper.escapeString(name) + "' message='" + TeamCityStatusMessageHelper.escapeString(message) + "']"; } public static String testFinished(String name, long duration) { return "##teamcity[testFinished name='" + TeamCityStatusMessageHelper.escapeString(name) + "' duration='" + duration + "']"; } }
package de.tblsoft.solr.pipeline.filter; import de.tblsoft.solr.pipeline.AbstractFilter; import de.tblsoft.solr.pipeline.bean.Document; import de.tblsoft.solr.pipeline.bean.DocumentBuilder; import de.tblsoft.solr.util.IOUtils; import org.apache.commons.io.FileUtils; import org.jsoup.parser.Parser; import javax.script.Compilable; import javax.script.CompiledScript; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import java.io.File; import java.util.ArrayList; import java.util.List; public class RichJavaScriptFilter extends AbstractFilter { private String filename; private String script; private ScriptEngine engine; private CompiledScript compiledScript; Parser htmlParser; Parser xmlParser; @Override public void init() { String internalFilename = getProperty("filename", null); script = getProperty("script", null); ScriptEngineManager mgr = new ScriptEngineManager(); engine = mgr.getEngineByName("JavaScript"); htmlParser = Parser.htmlParser(); xmlParser = Parser.xmlParser(); try { if(internalFilename != null) { filename = IOUtils.getAbsoluteFile(getBaseDir(),internalFilename); script = FileUtils.readFileToString(new File(filename)); } Compilable compEngine = (Compilable) engine; compiledScript = compEngine.compile(script); } catch (Exception e) { throw new RuntimeException(e); } super.init(); } @Override public void document(Document document) { List<Document> docs = new ArrayList<>(); docs.add(document); try { engine.put("htmlParser", htmlParser); engine.put("xmlParser", xmlParser); engine.put("docs", docs); engine.put("documentBuilder", new DocumentBuilder()); compiledScript.eval(); for(Document d : docs) { super.document(d); } } catch (Exception e) { throw new RuntimeException(e); } } @Override public void end() { super.end(); } }
package edu.washington.cs.dericp.diffutils.change; /** * <p>This class represents a change to a single line in a patch. In a patch * file, lines may be either deleted inserted, or remain the same. In the case * that a line remains the same between two different revisions of a file, the * line can be known as a context line.</p> */ public class LineChange { /** * There are three types of LineChanges: insertion, deletion, * context. Context LineChanges are lines which are not changes * at all but are lines present * in both the original and revised versions of the file. */ public enum Type { INSERTION, DELETION, CONTEXT } // the content of the line that this LineChange represents private String content; // the line number of the content of this LineChange in the original file // if a line was inserted between the original and revised versions of a // file, there is no need for an originalLineNumber since the line did not // previously exist in the original file private int originalLineNumber; // the line number of the content of this LineChange in the revised file // if a line was deleted between the original and revised versions of a // file, there is no need for a revisedLineNumber since the line no longer // exists in the new file private int revisedLineNumber; // the type of this LineChange private Type type; /** * Constructs a new LineChange. * * @param content the content of the change i.e. what is to be deleted * or inserted and should be a full line * @param originalLineNumber where this change occurs in the original file * @param revisedLineNumber where this change is seen in the revised file * @param type the type of change to be created */ public LineChange(String content, int originalLineNumber, int revisedLineNumber, Type type) { this.content = content; this.originalLineNumber = originalLineNumber; this.revisedLineNumber = revisedLineNumber; this.type = type; } /** * Sets the type of this change. * * @param type the type of change that this LineChange will now represent */ public void setType(Type type) { this.type = type; } /** * Returns the content of this LineChange. * * @return the content of this LineChange */ public String getContent() { return content; } /** * Returns the original line number of this LineChange. * * @return where this change occurs in the original file */ public int getOriginalLineNumber() { return originalLineNumber; } /** * Returns the revised line number of this LineChange * * @return where this change is seen in the revised file */ public int getRevisedLineNumber() { return revisedLineNumber; } /** * Returns the type of this LineChange. * * @return the type of this LineChange. */ public Type getType() { return type; } @Override public boolean equals(Object o) { if (!(o instanceof LineChange)) { return false; } LineChange other = (LineChange) o; return content.equals(other.content) && originalLineNumber == other.originalLineNumber && revisedLineNumber == other.revisedLineNumber && type == other.type; } @Override public int hashCode() { int result = 17; result = 31 * result + content.hashCode(); result = 31 * result + originalLineNumber; result = 31 * result + revisedLineNumber; result = 31 * result + type.hashCode(); return result; } }
package fi.csc.microarray.databeans.fs; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.util.Enumeration; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipOutputStream; import fi.csc.microarray.client.ClientApplication; import fi.csc.microarray.client.dialog.ChipsterDialog.DetailsVisibility; import fi.csc.microarray.client.dialog.DialogInfo.Severity; import fi.csc.microarray.client.operation.Operation; import fi.csc.microarray.client.operation.OperationCategory; import fi.csc.microarray.client.operation.OperationDefinition; import fi.csc.microarray.client.operation.Operation.DataBinding; import fi.csc.microarray.client.operation.parameter.Parameter; import fi.csc.microarray.databeans.DataBean; import fi.csc.microarray.databeans.DataFolder; import fi.csc.microarray.databeans.DataItem; import fi.csc.microarray.databeans.DataBean.Link; import fi.csc.microarray.exception.MicroarrayException; import fi.csc.microarray.util.IOUtils; /** * Saves and loads contents of a DataManager. Snapshotting is based on journaling * paradigm, but due to pragmatical reasons both writing and reading are 2-pass * operations, not fitting the paradigm exactly. * * @author Aleksi Kallio * */ public class FSSnapshottingSession { private final int DATA_BLOCK_SIZE = 2048; private static final String METADATA_FILENAME = "snapshot_metadata.txt"; public final static String SNAPSHOT_EXTENSION = "cs"; private final int SNAPSHOT_VERSION = 2; private static final String ROOT_FOLDER_ID = "0"; private FSDataManager manager; private ClientApplication application; private int itemIdCounter = 0; private HashMap<Integer, DataItem> itemIdMap = new HashMap<Integer, DataItem>(); private HashMap<DataItem, Integer> reversedItemIdMap = new HashMap<DataItem, Integer>(); private int operationIdCounter = 0; private HashMap<Integer, Operation> operationIdMap = new HashMap<Integer, Operation>(); private HashMap<Operation, Integer> reversedOperationIdMap = new HashMap<Operation, Integer>(); public FSSnapshottingSession(FSDataManager manager, ClientApplication application) { this.manager = manager; this.application = application; } public void saveSnapshot(File sessionFile) throws IOException { boolean replaceOldSession = sessionFile.exists(); File newSessionFile; File backupFile = null; if (replaceOldSession) { // TODO maybe avoid overwriting existing temp file newSessionFile = new File(sessionFile.getAbsolutePath() + "-temp.cs"); backupFile = new File(sessionFile.getAbsolutePath() + "-backup.cs"); } else { newSessionFile = sessionFile; } ZipOutputStream zipOutputStream = null; boolean createdSuccessfully = false; try { zipOutputStream = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(newSessionFile))); zipOutputStream.setLevel(1); // quite slow with bigger values // write data and gather metadata simultaneously StringBuffer metadata = new StringBuffer(""); metadata.append("VERSION " + SNAPSHOT_VERSION + "\n"); // generate all ids generateIdsRecursively((FSDataFolder)manager.getRootFolder()); // 1st pass, write most metadata saveRecursively((FSDataFolder)manager.getRootFolder(), zipOutputStream, metadata); // 2nd pass for links (if written in one pass, input dependent operation parameters break when reading) saveLinksRecursively((FSDataFolder)manager.getRootFolder(), metadata); writeFile(zipOutputStream, METADATA_FILENAME, new ByteArrayInputStream(metadata.toString().getBytes())); zipOutputStream.finish(); zipOutputStream.close(); // rename new session if replacing existing if (replaceOldSession) { // original to backup if (!sessionFile.renameTo(backupFile)) { throw new IOException("Creating backup " + sessionFile + " -> " + backupFile + " failed."); } // new to original if (newSessionFile.renameTo(sessionFile)) { createdSuccessfully = true; // remove backup backupFile.delete(); } else { // try to move backup back to original // TODO remove new session file? if (backupFile.renameTo(sessionFile)) { throw new IOException("Moving new " + newSessionFile + " -> " + sessionFile + " failed, " + "restored original session file."); } else { throw new IOException("Moving new " + newSessionFile + " -> " + sessionFile + " failed, " + "also restoring original file failed, backup of original is " + backupFile); } } } // no existing session else { createdSuccessfully = true; } } catch (RuntimeException e) { // createdSuccesfully is false, so file will be deleted in finally block throw e; } catch (IOException e) { // createdSuccesfully is false, so file will be deleted in finally block throw e; } finally { IOUtils.closeIfPossible(zipOutputStream); // called twice for normal execution, not a problem if (!replaceOldSession && !createdSuccessfully) { newSessionFile.delete(); // do not leave bad session files hanging around } } } private void writeFile(ZipOutputStream out, String name, InputStream in) throws IOException { int byteCount; ZipEntry cpZipEntry = new ZipEntry(name); out.putNextEntry(cpZipEntry ); byte[] b = new byte[DATA_BLOCK_SIZE]; while ( (byteCount = in.read(b, 0, DATA_BLOCK_SIZE)) != -1 ) { out.write(b, 0, byteCount); } out.closeEntry() ; } private int generateIdsRecursively(FSDataFolder folder) throws IOException { int dataCount = 0; generateId(folder); for (DataItem data : folder.getChildren()) { if (data instanceof FSDataFolder) { int recDataCount = generateIdsRecursively((FSDataFolder)data); dataCount += recDataCount; } else { generateId((FSDataBean)data); dataCount++; } } return dataCount; } private void saveRecursively(FSDataFolder folder, ZipOutputStream cpZipOutputStream, StringBuffer metadata) throws IOException { String folderId = fetchId(folder); saveDataFolderMetadata(folder, folderId, metadata); for (DataItem data : folder.getChildren()) { if (data instanceof FSDataFolder) { saveRecursively((FSDataFolder)data, cpZipOutputStream, metadata); } else { FSDataBean bean = (FSDataBean)data; saveDataBeanMetadata(bean, folderId, metadata); writeFile(cpZipOutputStream, bean.getContentFile().getName(), new FileInputStream(bean.getContentFile())); } } } private void saveDataFolderMetadata(FSDataFolder folder, String folderId, StringBuffer metadata) { metadata.append("DATAFOLDER " + folderId + "\n"); saveDataItemMetadata(folder, folderId, metadata); } private void saveDataBeanMetadata(FSDataBean bean, String folderId, StringBuffer metadata) { String beanId = fetchId(bean); metadata.append("DATABEAN " + beanId + " " + bean.getContentFile().getName() + "\n"); if (bean.getOperation() != null) { Operation operation = bean.getOperation(); String operId; // write operation or lookup already written if (!operationIdMap.containsValue(operation) ) { operId = generateId(operation); metadata.append("OPERATION " + operId + " " + operation.getCategoryName() + "/" + operation.getName() + "\n"); // some parameters need inputs at loading time => write these first if (operation.getBindings() != null) { String beanIds = ""; for (DataBinding binding : operation.getBindings()) { beanIds += fetchId(binding.getData()) + " "; } metadata.append("INPUTS " + operId + " " + beanIds + "\n"); } for (Parameter parameter : operation.getParameters()) { metadata.append("OPERATION_PARAMETER " + operId + " " + parameter.getName() + " " + parameter.getValue() + "\n"); } // will be written in the 2nd pass // for (Link type : Link.values()) { // for (DataBean target : bean.getLinkTargets(type)) { // String targetId = fetchId(target); // metadata.append("LINK " + type.name() + " " + beanId + " " + targetId + "\n"); } else { operId = reversedOperationIdMap.get(operation).toString(); } metadata.append("OUTPUT " + operId + " " + beanId + "\n"); } if (bean.getNotes() != null) { // remove newlines from notes, they break loading metadata.append("NOTES " + beanId + " " + bean.getNotes().replace('\n', ' ') + "\n"); } if (bean.getUrl() != null) { metadata.append("CACHED_URL " + beanId + " " + bean.getUrl() + "\n"); } saveDataItemMetadata(bean, beanId, metadata); } private void saveDataItemMetadata(DataItem data, String folderId, StringBuffer metadata) { metadata.append("NAME " + folderId + " " + data.getName() + "\n"); if (data.getParent() != null) { metadata.append("CHILD " + folderId + " " + fetchId(data.getParent()) + "\n"); } } public List<DataItem> loadFromSnapshot(File snapshot, DataFolder parentFolder) throws IOException, MicroarrayException { ZipFile zipFile = new ZipFile(snapshot); ZipEntry entry = null; Map<String,ZipEntry> entryMap = new HashMap<String,ZipEntry>(); Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()){ entry = (ZipEntry)entries.nextElement(); entryMap.put(entry.getName(), entry); } LinkedList<DataItem> newItems = new LinkedList<DataItem>(); LinkedList<String> delayedProcessing = new LinkedList<String>(); BufferedReader metadataIn = null; try { // load metadata and data metadataIn = new BufferedReader(new InputStreamReader( zipFile.getInputStream(entryMap.get(METADATA_FILENAME)))); String firstLine = metadataIn.readLine(); String supportedVersionLine = "VERSION " + SNAPSHOT_VERSION; if (!firstLine.contains(supportedVersionLine)) { throw new RuntimeException("unsupported stored session format: should be \"" + supportedVersionLine + "\", but was \"" + firstLine.replace("\n", "") + "\""); } // 1st pass for (String line = metadataIn.readLine(); line != null; line = metadataIn.readLine()) { if (line.startsWith("DATAFOLDER ")) { String[] split = line.split(" "); String id = split[1]; DataFolder folder = manager.createFolder(""); newItems.add(folder); mapId(id, folder); } else if (line.startsWith("DATABEAN ")) { String[] split = line.split(" "); String id = split[1]; ZipEntry beanEntry = entryMap.get(split[2]); InputStream inputStream = zipFile.getInputStream(beanEntry); DataBean bean = manager.createDataBean("<empty>", inputStream); newItems.add(bean); mapId(id, bean); } else if (line.startsWith("NAME ")) { String[] split = line.split(" "); String id = split[1]; String name = afterNthSpace(line, 2); DataItem item = fetchItem(id); item.setName(name); if (item instanceof DataBean) { // update content type now that we have the real filename available (this is needed!) DataBean bean = (DataBean)item; bean.setContentType(manager.guessContentType(name)); } } else if (line.startsWith("OPERATION ")) { String[] split = line.split(" "); String id = split[1]; String[] opData = afterNthSpace(line, 2).split("/"); OperationDefinition od = application.locateOperationDefinition(opData[0], opData[1]); Operation op = null; if (od == null) { // create local operation definition object od = new OperationDefinition(opData[1], new OperationCategory(opData[0]), "", false); // warn if it was a real operation if (!OperationCategory.isPseudoCategory(od.getCategory())) { String message = "The session you opened contains a dataset which has been derived using an analysis tool which has been removed or renamed.\n\n" + "The dataset contents have not changed and you can use them as before, but the obsolete operation will not be usable in workflows."; String details = "Analysis tool: " + od.getCategoryName() + " / " + od.getName() + "\n"; warnAboutObsoleteContent(message, details, null); } } op = new Operation(od, new DataBean[] { /* empty inputs currently */}); mapId(id, op); } else if (line.startsWith("OPERATION_PARAMETER ")) { delayedProcessing.add(line); // process after derivation links are in place } else if (line.startsWith("OUTPUT ")) { String[] split = line.split(" "); String operId = split[1]; Operation operation = fetchOperation(operId); String beanId = split[2]; DataBean bean = (DataBean)fetchItem(beanId); bean.setOperation(operation); } else if (line.startsWith("INPUTS ")) { delayedProcessing.add(line); // process after derivation links are in place } else if (line.startsWith("CHILD ")) { String[] split = line.split(" "); String childId = split[1]; String parentId = split[2]; DataFolder parent; if (parentId.equals(ROOT_FOLDER_ID)) { parent = manager.getRootFolder(); } else { parent = (DataFolder)fetchItem(parentId); } DataItem child = fetchItem(childId); parent.addChild(child); } else if (line.startsWith("NOTES ")) { String[] split = line.split(" "); String id = split[1]; String notes = afterNthSpace(line, 2); DataBean item = (DataBean)fetchItem(id); item.setNotes(notes); } else if (line.startsWith("CACHED_URL ")) { String[] split = line.split(" "); String id = split[1]; String url = split[2]; FSDataBean bean = (FSDataBean)fetchItem(id); bean.setContentChanged(false); bean.setUrl(new URL(url)); } else if (line.startsWith("LINK ")) { String[] split = line.split(" "); Link link= Link.valueOf(split[1]); String fromId = split[2]; String toId = split[3]; DataBean from = (DataBean)fetchItem(fromId); DataBean to = (DataBean)fetchItem(toId); // to be compatible with older session files that have duplicate links // check for duplicity here boolean exists = false; for (DataBean target : from.getLinkTargets(link)) { if (target == to) { // this link already exists, do not add it again exists = true; break; } } if (!exists) { from.addLink(link, to); } } else { throw new RuntimeException("metadata error in " + snapshot.getCanonicalPath() + ": line could not be processed \"" + line + "\""); } } } finally { if (metadataIn != null) { metadataIn.close(); } } // 2nd pass for (String line : delayedProcessing) { if (line.startsWith("OPERATION_PARAMETER ")) { String[] split = line.split(" ", 4); // split to (max.) 4 pieces, i.e., do no skip trailing whitespace (happens when paramValue is empty) String operId = split[1]; String paramName = split[2]; String paramValue = split[3]; Operation operation = fetchOperation(operId); Parameter parameter = operation.getParameter(paramName); if (parameter != null) { try { parameter.parseValue(paramValue); } catch (IllegalArgumentException e) { String message = "The session you opened contains a dataset with a parameter that references to an another dataset that was removed." + "Typically this happens when you break the connection between phenodata and datasets that it describes. " + "The dataset contents have not changed and you can use them as before, but the obsolete parameter has been removed from the history information of the dataset " + "and will not be saved in further sessions or workflows."; String details = "Analysis tool: " + operation.getCategoryName() + " / " + operation.getName() + "\nParameter with obsolete reference: " + paramName; warnAboutObsoleteContent(message, details, ""); } } else { String message = "The session you opened contains a dataset which has been derived using an analysis tool with a parameter which has been removed or renamed.\n\n" + "The dataset contents have not changed and you can use them as before, but the obsolete parameter has been removed from the history information of the dataset " + "and will not be saved in further sessions or workflows."; String details = "Analysis tool: " + operation.getCategoryName() + " / " + operation.getName() + "\nObsolete parameter: " + paramName; String dataName = null; if (operation.getBindings() != null) { if (operation.getBindings().size() == 1) { dataName = operation.getBindings().get(0).getData().getName(); } } warnAboutObsoleteContent(message, details, dataName); } } else if (line.startsWith("INPUTS ")) { String[] split = line.split(" "); String operId = split[1]; Operation operation = fetchOperation(operId); LinkedList<DataBean> inputs = new LinkedList<DataBean>(); for (int i = 2; i < split.length; i++) { String beanId = split[i]; DataBean bean = (DataBean)fetchItem(beanId); if (bean.queryFeatures("/phenodata/").exists()) { continue; // skip phenodata, it is bound automatically } inputs.add(bean); } operation.bindInputs(inputs.toArray(new DataBean[] {})); } else { throw new RuntimeException("internal error, cannot parse: " + line); } } return newItems; } private void saveLinksRecursively(FSDataFolder folder, StringBuffer metadata) { for (DataItem data : folder.getChildren()) { if (data instanceof FSDataFolder) { saveLinksRecursively((FSDataFolder)data, metadata); } else { DataBean bean = (DataBean)data; for (Link type : Link.values()) { for (DataBean target : bean.getLinkTargets(type)) { String beanId = fetchId(bean); String targetId = fetchId(target); metadata.append("LINK " + type.name() + " " + beanId + " " + targetId + "\n"); } } } } } private void warnAboutObsoleteContent(String message, String details, String dataName) { String title = "Obsolete content in the session"; String inputDataDesc = dataName != null ? ("When loading dataset " + dataName + ":\n") : ""; String completeDetails = inputDataDesc + details; application.showDialog(title, message, completeDetails, Severity.INFO, true, DetailsVisibility.DETAILS_ALWAYS_VISIBLE); } private static String afterNthSpace(String line, int nth) { int from = 0; for (int i = 0; i < nth; i++) { from = line.indexOf(" ", from + 1); } return line.substring(from + 1); } private void mapId(String id, DataItem item) { Integer iid = Integer.parseInt(id); itemIdMap.put(iid, item); reversedItemIdMap.put(item, iid); } private void mapId(String id, Operation operation) { Integer iid = Integer.parseInt(id); operationIdMap.put(iid, operation); reversedOperationIdMap.put(operation, iid); } private void generateId(DataItem data) { Integer id = itemIdCounter++; itemIdMap.put(id, data); reversedItemIdMap.put(data, id); } private String generateId(Operation operation) { Integer id = operationIdCounter++; operationIdMap.put(id, operation); reversedOperationIdMap.put(operation, id); return id.toString(); } private DataItem fetchItem(String id) { Integer iid = Integer.parseInt(id); return itemIdMap.get(iid); } private Operation fetchOperation(String id) { Integer iid = Integer.parseInt(id); return operationIdMap.get(iid); } private String fetchId(DataItem item) { return reversedItemIdMap.get(item).toString(); } }
package fr.ens.transcriptome.nividic.platform.cli; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import org.rosuda.JRclient.RSrvException; import org.rosuda.JRclient.Rconnection; import fr.ens.transcriptome.nividic.om.BioAssay; import fr.ens.transcriptome.nividic.om.ExpressionMatrix; import fr.ens.transcriptome.nividic.om.ExpressionMatrixFactory; import fr.ens.transcriptome.nividic.om.ExpressionMatrixRuntimeException; import fr.ens.transcriptome.nividic.om.io.InputStreamBioAssayReader; import fr.ens.transcriptome.nividic.om.io.IDMAReader; import fr.ens.transcriptome.nividic.om.io.NividicIOException; import fr.ens.transcriptome.nividic.om.io.SimpleExpressionMatrixWriter; import fr.ens.transcriptome.nividic.om.r.RExpressionMatrix; /** * Demo class for assembling an ExpressionMatrixDimension from IDMA files * @author Lory Montout */ public final class MatrixDemo { /** * Main method. * @param args Arguments of the main method */ public static void main(final String[] args) { ExpressionMatrix em = readMatrixM(args); saveMatrix(em); try { exec(em); } catch (RSrvException e) { e.printStackTrace(); } // demoMA(args); } private static ExpressionMatrix readMatrixM(final String[] args) { if (args == null) return null; File dir = new File(args[0]); File[] filesToRead = dir.listFiles(new FileFilter() { public boolean accept(final File pathname) { return pathname.getName().endsWith(".txt"); } }); ArrayList readedFiles = new ArrayList(filesToRead.length); for (int i = 0; i < filesToRead.length; i++) { try { // System.out.println("read : " + filesToRead[i].getAbsolutePath()); // Read idma file InputStream is = new FileInputStream(filesToRead[i].getAbsolutePath()); InputStreamBioAssayReader bar = new IDMAReader(is); bar.addFieldToRead(BioAssay.FIELD_NAME_ID); bar.addFieldToRead(BioAssay.FIELD_NAME_BRIGHT); bar.addFieldToRead(BioAssay.FIELD_NAME_RATIO); BioAssay b = bar.read(); b.setName((filesToRead[i].toString()).substring("data/".length(), (filesToRead[i].toString().length()) - ".txt".length())); is.close(); readedFiles.add(b); } catch (FileNotFoundException e) { System.err.println("file " + filesToRead[i] + " not found"); } catch (NividicIOException e) { System.err.println("error while reading " + filesToRead[i]); } catch (IOException e) { System.err.println("error while reading " + filesToRead[i]); } } // Create matrix ExpressionMatrix em = ExpressionMatrixFactory.createExpressionMatrix(); em.setName("MatrixDemo"); try { for (int i = 0; i < readedFiles.size(); i++) { em.addBioAssay((BioAssay) readedFiles.get(i)); } } catch (ExpressionMatrixRuntimeException e) { System.err.println("error while assembling "); } return em; } private static void saveMatrix(final ExpressionMatrix em) { String outputFile = "/tmp/testExpressionMatrix.txt"; try { OutputStream os = new FileOutputStream(outputFile); SimpleExpressionMatrixWriter emw = new SimpleExpressionMatrixWriter(os); emw.write(em); os.close(); } catch (FileNotFoundException e) { System.err.println("file " + outputFile + " not found"); } catch (NividicIOException e) { System.err.println("error while writing " + em.getName()); } catch (IOException e) { System.err.println("error while writing " + em.getName()); } } private static void demoMA(final String[] args) { if (args == null) return; File dir = new File(args[0]); File[] filesToRead = dir.listFiles(new FileFilter() { public boolean accept(final File pathname) { return pathname.getName().endsWith(".txt"); } }); ArrayList readedFiles = new ArrayList(filesToRead.length); for (int i = 0; i < filesToRead.length; i++) { try { // System.out.println(filesToRead[i].getAbsolutePath()); // Read idma file InputStream is = new FileInputStream(filesToRead[i].getAbsolutePath()); InputStreamBioAssayReader bar = new IDMAReader(is); bar.addFieldToRead(BioAssay.FIELD_NAME_ID); bar.addFieldToRead(BioAssay.FIELD_NAME_BRIGHT); bar.addFieldToRead(BioAssay.FIELD_NAME_RATIO); BioAssay b = bar.read(); b.setName((filesToRead[i].toString()).substring("data/".length(), (filesToRead[i].toString().length()) - (".txt".length()))); is.close(); readedFiles.add(b); } catch (FileNotFoundException e) { System.err.println("file " + filesToRead[i] + " not found"); } catch (NividicIOException e) { System.err.println("error while reading " + filesToRead[i]); } catch (IOException e) { System.err.println("error while reading " + filesToRead[i]); } } // Create matrix ExpressionMatrix em = ExpressionMatrixFactory.createExpressionMatrix(); em.setName("MatrixDemo"); em.addDimension(BioAssay.FIELD_NAME_A); try { for (int i = 0; i < readedFiles.size(); i++) { em.addBioAssay((BioAssay) readedFiles.get(i)); } } catch (ExpressionMatrixRuntimeException e) { System.err.println("error while assembling "); } // Save matrix String outputFile2 = "/tmp/testExpressionMatrixMA.txt"; try { OutputStream os2 = new FileOutputStream(outputFile2); SimpleExpressionMatrixWriter emMAw = new SimpleExpressionMatrixWriter(os2); emMAw.write(em); os2.close(); } catch (FileNotFoundException e) { System.err.println("file " + outputFile2 + " not found"); } catch (NividicIOException e) { System.err.println("error while writing " + em.getName()); } catch (IOException e) { System.err.println("error while writing " + em.getName()); } } /** * @param em * @throws RSrvException */ private static void exec(final ExpressionMatrix em) throws RSrvException { Rconnection con = new Rconnection("127.0.0.1"); RExpressionMatrix rexp = new RExpressionMatrix(); rexp.setConnexion(con); rexp.setMatrix(em); String rName = "em"; rexp.put(rName); rexp.get(rName); // System.out.println("equals=" + em.equals(em2)); // System.out.println(con.eval("names(em[,1:8])")); StringBuffer sb = new StringBuffer(); sb.append("em2="); sb.append(rName); sb.append("[,1:"); sb.append("(dim("); sb.append(rName); sb.append(")[2])"); // sb.append(em.getColumnCount() + 0); sb.append("]"); // sending instruction // System.out.println(sb); con.voidEval(sb.toString()); // cleaning the StringBuffer sb.setLength(0); con.voidEval("em.t=t(as.matrix(em))"); con.voidEval("em.d=dist(em.t,method = 'manhattan')"); con.voidEval("postscript('/tmp/DemoMatrix_cluster.eps')"); con.voidEval("plot(hclust(em.d,method = \"average\"),main = \"experiences " + "cluster\",xlab = \"Experiences\",sub = 'hclust(dist" + "(*,manhattan),\"average\")')"); con.voidEval("dev.off()"); } private MatrixDemo() { } }
package io.funtom.util.concurrent; import java.util.HashMap; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.locks.ReentrantLock; public final class SynchronizedPerKeyExecutor<K> { private final Map<K, LockExecutor> executors = new HashMap<>(); private final Map<K, Integer> keyUsersCount = new HashMap<>(); public void execute(K key, Runnable task) { LockExecutor executor = getExecutorForKey(key); try { executor.execute(task); } finally { freeExecutorForKey(key); } } public <R> R submit(K key, Callable<R> task) throws Exception { LockExecutor executor = getExecutorForKey(key); try { return executor.submit(task); } finally { freeExecutorForKey(key); } } public <R> R submitUnchecked(K key, Callable<R> task) { LockExecutor executor = getExecutorForKey(key); try { return executor.submitUnchecked(task); } finally { freeExecutorForKey(key); } } private synchronized LockExecutor getExecutorForKey(K key) { LockExecutor result; Integer currentUsers = keyUsersCount.get(key); if (currentUsers == null) { keyUsersCount.put(key, 1); result = new LockExecutor(new ReentrantLock()); executors.put(key, result); } else { keyUsersCount.put(key, currentUsers + 1); result = executors.get(key); } return result; } private synchronized void freeExecutorForKey(K key) { int currentUsers = keyUsersCount.get(key); if (currentUsers == 1) { keyUsersCount.remove(key); executors.remove(key); } else { keyUsersCount.put(key, currentUsers - 1); } } }
package jp.naist.sd.kenja.factextractor; import java.io.File; import java.io.IOException; import com.google.common.base.Charsets; import com.google.common.io.Files; public class FileFormatTreeWriter extends TreeWriter { private File baseDir; private File currentDir; public FileFormatTreeWriter(File baseDir) { this.baseDir = baseDir; if (!baseDir.exists()) { try { Files.createParentDirs(baseDir); baseDir.mkdir(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } currentDir = baseDir; } @Override public void writeTree(Tree tree) { File parentDir = currentDir; if (!tree.isRoot()) { currentDir = new File(currentDir, tree.getName()); if (!currentDir.exists()) currentDir.mkdir(); } for (Blob blob : tree.getBlobs()) { writeBlob(blob); } for (Tree childTree : tree.getChildTrees()) { writeTree(childTree); } currentDir = parentDir; } @Override public void writeBlob(Blob blob) { if (!currentDir.exists()) try { Files.createParentDirs(currentDir); currentDir.mkdir(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } File blobFile = new File(currentDir, blob.getName()); try { blobFile.createNewFile(); Files.write(blob.getBody(), blobFile, Charsets.US_ASCII); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
package org.wildfly.extension.undertow.security.jaspi; import io.undertow.security.api.AuthenticatedSessionManager; import io.undertow.security.api.AuthenticationMechanism; import io.undertow.security.api.SecurityContext; import io.undertow.security.idm.Account; import io.undertow.server.ConduitWrapper; import io.undertow.server.HttpServerExchange; import io.undertow.servlet.handlers.ServletRequestContext; import io.undertow.util.AttachmentKey; import io.undertow.util.ConduitFactory; import org.jboss.security.SecurityConstants; import org.jboss.security.SecurityContextAssociation; import org.jboss.security.auth.callback.JBossCallbackHandler; import org.jboss.security.auth.message.GenericMessageInfo; import org.jboss.security.identity.plugins.SimpleRole; import org.jboss.security.identity.plugins.SimpleRoleGroup; import org.jboss.security.plugins.auth.JASPIServerAuthenticationManager; import org.wildfly.extension.undertow.logging.UndertowLogger; import org.wildfly.extension.undertow.security.AccountImpl; import javax.security.auth.Subject; import javax.security.auth.message.AuthException; import javax.servlet.ServletRequest; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.security.Principal; import java.util.HashSet; import java.util.Set; import org.jboss.security.auth.callback.JASPICallbackHandler; import org.jboss.security.identity.Role; import org.jboss.security.identity.RoleGroup; import org.xnio.conduits.StreamSinkConduit; /** * <p> * {@link AuthenticationMechanism} implementation that enables JASPI-based authentication. * </p> * * @author Pedro Igor * @author <a href="mailto:[email protected]">Stefan Guilhen</a> */ public class JASPIAuthenticationMechanism implements AuthenticationMechanism { private static final String JASPI_HTTP_SERVLET_LAYER = "HttpServlet"; private static final String MECHANISM_NAME = "JASPIC"; private static final String JASPI_AUTH_TYPE = "javax.servlet.http.authType"; private static final String JASPI_REGISTER_SESSION = "javax.servlet.http.registerSession"; public static final AttachmentKey<HttpServerExchange> HTTP_SERVER_EXCHANGE_ATTACHMENT_KEY = AttachmentKey.create(HttpServerExchange.class); public static final AttachmentKey<SecurityContext> SECURITY_CONTEXT_ATTACHMENT_KEY = AttachmentKey.create(SecurityContext.class); private static final AttachmentKey<Boolean> ALREADY_WRAPPED = AttachmentKey.create(Boolean.class); private final String securityDomain; private final String configuredAuthMethod; public JASPIAuthenticationMechanism(final String securityDomain, final String configuredAuthMethod) { this.securityDomain = securityDomain; this.configuredAuthMethod = configuredAuthMethod; } @Override public AuthenticationMechanismOutcome authenticate(final HttpServerExchange exchange, final SecurityContext sc) { final ServletRequestContext requestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY); final JASPIServerAuthenticationManager sam = createJASPIAuthenticationManager(); final GenericMessageInfo messageInfo = createMessageInfo(exchange, sc); final String applicationIdentifier = buildApplicationIdentifier(requestContext); final JASPICallbackHandler cbh = new JASPICallbackHandler(); UndertowLogger.ROOT_LOGGER.debugf("validateRequest for layer [%s] and applicationContextIdentifier [%s]", JASPI_HTTP_SERVLET_LAYER, applicationIdentifier); Account cachedAccount = null; final JASPICSecurityContext jaspicSecurityContext = (JASPICSecurityContext) exchange.getSecurityContext(); final AuthenticatedSessionManager sessionManager = exchange.getAttachment(AuthenticatedSessionManager.ATTACHMENT_KEY); if (sessionManager != null) { AuthenticatedSessionManager.AuthenticatedSession authSession = sessionManager.lookupSession(exchange); cachedAccount = authSession.getAccount(); // if there is a cached account we set it in the security context so that the principal is available to // SAM modules via request.getUserPrincipal(). if (cachedAccount != null) { jaspicSecurityContext.setCachedAuthenticatedAccount(cachedAccount); } } AuthenticationMechanismOutcome outcome = AuthenticationMechanismOutcome.NOT_AUTHENTICATED; Account authenticatedAccount = null; boolean isValid = sam.isValid(messageInfo, new Subject(), JASPI_HTTP_SERVLET_LAYER, applicationIdentifier, cbh); jaspicSecurityContext.setCachedAuthenticatedAccount(null); if (isValid) { // The CBH filled in the JBOSS SecurityContext, we need to create an Undertow account based on that org.jboss.security.SecurityContext jbossSct = SecurityActions.getSecurityContext(); authenticatedAccount = createAccount(cachedAccount, jbossSct); } // authType resolution (check message info first, then check for the configured auth method, then use mech-specific name). String authType = (String) messageInfo.getMap().get(JASPI_AUTH_TYPE); if (authType == null) authType = this.configuredAuthMethod != null ? this.configuredAuthMethod : MECHANISM_NAME; if (isValid && authenticatedAccount != null) { outcome = AuthenticationMechanismOutcome.AUTHENTICATED; Object registerObj = messageInfo.getMap().get(JASPI_REGISTER_SESSION); boolean cache = false; if(registerObj != null && (registerObj instanceof String)) { cache = Boolean.valueOf((String)registerObj); } sc.authenticationComplete(authenticatedAccount, authType, cache); } else if (isValid && authenticatedAccount == null && !isMandatory(requestContext)) { outcome = AuthenticationMechanismOutcome.NOT_ATTEMPTED; } else { outcome = AuthenticationMechanismOutcome.NOT_AUTHENTICATED; sc.authenticationFailed("JASPIC authentication failed.", authType); } // A SAM can wrap the HTTP request/response objects - update the servlet request context with the values found in the message info. ServletRequestContext servletRequestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY); servletRequestContext.setServletRequest((HttpServletRequest) messageInfo.getRequestMessage()); servletRequestContext.setServletResponse((HttpServletResponse) messageInfo.getResponseMessage()); secureResponse(exchange, sc, sam, messageInfo, cbh); return outcome; } @Override public ChallengeResult sendChallenge(final HttpServerExchange exchange, final SecurityContext securityContext) { return new ChallengeResult(true); } private boolean wasAuthExceptionThrown() { return SecurityContextAssociation.getSecurityContext().getData().get(AuthException.class.getName()) != null; } private JASPIServerAuthenticationManager createJASPIAuthenticationManager() { return new JASPIServerAuthenticationManager(this.securityDomain, new JBossCallbackHandler()); } private String buildApplicationIdentifier(final ServletRequestContext attachment) { ServletRequest servletRequest = attachment.getServletRequest(); return servletRequest.getServletContext().getVirtualServerName() + " " + servletRequest.getServletContext().getContextPath(); } private GenericMessageInfo createMessageInfo(final HttpServerExchange exchange, final SecurityContext securityContext) { ServletRequestContext servletRequestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY); GenericMessageInfo messageInfo = new GenericMessageInfo(); messageInfo.setRequestMessage(servletRequestContext.getServletRequest()); messageInfo.setResponseMessage(servletRequestContext.getServletResponse()); messageInfo.getMap().put("javax.security.auth.message.MessagePolicy.isMandatory", isMandatory(servletRequestContext).toString()); // additional context data, useful to provide access to Undertow resources during the modules processing messageInfo.getMap().put(SECURITY_CONTEXT_ATTACHMENT_KEY, securityContext); messageInfo.getMap().put(HTTP_SERVER_EXCHANGE_ATTACHMENT_KEY, exchange); return messageInfo; } private Account createAccount(final Account cachedAccount, final org.jboss.security.SecurityContext jbossSct) { if (jbossSct == null) { throw UndertowLogger.ROOT_LOGGER.nullParamter("org.jboss.security.SecurityContext"); } // null principal: SAM has opted out of the authentication process. Principal userPrincipal = jbossSct.getUtil().getUserPrincipal(); if (userPrincipal == null) { return null; } // SAM handled the same principal found in the cached account: indicates we must use the cached account. if (cachedAccount != null && cachedAccount.getPrincipal() == userPrincipal) { // populate the security context using the cached account data. jbossSct.getUtil().createSubjectInfo(userPrincipal, ((AccountImpl) cachedAccount).getCredential(), null); RoleGroup roleGroup = new SimpleRoleGroup(SecurityConstants.ROLES_IDENTIFIER); for (String role : cachedAccount.getRoles()) roleGroup.addRole(new SimpleRole(role)); jbossSct.getUtil().setRoles(roleGroup); return cachedAccount; } // SAM handled a different principal or there is no cached account: build a new account. Set<String> stringRoles = new HashSet<String>(); RoleGroup roleGroup = jbossSct.getUtil().getRoles(); if (roleGroup != null) { for (Role role : roleGroup.getRoles()) { stringRoles.add(role.getRoleName()); } } Object credential = jbossSct.getUtil().getCredential(); return new AccountImpl(userPrincipal, stringRoles, credential); } private void secureResponse(final HttpServerExchange exchange, final SecurityContext securityContext, final JASPIServerAuthenticationManager sam, final GenericMessageInfo messageInfo, final JASPICallbackHandler cbh) { if(exchange.getAttachment(ALREADY_WRAPPED) != null || exchange.isResponseStarted()) { return; } exchange.putAttachment(ALREADY_WRAPPED, true); // we add a response wrapper to properly invoke the secureResponse, after processing the destination exchange.addResponseWrapper(new ConduitWrapper<StreamSinkConduit>() { @Override public StreamSinkConduit wrap(final ConduitFactory<StreamSinkConduit> factory, final HttpServerExchange exchange) { ServletRequestContext requestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY); String applicationIdentifier = buildApplicationIdentifier(requestContext); if (!wasAuthExceptionThrown()) { UndertowLogger.ROOT_LOGGER.debugf("secureResponse for layer [%s] and applicationContextIdentifier [%s].", JASPI_HTTP_SERVLET_LAYER, applicationIdentifier); sam.secureResponse(messageInfo, new Subject(), JASPI_HTTP_SERVLET_LAYER, applicationIdentifier, cbh); // A SAM can unwrap the HTTP request/response objects - update the servlet request context with the values found in the message info. ServletRequestContext servletRequestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY); servletRequestContext.setServletRequest((HttpServletRequest) messageInfo.getRequestMessage()); servletRequestContext.setServletResponse((HttpServletResponse) messageInfo.getResponseMessage()); } return factory.create(); } }); } /** * <p>The authentication is mandatory if the servlet has http constraints (eg.: {@link * javax.servlet.annotation.HttpConstraint}).</p> * * @param attachment * @return */ private Boolean isMandatory(final ServletRequestContext attachment) { return attachment.getExchange().getSecurityContext() != null && attachment.getExchange().getSecurityContext().isAuthenticationRequired(); } }
package org.openoffice.xmerge.converter.xml.sxw.wordsmith; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.openoffice.xmerge.DocumentMerger; import org.openoffice.xmerge.MergeException; import org.openoffice.xmerge.ConverterCapabilities; import org.openoffice.xmerge.converter.xml.sxw.SxwDocument; import org.openoffice.xmerge.merger.DiffAlgorithm; import org.openoffice.xmerge.merger.Difference; import org.openoffice.xmerge.merger.NodeMergeAlgorithm; import org.openoffice.xmerge.merger.Iterator; import org.openoffice.xmerge.merger.DiffAlgorithm; import org.openoffice.xmerge.merger.diff.ParaNodeIterator; import org.openoffice.xmerge.merger.diff.IteratorLCSAlgorithm; import org.openoffice.xmerge.merger.merge.DocumentMerge; import org.openoffice.xmerge.merger.merge.CharacterBaseParagraphMerge; import org.openoffice.xmerge.util.Debug; /** * Wordsmith implementation of <code>DocumentMerger</code> * for the {@link * org.openoffice.xmerge.converter.xml.sxw.wordsmith.PluginFactoryImpl * PluginFactoryImpl}.</p> */ public class DocumentMergerImpl implements DocumentMerger { private ConverterCapabilities cc_; private org.openoffice.xmerge.Document orig = null; public DocumentMergerImpl(org.openoffice.xmerge.Document doc, ConverterCapabilities cc) { cc_ = cc; this.orig = doc; } public void merge(org.openoffice.xmerge.Document modifiedDoc) throws MergeException { SxwDocument wdoc1 = (SxwDocument) orig; SxwDocument wdoc2 = (SxwDocument) modifiedDoc; Document doc1 = wdoc1.getContentDOM(); Document doc2 = wdoc2.getContentDOM(); Iterator i1 = new ParaNodeIterator(cc_, doc1.getDocumentElement()); Iterator i2 = new ParaNodeIterator(cc_, doc2.getDocumentElement()); DiffAlgorithm diffAlgo = new IteratorLCSAlgorithm(); // find out the paragrah level diffs Difference[] diffTable = diffAlgo.computeDiffs(i1, i2); if (Debug.isFlagSet(Debug.INFO)) { Debug.log(Debug.INFO, "Diff Result: "); for (int i = 0; i < diffTable.length; i++) { Debug.log(Debug.INFO, diffTable[i].debug()); } } // merge the paragraphs NodeMergeAlgorithm charMerge = new CharacterBaseParagraphMerge(); DocumentMerge docMerge = new DocumentMerge(cc_, charMerge); Iterator result = null; docMerge.applyDifference(i1, i2, diffTable); } }
package org.datacite.mds.web.ui.controller; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.List; import javax.validation.ConstraintViolation; import javax.validation.ConstraintViolationException; import javax.validation.Valid; import javax.validation.ValidationException; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.datacite.mds.domain.Allocator; import org.datacite.mds.domain.AllocatorOrDatacentre; import org.datacite.mds.domain.Datacentre; import org.datacite.mds.domain.Dataset; import org.datacite.mds.domain.Media; import org.datacite.mds.domain.Metadata; import org.datacite.mds.service.HandleException; import org.datacite.mds.service.HandleService; import org.datacite.mds.service.SecurityException; import org.datacite.mds.util.SecurityUtils; import org.datacite.mds.util.Utils; import org.datacite.mds.util.ValidationUtils; import org.datacite.mds.validation.ValidationHelper; import org.datacite.mds.web.api.NotFoundException; import org.datacite.mds.web.ui.UiController; import org.datacite.mds.web.ui.model.CreateDatasetModel; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.roo.addon.web.mvc.controller.RooWebScaffold; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.validation.BindingResult; import org.springframework.validation.FieldError; import org.springframework.validation.ObjectError; import org.springframework.web.bind.WebDataBinder; import org.springframework.web.bind.annotation.InitBinder; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.multipart.support.ByteArrayMultipartFileEditor; @RooWebScaffold(path = "datasets", formBackingObject = Dataset.class, delete = false, populateMethods = false) @RequestMapping("/datasets") @Controller public class DatasetController implements UiController { private static Logger log = Logger.getLogger(DatasetController.class); @Autowired HandleService handleService; @Autowired ValidationHelper validationHelper; @InitBinder void initBinder(WebDataBinder binder) { binder.registerCustomEditor(byte[].class, new ByteArrayMultipartFileEditor()); } @RequestMapping(value = "/{id}", method = RequestMethod.GET) public String show(@PathVariable("id") Long id, Model model) { Dataset dataset = Dataset.findDataset(id); model.addAttribute("dataset", dataset); List<Media> medias = Media.findMediasByDataset(dataset).getResultList(); model.addAttribute("medias", medias); List<Metadata> metadatas = Metadata.findMetadatasByDataset(dataset).getResultList(); model.addAttribute("metadatas", metadatas); try { Metadata metadata = metadatas.get(0); model.addAttribute("metadata", metadata); byte[] xml = metadata.getXml(); model.addAttribute("prettyxml", Utils.formatXML(xml)); } catch (Exception e) { } model.addAttribute("resolvedUrl", resolveDoi(dataset)); model.addAttribute("itemId", id); return "datasets/show"; } private String resolveDoi(Dataset dataset) { try { String url = handleService.resolve(dataset.getDoi()); return url; } catch (NotFoundException e) { return "not resolveable"; } catch (HandleException e) { return "handle error"; } } @ModelAttribute("datacentres") public Collection<Datacentre> populateDatacentres() throws SecurityException { if (SecurityUtils.isLoggedInAsDatacentre()) { Datacentre datacentre = SecurityUtils.getCurrentDatacentre(); return Arrays.asList(datacentre); } else { Allocator allocator = SecurityUtils.getCurrentAllocator(); return Datacentre.findAllDatacentresByAllocator(allocator); } } @RequestMapping(method = RequestMethod.GET) public String list(@RequestParam(value = "page", required = false) Integer page, @RequestParam(value = "size", required = false) Integer size, Model model) throws SecurityException { AllocatorOrDatacentre user = SecurityUtils.getCurrentAllocatorOrDatacentre(); if (page != null || size != null) { int sizeNo = size == null ? 10 : size.intValue(); model.addAttribute("datasets", Dataset.findDatasetEntriesByAllocatorOrDatacentre(user, page == null ? 0 : (page .intValue() - 1) * sizeNo, sizeNo)); float nrOfPages = (float) Dataset.countDatasetsByAllocatorOrDatacentre(user) / sizeNo; model.addAttribute("maxPages", (int) ((nrOfPages > (int) nrOfPages || nrOfPages == 0.0) ? nrOfPages + 1 : nrOfPages)); } else { model.addAttribute("datasets", Dataset.findDatasetsByAllocatorOrDatacentre(user)); } return "datasets/list"; } @RequestMapping(params = "form", method = RequestMethod.GET) public String createForm(Model uiModel) { uiModel.addAttribute("createDatasetModel", new CreateDatasetModel()); return "datasets/create"; } @RequestMapping(method = RequestMethod.POST) public String create(@Valid CreateDatasetModel createDatasetModel, BindingResult result, Model model) { Dataset dataset = new Dataset(); Metadata metadata = new Metadata(); dataset.setDoi(createDatasetModel.getDoi()); dataset.setDatacentre(createDatasetModel.getDatacentre()); dataset.setUrl(createDatasetModel.getUrl()); metadata.setDataset(dataset); if (!result.hasErrors()) { try { byte[] xml = createDatasetModel.getXml(); if (xml.length > 0) metadata.setXml(createDatasetModel.getXml()); else throw new ValidationException("may not be empty"); } catch (ValidationException e) { result.addError(new FieldError("", "xml", e.getMessage())); } } if (!result.hasErrors()) { try { SecurityUtils.checkQuota(dataset.getDatacentre()); } catch (SecurityException e) { ObjectError error = new ObjectError("", e.getMessage()); result.addError(error); } } if (!result.hasErrors()) { try { validationHelper.validate(dataset); validationHelper.validate(metadata); } catch (ConstraintViolationException ex) { for (ConstraintViolation<?>v: ex.getConstraintViolations()) { FieldError error = new FieldError("", v.getPropertyPath().toString(), v.getMessage()); result.addError(error); } } } if (! StringUtils.isEmpty(dataset.getUrl()) && !result.hasErrors()) { try { handleService.create(dataset.getDoi(), dataset.getUrl()); dataset.setMinted(new Date()); log.info(dataset.getDatacentre().getSymbol() + " successfuly minted (via UI) " + dataset.getDoi()); } catch (HandleException e) { log.debug("minting DOI failed; try to update it"); try { handleService.update(dataset.getDoi(), dataset.getUrl()); log.info(dataset.getDatacentre().getSymbol() + " successfuly updated (via UI) " + dataset.getDoi()); } catch (HandleException ee) { String message = "HandleService: " + ee.getMessage(); FieldError error = new FieldError("", "doi", dataset.getDoi(), false, null, null, message); result.addError(error); } } } if (result.hasErrors()) { model.addAttribute("createDatasetModel", createDatasetModel); return "datasets/create"; } dataset.persist(); dataset.getDatacentre().incQuotaUsed(Datacentre.ForceRefresh.YES); metadata.persist(); model.asMap().clear(); return "redirect:/datasets/" + dataset.getId().toString(); } @RequestMapping(method = RequestMethod.PUT) public String update(@Valid Dataset dataset, BindingResult result, Model model) { if (!dataset.getUrl().isEmpty() && !result.hasErrors()) { try { handleService.update(dataset.getDoi(), dataset.getUrl()); log.info(dataset.getDatacentre().getSymbol() + " successfuly updated (via UI) " + dataset.getDoi()); } catch (HandleException e) { log.debug("updating DOI failed; try to mint it"); try { handleService.create(dataset.getDoi(), dataset.getUrl()); dataset.setMinted(new Date()); log.info(dataset.getDatacentre().getSymbol() + " successfuly minted (via UI) " + dataset.getDoi()); } catch (HandleException e1) { ObjectError error = new ObjectError("", "HandleService: " + e.getMessage()); result.addError(error); } } } if (result.hasErrors()) { model.addAttribute("dataset", dataset); return "datasets/update"; } dataset.merge(); model.asMap().clear(); return "redirect:/datasets/" + dataset.getId().toString(); } @RequestMapping(params = "find=ByDoiEquals", method = RequestMethod.GET) public String findDatasetsByDoiEquals(@RequestParam("doi") String doi, Model model) { Dataset dataset = Dataset.findDatasetByDoi(doi); model.asMap().clear(); return (dataset == null) ? "datasets/show" : "redirect:/datasets/" + dataset.getId(); } @RequestMapping(value = "/{id}", params = "form", method = RequestMethod.GET) public String updateForm(@PathVariable("id") Long id, Model model) { Dataset dataset = Dataset.findDataset(id); model.addAttribute("dataset", dataset); model.addAttribute("resolvedUrl", resolveDoi(dataset)); return "datasets/update"; } }
package org.xwiki.test.rest; import java.util.Locale; import java.util.UUID; import javax.ws.rs.core.MediaType; import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.httpclient.NameValuePair; import org.apache.commons.httpclient.methods.DeleteMethod; import org.apache.commons.httpclient.methods.GetMethod; import org.apache.commons.httpclient.methods.PostMethod; import org.apache.commons.httpclient.methods.PutMethod; import org.junit.Assert; import org.junit.Test; import org.xwiki.rest.Relations; import org.xwiki.rest.model.jaxb.History; import org.xwiki.rest.model.jaxb.HistorySummary; import org.xwiki.rest.model.jaxb.Link; import org.xwiki.rest.model.jaxb.Page; import org.xwiki.rest.model.jaxb.PageSummary; import org.xwiki.rest.model.jaxb.Pages; import org.xwiki.rest.model.jaxb.Space; import org.xwiki.rest.model.jaxb.Spaces; import org.xwiki.rest.model.jaxb.Syntaxes; import org.xwiki.rest.model.jaxb.Translation; import org.xwiki.rest.model.jaxb.Wiki; import org.xwiki.rest.model.jaxb.Wikis; import org.xwiki.rest.resources.SyntaxesResource; import org.xwiki.rest.resources.pages.PageChildrenResource; import org.xwiki.rest.resources.pages.PageHistoryResource; import org.xwiki.rest.resources.pages.PageResource; import org.xwiki.rest.resources.pages.PageTranslationResource; import org.xwiki.rest.resources.wikis.WikisResource; import org.xwiki.test.rest.framework.AbstractHttpTest; import org.xwiki.test.rest.framework.TestConstants; public class PageResourceTest extends AbstractHttpTest { private Page getFirstPage() throws Exception { GetMethod getMethod = executeGet(getFullUri(WikisResource.class)); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); Wikis wikis = (Wikis) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); Assert.assertTrue(wikis.getWikis().size() > 0); Wiki wiki = wikis.getWikis().get(0); Link spacesLink = getFirstLinkByRelation(wiki, Relations.SPACES); Assert.assertNotNull(spacesLink); getMethod = executeGet(spacesLink.getHref()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); Spaces spaces = (Spaces) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); Assert.assertTrue(spaces.getSpaces().size() > 0); Space space = null; for (final Space s : spaces.getSpaces()) { if ("Main".equals(s.getName())) { space = s; break; } } // get the pages list for the space Link pagesInSpace = getFirstLinkByRelation(space, Relations.PAGES); Assert.assertNotNull(pagesInSpace); getMethod = executeGet(pagesInSpace.getHref()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); Pages pages = (Pages) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); Assert.assertTrue(pages.getPageSummaries().size() > 0); Link pageLink = null; for (final PageSummary ps : pages.getPageSummaries()) { if ("WebHome".equals(ps.getName())) { pageLink = getFirstLinkByRelation(ps, Relations.PAGE); Assert.assertNotNull(pageLink); break; } } Assert.assertNotNull(pageLink); getMethod = executeGet(pageLink.getHref()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); Page page = (Page) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); return page; } @Override @Test public void testRepresentation() throws Exception { Page page = getFirstPage(); Link link = getFirstLinkByRelation(page, Relations.SELF); Assert.assertNotNull(link); checkLinks(page); } @Test public void testGETNotExistingPage() throws Exception { GetMethod getMethod = executeGet(getUriBuilder(PageResource.class).build(getWiki(), "NOTEXISTING", "NOTEXISTING").toString()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_NOT_FOUND, getMethod.getStatusCode()); } @Test public void testPUTPage() throws Exception { final String CONTENT = String.format("This is a content (%d)", System.currentTimeMillis()); final String TITLE = String.format("Title (%s)", UUID.randomUUID().toString()); Page originalPage = getFirstPage(); Page newPage = objectFactory.createPage(); newPage.setContent(CONTENT); newPage.setTitle(TITLE); Link link = getFirstLinkByRelation(originalPage, Relations.SELF); Assert.assertNotNull(link); PutMethod putMethod = executePutXml(link.getHref(), newPage, "Admin", "admin"); Assert.assertEquals(getHttpMethodInfo(putMethod), HttpStatus.SC_ACCEPTED, putMethod.getStatusCode()); Page modifiedPage = (Page) unmarshaller.unmarshal(putMethod.getResponseBodyAsStream()); Assert.assertEquals(CONTENT, modifiedPage.getContent()); Assert.assertEquals(TITLE, modifiedPage.getTitle()); } @Test public void testPUTPageWithTextPlain() throws Exception { final String CONTENT = String.format("This is a content (%d)", System.currentTimeMillis()); Page originalPage = getFirstPage(); Link link = getFirstLinkByRelation(originalPage, Relations.SELF); Assert.assertNotNull(link); PutMethod putMethod = executePut(link.getHref(), CONTENT, MediaType.TEXT_PLAIN, "Admin", "admin"); Assert.assertEquals(getHttpMethodInfo(putMethod), HttpStatus.SC_ACCEPTED, putMethod.getStatusCode()); Page modifiedPage = (Page) unmarshaller.unmarshal(putMethod.getResponseBodyAsStream()); Assert.assertEquals(CONTENT, modifiedPage.getContent()); } @Test public void testPUTPageUnauthorized() throws Exception { Page page = getFirstPage(); page.setContent("New content"); Link link = getFirstLinkByRelation(page, Relations.SELF); Assert.assertNotNull(link); PutMethod putMethod = executePutXml(link.getHref(), page); Assert.assertEquals(getHttpMethodInfo(putMethod), HttpStatus.SC_UNAUTHORIZED, putMethod.getStatusCode()); } @Test public void testPUTNonExistingPage() throws Exception { final String SPACE_NAME = "Test"; final String PAGE_NAME = String.format("Test-%d", System.currentTimeMillis()); final String CONTENT = String.format("Content %d", System.currentTimeMillis()); final String TITLE = String.format("Title %d", System.currentTimeMillis()); final String PARENT = "Main.WebHome"; Page page = objectFactory.createPage(); page.setContent(CONTENT); page.setTitle(TITLE); page.setParent(PARENT); PutMethod putMethod = executePutXml(getUriBuilder(PageResource.class).build(getWiki(), SPACE_NAME, PAGE_NAME).toString(), page, "Admin", "admin"); Assert.assertEquals(getHttpMethodInfo(putMethod), HttpStatus.SC_CREATED, putMethod.getStatusCode()); Page modifiedPage = (Page) unmarshaller.unmarshal(putMethod.getResponseBodyAsStream()); Assert.assertEquals(CONTENT, modifiedPage.getContent()); Assert.assertEquals(TITLE, modifiedPage.getTitle()); Assert.assertEquals(PARENT, modifiedPage.getParent()); } @Test public void testPUTWithInvalidRepresentation() throws Exception { Page page = getFirstPage(); Link link = getFirstLinkByRelation(page, Relations.SELF); PutMethod putMethod = executePut(link.getHref(), "<?xml version=\"1.0\" encoding=\"UTF-8\"?><invalidPage><content/></invalidPage>", MediaType.TEXT_XML); Assert.assertEquals(getHttpMethodInfo(putMethod), HttpStatus.SC_BAD_REQUEST, putMethod.getStatusCode()); } private boolean createPageIfDoesntExist(String spaceName, String pageName, String content) throws Exception { String uri = getUriBuilder(PageResource.class).build(getWiki(), spaceName, pageName).toString(); GetMethod getMethod = executeGet(uri); if (getMethod.getStatusCode() == HttpStatus.SC_NOT_FOUND) { Page page = objectFactory.createPage(); page.setContent(content); PutMethod putMethod = executePutXml(uri, page, "Admin", "admin"); Assert.assertEquals(getHttpMethodInfo(putMethod), HttpStatus.SC_CREATED, putMethod.getStatusCode()); getMethod = executeGet(uri); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); return true; } return false; } @Test public void testPUTGETTranslation() throws Exception { createPageIfDoesntExist(TestConstants.TEST_SPACE_NAME, TestConstants.TRANSLATIONS_PAGE_NAME, "Translations"); // PUT String[] languages = Locale.getISOLanguages(); final String languageId = languages[random.nextInt(languages.length)]; Page page = objectFactory.createPage(); page.setContent(languageId); PutMethod putMethod = executePutXml( getUriBuilder(PageTranslationResource.class).build(getWiki(), TestConstants.TEST_SPACE_NAME, TestConstants.TRANSLATIONS_PAGE_NAME, languageId).toString(), page, "Admin", "admin"); Assert.assertEquals(getHttpMethodInfo(putMethod), HttpStatus.SC_CREATED, putMethod.getStatusCode()); // GET GetMethod getMethod = executeGet(getUriBuilder(PageTranslationResource.class).build(getWiki(), TestConstants.TEST_SPACE_NAME, TestConstants.TRANSLATIONS_PAGE_NAME, languageId).toString()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); Page modifiedPage = (Page) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); Assert.assertEquals(languageId, modifiedPage.getLanguage()); Assert.assertEquals(languageId, modifiedPage.getLanguage()); Assert.assertTrue(modifiedPage.getTranslations().getTranslations().size() > 0); for (Translation translation : modifiedPage.getTranslations().getTranslations()) { getMethod = executeGet(getFirstLinkByRelation(translation, Relations.PAGE).getHref()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); modifiedPage = (Page) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); Assert.assertEquals(modifiedPage.getLanguage(), translation.getLanguage()); checkLinks(translation); } } @Test public void testGETNotExistingTranslation() throws Exception { createPageIfDoesntExist(TestConstants.TEST_SPACE_NAME, TestConstants.TRANSLATIONS_PAGE_NAME, "Translations"); GetMethod getMethod = executeGet(getUriBuilder(PageResource.class).build(getWiki(), TestConstants.TEST_SPACE_NAME, TestConstants.TRANSLATIONS_PAGE_NAME).toString()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); getMethod = executeGet(getUriBuilder(PageTranslationResource.class).build(getWiki(), TestConstants.TEST_SPACE_NAME, TestConstants.TRANSLATIONS_PAGE_NAME, "NOT_EXISTING").toString()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_NOT_FOUND, getMethod.getStatusCode()); } @Test public void testDELETEPage() throws Exception { final String pageName = String.format("Test-%d", random.nextLong()); createPageIfDoesntExist(TestConstants.TEST_SPACE_NAME, pageName, "Test page"); DeleteMethod deleteMethod = executeDelete(getUriBuilder(PageResource.class).build(getWiki(), TestConstants.TEST_SPACE_NAME, pageName) .toString(), "Admin", "admin"); Assert.assertEquals(getHttpMethodInfo(deleteMethod), HttpStatus.SC_NO_CONTENT, deleteMethod.getStatusCode()); GetMethod getMethod = executeGet(getUriBuilder(PageResource.class).build(getWiki(), TestConstants.TEST_SPACE_NAME, pageName) .toString()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_NOT_FOUND, getMethod.getStatusCode()); } @Test public void testDELETEPageNoRights() throws Exception { final String pageName = String.format("Test-%d", random.nextLong()); createPageIfDoesntExist(TestConstants.TEST_SPACE_NAME, pageName, "Test page"); DeleteMethod deleteMethod = executeDelete(getUriBuilder(PageResource.class).build(getWiki(), TestConstants.TEST_SPACE_NAME, pageName) .toString()); Assert.assertEquals(getHttpMethodInfo(deleteMethod), HttpStatus.SC_UNAUTHORIZED, deleteMethod.getStatusCode()); GetMethod getMethod = executeGet(getUriBuilder(PageResource.class).build(getWiki(), TestConstants.TEST_SPACE_NAME, pageName) .toString()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); } @Test public void testPageHistory() throws Exception { GetMethod getMethod = executeGet(getUriBuilder(PageResource.class).build(getWiki(), "Main", "WebHome").toString()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); Page originalPage = (Page) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); String pageHistoryUri = getUriBuilder(PageHistoryResource.class).build(getWiki(), originalPage.getSpace(), originalPage.getName()) .toString(); getMethod = executeGet(pageHistoryUri); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); History history = (History) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); for (HistorySummary historySummary : history.getHistorySummaries()) { getMethod = executeGet(getFirstLinkByRelation(historySummary, Relations.PAGE).getHref()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); Page page = (Page) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); checkLinks(page); for (Translation translation : page.getTranslations().getTranslations()) { checkLinks(translation); } } } @Test public void testPageTranslationHistory() throws Exception { String pageHistoryUri = getUriBuilder(PageHistoryResource.class).build(getWiki(), TestConstants.TEST_SPACE_NAME, TestConstants.TRANSLATIONS_PAGE_NAME).toString(); GetMethod getMethod = executeGet(pageHistoryUri); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); History history = (History) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); for (HistorySummary historySummary : history.getHistorySummaries()) { getMethod = executeGet(getFirstLinkByRelation(historySummary, Relations.PAGE).getHref()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); Page page = (Page) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); checkLinks(page); checkLinks(page.getTranslations()); } } @Test public void testGETPageChildren() throws Exception { GetMethod getMethod = executeGet(getUriBuilder(PageChildrenResource.class).build(getWiki(), "Main", "WebHome").toString()); Assert.assertEquals(getHttpMethodInfo(getMethod), HttpStatus.SC_OK, getMethod.getStatusCode()); Pages pages = (Pages) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); Assert.assertTrue(pages.getPageSummaries().size() > 0); for (PageSummary pageSummary : pages.getPageSummaries()) { checkLinks(pageSummary); } } @Test public void testPOSTPageFormUrlEncoded() throws Exception { final String CONTENT = String.format("This is a content (%d)", System.currentTimeMillis()); final String TITLE = String.format("Title (%s)", UUID.randomUUID().toString()); Page originalPage = getFirstPage(); Link link = getFirstLinkByRelation(originalPage, Relations.SELF); Assert.assertNotNull(link); NameValuePair[] nameValuePairs = new NameValuePair[2]; nameValuePairs[0] = new NameValuePair("title", TITLE); nameValuePairs[1] = new NameValuePair("content", CONTENT); PostMethod postMethod = executePostForm(String.format("%s?method=PUT", link.getHref()), nameValuePairs, "Admin", "admin"); Assert.assertEquals(getHttpMethodInfo(postMethod), HttpStatus.SC_ACCEPTED, postMethod.getStatusCode()); Page modifiedPage = (Page) unmarshaller.unmarshal(postMethod.getResponseBodyAsStream()); Assert.assertEquals(CONTENT, modifiedPage.getContent()); Assert.assertEquals(TITLE, modifiedPage.getTitle()); } @Test public void testPUTPageSyntax() throws Exception { Page originalPage = getFirstPage(); GetMethod getMethod = executeGet(getFullUri(SyntaxesResource.class)); Syntaxes syntaxes = (Syntaxes) unmarshaller.unmarshal(getMethod.getResponseBodyAsStream()); String newSyntax = null; for (String syntax : syntaxes.getSyntaxes()) { if (!syntax.equals(originalPage.getSyntax())) { newSyntax = syntax; break; } } originalPage.setSyntax(newSyntax); Link link = getFirstLinkByRelation(originalPage, Relations.SELF); Assert.assertNotNull(link); PutMethod putMethod = executePutXml(link.getHref(), originalPage, "Admin", "admin"); Assert.assertEquals(getHttpMethodInfo(putMethod), HttpStatus.SC_ACCEPTED, putMethod.getStatusCode()); Page modifiedPage = (Page) unmarshaller.unmarshal(putMethod.getResponseBodyAsStream()); Assert.assertEquals(newSyntax, modifiedPage.getSyntax()); } }
package org.openbmp.db_rest.helpers; import java.io.IOException; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.HttpMethod; public class RestAuthenticationFilter implements javax.servlet.Filter { public static final String AUTHENTICATION_HEADER = "Authorization"; public void doFilter(ServletRequest request, ServletResponse response, FilterChain filter) throws IOException, ServletException { if (request instanceof HttpServletRequest) { if (((HttpServletRequest) request).getMethod().equals(HttpMethod.OPTIONS.toString())) { if (response instanceof HttpServletResponse) { HttpServletResponse httpServletResponse = (HttpServletResponse) response; httpServletResponse.addHeader("Access-Control-Allow-Credentials", "true"); httpServletResponse.addHeader("Access-Control-Allow-Headers", "Accept, Authorization"); httpServletResponse.addHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS, HEAD"); httpServletResponse.addHeader("Access-Control-Allow-Origin", "*"); httpServletResponse.setStatus(200); } } else { String path = ((HttpServletRequest) request).getPathInfo(); boolean authenticationStatus; if (!path.contains("/auth/login")) { HttpServletRequest httpServletRequest = (HttpServletRequest) request; String authCredentials = httpServletRequest .getHeader(AUTHENTICATION_HEADER); // better injected AuthenticationService authenticationService = new AuthenticationService(); authenticationStatus = authenticationService .authenticate(authCredentials); } else { authenticationStatus = true; } if (!authenticationStatus) { if (response instanceof HttpServletResponse) { HttpServletResponse httpServletResponse = (HttpServletResponse) response; httpServletResponse .setStatus(HttpServletResponse.SC_UNAUTHORIZED); // Allow the browser to prompt for user/pass using basic auth httpServletResponse.setHeader("WWW-Authenticate", "Basic"); } } else { filter.doFilter(request, response); } } } } public void destroy() { } public void init(FilterConfig arg0) throws ServletException { } }
package org.radarcns.management.config; import io.github.jhipster.security.AjaxLogoutSuccessHandler; import io.github.jhipster.security.Http401UnauthorizedEntryPoint; import org.radarcns.management.security.JwtAuthenticationFilter; import org.springframework.beans.factory.BeanInitializationException; import org.springframework.boot.web.servlet.FilterRegistrationBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.http.HttpMethod; import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.builders.WebSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.config.http.SessionCreationPolicy; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.security.data.repository.query.SecurityEvaluationContextExtension; import javax.annotation.PostConstruct; import javax.servlet.Filter; @Configuration @EnableWebSecurity @EnableGlobalMethodSecurity(prePostEnabled = true, securedEnabled = true) public class SecurityConfiguration extends WebSecurityConfigurerAdapter { private final AuthenticationManagerBuilder authenticationManagerBuilder; private final UserDetailsService userDetailsService; public SecurityConfiguration(AuthenticationManagerBuilder authenticationManagerBuilder, UserDetailsService userDetailsService) { this.authenticationManagerBuilder = authenticationManagerBuilder; this.userDetailsService = userDetailsService; } @PostConstruct public void init() { try { authenticationManagerBuilder .userDetailsService(userDetailsService) .passwordEncoder(passwordEncoder()); } catch (Exception e) { throw new BeanInitializationException("Security configuration failed", e); } } @Bean public AjaxLogoutSuccessHandler ajaxLogoutSuccessHandler() { return new AjaxLogoutSuccessHandler(); } @Bean public Http401UnauthorizedEntryPoint http401UnauthorizedEntryPoint() { return new Http401UnauthorizedEntryPoint(); } @Bean public PasswordEncoder passwordEncoder() { return new BCryptPasswordEncoder(); } @Override public void configure(WebSecurity web) throws Exception { web.ignoring() .antMatchers("/app*.{js,html}")
package org.starlambdawars.collectors; import org.starlambdawars.beans.ForceAlignment; import org.starlambdawars.beans.MovieType; import org.starlambdawars.beans.StarWarsCharacter; import org.starlambdawars.beans.StarWarsMovie; import org.starlambdawars.mapper.StarWarsMovieMapper; import org.starlambdawars.utils.DataLoader; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; public class StarWarsMovieCollector { public StarWarsMovieCollector() throws IOException { loader = new DataLoader(); movies = loader.loadMovies(); mapper = new StarWarsMovieMapper(); } // public Map<ForceAlignment, List<StarWarsCharacter>> mapForceByCharacters() { // List<StarWarsCharacter> characters = mapper.allCharacters(); // return characters // .stream() // .map(a -> a.getForceAlignment()) // .collect( // Collectors.toMap( // findByForceAlignment(a, characters) // public Map<MovieType, List<StarWarsMovie>> mapTypeByMovies() { // return movies // .stream() // .map(k -> k.getKind()) // .collect( // Collectors.toMap( // Function.identity(), // findByKind(k, movies) private List<StarWarsCharacter> findByForceAlignment(ForceAlignment alignment, List<StarWarsCharacter> characters) { return characters .stream() .filter(c -> c.getForceAlignment().equals(alignment)) .collect(Collectors.toList()); } private List<String> findByKind(MovieType kind, List<StarWarsMovie> movies) { return movies .stream() .filter(m -> m.getKind().equals(kind)) .map(m -> m.getTitle()) .collect(Collectors.toList()); } private DataLoader loader; private List<StarWarsMovie> movies; private StarWarsMovieMapper mapper; }