001/*
002 * Copyright (c) 2003-2010 The Regents of the University of California.
003 * All rights reserved.
004 *
005 * '$Author: crawl $'
006 * '$Date: 2015-10-28 21:08:39 +0000 (Wed, 28 Oct 2015) $' 
007 * '$Revision: 34137 $'
008 * 
009 * Permission is hereby granted, without written agreement and without
010 * license or royalty fees, to use, copy, modify, and distribute this
011 * software and its documentation for any purpose, provided that the above
012 * copyright notice and the following two paragraphs appear in all copies
013 * of this software.
014 *
015 * IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY
016 * FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
017 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF
018 * THE UNIVERSITY OF CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY OF
019 * SUCH DAMAGE.
020 *
021 * THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY WARRANTIES,
022 * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
023 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE
024 * PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, AND THE UNIVERSITY OF
025 * CALIFORNIA HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES,
026 * ENHANCEMENTS, OR MODIFICATIONS.
027 *
028 */
029
030package org.ecoinformatics.seek.datasource.eml.eml2;
031
032import java.awt.BorderLayout;
033import java.io.File;
034import java.io.FileInputStream;
035import java.io.InputStream;
036import java.io.InputStreamReader;
037import java.io.Reader;
038import java.net.URL;
039import java.sql.ResultSet;
040import java.sql.ResultSetMetaData;
041import java.sql.SQLException;
042import java.util.Collection;
043import java.util.Enumeration;
044import java.util.Hashtable;
045import java.util.Iterator;
046import java.util.List;
047import java.util.Vector;
048
049import javax.swing.JFrame;
050import javax.swing.JPanel;
051import javax.swing.JScrollPane;
052import javax.swing.JTable;
053import javax.swing.JTextArea;
054import javax.swing.table.TableModel;
055
056import org.apache.commons.logging.Log;
057import org.apache.commons.logging.LogFactory;
058import org.ecoinformatics.ecogrid.client.IdentifierServiceClient;
059import org.ecoinformatics.seek.dataquery.DBTableNameResolver;
060import org.ecoinformatics.seek.dataquery.DBTablesGenerator;
061import org.ecoinformatics.seek.dataquery.HsqlDataQueryAction;
062import org.ecoinformatics.seek.datasource.DataSourceIcon;
063import org.ecoinformatics.seek.datasource.EcogridDataCacheItem;
064import org.ecoinformatics.seek.datasource.EcogridGZippedDataCacheItem;
065import org.ecoinformatics.seek.datasource.EcogridMetaDataCacheItem;
066import org.ecoinformatics.seek.datasource.EcogridTarArchivedDataCacheItem;
067import org.ecoinformatics.seek.datasource.EcogridZippedDataCacheItem;
068import org.ecoinformatics.seek.ecogrid.EcoGridService;
069import org.ecoinformatics.seek.ecogrid.EcoGridServicesController;
070import org.ecoinformatics.seek.ecogrid.quicksearch.ResultRecord;
071import org.ecoinformatics.seek.ecogrid.quicksearch.ResultTreeRoot;
072import org.ecoinformatics.seek.querybuilder.DBQueryDef;
073import org.ecoinformatics.seek.querybuilder.DBQueryDefParserEmitter;
074import org.ecoinformatics.seek.querybuilder.DBSchemaParserEmitter;
075import org.kepler.actor.preview.Previewable;
076import org.kepler.objectmanager.ActorMetadata;
077import org.kepler.objectmanager.cache.ActorCacheObject;
078import org.kepler.objectmanager.cache.CacheManager;
079import org.kepler.objectmanager.cache.DataCacheListener;
080import org.kepler.objectmanager.cache.DataCacheManager;
081import org.kepler.objectmanager.cache.DataCacheObject;
082import org.kepler.objectmanager.data.DataSourceControllerFactory;
083import org.kepler.objectmanager.data.DataType;
084import org.kepler.objectmanager.data.db.DSSchemaDef;
085import org.kepler.objectmanager.data.db.DSSchemaIFace;
086import org.kepler.objectmanager.data.db.DSTableFieldIFace;
087import org.kepler.objectmanager.data.db.Entity;
088import org.kepler.objectmanager.data.db.QBTableauFactory;
089import org.kepler.objectmanager.data.text.TextComplexFormatDataReader;
090import org.kepler.objectmanager.lsid.KeplerLSID;
091import org.kepler.util.DelimitedReader;
092import org.kepler.util.DotKeplerManager;
093import org.xml.sax.InputSource;
094
095import EDU.oswego.cs.dl.util.concurrent.CountDown;
096import EDU.oswego.cs.dl.util.concurrent.Latch;
097import ptolemy.actor.TypedIOPort;
098import ptolemy.actor.gui.style.TextStyle;
099import ptolemy.data.BooleanToken;
100import ptolemy.data.DoubleToken;
101import ptolemy.data.IntToken;
102import ptolemy.data.LongToken;
103import ptolemy.data.StringToken;
104import ptolemy.data.Token;
105import ptolemy.data.expr.FileParameter;
106import ptolemy.data.expr.Parameter;
107import ptolemy.data.expr.StringParameter;
108import ptolemy.data.type.BaseType;
109import ptolemy.data.type.Type;
110import ptolemy.kernel.CompositeEntity;
111import ptolemy.kernel.util.ChangeRequest;
112import ptolemy.kernel.util.IllegalActionException;
113import ptolemy.kernel.util.InternalErrorException;
114import ptolemy.kernel.util.NameDuplicationException;
115import ptolemy.kernel.util.NamedObj;
116import ptolemy.kernel.util.Settable;
117import ptolemy.kernel.util.StringAttribute;
118import ptolemy.kernel.util.Workspace;
119import ptolemy.moml.MoMLChangeRequest;
120import ptolemy.util.CancelException;
121import ptolemy.util.MessageHandler;
122import ptolemy.vergil.basic.KeplerDocumentationAttribute;
123import util.PersistentTableModel;
124import util.PersistentTableModelWindowListener;
125import util.PersistentVector;
126import util.StaticUtil;
127import util.TableSorter;
128
129/**
130 * <p>
131 * The Eml200DataSource is used to gain access to a wide variety of data
132 * packages that have been described using Ecological Metadata Language (EML).
133 * Each data package contains an EML metadata description and one or more data
134 * entities (data tables, spatial raster images, spatial vector images). The
135 * data packages can be accessed from the local filesystem or through any
136 * EcoGrid server that provides access to its collection of data objects.
137 * </p>
138 * <p>
139 * The metadata provided by the EML description of the data allows the data to
140 * be easily ingested into Kepler and exposed for use in downstream components.
141 * The Eml200DataSource handles all of the mechanical issues associated with
142 * parsing the metadata, downloading the data from remote servers if applicable,
143 * understanding the logical structure of the data, and emitting the data for
144 * downstream use when required. The supported data transfer protocols include
145 * http, ftp, file, ecogrid and srb.
146 * </p>
147 * <p>
148 * After parsing the EML metadata, the actor automatically reconfigures its
149 * exposed ports to provide one port for each attribute in the first entity that
150 * is described in the EML description. For example, if the first entity is a
151 * data table with four columns, the ports might be "Site", "Date", "Plot", and
152 * "Rainfall" if that's what the data set contained. These details are obtained
153 * from the EML document.
154 * </p>
155 * <p>
156 * By default, the ports created by the EML200DataSource represent fields in the
157 * data entities, and one tuple of data is emitted on these ports during each
158 * fire cycle. Alternatively, the actor can be configured to so that the ports
159 * instead represent an array of values for a field ("AsColumnVector"), or so
160 * that the ports represent an entire table of data ("AsTable") formatted in
161 * comma-separated-value (CSV) format.
162 * </p>
163 * <p>
164 * If more than one data entity is described in the EML metadata, then the
165 * output of the actor defaults to the first entity listed in the EML. To select
166 * the other entities, one must provide a query statement that describes the
167 * filter and join that should be used to produce the data to be output. This is
168 * accomplished by selecting 'Open actor', which shows the Query configuration
169 * dialog, which can be used to select the columns to be output and any
170 * filtering constraints to be applied.
171 * </p>
172 * 
173 * @author Matt Jones, Jing Tao, Chad Berkley
174 * @since kepler-1.0.0
175 * @Pt.ProposedRating Red (jones)
176 * @Pt.AcceptedRating Red (jones)
177 */
178public class Eml200DataSource extends ResultRecord implements
179                DataCacheListener, Previewable {
180
181        /*
182         * Brief discussion of threads:
183         * 
184         * The actor startup will call attributeChanged multiple time to configure
185         * the object. When the recordId and endpoint attributes are set, the object
186         * will attempt to load the EcoGridMetaDataCacheItem. This operation will
187         * fork a thread named "MetaData XYZZY".
188         * 
189         * The MetaData thread is configured to use the inner class MetadataComplete
190         * listener when it completes. The thread, in this listener, initializes the
191         * CountDown _entityCountDown and kicks off threads to load each entity. The
192         * thread then waits using _entityCountDown.acquire() until all the data
193         * entity thread are complete.
194         * 
195         * Each data entity is loaded using a class derived from
196         * EcogridDataCacheItem. The specific type of class is determined from ...
197         */
198
199        // /////////////////////////////////////////////////////////////////
200        // // private variables ////
201        static final String DATATYPES[] = { DataType.INT, DataType.FLOAT,
202                        DataType.DOUBLE, DataType.LONG, DataType.STR };
203
204        static final BaseType BASETYPES[] = { BaseType.INT, BaseType.DOUBLE,
205                        BaseType.DOUBLE, BaseType.LONG, BaseType.STRING };
206
207        static private Hashtable _TypeHash = new Hashtable();
208
209        // for looking using the default documentation from the actor lib
210        private static KeplerDocumentationAttribute defaultDocumentation = null;
211
212        static Log log;
213        static {
214                log = LogFactory
215                                .getLog("org.ecoinformatics.seek.datasource.eml.eml2.Eml200DataSource");
216        }
217
218        private QBTableauFactory _qbTableauFactory = null;
219
220        private DataSourceControllerFactory _nodeController = null;
221
222        /**
223         * Output indicator parameter.
224         */
225        private Eml200DataOutputFormatBase _dataOutputFormat = null;
226
227        private Vector<Entity> _entityList = new Vector<Entity>();
228        private Entity _selectedTableEntity = null;
229
230        private Vector _columns = null;
231
232        private DBQueryDef _queryDef = null;
233
234        private DSSchemaIFace _schemaDef = null;
235        private DSSchemaDef _schema = new DSSchemaDef();
236
237        private boolean _ignoreSchemaChange = false;
238
239        private boolean _schemaDefinitionIsSet = false;
240
241        private DBTablesGenerator _tableGenerator = null;
242
243        private EcogridDataCacheItem _selectedCachedDataItem = null;
244
245        private EcogridMetaDataCacheItem _cacheMetaDataItem = null;
246
247        /**
248         * _metadataCompleted is a Latch object (from oswego) which is used to
249         * synchronize the start of the workflow. The Latch is released when the
250         * _cacheMetaDataItem has been completed. It is also used to notify a thread
251         * blocked in initialized() when the stop method is executed.
252         */
253        private Latch _metadataCompleted = new Latch();
254
255        private InputStream _reader = null;
256
257        private HsqlDataQueryAction _resultSet = null;
258
259        /*
260         * Indicates when there is no more data in the _resultSet to output.
261         */
262        private boolean _endOfResultSet = false;
263
264        private DelimitedReader _simpleDelimitedReader = null;
265
266        private TextComplexFormatDataReader _complexFormatReader = null;
267
268        private CountDown _entityCountDown = null;
269        private int _numberOfEntities = 0;
270
271        private int _numberOfFailedDownloadEntities = 0;
272
273        private boolean _hasSQLCommand = false;
274
275        private String[] _selectedColumnLabelList = null;
276
277        private Type[] _selectedColumnTypeList = null;
278
279        private static final int INDEXFORNOTFOUND = -1;
280
281        private static final int DEFAULTINDEX = 0;
282
283        private DataSourceIcon _icon;
284
285        private String emlFile = null;
286        
287        private String emlFileFinalPath = null;
288
289        private Vector failedDownloadEntityName = new Vector();
290
291        /**
292         * Tracks if user has been asked about newer version of eml before (Kind of
293         * a hack to get around mysterious multiple attributeChanged() calls)
294         */
295        private int checkVersionPromptCount = 0;
296
297        /**
298         * The default endpoint for EcoGrid messages, which is overridden in the
299         * configuration file.
300         */
301        private static final String ENDPOINT = "http://ecogrid.ecoinformatics.org/knb/services/QueryService";
302
303        private static final String HTMLEXTENSION = ".html";
304
305        public static final Settable.Visibility DEFAULTFORSQL = Settable.EXPERT;
306
307        public static final Settable.Visibility DEFAULTFORSCHEMA = Settable.EXPERT;
308
309        private static final boolean SHOWGUIERROR = true;
310
311        // private boolean isDoneParseEmlFile = false;
312
313        // /////////////////////////////////////////////////////////////////
314        // // ports and parameters ////
315
316        /**
317         * The file path for locating an EML file that is available from a local
318         * file.
319         */
320        public FileParameter emlFilePath = null;
321
322        /**
323         * The file path for locating a data file that is available from the local
324         * file system
325         */
326        public FileParameter dataFilePath = null;
327
328        /**
329         * The SQL command which will be applied to the data entity to filter data
330         * values. This is usually generated using the Query configuration dialog.
331         */
332        public StringAttribute sqlDef = null;
333
334        /**
335         * Schema definition for the entities in this package. The schema definition
336         * is obtained automatically by parsing the EMl document and does not need
337         * to be edited by the end user.
338         */
339        public StringAttribute schemaDef = null;
340
341        /**
342         * The format of the output to be produced for the data entity. This
343         * parameter controls which ports are created for the actor and what data is
344         * emitted on those ports during each fire cycle. For example, this field
345         * can be configured to produce one port for each column in a data table, or
346         * one port that emits the entire data table at once in CSV format.
347         * Specifically, the output format choices are:
348         * <p>
349         * As Field: This is the default. One output port is created for each field
350         * (aka column/attribute/variable) that is described in the EML metadata for
351         * the data entity. If the SQL statement has been used to subset the data,
352         * then only those fields selected in the SQL statement will be configured
353         * as ports.
354         * </p>
355         * <p>
356         * As Table: The selected entity will be sent out as a string which contains
357         * the entire entity data. It has three output ports: DataTable - the data
358         * itself, Delimiter - delimiter to seperate fields, and NumColumns - the
359         * number of fields in the table.
360         * </p>
361         * <p>
362         * As Row: In this output format, one tuple of selected data is formatted as
363         * an array and sent out. It only has one output port (DataRow) and the data
364         * type is a record containing each of the individuals fields.
365         * </p>
366         * <p>
367         * As Byte Array: Selected data will be sent out as an array of bytes which
368         * are read from the data file. This is the raw data being sent in binary
369         * format. It has two output ports: BinaryData - contains data itself, and
370         * EndOfStream - a tag to indicate if it is end of data stream.
371         * </p>
372         * <p>
373         * As UnCompressed File Name: This format is only used when the entity is a
374         * compressed file (zip, tar et al). The compressed archive file is
375         * uncompressed after it is downloaded. It has only one output port which
376         * will contain an array of the filenames of all of the uncompressed files
377         * from the archive. If the parameter "Target File Extension in Compressed
378         * File" is provided, then instead the array that is returned will only
379         * contain the files with the file extension provided.
380         * </p>
381         * <p>
382         * As Cache File Name: Kepler stores downloaded data files from remote sites
383         * into its cache system. This output format will send the local cache file
384         * path for the entity so that workflow designers can directly access the
385         * cache files. It has two output ports: CacheLocalFileName - the local file
386         * path, and CacheResourceName - the data link in eml for this enity.
387         * </p>
388         * <p>
389         * As Column Vector: This output format is similar to "As Field". The
390         * difference is instead sending out a single value on each port, it sends
391         * out an array of all of the data for that field. The type of each port is
392         * an array of the base type for the field.
393         * </p>
394         * <p>
395         * As ColumnBased Record: This output format will send all data on one port
396         * using a Record structure that encapsulates the entire data object. The
397         * Record will contain one array for each of the fields in the data, and the
398         * type of that array will be determined by the type of the field it
399         * represents.
400         * </p>
401         */
402        public StringParameter dataOutputFormat = null;
403
404        /**
405         * This parameter specifies a file extension that is used to limit the array
406         * of filenames returned by the data source actor when "As unCompressed File
407         * Name" is selected as the ouput type. Please see more information in
408         * "As Uncompressed File Name" in the description of the output format
409         * parameter.
410         */
411        public StringParameter fileExtensionFilter = null;
412
413        /**
414         * This parameter determines if extra data columns that are NOT described in
415         * the EML should be ignored (isLenient=true) or if an error should be
416         * raised when the data and EML description do not match (isLenient=false)
417         * TRUE - extra data columns are ignored FALSE - an error is raised when
418         * data and metadata conflict
419         */
420        public Parameter isLenient = null;
421
422        /**
423         * This parameter determines if remote source should be queried for latest
424         * revision of metadata file. TRUE - check performed FALSE - do not check
425         * for latest version
426         */
427        public Parameter checkVersion = null;
428
429        /**
430         * If this EML package has mutiple entities, this parameter specifies which
431         * entity should be used for output. By default when this parameter is
432         * unset, data from the first entity described in an EML package is output.
433         * This parameter is only used if the SQL parameter is not used, or if the
434         * SQL parameter is used and the output format is one of "As Table",
435         * "As Byte Array", "As Uncompressed File Name", and "As Cache File Name".
436         */
437        public StringParameter selectedEntity = null;
438
439        /**
440         * Construct an actor with the given container and name.
441         * 
442         * @param container
443         *            The container.
444         * @param name
445         *            The name of this actor.
446         * @exception IllegalActionException
447         *                If the actor cannot be contained by the proposed
448         *                container.
449         * @exception NameDuplicationException
450         *                If the container already has an actor with this name.
451         * @since
452         */
453        public Eml200DataSource(CompositeEntity container, String name)
454                        throws NameDuplicationException, IllegalActionException {
455                super(container, name);
456                _icon = new DataSourceIcon(this);
457
458                emlFilePath = new FileParameter(this, "emlFilePath");
459                emlFilePath.setDisplayName("EML File");
460
461                dataFilePath = new FileParameter(this, "dataFilePath");
462                dataFilePath.setDisplayName("Data File");
463
464                schemaDef = new StringAttribute(this, "schemaDef");
465                TextStyle schemaDefTS = new TextStyle(schemaDef, "schemaDef");
466                schemaDef.setDisplayName("Schema Definition");
467                schemaDef.setVisibility(DEFAULTFORSCHEMA);
468
469                sqlDef = new StringAttribute(this, "sqlDef");
470                TextStyle sqlDefTS = new TextStyle(sqlDef, "sqlDef");
471                sqlDef.setDisplayName("SQL Command");
472                sqlDef.setVisibility(DEFAULTFORSQL);
473                selectedEntity = new StringParameter(this, "selectedEntity");
474                selectedEntity.setDisplayName("Selected Entity");
475                dataOutputFormat = new StringParameter(this, "dataOutputFormat");
476                dataOutputFormat.setDisplayName("Data Output Format");
477                dataOutputFormat.setExpression(Eml200DataOutputFormatFactory._AsField);
478                dataOutputFormat.addChoice(Eml200DataOutputFormatFactory._AsField);
479                dataOutputFormat.addChoice(Eml200DataOutputFormatFactory._AsTable);
480                dataOutputFormat.addChoice(Eml200DataOutputFormatFactory._AsRow);
481                dataOutputFormat.addChoice(Eml200DataOutputFormatFactory._AsByteArray);
482                dataOutputFormat
483                                .addChoice(Eml200DataOutputFormatFactory._AsUnzippedFileName);
484                dataOutputFormat.addChoice(Eml200DataOutputFormatFactory._AsFileName);
485                dataOutputFormat.addChoice(Eml200DataOutputFormatFactory._AsAllFileNames);
486                dataOutputFormat
487                                .addChoice(Eml200DataOutputFormatFactory._AsColumnVector);
488                dataOutputFormat
489                                .addChoice(Eml200DataOutputFormatFactory._AsColumnRecord);
490                _dataOutputFormat = Eml200DataOutputFormatFactory.newInstance(this);
491
492                fileExtensionFilter = new StringParameter(this, "fileExtensionFilter");
493                fileExtensionFilter.setDisplayName("File Extension Filter");
494
495                isLenient = new Parameter(this, "isLenient");
496                isLenient.setDisplayName("Allow lenient data parsing");
497                isLenient.setTypeEquals(BaseType.BOOLEAN);
498                isLenient.setToken(BooleanToken.FALSE);
499
500                checkVersion = new Parameter(this, "checkVersion");
501                checkVersion.setDisplayName("Check for latest version");
502                checkVersion.setTypeEquals(BaseType.BOOLEAN);
503                checkVersion.setToken(BooleanToken.FALSE);
504
505                // create tableau for editting the SQL String
506                _qbTableauFactory = new QBTableauFactory(this, "_tableauFactory");
507
508                // Create a node controller to control the context menu
509                _nodeController = new DataSourceControllerFactory(this,
510                                "_controllerFactory");
511
512                if (_TypeHash.size() == 0) {
513                        for (int i = 0; i < DATATYPES.length; i++) {
514                                _TypeHash.put(DATATYPES[i], BASETYPES[i]);
515                        }
516                }
517
518        }
519
520        /**
521         * Accessor to _columns member. Default permissions for use by
522         * Eml200DataOutputFormatBase derived classes only.
523         * 
524         *       */
525        public Vector getColumns() {
526                return _columns;
527        }
528
529        /**
530         * @return Returns the _selectedTableEntity.
531         */
532        Entity getSelectedTableEntity() {
533                return _selectedTableEntity;
534        }
535        
536        public Vector<Entity> getEntityList() {
537                return _entityList;
538        }
539
540        /**
541         * @return Returns the _selectedCachedDataItem.
542         */
543        EcogridDataCacheItem getSelectedCachedDataItem() {
544                return _selectedCachedDataItem;
545        }
546
547        /**
548         * @return Returns the fileExtensionFilter.
549         */
550        String getFileExtensionInZip() {
551                try {
552                        return fileExtensionFilter.stringValue();
553                } catch (IllegalActionException e) {
554                        return "";
555                }
556        }
557
558        /**
559         * @return Returns the _selectedColumnLabelList.
560         */
561        String[] getColumnLabels() {
562                return _selectedColumnLabelList;
563        }
564
565        /**
566         * @return Returns the _selectedColumnTypeList.
567         */
568        Type[] getColumnTypes() {
569                return _selectedColumnTypeList;
570        }
571
572        @Override
573    public void preinitialize() throws IllegalActionException {
574
575                // check for latest
576                this.checkForMostRecentRecordId(false);
577                
578                // First block for metadata download to finish.
579                try {
580                        _metadataCompleted.acquire();
581                        log.debug("Is stop requested? " + getDirector().isStopRequested());
582                } catch (InterruptedException e) {
583                        log.debug("Is stop requested? " + getDirector().isStopRequested());
584                        if (getDirector().isStopRequested()) {
585                                throw new IllegalActionException("Execution interrupted");
586                        }
587                }
588
589                super.preinitialize();
590        }
591
592        /**
593         * Initialize the actor prior to running in the workflow. This reads the
594         * metadata and configures the ports.
595         * 
596         * @throws IllegalActionException
597         */
598        @Override
599    public void initialize() throws IllegalActionException {
600                log.debug("In initialize method");
601
602                // Now block waiting for the entity data to finish.
603                try {
604                        synchronized (_entityCountDown) {
605                                while (_entityCountDown.currentCount() > 0
606                                                && (getDirector() != null && !getDirector()
607                                                                .isStopRequested())) {
608                                        _entityCountDown.wait();
609                                        log.debug("Is stop requested? "
610                                                        + getDirector().isStopRequested());
611                                }
612                        }
613                } catch (InterruptedException e) {
614                        throw new IllegalActionException("Downloads not completed");
615                } catch (Exception e) {
616                        throw new IllegalActionException("Download error encountered");
617                }
618
619                if (getDirector() != null && getDirector().isStopRequested()) {
620                        throw new IllegalActionException("Execution interrupted");
621                }
622
623                if (_selectedTableEntity == null) {
624                        throw new IllegalActionException("_selectedTableEnity is NULL!");
625                }
626
627                if (_selectedCachedDataItem == null) {
628                        _selectedCachedDataItem = (EcogridDataCacheItem) _selectedTableEntity
629                                        .getDataCacheObject();
630                }
631
632                if (_selectedCachedDataItem == null) {
633                        throw new IllegalActionException(
634                                        " The selected entity has a null data (Maybe data download failed)");
635                }
636
637                // This was the initializeAsTableRowOrField method
638                String sqlStr = "";
639                String sqlXMLStr = ((Settable) sqlDef).getExpression();
640                if (sqlXMLStr == null || sqlXMLStr.length() == 0) {
641                        sqlStr = "SELECT * FROM "
642                                        + (_selectedTableEntity.getMappedName() != null ? _selectedTableEntity
643                                                        .getMappedName()
644                                                        : _selectedTableEntity.getName());
645
646                } else {
647                        Hashtable mappedNameHash = new Hashtable();
648                        // should go through all enities
649                        int size = _entityList.size();
650                        if (size == 0) {
651                                // no entity in this package and throw an exception
652                                throw new IllegalActionException(
653                                                "There is no downloadable entity or no entity in this eml package");
654                        }
655                        for (int i = 0; i < size; i++) {
656                                Entity entity = (Entity) _entityList.elementAt(i);
657                                if (entity.getMappedName() != null) {
658                                        mappedNameHash
659                                                        .put(entity.getName(), entity.getMappedName());
660                                }
661                        }
662                        DBQueryDef queryDef = DBQueryDefParserEmitter.parseQueryDef(
663                                        _schemaDef, sqlXMLStr, mappedNameHash);
664
665                        sqlStr = DBQueryDefParserEmitter.createSQL(_schemaDef, queryDef);
666                }
667
668                log.debug("The sql command is " + sqlStr);
669                // excuted query
670                if (sqlStr != null && !sqlStr.trim().equals("")) {
671                        // if table gnerated successfully, we will run query
672                        if (_tableGenerator != null && _tableGenerator.getSuccessStatus()) {
673                                try {
674                                        _icon.setBusy();
675                                        _resultSet = new HsqlDataQueryAction();
676                                        _resultSet.setSQL(sqlStr);
677                                        _resultSet.actionPerformed(null);
678
679                                } catch (Exception e) {
680                                        log.debug("Error to run query is ", e);
681                                        throw new IllegalActionException(e.getMessage());
682                                }
683
684                        }
685                }
686                // if reustlset is null(this can be caused by db couldn't create table)
687                // and we don't have any sql command
688                // (this means we only have one data entity involve) or only has one
689                // entity at all
690                // we would like to try read the selected entity data from datachache
691                // rather than
692                // from db
693                if (_resultSet == null && (!_hasSQLCommand || _numberOfEntities == 1)) {
694                        // System.out.println("in result set is null!!!!!!!!!!!!!!!!!!");
695                        _reader = _selectedCachedDataItem.getDataInputStream();
696                        try {
697                                createDelimitedReader();
698                        } catch (Exception e) {
699                                log.debug("Error to run delimiter reader is  ", e);
700                                throw new IllegalActionException(e.getMessage());
701                        }
702
703                }
704                _icon.setReady();
705                // This was the initializeAsTableRowOrField method
706
707                // Set marker to say we have data. This might not be true though and is
708                // perhaps
709                // a bug. The correct thing to do, is check for data in the prefire and
710                // in the
711                // postfire.
712                _endOfResultSet = false;
713
714                _dataOutputFormat.initialize();
715
716        }
717
718        /**
719         * This method will read a row vector from data source, either from
720         * resultset which excuted by data query or delimiterdReader which reader
721         * from data inputtream - _reader. This method will be called in asFired
722         * method
723         */
724        public Vector gotRowVectorFromSource() throws Exception {
725                Vector rowVector = new Vector();
726                if (_resultSet != null) {
727
728                        ResultSet rs = _resultSet.getResultSet();
729                        ResultSetMetaData metadata = rs.getMetaData();
730                        int columnSize = metadata.getColumnCount();
731
732                        if (rs.next()) {
733                                for (int i = 0; i < columnSize; i++) {
734                                        String str = rs.getString(i + 1);
735                                        rowVector.add(str);
736                                }
737                        }
738                } else if (_reader != null
739                                && (!_hasSQLCommand || _numberOfEntities == 1)) {
740                        if (_selectedTableEntity.isSimpleDelimited()) {
741                                _simpleDelimitedReader
742                                                .setCollapseDelimiter(_selectedTableEntity
743                                                                .getCollapseDelimiter());
744                                _simpleDelimitedReader.setNumFooterLines(_selectedTableEntity
745                                                .getNumFooterLines());
746                                rowVector = _simpleDelimitedReader.getRowDataVectorFromStream();
747                        } else {
748                                rowVector = _complexFormatReader.getRowDataVectorFromStream();
749                        }
750
751                }
752
753                if (rowVector.isEmpty()) {
754                        _endOfResultSet = true;
755                }
756                return rowVector;
757        }
758
759        /*
760         * (non-Javadoc)
761         * 
762         * @see ptolemy.actor.lib.Source#prefire()
763         */
764        @Override
765    public boolean prefire() throws IllegalActionException {
766                return _dataOutputFormat.prefire();
767        }
768
769        /**
770         * Send a record's tokens over the ports on each fire event.
771         * 
772         * @exception IllegalActionException
773         *                If there is no director.
774         */
775        @Override
776    public void fire() throws IllegalActionException {
777                // log.debug("In fire method");
778                super.fire();
779                _dataOutputFormat.fire();
780        }
781
782        /**
783         * This method is only for output as byte array. Read the next bytes from
784         * the input stream into array. Fire method will send the array out. If
785         * there reached EOF , return false. Otherwise, return whatever the
786         * superclass returns.
787         * 
788         * @exception IllegalActionException
789         *                If there is a problem reading the file.
790         */
791        @Override
792    public boolean postfire() throws IllegalActionException {
793
794                if (!_dataOutputFormat.postfire()) {
795                        return false;
796                }
797
798                try {
799                        if (_resultSet != null && _resultSet.getResultSet().isAfterLast()) {
800                                return false;
801                        }
802                } catch (SQLException e) {
803                        throw new IllegalActionException(this, e,
804                                        "Unable to determine end of result set");
805                }
806
807                if (_endOfResultSet) {
808                        return false;
809                }
810
811                return super.postfire();
812
813        }
814
815        void initializePort(String aPortName, Type aPortType)
816                        throws IllegalActionException {
817                try {
818                        String columnName = aPortName.trim();
819                        // Create a new port for each Column in the resultset
820                        TypedIOPort port = (TypedIOPort) this.getPort(columnName);
821                        boolean aIsNew = (port == null);
822                        if (aIsNew) {
823                                // Create a new typed port and add it to this container
824                                port = new TypedIOPort(this, columnName, false, true);
825                                log.debug("Creating port [" + columnName + "]" + this);
826                        }
827                        port.setTypeEquals(aPortType);
828
829                } catch (ptolemy.kernel.util.NameDuplicationException nde) {
830                        throw new IllegalActionException(
831                                        "One or more attributes has the same name.  Please correct this and try again.");
832                }
833
834        }
835
836        /*
837         * Remove all ports which's name is not in the selected vector
838         */
839        void removeOtherOutputPorts(Collection nonRemovePortName)
840                        throws IllegalActionException {
841                // Use toArray() to make a deep copy of this.portList().
842                // Do this to prevent ConcurrentModificationExceptions.
843                TypedIOPort[] l = new TypedIOPort[0];
844                l = (TypedIOPort[]) this.portList().toArray(l);
845
846                for (int i = 0; i < l.length; i++) {
847                        TypedIOPort port = l[i];
848                        if (port == null || port.isInput()) {
849                                continue;
850                        }
851                        String currPortName = port.getName();
852                        if (!nonRemovePortName.contains(currPortName)) {
853                                try {
854                                        port.setContainer(null);
855                                } catch (Exception ex) {
856                                        throw new IllegalActionException(this,
857                                                        "Error removing port: " + currPortName);
858                                }
859                        }
860                }
861        }
862
863        /**
864         * Issue a ChangeRequest to change the output ports.
865         * 
866         * @throws ptolemy.kernel.util.IllegalActionException
867         */
868        private void reconfigurePorts(String why) {
869                log.debug("Creating reconfigure ports change request " + why);
870                this.requestChange(new ChangeRequest(this, why) {
871                        @Override
872            public void _execute() throws Exception {
873                                log.debug("Executing reconfigure ports change request "
874                                                + this.getDescription());
875                                _dataOutputFormat.reconfigurePorts();
876                        }
877                });
878        }
879
880        /**
881         * Callback for changes in attribute values.
882         */
883        @Override
884    public void attributeChanged(ptolemy.kernel.util.Attribute attribute)
885                        throws ptolemy.kernel.util.IllegalActionException {
886                log.debug("In attribute change method");
887                // System.out.println("In attribute change method!!!!!!!!!!!!!!!");
888                if (attribute == emlFilePath || attribute == dataFilePath) {
889                        log.debug("Processing new EML or data file path...");
890                        try {
891                                String url = ((Settable) emlFilePath).getExpression();
892                                log.debug("EML File Path is: " + emlFilePath);
893
894                                String dataurl = dataFilePath.getExpression();
895                                if (dataurl == null || dataurl.trim().equals("")) {
896                                        log.debug("Data file is null so returning...");
897                                        return;
898                                }
899
900                                if (url == null || url.trim().equals("")) {
901                                        log.debug("URL is null so returning...");
902                                        return;
903                                }
904
905                                if (emlFile != null && emlFile.equals(url)) {
906                                        log.debug("EML File is null so returning...");
907                                        return;
908                                }
909                                emlFile = url;
910                                String endpoint = null;
911                                if (!url.startsWith("http://") 
912                                                && !url.startsWith("https://")
913                                                && !url.startsWith("file:///")
914                                                && !url.startsWith("ftp://")
915                                                && !url.startsWith("ecogrid://")
916                                                && !url.startsWith("srb://")) {
917                                        log.debug("In url mangling block");
918                                        if (emlFilePath == null) {
919                                                return;
920                                        }
921                                        File emlFileName = emlFilePath.asFile();
922                                        //System.out.println("the name of eml file name" +emlFileName.getName());
923                                        setRecordId(emlFileName.getName());
924                                        if (emlFileName == null) {
925                                                return;
926                                        }
927                                        url = emlFileName.getPath();
928                                        // System.out.println("the url from getpath is "+url);
929                                        // it is file path we need add file protocal
930                                        if (url.startsWith("file:/") && !url.startsWith("file://")
931                                                        && !url.startsWith("file:///")) {
932
933                                                // somehow windows url will look like "file:/C:
934                                                url = url.replaceFirst("file:/", "file:///");
935
936                                        } else if (url.startsWith("file://")
937                                                        && !url.startsWith("file:///")) {
938
939                                                url = url.replaceFirst("file://", "file:///");
940                                        } else if (!url.startsWith("file:///")) {
941                                                // it is file path we need add file protocal
942                                                url = "file:///" + url;
943                                        }
944                                }
945                                emlFileFinalPath = url;
946                                log.debug("Final EML url is: " + emlFileFinalPath);
947
948                                _icon.setBusy();
949                                clearTableRelatedParameter(true);
950                                _cacheMetaDataItem = (EcogridMetaDataCacheItem) DataCacheManager
951                                                .getCacheItem(new MetadataComplete(),
952                                                                "MetaData " + emlFileFinalPath, endpoint,
953                                                                EcogridMetaDataCacheItem.class.getName());
954                                if (_cacheMetaDataItem.isEmpty()) {
955                                        _cacheMetaDataItem.setEndPoint(endpoint);
956                                        _cacheMetaDataItem.setRecordId(emlFileFinalPath);
957                                        _cacheMetaDataItem.start();
958                                } else {
959                                        log.debug("in not empty============");
960                                }
961                        } catch (Exception e) {
962                                e.printStackTrace();
963                        }
964
965                } else if (attribute == sqlDef) {
966                        String sqlDefStr = ((Settable) attribute).getDefaultExpression();
967                        sqlDefStr = ((Settable) attribute).getExpression();
968                        if (sqlDefStr.length() > 0) {
969                                _hasSQLCommand = true;
970                                _queryDef = DBQueryDefParserEmitter.parseQueryDef(_schemaDef,
971                                                sqlDefStr);
972                                _columns = _queryDef.getSelects();
973                                generteLabelListAndTypteListFromColumns();
974                                reconfigurePorts("Sql attribute changed");
975                        } else if (_dataOutputFormat instanceof Eml200DataOutputFormatField) {
976                                // if sql command is empty, we will think it will be select *
977                                // from selected table
978                                if (_selectedTableEntity != null) {
979                                        _columns = _selectedTableEntity.getFields();
980                                        generteLabelListAndTypteListFromColumns();
981                                        reconfigurePorts("Sql attribute changed");
982                                }
983                        }
984
985                } else if (attribute == schemaDef && !_ignoreSchemaChange) {
986                        // NOTE: We may skip setting it here because _ignoreSchemaChange
987                        // may be true
988                        String schemaDefStr = ((Settable) schemaDef).getExpression();
989
990                        // MOML may have a blank definition
991                        if (schemaDefStr.length() > 0) {
992                                _schemaDefinitionIsSet = true; // remember that we have been
993                                // set by the MOML
994
995                                log.debug("schemaDef >>" + schemaDefStr + "<<");
996
997                                _schemaDef = DBSchemaParserEmitter.parseSchemaDef(schemaDefStr);
998                        }
999
1000                } else if (attribute.getName().equals("checkVersion")) {
1001                        log.debug("=========================change checkVersion");
1002                        // use the endpoint to determine where to search for the most recent
1003                        // version
1004                        this.checkForMostRecentRecordId(true);
1005                } else if ((attribute.getName().equals(ResultRecord.RECORDID) || attribute
1006                                .getName().equals(ResultRecord.ENDPOINT))
1007                                && this.hasConnectionValues()
1008                                && !(attribute.getContainer().getContainer() instanceof ResultTreeRoot)) {
1009                        log.debug("=========================change recordid or endpoints");
1010                        if (getRecordId() != null && getEndpoint() != null) {
1011                                _icon.setBusy();
1012                                // start over!
1013                                clearTableRelatedParameter(false);
1014                                //_entityList = new Vector<Entity>();
1015                                _cacheMetaDataItem = (EcogridMetaDataCacheItem) DataCacheManager
1016                                                .getCacheItem(new MetadataComplete(), "MetaData "
1017                                                                + getRecordId(), getEndpoint(),
1018                                                                EcogridMetaDataCacheItem.class.getName());
1019                                if (_cacheMetaDataItem != null && _cacheMetaDataItem.isEmpty()) {
1020                                        _cacheMetaDataItem.setEndPoint(getEndpoint());
1021                                        _cacheMetaDataItem.setRecordId(getRecordId());
1022                                        _cacheMetaDataItem.start();
1023                                }
1024                        }
1025
1026                } else if (attribute == dataOutputFormat) {
1027                        String format = ((Settable) attribute).getExpression();
1028                        log.debug("=========================change dataOutputFormat "
1029                                        + format);
1030                        String strDataOutputFormat = dataOutputFormat.stringValue();
1031                        _dataOutputFormat = Eml200DataOutputFormatFactory.newInstance(
1032                                        strDataOutputFormat, this);
1033                        reconfigurePorts("Output type changed");
1034
1035                } else if (attribute == selectedEntity) {
1036                        // reset selected entity
1037                        String selectedEntityName = ((Settable) attribute).getExpression();
1038                        log.debug("=========================selected entity "
1039                                        + selectedEntityName);
1040                        setSelectedEntityValue(true);
1041                }
1042        }
1043
1044        /** Clone the Eml200DataSource into the specified workspace. */
1045        @Override
1046    public Object clone(Workspace workspace) throws CloneNotSupportedException {
1047        
1048        // see if the output port was removed. if so, recreate it before
1049        // call super.clone() since Entity.clone() expects all ports that
1050        // are class fields to have a non-null container. after calling
1051        // super.clone(), we reset the container to null.
1052        boolean createdOutput = false;
1053        if(output.getContainer() == null) {
1054            createdOutput = true;
1055            try {
1056                output = new TypedIOPort(this, "output", false, true);
1057            } catch (IllegalActionException | NameDuplicationException e) {
1058                throw new CloneNotSupportedException(
1059                    "Error creating output port: " + e.getMessage());
1060            }
1061        }
1062
1063        try {
1064            Eml200DataSource newObject = (Eml200DataSource) super.clone(workspace);
1065            newObject._dataOutputFormat = Eml200DataOutputFormatFactory.newInstance(newObject);
1066            newObject._entityList = new Vector<Entity>();
1067            newObject._icon = null;
1068            newObject._metadataCompleted = new Latch();
1069            newObject._nodeController = (DataSourceControllerFactory) newObject.getAttribute("_controllerFactory");
1070            newObject._qbTableauFactory = (QBTableauFactory) newObject.getAttribute("_tableauFactory");
1071            newObject._schemaDef = null;
1072            newObject._schema = new DSSchemaDef();
1073            newObject.failedDownloadEntityName = new Vector();                       
1074            return newObject;
1075        } finally {
1076            if(createdOutput) {
1077                try {
1078                    output.setContainer(null);
1079                } catch (IllegalActionException | NameDuplicationException e) {
1080                    throw new CloneNotSupportedException(
1081                        "Error removing output port: " + e.getMessage());
1082                }
1083            }
1084        }
1085        }
1086        
1087        /**
1088         * This method allows default documentation to be added to the actor
1089         * specified in the parameter. The KeplerDocumentation is retrieved from the
1090         * 'EML 2 Dataset' that exists in the actor library. The default
1091         * documentation is only loaded once since it will likely remain quite
1092         * static. If the given actor instance already contains the
1093         * KeplerDocumentation attribute, the existing attribute is preserved
1094         * (nothing is changed).
1095         * 
1096         * @param emlActor
1097         *            the instance to which documentation will be added
1098         */
1099        public static void generateDocumentationForInstance(
1100                        Eml200DataSource emlActor) {
1101                try {
1102                        // only look up the documentation only once
1103                        if (defaultDocumentation == null) {
1104                                Iterator cacheIter = CacheManager.getInstance()
1105                                                .getCacheObjectIterator();
1106                                while (cacheIter.hasNext()) {
1107                                        Object co = cacheIter.next();
1108                                        // is this an actor cache object?
1109                                        if (co instanceof ActorCacheObject) {
1110                                                ActorCacheObject aco = (ActorCacheObject) co;
1111                                                // for this class?
1112                                                if (aco.getClassName().equals(emlActor.getClassName())) {
1113                                                        // get the metadata
1114                                                        ActorMetadata am = aco.getMetadata();
1115                                                        // get the default documentation from the metadata
1116                                                        defaultDocumentation = am
1117                                                                        .getDocumentationAttribute();
1118                                                        log
1119                                                                        .debug("looked up default KeplerDocumentation contained in "
1120                                                                                        + am.getName());
1121                                                        break;
1122                                                }
1123                                        }
1124                                }
1125                        }
1126
1127                        // add the documentation for this actor if it is not there already
1128                        if (emlActor.getAttribute("KeplerDocumentation") == null) {
1129                                // make an instance of the documentation attribute for the input
1130                                // actor
1131                                KeplerDocumentationAttribute keplerDocumentation = new KeplerDocumentationAttribute(
1132                                                emlActor, "KeplerDocumentation");
1133
1134                                // copy the default and set it for this one
1135                                keplerDocumentation
1136                                                .createInstanceFromExisting(defaultDocumentation);
1137                                keplerDocumentation.setContainer(emlActor);
1138                                log.debug("set the KeplerDocumentation for actor instance: "
1139                                                + emlActor.getName());
1140
1141                        }
1142                } catch (Exception e) {
1143                        log
1144                                        .error("error encountered whilst generating default documentation for actor instance: "
1145                                                        + e.getMessage());
1146                        e.printStackTrace();
1147                }
1148        }
1149
1150        /**
1151         * First checks if checkVersion parameter is selected. If it is selected the
1152         * Ecogrid is checked for a more recent version of the recordid. If there is
1153         * a newer version available, user is asked if they would like to use the
1154         * newer version in the workflow. If they really do want to use the newer
1155         * version, the most recent recorid is set in the actor attributes.
1156         * 
1157         */
1158        private void checkForMostRecentRecordId(boolean prompt) {
1159
1160                // check if we even want to do this for the actor
1161                boolean boolCheckVersion = false;
1162                try {
1163                        boolCheckVersion = 
1164                                ((BooleanToken) this.checkVersion.getToken()).booleanValue();
1165                        log.debug("checkVersion flag=" + boolCheckVersion);
1166                } catch (IllegalActionException e1) {
1167                        log.error("could not parse checkVersion parameter");
1168                        e1.printStackTrace();
1169                }
1170                if (!boolCheckVersion) {
1171                        checkVersionPromptCount = 0; // ask again if they turn it on later
1172                        return;
1173                }
1174
1175                // check for newer version of the eml
1176                String mostRecentRecordId = getMostRecentRecordId();
1177
1178                // peek
1179                log.debug("Original recordId=" + getRecordId()
1180                                + " - Most recent recordId=" + mostRecentRecordId);
1181                // different?
1182                if (mostRecentRecordId != null
1183                                && !mostRecentRecordId.equalsIgnoreCase(getRecordId())) {
1184                        
1185                        // are we prompting?
1186                        if (prompt) {
1187                                if (checkVersionPromptCount < 1) {
1188                                        boolean response = MessageHandler
1189                                                        .yesNoQuestion("This workflow uses an old version of: "
1190                                                                        + getName()
1191                                                                        + "\nCurrent workflow version: "
1192                                                                        + getRecordId()
1193                                                                        + "\nMost recent repository version: "
1194                                                                        + mostRecentRecordId
1195                                                                        + "\nWould you like to update the workflow to use the most recent version?"
1196                                                                        + "\nNOTE:  Both the data and data structure can vary widely from version to version."
1197                                                                        + "\nNewer version available - " + getName());
1198        
1199                                        if (response == true) {
1200                                                // reset the check counter for 'yes' answer so that we'll be
1201                                                // able to ask again later
1202                                                checkVersionPromptCount = 0;
1203                                                setRecordId(mostRecentRecordId);
1204                                        } else {
1205                                                // I told you once, and i won't tell you again, I said NO!
1206                                                checkVersionPromptCount++;
1207                                        }
1208                                }
1209                        }
1210                        else {
1211                                setRecordId(mostRecentRecordId);
1212                        }
1213                }
1214        }
1215
1216        /**
1217         * This method will clear sql, schema and selected entity parameters in the
1218         * configure window.
1219         */
1220        private void clearTableRelatedParameter(boolean all)
1221                        throws ptolemy.kernel.util.IllegalActionException {
1222                // clean up entity list and sql and schema attribute
1223                _entityList = new Vector();
1224                if (all) {
1225                        selectedEntity.setExpression("");
1226                }
1227                selectedEntity.removeAllChoices();
1228                _queryDef = new DBQueryDef();
1229                sqlDef.setExpression("");
1230                _schema = new DSSchemaDef();
1231                schemaDef.setExpression("");
1232        }
1233
1234        /**
1235         * Creates the schema definition from the incoming data columns
1236         */
1237        private void createSchemaFromData(Entity tableEntity) {
1238                try // XXX shouldn't catch this exception here
1239                {
1240                        // _schemaDefinitionIsSet gets set when the schema has come
1241                        // from the MOML
1242                        // So if it is false here, then we set the attribute from the data.
1243                        //
1244                        // If after this the attr gets set from the MOML it will override
1245                        // what we set here.
1246                        // Entity tableEntity = dataItem.getEntity();
1247                        DataCacheObject dataItem = tableEntity.getDataCacheObject();
1248                        if (tableEntity != null) {
1249                                _schema.addTable(tableEntity);
1250                                _schemaDef = _schema;
1251
1252                                DBTableNameResolver nameResolver = new DBTableNameResolver();
1253                                try {
1254                                        tableEntity = nameResolver.resolveTableName(tableEntity);
1255                                } catch (Exception e) {
1256                                }
1257                                boolean refresh = false;
1258                                if (tableEntity.isSimpleDelimited()) {
1259                                        // for simple dimilter, we can use text table
1260                                        _tableGenerator = new DBTablesGenerator(tableEntity,
1261                                                        dataItem.getBaseFileName(), refresh);
1262                                } else {
1263                                        // for complex format, we should use inpustream to create
1264                                        // table
1265                                        InputStream input = dataItem.getDataInputStream();
1266                                        _tableGenerator = new DBTablesGenerator(tableEntity, input,
1267                                                        refresh);
1268                                }
1269
1270                                _tableGenerator.run(); // don't do thread
1271
1272                                String schemaDefXML = DBSchemaParserEmitter.emitXML(_schemaDef);
1273                                _ignoreSchemaChange = true;
1274                                ((Settable) schemaDef).setExpression(schemaDefXML);
1275                                _ignoreSchemaChange = false;
1276                        }
1277                } catch (IllegalActionException e) {
1278                        log.debug("In createSchemaFromData: " + e);
1279                }
1280
1281        }
1282
1283        @Override
1284    public void preview() {
1285
1286                String displayText = "PREVIEW NOT IMPLEMENTED FOR THIS ACTOR";
1287                JFrame frame = new JFrame(this.getName() + " Preview");
1288                frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
1289                JPanel panel = new JPanel(new BorderLayout());
1290                JScrollPane scrollPane = null;
1291                JTable jtable = null;
1292
1293                try {
1294
1295                        // set everything up (datawise)
1296                        this.initialize();
1297
1298                        // check the entity - different displays for different formats
1299                        // Compressed file
1300                        if (this._selectedTableEntity.getHasGZipDataFile()
1301                                        || this._selectedTableEntity.getHasTarDataFile()
1302                                        || this._selectedTableEntity.getHasZipDataFile()) {
1303                                displayText = "Selected entity is a compressed file.  \n"
1304                                                + "Preview not implemented for output format: "
1305                                                + this.dataOutputFormat.getExpression();
1306                                if (this._dataOutputFormat instanceof Eml200DataOutputFormatUnzippedFileName) {
1307                                        Eml200DataOutputFormatUnzippedFileName temp = (Eml200DataOutputFormatUnzippedFileName) this._dataOutputFormat;
1308                                        displayText = "Files: \n";
1309                                        for (int i = 0; i < temp.getTargetFilePathInZip().length; i++) {
1310                                                displayText += temp.getTargetFilePathInZip()[i] + "\n";
1311                                        }
1312                                }
1313
1314                        }
1315                        // SPATIALRASTERENTITY or SPATIALVECTORENTITY are "image entities"
1316                        // as far as the parser is concerned
1317                        else if (this._selectedTableEntity.getIsImageEntity()) {
1318                                // use the content of the cache file
1319                                displayText = new String(this.getSelectedCachedDataItem()
1320                                                .getData());
1321                        }
1322                        // TABLEENTITY
1323                        else {
1324                                // holds the rows for the table on disk with some in memory
1325                                String vectorTempDir = DotKeplerManager.getInstance().getCacheDirString();
1326                                // + "vector"
1327                                // + File.separator;
1328                                PersistentVector rowData = new PersistentVector(vectorTempDir);
1329
1330                                // go through the rows and add them to the persistent vector
1331                                // model
1332                                Vector row = this.gotRowVectorFromSource();
1333                                while (!row.isEmpty()) {
1334                                        rowData.addElement(row);
1335                                        row = this.gotRowVectorFromSource();
1336                                }
1337                                // the column headers for the table
1338                                Vector columns = this.getColumns();
1339
1340                                /*
1341                                 * with java 6, there is a more built-in sorting mechanism that
1342                                 * does not require the custom table sorter class
1343                                 */
1344                                TableModel tableModel = new PersistentTableModel(rowData,
1345                                                columns);
1346                                TableSorter tableSorter = new TableSorter(tableModel);
1347                                jtable = new JTable(tableSorter) {
1348                                        // make this table read-only by overriding the default
1349                                        // implementation
1350                                        @Override
1351                    public boolean isCellEditable(int row, int col) {
1352                                                return false;
1353                                        }
1354                                };
1355                                // sets up the listeners for sorting and such
1356                                tableSorter.setTableHeader(jtable.getTableHeader());
1357                                // set up the listener to trash persisted data when done
1358                                frame.addWindowListener(new PersistentTableModelWindowListener(
1359                                                (PersistentTableModel) tableModel));
1360                        }
1361                } catch (Exception e) {
1362                        displayText = "Problem encountered while generating preview: \n"
1363                                        + e.getMessage();
1364                        log.error(displayText);
1365                        e.printStackTrace();
1366                }
1367
1368                // make sure there is a jtable, otherwise show just a text version of
1369                // the data
1370                if (jtable != null) {
1371                        jtable.setVisible(true);
1372                        // jtable.setAutoResizeMode(JTable.AUTO_RESIZE_OFF);
1373                        scrollPane = new JScrollPane(jtable);
1374                } else {
1375                        JTextArea textArea = new JTextArea();
1376                        textArea.setColumns(80);
1377                        textArea.setText(displayText);
1378                        textArea.setVisible(true);
1379                        scrollPane = new JScrollPane(textArea);
1380                }
1381                scrollPane.setVisible(true);
1382                panel.setOpaque(true);
1383                panel.add(scrollPane, BorderLayout.CENTER);
1384                frame.setContentPane(panel);
1385                frame.pack();
1386                frame.setLocationRelativeTo(null);
1387                frame.setVisible(true);
1388        }
1389
1390        /**
1391         * Get a URL pointer to the documentation for this data source. The XML
1392         * source of the EML document is retrieved from the cache, and then passed
1393         * to an XSLT parser to be transformed into HTML format, which is saved in a
1394         * temporary file. The URL of the temporary file containing the HTML result
1395         * is returned.
1396         * 
1397         * @return URL the URL of the HTML file containing the documentation
1398         */
1399        @Override
1400    public URL getDocumentation() {
1401
1402                String namespace = getNamespace();
1403                if (namespace == null) {
1404                        namespace = EML2MetadataSpecification.EML200NAMESPACE;
1405                }
1406                log.debug("The name space is " + namespace);
1407                URL htmlUrl = null;
1408                // Get the metadata XML document and transform it to html
1409                if (_cacheMetaDataItem.isReady()) {
1410                        try {
1411                                String htmlFileName = _cacheMetaDataItem.getBaseFileName()
1412                                                + HTMLEXTENSION;
1413                                InputStream is = _cacheMetaDataItem.getDataInputStream();
1414                                InputStreamReader source = new InputStreamReader(is);
1415                                htmlUrl = StaticUtil.getMetadataHTMLurl(source, namespace,
1416                                                htmlFileName);
1417
1418                        } catch (Exception fnfe) {
1419                                log.debug("Could not open temporary output file.");
1420                        }
1421                }
1422                return htmlUrl;
1423        }
1424
1425        /**
1426         * Creates the delimtedReader from the data item inputstream
1427         */
1428        private void createDelimitedReader() throws Exception {
1429                /*
1430                 * String data = aDataStr.toString(); log.debug("-----------------\n" +
1431                 * data + "\n-----------------\n", 2);
1432                 */
1433
1434                // log.debug("entityhash: " + entityhash.toString());
1435                if (_selectedTableEntity.isSimpleDelimited()) {
1436                        boolean stripHeaderLine = true;
1437                        boolean isLenientBool = ((BooleanToken) this.isLenient.getToken())
1438                                        .booleanValue();
1439                        _simpleDelimitedReader = new DelimitedReader(_reader,
1440                                        _selectedTableEntity.getAttributes().length,
1441                                        _selectedTableEntity.getDelimiter(), _selectedTableEntity
1442                                                        .getNumHeaderLines(), _selectedTableEntity
1443                                                        .getRecordDelimiter(), _selectedTableEntity
1444                                                        .getNumRecords(), stripHeaderLine);
1445                        _simpleDelimitedReader.setLenient(isLenientBool);
1446                } else {
1447                        _complexFormatReader = new TextComplexFormatDataReader(_reader,
1448                                        _selectedTableEntity);
1449                }
1450
1451                // _dataVectors = dr.getTokenizedData(true);
1452        }
1453
1454        /**
1455         * Returns a Ptolemly type for a given Kepler DataSource Type
1456         * 
1457         * @param aType
1458         *            DataSource type
1459         * @return Ptolemy type
1460         */
1461        BaseType getBaseType(String aType) {
1462                BaseType type = (BaseType) _TypeHash.get(aType);
1463                if (type == null) {
1464                        return BaseType.UNKNOWN;
1465                }
1466                return type;
1467        }
1468
1469        /**
1470         * Inner class used for completion notification of EcoGridMetaDataCacheItem
1471         * objects.
1472         */
1473        private class MetadataComplete implements DataCacheListener {
1474
1475                @Override
1476        public void complete(DataCacheObject aItem) {
1477                        log.debug("MetadataComplete: " + this);
1478
1479                        try {
1480                                aItem.removeListener(this);
1481
1482                                if (!aItem.isReady()) {
1483                                        log.error("Unable to download MetaData");
1484                                        /*
1485                                         * if (SHOWGUIERROR) { throw new
1486                                         * IllegalActionException(this,
1487                                         * "Unable to download MetaData");
1488                                         * 
1489                                         * }
1490                                         */
1491                                        MessageHandler.error("Unable to download MetaData");
1492                                        _icon.setError();
1493                                        return;
1494                                }
1495
1496                                try {
1497                                        parsePackage(new InputStreamReader(new FileInputStream(
1498                                                        new File(aItem.getAbsoluteFileName()))));
1499                                } catch (Exception e) {
1500                                        log
1501                                                        .error(
1502                                                                        "Exception occurred during MetaDataCompletion",
1503                                                                        e);
1504                                        /*
1505                                         * if (SHOWGUIERROR) { throw new
1506                                         * IllegalActionException(this,
1507                                         * "Unable to parse the MetaData: " +e.getMessage() ,
1508                                         * "alert", JOptionPane.ERROR_MESSAGE); }
1509                                         */
1510                                        MessageHandler.error("Unable to parse the MetaData: "
1511                                                        + e.getMessage());
1512                                        _icon.setError();
1513                                }
1514                                // if no sql command, we need set columns
1515                                if (!_hasSQLCommand) {
1516                                        log
1517                                                        .debug("There is no sql command attribute and set up columns in compelete method");
1518                                        _columns = _selectedTableEntity.getFields();
1519                                }
1520
1521                                generteLabelListAndTypteListFromColumns();
1522                                reconfigurePorts("Metadata Complete");
1523
1524                        } finally {
1525                                _metadataCompleted.release();
1526                        }
1527                        // System.out.println("metadata complete !!!!!!!!!!!!!!!!1");
1528                }
1529        }
1530
1531        // ------------------------------------------------------------------------
1532        // -- DataCacheListener
1533        // ------------------------------------------------------------------------
1534
1535        @Override
1536    public void complete(DataCacheObject aItem) {
1537                log.debug("complete: " + this);
1538
1539                log.debug("Class of aItem " + aItem.getClass().getName());
1540                aItem.removeListener(this);
1541                if (aItem.isReady()) {
1542                        log.debug("aItem is instanceof EcogridDataCacheItem");
1543
1544                        EcogridDataCacheItem item = (EcogridDataCacheItem) aItem;
1545                        String entityIdentifier = item.getEntityIdentifier();
1546                        int index = lookupEntityListName(entityIdentifier);
1547                        Entity entity = null;
1548                        if (index != INDEXFORNOTFOUND) {
1549                                entity = (Entity) _entityList.elementAt(index);
1550                                entity.setDataCacheObject(item);
1551                        }
1552
1553                        if (entity != null && !entity.getIsImageEntity()) {
1554
1555                                createSchemaFromData(entity);
1556
1557                        }
1558                } else if (aItem.isError()) {
1559                        log.debug("In failed download path");
1560                        EcogridDataCacheItem item = (EcogridDataCacheItem) aItem;
1561                        String entityIdentifier = item.getEntityIdentifier();
1562                        int index = lookupEntityListName(entityIdentifier);
1563                        Entity entity = null;
1564                        if (index != INDEXFORNOTFOUND) {
1565                                entity = (Entity) _entityList.elementAt(index);
1566                        }
1567                        // because fail to download, set null as data cache item
1568                        entity.setDataCacheObject(null);
1569                        _numberOfFailedDownloadEntities++;
1570                        failedDownloadEntityName.add(entityIdentifier);
1571                }
1572                // Decrement the number of downloads completed.
1573                _entityCountDown.release();
1574
1575                // Check for completion
1576                finished();
1577        }
1578
1579        /**
1580         * Parses the package and fills in the names and types arrays.
1581         * 
1582         * @param eml
1583         * @throws IllegalActionException
1584         */
1585        private void parsePackage(Reader eml) throws IllegalActionException {
1586                Eml200Parser eml2parser;
1587
1588                try { // parse the package for the names and types of the atts
1589                        log.debug("creating parser");
1590                        eml2parser = new Eml200Parser();
1591                        log.debug("parsing...");
1592                        eml2parser.parse(new InputSource(eml));
1593                        if(getNamespace() == null ){
1594                          log.debug("the namespace from parser is " + eml2parser.getNameSpace() + " and set it to ResultRecord");
1595                          setNamespace(eml2parser.getNameSpace());
1596                        }
1597                        // get if this package has image entity
1598                        // _hasImageEntity = eml2parser.getHasImageEntity();
1599                        // log.debug("This pakcage has image entity " + _hasImageEntity);
1600                        log.debug("Done parsing");
1601                } catch (Exception e) {
1602      e.printStackTrace();
1603                        throw new IllegalActionException("Error parsing the eml package: "
1604                                        + e.getMessage());
1605                }
1606
1607                getData(eml2parser); // fills in the _dataVectors data member
1608                setOptionsForSelectedEntityAttribute();
1609
1610                // finished();
1611        }
1612
1613        private void finished() {
1614
1615                if (_entityCountDown.currentCount() > 0) {
1616                        return;
1617                }
1618
1619                try {
1620                        // this method will set up columns info if not sql command
1621                        setSelectedEntityValue(false);
1622
1623                } catch (Exception e) {
1624                        log.debug("The error in set up selected entity is ", e);
1625
1626                }
1627                if (_numberOfFailedDownloadEntities == 0) {
1628                        _icon.setReady();
1629                } else {
1630                        log.error("Some downloads failed");
1631                        StringBuffer entityNameList = new StringBuffer();
1632                        for (int i = 0; i < failedDownloadEntityName.size(); i++) {
1633                                if (entityNameList.length() > 0) {
1634                                        entityNameList.append(", ");
1635                                }
1636                                String name = (String) failedDownloadEntityName.elementAt(i);
1637
1638                                entityNameList.append(name);
1639                        }
1640
1641                        _icon.setError();
1642                        
1643                        //make an exception, but just use the message handler for it
1644                        String msg = 
1645                                "Data entity/entities: "
1646                                + entityNameList.toString()
1647                                + " failed to be downloaded, please check the data link in metadata";
1648                        
1649                        InternalErrorException exception = 
1650                                new InternalErrorException(
1651                                        this,
1652                                        null,
1653                                        "Download error");
1654                        try {
1655                                MessageHandler.warning(msg, exception);
1656                        } catch (CancelException e) {
1657                                //do nothing
1658                        }
1659
1660                }
1661
1662        }
1663
1664        /**
1665         * get the data based on the contents of the package
1666         * 
1667         * @param parser
1668         *            the parser that has already parsed the package.
1669         */
1670        private void getData(Eml200Parser parser) throws IllegalActionException {
1671                if (parser != null) {
1672                        _numberOfEntities = parser.getEntityCount();
1673                        _entityCountDown = new CountDown(_numberOfEntities);
1674                        /*
1675                         * if (parser.getEntityCount() > 1) { throw new
1676                         * IllegalActionException(
1677                         * "Currently this parser only deals with one entity. " +
1678                         * "Please use a package with only one entity.");
1679                         */
1680                        if (_numberOfEntities == 0) {
1681                                throw new IllegalActionException(
1682                                                "There must be at least one entity in the EML package.");
1683                        }
1684                        Hashtable entityList = parser.getEntityHash();
1685                        // initialize selected entity
1686                        _selectedTableEntity = (Entity) entityList.get(entityList.keys()
1687                                        .nextElement());
1688                        // log.debug("entityhash: " + _entityList.toString());
1689
1690                        // start a thread to get data chachedItem
1691                        Enumeration enu = entityList.elements();
1692                        while (enu.hasMoreElements()) {
1693
1694                                Entity singleEntity = (Entity) enu.nextElement();
1695                                log.debug("Adding Entity " + singleEntity);
1696                                // System.out.println("Data URL = "+singleEntity.getURL());
1697                                String dataurl = dataFilePath.getExpression();
1698                                // check for absolute path - relative was not resolving correctly
1699                                try {
1700                                        URL url = dataFilePath.asURL();
1701                                        if (url != null) {
1702                                                dataurl = url.getPath();
1703                                        }
1704                                } catch (Exception e) {
1705                                        // do nothing - just ignore it
1706                                }
1707                                // use the dataurl parameter if it has been entered
1708                                // ie replace the url in the Entity object
1709                                // use: to set local data file
1710                                if (dataurl != null && dataurl.length() > 0) {
1711                                        if (dataurl.startsWith("/")) {
1712                                                dataurl = "file://" + dataurl;
1713                                        } else if (dataurl.startsWith("file:/")) {
1714                                                if ((!dataurl.startsWith("file://"))
1715                                                                && (!dataurl.startsWith("file:///"))) {
1716                                                        dataurl = dataurl
1717                                                                        .replaceFirst("file:/", "file:///");
1718                                                }
1719                                        } else {
1720                                                dataurl = "file:///" + dataurl;
1721                                        }
1722                                        singleEntity.setURL(dataurl);
1723                                        // System.out.println("Data URL(1) = "+singleEntity.getURL());
1724                                }
1725                                _entityList.add(singleEntity);
1726                                getDataItemFromCache(singleEntity);
1727                        }
1728
1729                }
1730        }
1731
1732        /**
1733         * This method will start a thread to get the cached data item at tableEntity.getURL()
1734         * 
1735         * @param tableEntity
1736         */
1737        private void getDataItemFromCache(Entity tableEntity) {
1738                if (tableEntity == null) {
1739                        log.debug("The table enity is null and couldn't get cached data item");
1740                        return;
1741                }
1742                String fileURLStr = tableEntity.getURL();
1743                log.debug("Data URL is: " + fileURLStr);
1744                // we need to distinguish zip file and generate
1745                // String compression = tableEntity.getCompressionMethod();
1746                EcogridDataCacheItem cachedDataItem = null;
1747                String dataItemName = "Data " + fileURLStr + " from " + this.getEndpoint();
1748                if (tableEntity.getHasZipDataFile()) {
1749                        log.debug("This is a zip data cacheItem");
1750                        cachedDataItem = (EcogridZippedDataCacheItem) DataCacheManager
1751                                        .getCacheItem(this, dataItemName, tableEntity.getName(), fileURLStr,
1752                                                        EcogridZippedDataCacheItem.class.getName());
1753                        // _isZipDataFile = true;
1754                } else if (tableEntity.getHasGZipDataFile()) {
1755                        log.debug("This is a gzip data cacheItem");
1756                        cachedDataItem = (EcogridGZippedDataCacheItem) DataCacheManager
1757                                        .getCacheItem(this, dataItemName, tableEntity.getName(), fileURLStr,
1758                                                        EcogridGZippedDataCacheItem.class.getName());
1759                        if (tableEntity.getHasTarDataFile()) {
1760                                log.debug("This is gizp and tar data cache item");
1761                                cachedDataItem.setIsTarFile(true);
1762                        }
1763
1764                } else if (tableEntity.getHasTarDataFile()) {
1765                        log.debug("This is a tar data cacheItem");
1766                        cachedDataItem = (EcogridTarArchivedDataCacheItem) DataCacheManager
1767                                        .getCacheItem(this, dataItemName, tableEntity.getName(), fileURLStr,
1768                                                        EcogridTarArchivedDataCacheItem.class.getName());
1769                } else {
1770                        log.debug("This is a uncompressed data cacheItem");
1771                        cachedDataItem = (EcogridDataCacheItem) DataCacheManager
1772                                        .getCacheItem(this, dataItemName, tableEntity.getName(), fileURLStr,
1773                                                        EcogridDataCacheItem.class.getName()); 
1774                }
1775                if (cachedDataItem.isEmpty()) {
1776                        String endPoint = null;
1777                        try {
1778                                // System.out.println("before get endpoint .........");
1779                                endPoint = this.getEndpoint();
1780                                // System.out.println("after get endpoint ............");
1781                        } catch (Exception e) {
1782                                log.debug("the exeption for get endpoint is " + e.getMessage());
1783                        }
1784
1785                        if (endPoint == null) {
1786                                cachedDataItem.setEndPoint(ENDPOINT);
1787                        } else {
1788                                cachedDataItem.setEndPoint(endPoint);
1789                        }
1790                        // add entity identifier order to track the cachedata item associated
1791                        // with which entity(In complete method)
1792                        cachedDataItem.setEntityIdentifier(tableEntity.getName());
1793                        cachedDataItem.start();
1794                }
1795
1796        }
1797
1798        /**
1799         * This method will set options for "Selected Entity" after getting all data
1800         * entities. The options will be the list of entity names.
1801         */
1802        private void setOptionsForSelectedEntityAttribute() {
1803                if (_entityList != null) {
1804                        int length = _entityList.size();
1805                        for (int i = 0; i < length; i++) {
1806                                Entity entity = (Entity) _entityList.elementAt(i);
1807                                String entityName = entity.getName();
1808                                if (entityName != null && !entityName.trim().equals("")) {
1809                                        selectedEntity.addChoice(entityName);
1810                                }
1811
1812                        }
1813
1814                }
1815        }
1816
1817        /**
1818         * Method to set up selected entity. If Attribute "selectedEntity" already
1819         * has value, look up the enitytList and found the selected entity. If
1820         * attribute "selectedEntity doesn't have any value, we choose the index 0
1821         * as selected entity.
1822         */
1823        private void setSelectedEntityValue(boolean fromAttributeChange)
1824                        throws IllegalActionException {
1825
1826                String selectedEntityName = selectedEntity.stringValue();
1827                log.debug("The selected entity name is " + selectedEntityName);
1828                if (!_entityList.isEmpty()) {
1829
1830                        if (selectedEntityName != null
1831                                        && !selectedEntityName.trim().equals("")) {
1832                                // already has a selected entity in momol
1833                                log.debug("There is a selected entity in actor");
1834                                int selectedIndex = lookupEntityListName(selectedEntityName);
1835                                // System.out.println("index of selected entity is "+selectedIndex);
1836                                if (selectedIndex == INDEXFORNOTFOUND) {
1837                                        throw new IllegalActionException(
1838                                                        "The selected Entity in momol cound't be found");
1839                                } else {
1840                                        _selectedTableEntity = (Entity) _entityList
1841                                                        .elementAt(selectedIndex);
1842                                        if (!_hasSQLCommand) {
1843                                                _columns = _selectedTableEntity.getFields();
1844                                        }
1845                                        _selectedCachedDataItem = (EcogridDataCacheItem) _selectedTableEntity
1846                                                        .getDataCacheObject();
1847                                        generteLabelListAndTypteListFromColumns();
1848                                        reconfigurePorts("Selected Entity changed");
1849                                }
1850                        } else {
1851                                // no selected enity in moml and we need selected one
1852                                log.debug("There is NOT a selected entity in actor");
1853                                _selectedTableEntity = (Entity) _entityList
1854                                                .elementAt(DEFAULTINDEX);
1855                                _selectedCachedDataItem = (EcogridDataCacheItem) _selectedTableEntity
1856                                                .getDataCacheObject();
1857                                reconfigurePorts("Selected Entity changed");
1858                                String entityName = _selectedTableEntity.getName();
1859                                log.debug("set the default entity name " + entityName
1860                                                + "because the there is no selected entity");
1861                                selectedEntity.setExpression(entityName);
1862                                if (!fromAttributeChange) {
1863                                        log
1864                                                        .debug("send a moml request for adding selected Entity parameter");
1865                                        StringBuffer buffer = new StringBuffer();
1866                                        buffer.append("<property name=\"");
1867                                        buffer.append("selectedEntity");
1868                                        buffer.append("\" class=\"");
1869                                        buffer.append(selectedEntity.getClassName());
1870                                        buffer.append("\" value=\"");
1871                                        buffer.append(entityName);
1872                                        buffer.append("\"/>");
1873                                        String moml = buffer.toString();
1874                                        log.debug("The moml string is " + moml);
1875                                        NamedObj container = (NamedObj) this.getContainer();
1876                                        NamedObj composite = (NamedObj) container.getContainer();
1877                                        MoMLChangeRequest request = new MoMLChangeRequest(this,
1878                                                        this, moml.toString());
1879                                        request.setUndoable(true);
1880                                        this.requestChange(request);
1881
1882                                }
1883                        }
1884                }
1885        }
1886
1887        /**
1888         * Method to find the entity index in EntityList which has the same name as
1889         * the given string. If no entity index is found, -1 will be returned.
1890         */
1891        private int lookupEntityListName(String givenString) {
1892                log.debug("Looking for entity named " + givenString);
1893                int index = INDEXFORNOTFOUND;
1894                if (givenString != null && !givenString.trim().equals("")) {
1895                        int size = _entityList.size();
1896                        for (int i = 0; i < size; i++) {
1897                                Entity entity = (Entity) _entityList.elementAt(i);
1898                                String entityName = entity.getName();
1899                                if (entityName != null && !entityName.trim().equals("")
1900                                                && entityName.equals(givenString)) {
1901                                        index = i;
1902
1903                                }
1904                        }
1905                }
1906                log.debug("The selected index is " + index);
1907                return index;
1908        }
1909
1910        /**
1911         * This method will generate selected column list.
1912         */
1913        private void generteLabelListAndTypteListFromColumns() {
1914
1915                if (_columns != null) {
1916                        int size = _columns.size();
1917                        _selectedColumnLabelList = new String[size];
1918                        _selectedColumnTypeList = new Type[size];
1919                        for (int i = 0; i < size; i++) {
1920                                DSTableFieldIFace column = (DSTableFieldIFace) _columns
1921                                                .elementAt(i);
1922                                _selectedColumnLabelList[i] = column.getName();
1923                                String type = column.getDataType();
1924                                _selectedColumnTypeList[i] = getBaseType(type);
1925
1926                        }
1927                }
1928
1929        }
1930
1931        /**
1932         * Method to transform a string array to token array based on given type.
1933         */
1934        static Token[] transformStringVectorToTokenArray(Vector stringVector,
1935                        Type type, Vector missingValuecode) throws IllegalActionException {
1936                if (stringVector == null) {
1937                        return null;
1938                }
1939                int size = stringVector.size();
1940                Token[] columnToken = new Token[size];
1941                for (int j = 0; j < size; j++) {
1942                        String eleStr = (String) stringVector.elementAt(j);
1943                        log.debug("The column value " + eleStr);
1944                        Token val = transformStringToToken(eleStr, type, missingValuecode,
1945                                        null);
1946                        columnToken[j] = val;
1947                }
1948
1949                return columnToken;
1950        }
1951
1952        /**
1953         * Method to transform a string to token based on given type and
1954         * missingValue.
1955         */
1956        static Token transformStringToToken(String eleStr, Type type,
1957                        Vector missingValue, String columnName)
1958                        throws IllegalActionException {
1959                Token val = null;
1960                if (missingValue != null && !missingValue.isEmpty()) {
1961                        if (missingValue.contains(eleStr)) {
1962                                eleStr = null;
1963                        }
1964                }
1965                String elementError = "Element \"";
1966                String errorMessage1 = "\" couldn't be in the ";
1967                String errorMessage2 = " column " + columnName
1968                                + ". It probably is a missing value code, however metadata "
1969                                + "doesn't describe it. Please make a double check.";
1970                // find the data type for each att
1971                if (type == BaseType.INT) {
1972
1973                        if (eleStr != null && !eleStr.equals("")) {
1974                                try {
1975                                        val = new IntToken(new Integer(eleStr).intValue());
1976                                } catch (NumberFormatException e) {
1977                                        throw (new IllegalActionException(elementError + eleStr
1978                                                        + errorMessage1 + "integer" + errorMessage2));
1979                                }
1980                        } else {
1981                                // eleStr = null;
1982                                val = IntToken.NIL;
1983                                // val.nil();
1984                        }
1985
1986                } else if (type == BaseType.DOUBLE) {
1987                        if (eleStr != null && !eleStr.equals("")) {
1988                                try {
1989                                        val = new DoubleToken(new Double(eleStr).doubleValue());
1990                                } catch (NumberFormatException e) {
1991                                        throw (new IllegalActionException(elementError + eleStr
1992                                                        + errorMessage1 + "numerical" + errorMessage2));
1993                                }
1994                        } else {
1995                                // eleStr = null;
1996                                val = DoubleToken.NIL;
1997                        }
1998
1999                } else if (type == BaseType.LONG) {
2000                        if (eleStr != null && !eleStr.equals("")) {
2001                                try {
2002                                        val = new LongToken(new Long(eleStr).longValue());
2003                                } catch (NumberFormatException e) {
2004                                        throw (new IllegalActionException(elementError + eleStr
2005                                                        + errorMessage1 + "numerical" + errorMessage2));
2006                                }
2007                        } else {
2008                                // eleStr = null;
2009                                val = LongToken.NIL;
2010                                // val.nil();
2011                        }
2012
2013                } else if (type == BaseType.STRING) {
2014                        if (eleStr != null) {
2015                                val = new StringToken(eleStr);
2016                        } else {
2017                                // eleStr = "nil";
2018                                val = StringToken.NIL;
2019                                // val.nil();
2020                        }
2021
2022                } else {
2023                        val = new IntToken(0);
2024                }
2025                return val;
2026        }
2027
2028        /**
2029         * Callback method that indicates that the workflow is being stopped. This
2030         * method is executed from the Director when the user presses the "stop"
2031         * button. All this does is release the execution thread if it happens to be
2032         * blocked in the initialize method waiting for _metaDataCompleted or
2033         * _entityCountDown to complete.
2034         */
2035        @Override
2036    public void stop() {
2037                log.debug("Stopping");
2038
2039                synchronized (_metadataCompleted) {
2040                        _metadataCompleted.notifyAll();
2041                }
2042                synchronized (_entityCountDown) {
2043                        _entityCountDown.notifyAll();
2044                }
2045                // TODO Auto-generated method stub
2046                super.stop();
2047        }
2048
2049        /**
2050         * This method will determine if the resultset is complete.
2051         * 
2052         * @return true if the result has been completely retrieved
2053         */
2054        public boolean isEndOfResultset() throws SQLException {
2055                if (_resultSet != null && _resultSet.getResultSet() != null
2056                                && _resultSet.getResultSet().isAfterLast()) {
2057                        return true;
2058                } else {
2059                        return false;
2060                }
2061        }
2062
2063        /*
2064         * This method will remove RecordDetails attribute from this entity. Those
2065         * attribute is useful for ResultRecord, but is useless for this class and
2066         * we wouldn't let it show up.
2067         */
2068        private void removeResultRecordDetailsAtrribute()
2069                        throws IllegalActionException, NameDuplicationException {
2070                // System.out.println("at begining");
2071                List list = this.attributeList();
2072                if (list != null) {
2073                        // System.out.println("attribute list is not null");
2074                        for (int j = 0; j < list.size(); j++) {
2075                                // System.out.println("the attribute list's size is "+recordDetailList.size());
2076                                ptolemy.kernel.util.Attribute att = (ptolemy.kernel.util.Attribute) list
2077                                                .get(j);
2078                                String attName = att.getName();
2079                                // System.out.println("------- the attribute "+att.getName());
2080                                Vector recordDetailList = this.getRecordDetailList();
2081
2082                                if (recordDetailList != null && attName != null
2083                                                && recordDetailList.contains(attName)) {
2084                                        // System.out.println("------- remove the attribute "+att.getName());
2085                                        att.setContainer(null);
2086
2087                                }
2088                        }
2089                }
2090
2091        }
2092
2093        /**
2094         * First checks if checkVersion parameter is selected. If it is selected the
2095         * Ecogrid is checked for a more recent version of the recordid. If there is
2096         * a newer version available, user is asked if they would like to use the
2097         * newer version in the workflow. If they really do want to use the newer
2098         * version, the most recent recorid is set in the actor attributes.
2099         * 
2100         */
2101        private String getMostRecentRecordId() {
2102        
2103                // look up the identifcation service based on the query/get service
2104                EcoGridService queryService = 
2105                        EcoGridServicesController.getInstance().getService(getEndpoint());
2106                EcoGridService lsidService = 
2107                        EcoGridServicesController.getInstance().getService(
2108                                        queryService.getServiceGroup(),
2109                                        EcoGridServicesController.IDENTIFIERSERVICETYPE);
2110                log.debug("identifier service endpoint: " + lsidService.getEndPoint());
2111        
2112                // check for newer version of the eml
2113                String mostRecentRecordId = null;
2114                try {
2115        
2116                        // translate the recordId to an lsid
2117                        KeplerLSID recordLsid = 
2118                                new KeplerLSID(getRecordId(), "kepler-project.org");
2119                        log.debug("translated recordLsid=" + recordLsid);
2120        
2121                        // look up the next revision
2122                        IdentifierServiceClient lsidClient = 
2123                                new IdentifierServiceClient(lsidService.getEndPoint());
2124                        KeplerLSID temp = 
2125                                new KeplerLSID(lsidClient.getNextRevision(recordLsid.toString()));
2126                        log.debug("next recordLsid=" + temp);
2127        
2128                        // subtract from the next revision to get the current latest
2129                        mostRecentRecordId = 
2130                                temp.getNamespace() 
2131                                + "." + temp.getObject()
2132                                + "." + (temp.getRevision().longValue() - 1);
2133                        log.debug("mostRecentRecordId=" + mostRecentRecordId);
2134                        
2135                } catch (Exception e) {
2136                        log.error("Problem looking up most recent record for id: "
2137                                        + getRecordId() + "\nError is: " + e.getMessage());
2138                        e.printStackTrace();
2139                }
2140                
2141                return mostRecentRecordId;
2142        }
2143        
2144        /**
2145   * Overwrite the method in Parent class -- ResultRecord
2146   */
2147  @Override
2148public Reader getFullRecord() {
2149    //System.out.println("in eml actor ==================");
2150    Reader recordReader = null;
2151    if(this.getEndpoint() != null) {
2152      //System.out.println("end point is not null");
2153      return super.getFullRecord();
2154    } else  {
2155      //System.out.println("end point is === null");
2156      //System.out.println("final path is "+emlFileFinalPath);
2157      String endpoint = null;
2158      EcogridMetaDataCacheItem item = (EcogridMetaDataCacheItem) DataCacheManager
2159          .getCacheItem(new MetadataComplete(),
2160              "MetaData " + emlFileFinalPath, endpoint,
2161              EcogridMetaDataCacheItem.class.getName());
2162      if (item.isEmpty()) {
2163        //System.out.println(" in item is empty branch");
2164        item.setEndPoint(endpoint);
2165        item.setRecordId(emlFileFinalPath);
2166        item.start();
2167        while (!item.isError() && !item.isReady()) {
2168          // do nothing, just waiting
2169          //System.out.println("Waiting!!!!!!!!!!");
2170        }
2171      } 
2172      // make sure the item is finished    
2173      // when it is ready
2174      InputStream stream = item.getDataInputStream();
2175      if (stream != null) {
2176        //System.out.println(" stream is not null");
2177        recordReader = new InputStreamReader(stream);
2178      }
2179      //System.out.println("the return reader is "+recordReader);
2180      return recordReader;
2181    }
2182  
2183  }
2184}