| | |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/Dgn7fileException.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/Dgn7fileHeader.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/Dgn7fileReader.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/Dgn7fileWriter.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/Element.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/ElementFactory.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/ElementType.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/EllipseElement.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/FrammeAttributeData.java svneol=native#text/plain |
| | |
| | | xdgnjobs/ximple-dgnio/src/test/java/com/ximple/io/dgn7/Dgn7OracleReaderTest.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/test/java/com/ximple/io/dgn7/Dgn7TextElementReaderTest.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/test/java/com/ximple/io/dgn7/Dgn7fileReaderTest.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/test/java/com/ximple/io/dgn7/Dgn7fileWriterTest.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-dgnio/src/test/java/com/ximple/io/dgn7/ElementFactoryTest.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-dgnio/src/test/java/com/ximple/io/dgn7/OracleTarget.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/test/resources/com/ximple/io/dgn7/test-data/Demo.dgn -text |
| | | xdgnjobs/ximple-dgnio/src/test/resources/com/ximple/io/dgn7/test-data/HV88491-1.dgn -text |
| | | xdgnjobs/ximple-dgnio/src/test/resources/com/ximple/io/dgn7/test-data/HV88491_0888888.dgn -text |
| | | xdgnjobs/ximple-dgnio/src/test/resources/com/ximple/io/dgn7/test-data/HV88494_0.dgn -text |
| | | xdgnjobs/ximple-dgnio/src/test/resources/com/ximple/io/dgn7/test-data/dgnseed2d.dgn -text |
| | | xdgnjobs/ximple-dgnio/src/test/resources/com/ximple/io/dgn7/test-data/testHV.dgn -text |
| | | xdgnjobs/ximple-elmparser/pom.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-elmparser/src/main/java/com/ximple/eofms/XElementFetcher.java svneol=native#text/plain |
| | |
| | | xdgnjobs/ximple-elmparser/src/main/resources/com/ximple/eofms/XElementParser.properties svneol=native#text/plain |
| | | xdgnjobs/ximple-elmparser/src/main/resources/com/ximple/eofms/XElementParser_zh_TW.properties svneol=native#text/plain |
| | | xdgnjobs/ximple-elmparser/src/main/resources/log4j.properties svneol=native#text/plain |
| | | xdgnjobs/ximple-jobcarrier/log4j.properties -text |
| | | xdgnjobs/ximple-jobcarrier/pom.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-jobcarrier/quartz.properties -text |
| | | xdgnjobs/ximple-jobcarrier/quartz_jobs.xml -text |
| | | xdgnjobs/ximple-jobcarrier/src/main/java/com/ximple/eofms/XQuartzJobCarrier.java svneol=native#text/plain |
| | | xdgnjobs/ximple-jobcarrier/src/main/java/com/ximple/eofms/XQuartzJobWizard.java svneol=native#text/plain |
| | | xdgnjobs/ximple-jobcarrier/src/main/resources/com/ximple/eofms/XQuartzJobWizard.properties svneol=native#text/plain |
| | |
| | | xdgnjobs/ximple-jobcarrier/src/main/resources/log4j.properties svneol=native#text/plain |
| | | xdgnjobs/ximple-jobcarrier/src/main/resources/quartz.properties svneol=native#text/plain |
| | | xdgnjobs/ximple-jobcarrier/src/main/resources/quartz_jobs.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-jobcarrier/src/main/resources/quartz_jobs_edb.xml -text svneol=unset#text/xml |
| | | xdgnjobs/ximple-jobcarrier/src/main/resources/quartz_jobs_shapefiles.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-jobcarrier/src/test/java/com/ximple/eofms/XQuartzJobCarrierTest.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/pom.xml svneol=native#text/xml |
| | |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/AbstractOracleDatabaseJob.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/DataReposVersionManager.java -text |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/DummyFeatureConvertJobContext.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2EdbGeoJob.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2MySQLJob.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2OraSDOJob.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2PostGISJob.java svneol=native#text/plain |
| | |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/AbstractDgnFileJobContext.java -text |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/AbstractOracleJobContext.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/OracleUpgradeJobContext.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/AbstractDgnToEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/AbstractOracleToEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/DummyFeatureConvertEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/FeatureDgnConvertEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/GeneralDgnConvertEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/IndexDgnConvertEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/OracleConvertEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/AbstractDgnToMySQLJobContext.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/AbstractOracleToMySQLJobContext.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/DummyFeatureConvertMySQlJobContext.java svneol=native#text/plain |
| | |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/ByteArrayCompressor.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/ColorTableMapping.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/DefaultColorTable.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/DigesterUtils.java -text |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/EPSG3825GeometryConverterDecorator.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/EPSG3826GeometryConverterDecorator.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/FeatureTypeBuilderUtil.java svneol=native#text/plain |
| | |
| | | xdgnjobs/ximple-spatialjob/src/main/resources/conf/DefaultConvertShpFilter.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-spatialjob/src/main/resources/conf/DefaultMapGroups.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-spatialjob/src/test/java/com/ximple/eofms/filter/ElementDispatcherTest.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/test/java/com/ximple/eofms/util/FeatureTypeBuilderUtilTest.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/test/java/com/ximple/eofms/util/FileUtilsTest.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/test/resources/com/ximple/eofms/filter/test-data/testElementFilter.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-spatialjob/src/test/resources/com/ximple/eofms/filter/test-data/testRules.xml svneol=native#text/xml |
| | |
| | | <oracle.jdbc>true</oracle.jdbc> |
| | | <test.maxHeapSize>512M</test.maxHeapSize> |
| | | <src.output>${basedir}/target</src.output> |
| | | <java5>1.5</java5> |
| | | <xdgnio.version>1.0.1</xdgnio.version> |
| | | <gt2.version>2.4.5</gt2.version> |
| | | <java5>1.6</java5> |
| | | <xdgnio.version>2.1.2</xdgnio.version> |
| | | <gt.version>10.6.x</gt.version> |
| | | <failIfNoTests>false</failIfNoTests> |
| | | <stress.skip.pattern></stress.skip.pattern> |
| | | <online.skip.pattern></online.skip.pattern> |
| | | <!--java.awt.headless>false</javaa.awt.headless--> |
| | | <allow.test.failure.ignore>false</allow.test.failure.ignore> |
| | | </properties> |
| | | |
| | | <profiles> |
| | |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-javadoc-plugin</artifactId> |
| | | <version>2.6.1</version> |
| | | <configuration> |
| | | <source>1.5</source> |
| | | </configuration> |
| | |
| | | </site> |
| | | </distributionManagement> |
| | | </profile> |
| | | |
| | | <!-- =========================================================== --> |
| | | <!-- Build Configuration --> |
| | | <!-- copies all JARs in a single directory. --> |
| | | <!-- =========================================================== --> |
| | | <profile> |
| | | <id>collect</id> |
| | | <build> |
| | | <plugins> |
| | | <plugin> |
| | | <groupId>com.ximple.eofms.maven</groupId> |
| | | <artifactId>ximple-jar-collector</artifactId> |
| | | <version>${project.version}</version> |
| | | <executions> |
| | | <execution> |
| | | <goals> |
| | | <goal>collect</goal> |
| | | </goals> |
| | | </execution> |
| | | </executions> |
| | | </plugin> |
| | | </plugins> |
| | | </build> |
| | | </profile> |
| | | </profiles> |
| | | |
| | | <scm> |
| | |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnjobs</artifactId> |
| | | <packaging>pom</packaging> |
| | | <version>1.0.1</version> |
| | | <version>2.1.2</version> |
| | | <name>ximple-dgnjobs</name> |
| | | <url>http://www.ximple.com.tw</url> |
| | | |
| | |
| | | <url>http://www.ximple.com.tw</url> |
| | | </organization> |
| | | |
| | | <inceptionYear>2008</inceptionYear> |
| | | <inceptionYear>2014</inceptionYear> |
| | | |
| | | <!-- =========================================================== --> |
| | | <!-- Issue managements and mailing lists. --> |
| | |
| | | <!-- =========================================================== --> |
| | | <dependencyManagement> |
| | | <dependencies> |
| | | <!-- GeoAPI and its dependencies --> |
| | | <dependency> |
| | | <groupId>org.opengis</groupId> |
| | | <artifactId>geoapi-nogenerics</artifactId> |
| | | <version>2.1.1</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>javax.units</groupId> |
| | | <artifactId>jsr108</artifactId> |
| | | <version>0.01</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>com.vividsolutions</groupId> |
| | | <artifactId>jts</artifactId> |
| | | <version>1.9</version> |
| | | <version>1.13</version> |
| | | </dependency> |
| | | |
| | | <!-- Apache --> |
| | |
| | | <dependency> |
| | | <groupId>commons-beanutils</groupId> |
| | | <artifactId>commons-beanutils</artifactId> |
| | | <version>1.7</version> |
| | | <version>1.8.2</version> |
| | | </dependency> |
| | | --> |
| | | <dependency> |
| | |
| | | <version>3.2.1</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-digester</groupId> |
| | | <artifactId>commons-digester</artifactId> |
| | | <version>1.8</version> |
| | | <groupId>org.apache.commons</groupId> |
| | | <artifactId>commons-digester3</artifactId> |
| | | <version>3.2</version> |
| | | <!--classifier>with-deps</classifier--> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-pool</groupId> |
| | | <artifactId>commons-pool</artifactId> |
| | | <version>1.4</version> |
| | | <version>1.6</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-logging</groupId> |
| | |
| | | <dependency> |
| | | <groupId>commons-cli</groupId> |
| | | <artifactId>commons-cli</artifactId> |
| | | <version>1.1</version> |
| | | <version>1.2</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-io</groupId> |
| | | <artifactId>commons-io</artifactId> |
| | | <version>2.4</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>commons-net</groupId> |
| | | <artifactId>commons-net</artifactId> |
| | | <version>2.1</version> |
| | | </dependency> |
| | | |
| | | |
| | | <dependency> |
| | | <groupId>log4j</groupId> |
| | | <artifactId>log4j</artifactId> |
| | | <version>1.2.15</version> |
| | | <version>1.2.17</version> |
| | | <!-- Same as the dependency in commons-logging --> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.apache.poi</groupId> |
| | | <artifactId>poi</artifactId> |
| | | <version>3.2-FINAL</version> |
| | | <version>3.9</version> |
| | | </dependency> |
| | | |
| | | <!-- geotools --> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <artifactId>gt-api</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-api</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <artifactId>gt-main</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-main</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <artifactId>gt-shapefile</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-shapefile</artifactId> |
| | | <version>${gt2.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-sample-data</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <artifactId>gt-sample-data</artifactId> |
| | | <version>${gt.version}</version> |
| | | <scope>test</scope> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-data</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <artifactId>gt-data</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-jdbc</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <artifactId>gt-opengis</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-oracle-spatial</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <artifactId>gt-metadata</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-postgis</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <artifactId>gt-jdbc</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-mysql</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-oracle</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-postgis</artifactId> |
| | | <version>${gt.version}</version> |
| | | <!--exclusions> |
| | | <exclusion> |
| | | <groupId>org.postgis</groupId> |
| | | <artifactId>postgis-driver</artifactId> |
| | | </exclusion> |
| | | </exclusions--> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-mysql</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | |
| | | <!-- because main and sample-data depend on referencing we need a tie breaker --> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-referencing</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <artifactId>gt-referencing</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-epsg-hsql</artifactId> |
| | | <version>${gt.version}</version> |
| | | <!--exclusions> |
| | | <exclusion> |
| | | <groupId>com.h2database</groupId> |
| | | <artifactId>h2</artifactId> |
| | | </exclusion> |
| | | </exclusions--> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-epsg-wkt</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>org.jdom</groupId> |
| | | <artifactId>jdom</artifactId> |
| | | <version>1.1</version> |
| | | <version>1.1.3</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>org.apache.velocity</groupId> |
| | | <artifactId>velocity</artifactId> |
| | | <version>1.5</version> |
| | | </dependency> |
| | | |
| | | <!-- We need this to make the referencing module useful --> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-epsg-hsql</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <scope>test</scope> |
| | | <version>1.7</version> |
| | | </dependency> |
| | | |
| | | <!-- ORACLE --> |
| | |
| | | <dependency> |
| | | <groupId>com.oracle</groupId> |
| | | <artifactId>ojdbc5</artifactId> |
| | | <version>11.1.0</version> |
| | | <version>11.1.0.7.0</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>com.oracle</groupId> |
| | |
| | | <version>11.1.0</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <!--dependency> |
| | | <groupId>postgresql</groupId> |
| | | <artifactId>postgresql</artifactId> |
| | | <version>8.3-603.jdbc3</version> |
| | | </dependency> |
| | | <version>8.4-702.jdbc3</version> |
| | | </dependency--> |
| | | <dependency> |
| | | <groupId>org.postgis</groupId> |
| | | <artifactId>postgis-driver</artifactId> |
| | | <version>1.3.3</version> |
| | | <version>2.0.2</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <!--dependency> |
| | | <groupId>mysql</groupId> |
| | | <artifactId>mysql-connector-java</artifactId> |
| | | <version>5.1.6</version> |
| | | </dependency> |
| | | <version>5.1.18</version> |
| | | </dependency--> |
| | | |
| | | <!-- opensymphony --> |
| | | <!--dependency> |
| | | <groupId>com.h2database</groupId> |
| | | <artifactId>h2</artifactId> |
| | | <version>1.3.163</version> |
| | | </dependency--> |
| | | |
| | | <!-- EnterpriseDB --> |
| | | <dependency> |
| | | <groupId>opensymphony</groupId> |
| | | <artifactId>quartz</artifactId> |
| | | <version>1.6.5</version> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>xedb-gt-geospatial</artifactId> |
| | | <version>0.1.1</version> |
| | | </dependency> |
| | | |
| | | |
| | | <!-- quartz-scheduler--> |
| | | <dependency> |
| | | <groupId>org.quartz-scheduler</groupId> |
| | | <artifactId>quartz</artifactId> |
| | | <version>2.2.1</version> |
| | | <exclusions> |
| | | <exclusion> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-api</artifactId> |
| | | </exclusion> |
| | | </exclusions> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.quartz-scheduler</groupId> |
| | | <artifactId>quartz-jobs</artifactId> |
| | | <version>2.2.1</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-api</artifactId> |
| | | <version>1.7.5</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-log4j12</artifactId> |
| | | <version>1.7.5</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>jcl-over-slf4j</artifactId> |
| | | <version>1.7.5</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.awl</groupId> |
| | | <artifactId>awl</artifactId> |
| | | <version>1.1.0-rc</version> |
| | | </dependency> |
| | | |
| | | <!--dependency> |
| | | <groupId>it.geosolutions</groupId> |
| | | <artifactId>geoserver-manager</artifactId> |
| | | <version>1.6-SNAPSHOT</version> |
| | | </dependency--> |
| | | |
| | | <dependency> |
| | | <groupId>xerces</groupId> |
| | | <artifactId>xercesImpl</artifactId> |
| | | <version>2.11.0</version> |
| | | </dependency> |
| | | <!-- Tests or legacy --> |
| | | <dependency> |
| | | <groupId>org.testng</groupId> |
| | | <artifactId>testng</artifactId> |
| | | <version>5.8</version> |
| | | <classifier>jdk15</classifier> |
| | | <version>6.8.7</version> |
| | | <scope>test</scope> |
| | | </dependency> |
| | | </dependencies> |
| | |
| | | <!-- Dependencies to be inherited by all modules. --> |
| | | <!-- =========================================================== --> |
| | | <dependencies> |
| | | <dependency> |
| | | <artifactId>geoapi-nogenerics</artifactId> |
| | | <groupId>org.opengis</groupId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <artifactId>jsr108</artifactId> |
| | | <groupId>javax.units</groupId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>com.vividsolutions</groupId> |
| | | <artifactId>jts</artifactId> |
| | |
| | | <groupId>commons-collections</groupId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-digester</groupId> |
| | | <artifactId>commons-digester</artifactId> |
| | | <groupId>org.apache.commons</groupId> |
| | | <artifactId>commons-digester3</artifactId> |
| | | <!--classifier>with-deps</classifier--> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-pool</groupId> |
| | |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <artifactId>gt2-api</artifactId> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-api</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <artifactId>gt2-main</artifactId> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-main</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-epsg-hsql</artifactId> |
| | | </dependency> |
| | | |
| | | <!--dependency> |
| | | <groupId>com.h2database</groupId> |
| | | <artifactId>h2</artifactId> |
| | | </dependency--> |
| | | |
| | | <dependency> |
| | | <artifactId>testng</artifactId> |
| | | <groupId>xerces</groupId> |
| | | <artifactId>xercesImpl</artifactId> |
| | | </dependency> |
| | | |
| | | <!--dependency> |
| | | <groupId>it.geosolutions</groupId> |
| | | <artifactId>geoserver-manager</artifactId> |
| | | <exclusions> |
| | | <exclusion> |
| | | <groupId>commons-io</groupId> |
| | | <artifactId>commons-io</artifactId> |
| | | </exclusion> |
| | | </exclusions> |
| | | </dependency--> |
| | | |
| | | <dependency> |
| | | <groupId>org.testng</groupId> |
| | | <classifier>jdk15</classifier> |
| | | <artifactId>testng</artifactId> |
| | | <scope>test</scope> |
| | | </dependency> |
| | | </dependencies> |
| | |
| | | <plugins> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-assembly-plugin</artifactId> |
| | | <version>2.1</version> |
| | | <configuration> |
| | | <descriptors> |
| | | <descriptor>build/maven/assembly/binaryDist.xml</descriptor> |
| | | <descriptor>build/maven/assembly/sourceDist.xml</descriptor> |
| | | </descriptors> |
| | | </configuration> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-clean-plugin</artifactId> |
| | | <version>2.1.1</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-clover-plugin</artifactId> |
| | | <version>2.3</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-compiler-plugin</artifactId> |
| | | <version>2.0.2</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-install-plugin</artifactId> |
| | | <version>2.1</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-jar-plugin</artifactId> |
| | | <version>2.1</version> |
| | | <artifactId>maven-eclipse-plugin</artifactId> |
| | | <version>2.5</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-javadoc-plugin</artifactId> |
| | | <version>2.4</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-plugin-plugin</artifactId> |
| | | <version>2.3</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-pmd-plugin</artifactId> |
| | | <version>2.4</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-project-info-reports-plugin</artifactId> |
| | | <version>2.0.1</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-resources-plugin</artifactId> |
| | | <version>2.2</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-site-plugin</artifactId> |
| | | <version>2.0-beta-5</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-surefire-plugin</artifactId> |
| | | <version>2.4.2</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-surefire-report-plugin</artifactId> |
| | | <version>2.4.2</version> |
| | | </plugin> |
| | | |
| | | <!-- http://www.ibiblio.org/maven2/org/codehaus/mojo/ --> |
| | | <plugin> |
| | | <groupId>org.codehaus.mojo</groupId> |
| | | <artifactId>changelog-maven-plugin</artifactId> |
| | | <version>2.0-beta-1</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.codehaus.mojo</groupId> |
| | | <artifactId>changes-maven-plugin</artifactId> |
| | | <version>2.0-beta-1</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.codehaus.mojo</groupId> |
| | | <artifactId>jxr-maven-plugin</artifactId> |
| | | <version>2.0-beta-1</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.codehaus.mojo</groupId> |
| | | <artifactId>taglist-maven-plugin</artifactId> |
| | | <version>2.2</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.codehaus.mojo</groupId> |
| | | <artifactId>jalopy-maven-plugin</artifactId> |
| | | <version>1.0-SNAPSHOT</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-source-plugin</artifactId> |
| | | <configuration> |
| | | <outputDirectory>${src.output}</outputDirectory> |
| | | <attach>false</attach> |
| | | </configuration> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-eclipse-plugin</artifactId> |
| | | <version>2.4</version> |
| | | <version>2.6.1</version> |
| | | </plugin> |
| | | </plugins> |
| | | </pluginManagement> |
| | | |
| | | <!-- http://www.ibiblio.org/maven2/org/apache/maven/wagon/ --> |
| | | <!-- |
| | | <extensions> |
| | | <extension> |
| | | <groupId>org.apache.maven.wagon</groupId> |
| | |
| | | <version>1.0-beta-2</version> |
| | | </extension> |
| | | </extensions> |
| | | --> |
| | | |
| | | <plugins> |
| | | <!-- ======================================================= --> |
| | |
| | | <convention>gt2/jalopygeotools.xml</convention> |
| | | <failOnError>false</failOnError> |
| | | </configuration> |
| | | <!-- |
| | | <dependencies> |
| | | <dependency> |
| | | <groupId>org.geotools.maven</groupId> |
| | | <artifactId>gt2-build-configs</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | </dependencies> |
| | | --> |
| | | </plugin> |
| | | |
| | | |
| | |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-compiler-plugin</artifactId> |
| | | <configuration> |
| | | <source>1.5</source> |
| | | <source>1.6</source> |
| | | <!-- The -source argument for the Java compiler. --> |
| | | <target>1.5</target> |
| | | <target>1.6</target> |
| | | <!-- The -target argument for the Java compiler. --> |
| | | <debug>true</debug> |
| | | <!-- Whether to include debugging information. --> |
| | | <encoding>ISO-8859-1</encoding> |
| | | <encoding>UTF-8</encoding> |
| | | <!-- The -encoding argument for the Java compiler. --> |
| | | </configuration> |
| | | </plugin> |
| | |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-surefire-plugin</artifactId> |
| | | <version>2.14.1</version> |
| | | <configuration> |
| | | <includes> |
| | | <include>**/*Test.java</include> |
| | |
| | | <!-- ======================================================= --> |
| | | <!-- Code coverage --> |
| | | <!-- ======================================================= --> |
| | | <!-- |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-clover-plugin</artifactId> |
| | | <version>2.4</version> |
| | | <configuration> |
| | | <jdk>1.5</jdk> |
| | | <licenseLocation> |
| | | http://svn.geotools.org/geotools/branches/2.4.x/build/maven/build-configs/src/main/resources/gt2/clover.license |
| | | http://svn.geotools.org/geotools/branches/2.6.x/build/maven/build-configs/src/main/resources/gt2/clover.license |
| | | </licenseLocation> |
| | | <flushPolicy>directed</flushPolicy> |
| | | </configuration> |
| | |
| | | <phase>pre-site</phase> |
| | | <goals> |
| | | <goal>instrument</goal> |
| | | <!-- aggregation is disabled due to the bug: --> |
| | | <!-- http://jira.codehaus.org/browse/MCLOVER-34 --> |
| | | < - aggregation is disabled due to the bug: - !> |
| | | < - http://jira.codehaus.org/browse/MCLOVER-34 - !> |
| | | </goals> |
| | | </execution> |
| | | </executions> |
| | |
| | | <dependency> |
| | | <groupId>org.geotools.maven</groupId> |
| | | <artifactId>gt2-build-configs</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | </dependencies> |
| | | </plugin> |
| | | |
| | | --> |
| | | |
| | | <!-- ======================================================= --> |
| | | <!-- JAR packaging. --> |
| | |
| | | </plugin> |
| | | |
| | | <!-- ======================================================= --> |
| | | <!-- JavaDoc packaging. --> |
| | | <!-- ======================================================= --> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-javadoc-plugin</artifactId> |
| | | </plugin> |
| | | |
| | | <!-- ======================================================= --> |
| | | <!-- Source packaging. --> |
| | | <!-- ======================================================= --> |
| | | <plugin> |
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms.maven</groupId> |
| | | <artifactId>ximple-maven</artifactId> |
| | | <version>1.0.1</version> |
| | | <version>2.1.1</version> |
| | | </parent> |
| | | |
| | | |
| | |
| | | <!-- =========================================================== --> |
| | | <groupId>com.ximple.eofms.maven</groupId> |
| | | <artifactId>ximple-jar-collector</artifactId> |
| | | <version>2.1.1</version> |
| | | <packaging>maven-plugin</packaging> |
| | | <name>JAR files collector</name> |
| | | |
| | |
| | | private String jarName; |
| | | |
| | | /** |
| | | * Project dependencies. |
| | | * |
| | | * @parameter expression="${project.artifacts}" |
| | | * @required |
| | | */ |
| | | private Set /*<Artifact>*/ dependencies; |
| | | |
| | | /** |
| | | * The Maven project running this plugin. |
| | | * |
| | | * @parameter expression="${project}" |
| | |
| | | * Gets the parent "target" directory. |
| | | */ |
| | | MavenProject parent = project; |
| | | |
| | | while (parent.hasParent()) { |
| | | parent = parent.getParent(); |
| | | } |
| | | |
| | | collectDirectory = parent.getBuild().getDirectory(); |
| | | |
| | | /* |
| | | * Now collects the JARs. |
| | | */ |
| | |
| | | * such file. Some modules use pom packaging, which do not produce any JAR file. |
| | | */ |
| | | final File jarFile = new File(outputDirectory, jarName + ".jar"); |
| | | |
| | | if (!jarFile.isFile()) { |
| | | return; |
| | | } |
| | | |
| | | /* |
| | | * Get the "target" directory of the parent pom.xml and make sure it exists. |
| | | */ |
| | | File collect = new File(collectDirectory); |
| | | |
| | | if (!collect.exists()) { |
| | | if (!collect.mkdir()) { |
| | | throw new MojoExecutionException("Failed to create target directory."); |
| | | throw new MojoExecutionException("Failed to create target directory: " + collect.getAbsolutePath()); |
| | | } |
| | | } |
| | | |
| | | if (collect.getCanonicalFile().equals(jarFile.getParentFile().getCanonicalFile())) { |
| | | /* |
| | | * The parent's directory is the same one than this module's directory. |
| | |
| | | */ |
| | | return; |
| | | } |
| | | |
| | | /* |
| | | * Creates a "binaries" subdirectory inside the "target" directory. |
| | | */ |
| | | collect = new File(collect, SUB_DIRECTORY); |
| | | |
| | | if (!collect.exists()) { |
| | | if (!collect.mkdir()) { |
| | | throw new MojoExecutionException("Failed to create binaries directory."); |
| | | } |
| | | } |
| | | |
| | | int count = 1; |
| | | FileUtils.copyFileToDirectory(jarFile, collect); |
| | | |
| | | Set<Artifact> dependencies = project.getDependencyArtifacts(); |
| | | if (dependencies != null) { |
| | | for (final Iterator it = dependencies.iterator(); it.hasNext();) { |
| | | final Artifact artifact = (Artifact) it.next(); |
| | | for (final Artifact artifact : dependencies) { |
| | | System.out.println("+++++++++++++++++++++++ DEP: " + artifact.getDependencyTrail()); |
| | | final String scope = artifact.getScope(); |
| | | |
| | | if ((scope != null) // Maven 2.0.6 bug? |
| | | && (scope.equalsIgnoreCase(Artifact.SCOPE_COMPILE) |
| | | || scope.equalsIgnoreCase(Artifact.SCOPE_RUNTIME))) { |
| | | if (scope != null && // Maven 2.0.6 bug? |
| | | (scope.equalsIgnoreCase(Artifact.SCOPE_COMPILE) || |
| | | scope.equalsIgnoreCase(Artifact.SCOPE_RUNTIME))) |
| | | { |
| | | final File file = artifact.getFile(); |
| | | final File copy = new File(collect, file.getName()); |
| | | |
| | | if (!copy.exists()) { |
| | | /* |
| | | * Copies the dependency only if it was not already copied. Note that |
| | | * the module's JAR was copied inconditionnaly above (because it may |
| | | * be the result of a new compilation). If a Geotools JAR from the |
| | | * dependencies list changed, it will be copied inconditionnaly when |
| | | * the module for this JAR will be processed by Maven. |
| | | */ |
| | | FileUtils.copyFileToDirectory(file, collect); |
| | | count++; |
| | | if (!artifact.getGroupId().startsWith("com.ximple.eofms")) { |
| | | final File copy = new File(collect, file.getName()); |
| | | if (copy.exists()) { |
| | | /* |
| | | * Copies the dependency only if it was not already copied. Note that |
| | | * the module's JAR was copied inconditionnaly above (because it may |
| | | * be the result of a new compilation). If a Geotools JAR from the |
| | | * dependencies list changed, it will be copied inconditionnaly when |
| | | * the module for this JAR will be processed by Maven. |
| | | */ |
| | | continue; |
| | | } |
| | | } |
| | | FileUtils.copyFileToDirectory(file, collect); |
| | | } |
| | | } |
| | | } |
| | | |
| | | getLog().info("Copied " + count + " JAR to parent directory."); |
| | | } |
| | | } |
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms.maven</groupId> |
| | | <artifactId>ximple-build</artifactId> |
| | | <version>1.0.1</version> |
| | | <version>2.1.1</version> |
| | | </parent> |
| | | |
| | | |
| | |
| | | <dependency> |
| | | <groupId>org.apache.maven</groupId> |
| | | <artifactId>maven-plugin-api</artifactId> |
| | | <version>2.0.4</version> |
| | | <version>2.2.1</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.apache.maven</groupId> |
| | | <artifactId>maven-project</artifactId> |
| | | <version>2.0.4</version> |
| | | <version>2.2.1</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.codehaus.plexus</groupId> |
| | | <artifactId>plexus-utils</artifactId> |
| | | <version>1.2</version> |
| | | <version>2.1</version> |
| | | </dependency> |
| | | </dependencies> |
| | | |
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnjobs</artifactId> |
| | | <version>1.0.1</version> |
| | | <version>2.1.2</version> |
| | | </parent> |
| | | |
| | | |
| | |
| | | <!-- =========================================================== --> |
| | | <groupId>com.ximple.eofms.maven</groupId> |
| | | <artifactId>ximple-build</artifactId> |
| | | <version>1.0.1</version> |
| | | <version>2.1.2</version> |
| | | <packaging>pom</packaging> |
| | | <name>Build tools for Ximple DgnJobs</name> |
| | | |
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnjobs</artifactId> |
| | | <version>1.0.1</version> |
| | | <version>2.1.2</version> |
| | | </parent> |
| | | |
| | | <!-- =========================================================== --> |
| | |
| | | <!-- =========================================================== --> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnio</artifactId> |
| | | <version>1.0.1</version> |
| | | <version>2.1.2</version> |
| | | <packaging>jar</packaging> |
| | | <name>ximple-dgnio</name> |
| | | <url>http://www.ximple.com.tw</url> |
| | |
| | | <dependencies> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-sample-data</artifactId> |
| | | <artifactId>gt-sample-data</artifactId> |
| | | <scope>test</scope> |
| | | </dependency> |
| | | <dependency> |
| | |
| | | <groupId>org.apache.poi</groupId> |
| | | <artifactId>poi</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-io</groupId> |
| | | <artifactId>commons-io</artifactId> |
| | | </dependency> |
| | | </dependencies> |
| | | |
| | | <!-- =========================================================== --> |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/26 ¤U¤È 06:41:45 |
| | | * @since 2006/5/26 �U�� 06:41:45 |
| | | */ |
| | | public class ArcElement extends Element implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(ArcElement.class); |
| | | |
| | | public ArcElement(byte[] raw) { |
| | | ArcElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | package com.ximple.io.dgn7; |
| | | |
| | | import java.util.ArrayList; |
| | | import java.util.Arrays; |
| | | import java.util.Collection; |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 03:44:56 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class ComplexChainElement extends Element implements ComplexElement, GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(ComplexChainElement.class); |
| | | |
| | | protected ArrayList<Element> list = new ArrayList<Element>(); |
| | | |
| | | public ComplexChainElement(byte[] raw) { |
| | | ComplexChainElement(byte[] raw) { |
| | | super(raw); |
| | | attrOffset = 4; |
| | | } |
| | |
| | | return factory.createMultiLineString(lines); |
| | | } |
| | | |
| | | public double getElementSize() { |
| | | public short getTotalLength() { |
| | | return raw[18]; |
| | | } |
| | | |
| | | protected void setTotalLength(short value) { |
| | | raw[18] = value; |
| | | } |
| | | |
| | | public short getNumOfElement() { |
| | | return raw[19]; |
| | | } |
| | | |
| | | protected void setNumOfElement(short value) { |
| | | raw[19] = value; |
| | | } |
| | | |
| | | |
| | | public short[] getAttributes() { |
| | | return Arrays.copyOfRange(raw, 20, 23); |
| | | } |
| | | |
| | | protected void setAttributes(short[] values) { |
| | | if (values.length < 4) return; |
| | | System.arraycopy(values, 0, raw, 20, 24 - 20); |
| | | } |
| | | |
| | | public boolean isClosed() { |
| | | if (isEmpty()) { |
| | | return false; |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 04:17:37 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public interface ComplexElement extends List<Element> { |
| | | } |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 03:45:15 |
| | | * @since 2006/5/18 �U�� 03:45:15 |
| | | */ |
| | | public class ComplexShapeElement extends Element implements ComplexElement, GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(ComplexShapeElement.class); |
| | | |
| | | ArrayList<Element> list = new ArrayList<Element>(); |
| | | |
| | | public ComplexShapeElement(byte[] raw) { |
| | | ComplexShapeElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | * Dgn7OracleReader |
| | | * User: Ulysses |
| | | * Date: 2007/10/24 |
| | | * Time: ¤U¤È 01:01:08 |
| | | * Time: |
| | | */ |
| | | public class Dgn7OracleReader implements Iterator<Element> { |
| | | private final static Logger logger = Logger.getLogger(Dgn7OracleReader.class); |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/17 ¤U¤È 01:21:00 |
| | | * @since 2006/5/17 |
| | | */ |
| | | public class Dgn7fileHeader { |
| | | private short elmtype; |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/17 ¤U¤È 01:24:10 |
| | | * @since 2006/5/17 |
| | | */ |
| | | public class Dgn7fileReader { |
| | | private static final Logger logger = LogManager.getLogger(Dgn7fileReader.class); |
| | |
| | | ByteBuffer buffer; |
| | | private ElementType fileElementType = ElementType.UNDEFINED; |
| | | private ByteBuffer headerTransfer; |
| | | private final Record record = new Record(); |
| | | private final Element.FileRecord record = new Element.FileRecord(); |
| | | private final boolean randomAccessEnabled; |
| | | private Lock lock; |
| | | private boolean useMemoryMappedBuffer; |
| | | private long currentOffset = 0L; |
| | | private StreamLogging streamLogger = new StreamLogging("Shapefile Reader"); |
| | | private StreamLogging streamLogger = new StreamLogging("Dgn7 Reader"); |
| | | private int maxElementId = 0; |
| | | |
| | | public Dgn7fileReader(ReadableByteChannel channel, boolean strict, boolean useMemoryMapped, Lock lock) |
| | | public Dgn7fileReader(FileChannel channel, boolean strict, boolean useMemoryMapped, Lock lock) |
| | | throws IOException, Dgn7fileException { |
| | | this.channel = channel; |
| | | this.useMemoryMappedBuffer = useMemoryMapped; |
| | |
| | | init(strict); |
| | | } |
| | | |
| | | public Dgn7fileReader(ReadableByteChannel channel, Lock lock) throws IOException, Dgn7fileException { |
| | | public Dgn7fileReader(FileChannel channel, Lock lock) throws IOException, Dgn7fileException { |
| | | this(channel, true, true, lock); |
| | | } |
| | | |
| | |
| | | return randomAccessEnabled; |
| | | } |
| | | |
| | | public Record nextElement() throws IOException, Dgn7fileException { |
| | | public Element.FileRecord nextElement() throws IOException, Dgn7fileException { |
| | | // need to update position |
| | | buffer.position(this.toBufferOffset(record.end)); |
| | | |
| | |
| | | record.length = elementLength; |
| | | record.signature = signature; |
| | | record.number = recordNumber; |
| | | record.buffer = buffer; |
| | | |
| | | // remember, we read one int already... |
| | | record.end = this.toFileOffset(buffer.position()) + elementLength - 4; |
| | |
| | | } |
| | | } |
| | | |
| | | public Record elementAt(int offset) throws IOException, UnsupportedOperationException, Dgn7fileException { |
| | | public Element.FileRecord elementAt(int offset) throws IOException, UnsupportedOperationException, Dgn7fileException { |
| | | if (randomAccessEnabled) { |
| | | this.goTo(offset); |
| | | |
| | |
| | | while (reader.hasNext()) { |
| | | size++; |
| | | |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | |
| | | } catch (IOException e) { |
| | | logger.warn("Stop read dgn file", e); |
| | | } catch (Dgn7fileException e) { |
| | | e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | reader.close(); |
| | | } |
| | | |
| | | System.out.println("count=" + count + " size=" + size); |
| | | logger.debug("count=" + count + " size=" + size); |
| | | // reader.close(); |
| | | } catch (IOException ioe) { |
| | | System.out.println(ioe); |
| | | ioe.printStackTrace(); |
| | | logger.warn(ioe.getMessage(), ioe); |
| | | } catch (Dgn7fileException e) { |
| | | e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | System.exit(0); |
| | | } |
| | | |
| | | public final class Record { |
| | | int length; |
| | | int number = 0; |
| | | int offset; // Relative to the whole file |
| | | int start = 0; // Relative to the current loaded buffer |
| | | short signature = 0; |
| | | |
| | | /** |
| | | * The minimum X value. |
| | | */ |
| | | public double minX; |
| | | |
| | | /** |
| | | * The minimum Y value. |
| | | */ |
| | | public double minY; |
| | | |
| | | /** |
| | | * The minimum Z value. |
| | | */ |
| | | public double minZ; |
| | | |
| | | /** |
| | | * The maximum X value. |
| | | */ |
| | | public double maxX; |
| | | |
| | | /** |
| | | * The maximum Y value. |
| | | */ |
| | | public double maxY; |
| | | |
| | | /** |
| | | * The maximum Z value. |
| | | */ |
| | | public double maxZ; |
| | | |
| | | // ElementType type; |
| | | int end = 0; // Relative to the whole file |
| | | Object element = null; |
| | | IElementHandler handler; |
| | | |
| | | public Object element() { |
| | | if (element == null) { |
| | | buffer.position(start); |
| | | buffer.order(ByteOrder.LITTLE_ENDIAN); |
| | | |
| | | if (handler == null) { |
| | | return null; |
| | | } |
| | | |
| | | element = handler.read(buffer, signature, length); |
| | | } |
| | | |
| | | return element; |
| | | } |
| | | |
| | | public int offset() { |
| | | return offset; |
| | | } |
| | | |
| | | /** |
| | | * A summary of the record. |
| | | */ |
| | | public String toString() { |
| | | return "Record " + number + " length " + length + " bounds " + minX + "," + minY + " " + maxX + "," + maxY; |
| | | } |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.io.dgn7;
|
| | |
|
| | | import com.ximple.util.DgnUtility;
|
| | | import org.apache.log4j.LogManager;
|
| | | import org.apache.log4j.Logger;
|
| | |
|
| | | import java.io.EOFException;
|
| | | import java.io.IOException;
|
| | | import java.nio.ByteBuffer;
|
| | | import java.nio.ByteOrder;
|
| | | import java.nio.MappedByteBuffer;
|
| | | import java.nio.ShortBuffer;
|
| | | import java.nio.channels.FileChannel;
|
| | | import java.nio.channels.ReadableByteChannel;
|
| | | import java.nio.channels.WritableByteChannel;
|
| | |
|
| | | public class Dgn7fileWriter {
|
| | | private static final Logger logger = LogManager.getLogger(Dgn7fileWriter.class);
|
| | |
|
| | | private Dgn7fileHeader header;
|
| | | private FileChannel channel;
|
| | | ByteBuffer buffer;
|
| | | private ElementType fileElementType = ElementType.UNDEFINED;
|
| | | private ByteBuffer headerTransfer;
|
| | | private final Element.FileRecord record = new Element.FileRecord();
|
| | | private final boolean randomAccessEnabled;
|
| | | private Lock lock;
|
| | | private boolean useMemoryMappedBuffer;
|
| | | private long currentOffset = 0L;
|
| | | private StreamLogging streamLogger = new StreamLogging("Dgn7 Writer");
|
| | | private int maxElementId = 0;
|
| | |
|
| | | public Dgn7fileWriter(FileChannel channel, boolean strict, boolean useMemoryMapped, Lock lock)
|
| | | throws IOException, Dgn7fileException {
|
| | | this.channel = channel;
|
| | | this.useMemoryMappedBuffer = useMemoryMapped;
|
| | | streamLogger.open();
|
| | | randomAccessEnabled = channel instanceof FileChannel;
|
| | | this.lock = lock;
|
| | | lock.lockRead();
|
| | | lock.lockWrite();
|
| | | // init(strict);
|
| | | }
|
| | |
|
| | | public Dgn7fileWriter(FileChannel channel, Lock lock) throws IOException, Dgn7fileException {
|
| | | this(channel, true, true, lock);
|
| | | }
|
| | |
|
| | | protected boolean hasNext() throws IOException {
|
| | | // mark current position
|
| | | int position = buffer.position();
|
| | |
|
| | | // ensure the proper position, regardless of read or handler behavior
|
| | | try {
|
| | | buffer.position(this.toBufferOffset(record.end));
|
| | | } catch (IllegalArgumentException e) {
|
| | | logger.warn("position=" + this.toBufferOffset(record.end), e);
|
| | |
|
| | | return false;
|
| | | }
|
| | |
|
| | | // no more data left
|
| | | if (buffer.remaining() < 4) {
|
| | | return false;
|
| | | }
|
| | |
|
| | | // looks good
|
| | | boolean hasNext = true;
|
| | | short type = buffer.getShort();
|
| | |
|
| | | if (type == -1) {
|
| | | hasNext = false;
|
| | | }
|
| | |
|
| | | // reset things to as they were
|
| | | buffer.position(position);
|
| | |
|
| | | return hasNext;
|
| | | }
|
| | |
|
| | | protected Element.FileRecord nextElement() throws IOException, Dgn7fileException {
|
| | | // need to update position
|
| | | buffer.position(this.toBufferOffset(record.end));
|
| | |
|
| | | // record header is big endian
|
| | | buffer.order(ByteOrder.LITTLE_ENDIAN);
|
| | |
|
| | | // read shape record header
|
| | | int recordNumber = ++maxElementId;
|
| | | short signature = buffer.getShort();
|
| | |
|
| | | // byte type = (byte) (buffer.get() & 0x7f);
|
| | | byte type = (byte) ((signature >>> 8) & 0x007f);
|
| | |
|
| | | // silly Bentley say contentLength is in 2-byte words
|
| | | // and ByteByffer uses bytes.
|
| | | // track the record location
|
| | | int elementLength = (buffer.getShort() * 2) + 4;
|
| | |
|
| | | if (!buffer.isReadOnly() && !useMemoryMappedBuffer) {
|
| | | // capacity is less than required for the record
|
| | | // copy the old into the newly allocated
|
| | | if (buffer.capacity() < elementLength) {
|
| | | this.currentOffset += buffer.position();
|
| | |
|
| | | ByteBuffer old = buffer;
|
| | |
|
| | | // ensure enough capacity for one more record header
|
| | | buffer = Dgn7fileReader.ensureCapacity(buffer, elementLength, useMemoryMappedBuffer);
|
| | | buffer.put(old);
|
| | | fill(buffer, channel);
|
| | | buffer.position(0);
|
| | | } else
|
| | |
|
| | | // remaining is less than record length
|
| | | // compact the remaining data and read again,
|
| | | // allowing enough room for one more record header
|
| | | if (buffer.remaining() < elementLength) {
|
| | | this.currentOffset += buffer.position();
|
| | | buffer.compact();
|
| | | fill(buffer, channel);
|
| | | buffer.position(0);
|
| | | }
|
| | | }
|
| | |
|
| | | // shape record is all little endian
|
| | | // buffer.order(ByteOrder.LITTLE_ENDIAN);
|
| | | // read the type, handlers don't need it
|
| | | ElementType recordType = ElementType.forID(type);
|
| | |
|
| | | logger.debug("nextElement at " + this.toBufferOffset(record.end) + ":type=" + type);
|
| | |
|
| | | // this usually happens if the handler logic is bunk,
|
| | | // but bad files could exist as well...
|
| | |
|
| | | /*
|
| | | * if (recordType != ElementType.NULL && recordType != fileElementType)
|
| | | * {
|
| | | * throw new IllegalStateException("ShapeType changed illegally from " + fileElementType + " to " + recordType);
|
| | | * }
|
| | | */
|
| | |
|
| | | // peek at bounds, then reset for handler
|
| | | // many handler's may ignore bounds reading, but we don't want to
|
| | | // second guess them...
|
| | | buffer.mark();
|
| | |
|
| | | if (recordType.isMultiPoint()) {
|
| | | int lowCoorX = buffer.getInt();
|
| | |
|
| | | lowCoorX = DgnUtility.convertFromDGN(lowCoorX);
|
| | | record.minX = DgnUtility.converUnitToCoord(lowCoorX);
|
| | |
|
| | | int lowCoorY = buffer.getInt();
|
| | |
|
| | | lowCoorY = DgnUtility.convertFromDGN(lowCoorY);
|
| | | record.minY = DgnUtility.converUnitToCoord(lowCoorY);
|
| | |
|
| | | int lowCoorZ = buffer.getInt();
|
| | |
|
| | | lowCoorZ = DgnUtility.convertFromDGN(lowCoorZ);
|
| | | record.minZ = DgnUtility.converUnitToCoord(lowCoorZ);
|
| | |
|
| | | int highCoorX = buffer.getInt();
|
| | |
|
| | | highCoorX = DgnUtility.convertFromDGN(highCoorX);
|
| | | record.maxX = DgnUtility.converUnitToCoord(highCoorX);
|
| | |
|
| | | int highCoorY = buffer.getInt();
|
| | |
|
| | | highCoorY = DgnUtility.convertFromDGN(highCoorY);
|
| | | record.maxY = DgnUtility.converUnitToCoord(highCoorY);
|
| | |
|
| | | int highCoorZ = buffer.getInt();
|
| | |
|
| | | highCoorZ = DgnUtility.convertFromDGN(highCoorZ);
|
| | | record.maxZ = DgnUtility.converUnitToCoord(highCoorZ);
|
| | | }
|
| | |
|
| | | buffer.reset();
|
| | | record.offset = record.end;
|
| | |
|
| | | // update all the record info.
|
| | | record.length = elementLength;
|
| | | record.signature = signature;
|
| | | record.number = recordNumber;
|
| | | record.buffer = buffer;
|
| | |
|
| | | // remember, we read one int already...
|
| | | record.end = this.toFileOffset(buffer.position()) + elementLength - 4;
|
| | | // record.end = this.toFileOffset(buffer.position()) + elementLength;
|
| | |
|
| | | // mark this position for the reader
|
| | | record.start = buffer.position();
|
| | |
|
| | | // clear any cached record
|
| | | record.handler = recordType.getElementHandler();
|
| | | record.element = null;
|
| | |
|
| | | return record;
|
| | | }
|
| | |
|
| | | private void init(boolean strict) throws IOException, Dgn7fileException {
|
| | | header = readHeader(channel, strict);
|
| | |
|
| | | if (useMemoryMappedBuffer) {
|
| | | FileChannel fc = channel;
|
| | |
|
| | | buffer = fc.map(FileChannel.MapMode.READ_WRITE, 0, fc.size());
|
| | |
|
| | | // buffer.position(100);
|
| | | buffer.position(header.size());
|
| | | this.currentOffset = 0;
|
| | | } else {
|
| | | // force useMemoryMappedBuffer to false
|
| | | this.useMemoryMappedBuffer = false;
|
| | |
|
| | | // start with 8K buffer
|
| | | buffer = ByteBuffer.allocateDirect(8 * 1024);
|
| | | fill(buffer, channel);
|
| | | buffer.flip();
|
| | | this.currentOffset = header.size();
|
| | | }
|
| | |
|
| | | headerTransfer = ByteBuffer.allocate(4);
|
| | | headerTransfer.order(ByteOrder.LITTLE_ENDIAN);
|
| | |
|
| | | // make sure the record end is set now...
|
| | | record.end = toFileOffset(buffer.position());
|
| | | }
|
| | |
|
| | | public static Dgn7fileHeader readHeader(FileChannel channel, boolean strict) throws IOException {
|
| | | ByteBuffer buffer = ByteBuffer.allocateDirect(4);
|
| | |
|
| | | if (fill(buffer, channel) == -1) {
|
| | | throw new EOFException("Premature end of header");
|
| | | }
|
| | |
|
| | | buffer.order(ByteOrder.LITTLE_ENDIAN);
|
| | |
|
| | | int length = buffer.getShort(2) * 2;
|
| | | ByteBuffer old = buffer;
|
| | |
|
| | | old.position(0);
|
| | |
|
| | | // ensure enough capacity for one more record header
|
| | | buffer = ByteBuffer.allocateDirect(length + 4);
|
| | | buffer.put(old);
|
| | |
|
| | | if (fill(buffer, channel) == -1) {
|
| | | throw new EOFException("Premature end of header");
|
| | | }
|
| | |
|
| | | buffer.position(0);
|
| | |
|
| | | Dgn7fileHeader header = new Dgn7fileHeader();
|
| | |
|
| | | header.read(buffer, strict);
|
| | |
|
| | | return header;
|
| | | }
|
| | |
|
| | | protected static int fill(ByteBuffer buffer, FileChannel channel) throws IOException {
|
| | | int r = buffer.remaining();
|
| | |
|
| | | // channel reads return -1 when EOF or other error
|
| | | // because they a non-blocking reads, 0 is a valid return value!!
|
| | | while ((buffer.remaining() > 0) && (r != -1)) {
|
| | | r = channel.read(buffer);
|
| | | }
|
| | |
|
| | | if (r == -1) {
|
| | | buffer.limit(buffer.position());
|
| | | }
|
| | |
|
| | | return r;
|
| | | }
|
| | |
|
| | | private void allocateBuffers() {
|
| | | buffer = ByteBuffer.allocateDirect(16 * 1024);
|
| | | }
|
| | |
|
| | | private void checkShapeBuffer(int size) {
|
| | | if (buffer.capacity() < size) {
|
| | | if (buffer != null)
|
| | | NIOUtilities.clean(buffer);
|
| | | buffer = ByteBuffer.allocateDirect(size);
|
| | | }
|
| | | }
|
| | |
|
| | | private void drain() throws IOException {
|
| | | buffer.flip();
|
| | | while (buffer.remaining() > 0)
|
| | | channel.write(buffer);
|
| | | buffer.flip().limit(buffer.capacity());
|
| | | }
|
| | |
|
| | | private int toBufferOffset(int offset) {
|
| | | return (int) (offset - currentOffset);
|
| | | }
|
| | |
|
| | | private int toFileOffset(int offset) {
|
| | | return (int) (currentOffset + offset);
|
| | | }
|
| | |
|
| | | public void writeElement(Element element) throws IOException {
|
| | | if (element == null) return;
|
| | | if (element.getElementType().isComplexElement()) {
|
| | | writeTo(element);
|
| | | ComplexElement complexElement = (ComplexElement) element;
|
| | | for (Element component : complexElement) {
|
| | | writeTo(component);
|
| | | }
|
| | | } else {
|
| | | writeTo(element);
|
| | | }
|
| | | }
|
| | |
|
| | | private void writeTo(Element element) throws IOException {
|
| | | ByteBuffer writeBuffer = ByteBuffer.allocateDirect(element.raw.length * 2);
|
| | | writeBuffer.order(ByteOrder.LITTLE_ENDIAN);
|
| | | for (short word : element.raw) {
|
| | | writeBuffer.putShort(word);
|
| | | }
|
| | | writeBuffer.rewind();
|
| | |
|
| | | channel.write(writeBuffer);
|
| | | }
|
| | |
|
| | |
|
| | |
|
| | | public void toEnd() throws IOException, Dgn7fileException {
|
| | | while (hasNext()) {
|
| | | nextElement();
|
| | | }
|
| | | }
|
| | |
|
| | | public void close() throws IOException {
|
| | | lock.unlockWrite();
|
| | | lock.unlockRead();
|
| | |
|
| | | if (channel.isOpen()) {
|
| | | channel.close();
|
| | | streamLogger.close();
|
| | | }
|
| | |
|
| | | if (buffer instanceof MappedByteBuffer) {
|
| | | NIOUtilities.clean(buffer);
|
| | | }
|
| | |
|
| | | channel = null;
|
| | | header = null;
|
| | | }
|
| | |
|
| | | public void writeEOF() throws IOException {
|
| | | ByteBuffer writeBuffer = ByteBuffer.allocateDirect(2);
|
| | | writeBuffer.order(ByteOrder.LITTLE_ENDIAN);
|
| | | writeBuffer.putShort((short) -1);
|
| | | channel.write(writeBuffer);
|
| | |
|
| | | }
|
| | | }
|
| | |
| | | import com.ximple.util.DgnUtility; |
| | | |
| | | /** |
| | | * Record |
| | | * FileRecord |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤W¤È 11:14:50 |
| | | */ |
| | | public class Element { |
| | | public static final int CONSTRUCTION_CLASS = 0; |
| | |
| | | protected short[] raw; |
| | | protected byte attrOffset = 0; |
| | | protected ByteBuffer rawBuffer; |
| | | protected boolean newElement = false; |
| | | |
| | | public Element(byte[] raw) { |
| | | Element(byte[] raw) { |
| | | // this.raw = raw; |
| | | this.raw = new short[raw.length / 2]; |
| | | rawBuffer = ByteBuffer.wrap(raw); |
| | |
| | | |
| | | public int getLineStyle() { |
| | | return (raw[17] & 0x0007); |
| | | } |
| | | |
| | | protected void setLineStyle(int value) { |
| | | if (value > -1 && value < 8) |
| | | raw[17] = (short) ((raw[17] & 0xfff8) | (value & 0x0007)); |
| | | else |
| | | new IllegalArgumentException("Out of Range!"); |
| | | } |
| | | |
| | | public Envelope getRange() { |
| | |
| | | raw[5] = (short) (temp >> 16 & 0x0000ffff); |
| | | raw[4] = (short) (temp & 0x0000ffff); |
| | | |
| | | // lowZ |
| | | raw[7] = 0; |
| | | raw[8] = 0; |
| | | |
| | | int highCoorX = DgnUtility.converCoordToUnit(bbox.getMaxX()); |
| | | temp = DgnUtility.converToDGN(highCoorX); |
| | | raw[9] = (short) (temp >> 16 & 0x0000ffff); |
| | |
| | | temp = DgnUtility.converToDGN(highCoorY); |
| | | raw[11] = (short) (temp >> 16 & 0x0000ffff); |
| | | raw[10] = (short) (temp & 0x0000ffff); |
| | | |
| | | // highZ |
| | | raw[13] = (short) 0xffff; |
| | | raw[12] = (short) 0xffff; |
| | | |
| | | } |
| | | |
| | | public boolean isComponentElement() { |
| | | return (short) ((raw[0] >>> 7) & 0x0001) == 1; |
| | | } |
| | | |
| | | protected void setComponentElement(boolean value) { |
| | | raw[0] = (short) ((raw[0] & 0xff7f) | (value ? 0x0080 : 0x0)); |
| | | } |
| | | |
| | | public boolean removeUserAttributeData(int iLinkageId) { |
| | |
| | | return (short) ((raw[0] >>> 15) & 0x0001) == 1; |
| | | } |
| | | |
| | | protected void setDeleted(boolean value) { |
| | | raw[0] = (short) ((raw[0] & 0x7fff) | ((((value) ? 1 : 0) << 15) & 0x8000)); |
| | | } |
| | | |
| | | public int getColorIndex() { |
| | | return ((raw[17] >>> 8) & 0x00ff); |
| | | } |
| | | |
| | | protected void setColorIndex(int value) { |
| | | if (value > -1 && value < 256) |
| | | { |
| | | raw[17] = (short) ((raw[17] & 0x00ff) | (value << 8 & 0xff00)); |
| | | } else new IllegalArgumentException("Out of Range!"); |
| | | } |
| | | |
| | | public int getType() { |
| | | return ((raw[0] >>> 8) & 0x007f); |
| | | } |
| | | |
| | | protected void setType(int value) { |
| | | raw[0] = (short) ((raw[0] & 0x80ff) | (value << 8) & 0x3f00); |
| | | } |
| | | |
| | | public ElementType getElementType() { |
| | |
| | | } |
| | | } |
| | | |
| | | public short getFollowLength() { |
| | | return raw[1]; |
| | | } |
| | | |
| | | protected void setFollowLength(short value) { |
| | | assert (raw.length >= value + 2); |
| | | raw[1] = value; |
| | | } |
| | | |
| | | public void addUserAttributeData(byte[] pDataBlock, Class dataClass, int iLinkageId) throws Element.Exception { |
| | | } |
| | | |
| | |
| | | } |
| | | |
| | | return true; |
| | | } |
| | | |
| | | public int getUserAttributeDataOffset() { |
| | | return (raw[15] + 16); |
| | | } |
| | | |
| | | public List<UserAttributeData> getUserAttributeData() { |
| | |
| | | public Exception() { |
| | | } |
| | | |
| | | // Constructs an Record.Exception with no detail message. |
| | | // Constructs an ElementRecord.Exception with no detail message. |
| | | public Exception(String oStrMessage) { |
| | | super(oStrMessage); |
| | | } |
| | |
| | | return elementType; |
| | | } |
| | | |
| | | public Object read(ByteBuffer buffer, short signature, int length) { |
| | | public Element read(ByteBuffer buffer, short signature, int length) { |
| | | byte[] dst = new byte[length]; |
| | | try { |
| | | buffer.get(dst, 4, dst.length - 4); |
| | |
| | | return new Element(raw); |
| | | } |
| | | } |
| | | |
| | | public static final class FileRecord { |
| | | int length; |
| | | int number = 0; |
| | | int offset; // Relative to the whole file |
| | | int start = 0; // Relative to the current loaded buffer |
| | | short signature = 0; |
| | | |
| | | /** |
| | | * The minimum X value. |
| | | */ |
| | | public double minX; |
| | | |
| | | /** |
| | | * The minimum Y value. |
| | | */ |
| | | public double minY; |
| | | |
| | | /** |
| | | * The minimum Z value. |
| | | */ |
| | | public double minZ; |
| | | |
| | | /** |
| | | * The maximum X value. |
| | | */ |
| | | public double maxX; |
| | | |
| | | /** |
| | | * The maximum Y value. |
| | | */ |
| | | public double maxY; |
| | | |
| | | /** |
| | | * The maximum Z value. |
| | | */ |
| | | public double maxZ; |
| | | |
| | | // ElementType type; |
| | | int end = 0; // Relative to the whole file |
| | | Object element = null; |
| | | IElementHandler handler; |
| | | ByteBuffer buffer; |
| | | |
| | | public Object element() { |
| | | if (element == null) { |
| | | buffer.position(start); |
| | | buffer.order(ByteOrder.LITTLE_ENDIAN); |
| | | |
| | | if (handler == null) { |
| | | return null; |
| | | } |
| | | |
| | | element = handler.read(buffer, signature, length); |
| | | } |
| | | |
| | | return element; |
| | | } |
| | | |
| | | public int offset() { |
| | | return offset; |
| | | } |
| | | |
| | | /** |
| | | * A summary of the record. |
| | | */ |
| | | public String toString() { |
| | | return "FileRecord " + number + " length " + length + " bounds " + minX + "," + minY + " " + maxX + "," + maxY; |
| | | } |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.io.dgn7;
|
| | |
|
| | | import com.vividsolutions.jts.geom.Coordinate;
|
| | | import com.vividsolutions.jts.geom.GeometryFactory;
|
| | | import com.vividsolutions.jts.geom.LineString;
|
| | | import com.vividsolutions.jts.geom.LinearRing;
|
| | |
|
| | | import java.util.Arrays;
|
| | | import java.util.LinkedList;
|
| | |
|
| | | public class ElementFactory {
|
| | | static final int DEFAULT_ELMHEAD_LENGTH = 28;
|
| | | static final int DEFAULT_DISPHEAD_LENGTH = 8;
|
| | | static final int MINIMAL_ELEMLENGTH = 18 * 2;
|
| | |
|
| | | static final int MAXINUM_LINESTRING_PTLEN = 100;
|
| | |
|
| | | private static ElementFactory elementFactory = new ElementFactory();
|
| | | private static GeometryFactory factory = new GeometryFactory();
|
| | |
|
| | | public static Element createLineString(LineString linestring) {
|
| | | Coordinate[] pts = linestring.getCoordinates();
|
| | | LineStringElement element = elementFactory.createLineStringElement(pts);
|
| | | element.setRange(linestring.getEnvelopeInternal());
|
| | | return element;
|
| | | }
|
| | |
|
| | | public static Element createShape(LinearRing ring) {
|
| | | Coordinate[] pts = ring.getCoordinates();
|
| | | int elmsize = MINIMAL_ELEMLENGTH + 2 + pts.length * 8;
|
| | | ShapeElement element = new ShapeElement(new byte[elmsize]);
|
| | | for (int i = 0; i < pts.length; i++) {
|
| | | element.setX(i, pts[i].x);
|
| | | element.setY(i, pts[i].y);
|
| | | }
|
| | | element.setVerticeSize(pts.length);
|
| | | element.setType(ElementType.SHAPE.id);
|
| | | element.setFollowLength((short) ((elmsize / 2) - 2));
|
| | | element.setRange(ring.getEnvelopeInternal());
|
| | | element.setLevelIndex(0);
|
| | | element.setColorIndex(0);
|
| | | element.setWeight(0);
|
| | | element.setLineStyle(0);
|
| | | return element;
|
| | | }
|
| | |
|
| | | private LineStringElement createLineStringElement(Coordinate[] pts) {
|
| | | int elmsize = MINIMAL_ELEMLENGTH + 2 + pts.length * 8;
|
| | | LineStringElement element = new LineStringElement(new byte[elmsize]);
|
| | | for (int i = 0; i < pts.length; i++) {
|
| | | element.setX(i, pts[i].x);
|
| | | element.setY(i, pts[i].y);
|
| | | }
|
| | | element.setVerticeSize(pts.length);
|
| | | element.setType(ElementType.LINESTRING.id);
|
| | | element.setFollowLength((short) ((elmsize / 2) - 2));
|
| | | element.setLevelIndex(0);
|
| | | element.setColorIndex(0);
|
| | | element.setWeight(0);
|
| | | element.setLineStyle(0);
|
| | | return element;
|
| | | }
|
| | |
|
| | | private ShapeElement createShapeElement(Coordinate[] pts) {
|
| | | int elmsize = MINIMAL_ELEMLENGTH + 2 + pts.length * 8;
|
| | | ShapeElement element = new ShapeElement(new byte[elmsize]);
|
| | | for (int i = 0; i < pts.length; i++) {
|
| | | element.setX(i, pts[i].x);
|
| | | element.setY(i, pts[i].y);
|
| | | }
|
| | | element.setVerticeSize(pts.length);
|
| | | element.setType(ElementType.SHAPE.id);
|
| | | element.setFollowLength((short) ((elmsize / 2) - 2));
|
| | | element.setLevelIndex(0);
|
| | | element.setColorIndex(0);
|
| | | element.setWeight(0);
|
| | | element.setLineStyle(0);
|
| | | return element;
|
| | | }
|
| | |
|
| | | public static Element createComplexChain(LineString linestring) {
|
| | | LinkedList<LineStringElement> elms = new LinkedList<LineStringElement>();
|
| | | Coordinate[] allpts = linestring.getCoordinates();
|
| | | int segsize = allpts.length / MAXINUM_LINESTRING_PTLEN;
|
| | | int currentpos = 0;
|
| | | int totalLength = 0;
|
| | | for (int seg = 0; seg < segsize; seg++) {
|
| | | Coordinate[] pts = Arrays.copyOfRange(allpts,
|
| | | currentpos, currentpos + MAXINUM_LINESTRING_PTLEN + 1, Coordinate[].class);
|
| | | LineStringElement element = elementFactory.createLineStringElement(pts);
|
| | | currentpos += MAXINUM_LINESTRING_PTLEN;
|
| | | element.setRange(element.toGeometry(factory).getEnvelopeInternal());
|
| | | element.setComponentElement(true);
|
| | | element.setLevelIndex(0);
|
| | | totalLength += element.raw.length;
|
| | | elms.add(element);
|
| | | }
|
| | | int remain = allpts.length % MAXINUM_LINESTRING_PTLEN;
|
| | | Coordinate[] pts = Arrays.copyOfRange(allpts,
|
| | | currentpos, currentpos + remain, Coordinate[].class);
|
| | | LineStringElement element = elementFactory.createLineStringElement(pts);
|
| | | element.setRange(element.toGeometry(factory).getEnvelopeInternal());
|
| | | element.setComponentElement(true);
|
| | | element.setLevelIndex(0);
|
| | | elms.add(element);
|
| | | totalLength += element.raw.length;
|
| | |
|
| | | ComplexChainElement result = new ComplexChainElement(new byte[MINIMAL_ELEMLENGTH + 12]);
|
| | | result.addAll(elms);
|
| | | result.setRange(linestring.getEnvelopeInternal());
|
| | | result.setType(ElementType.COMPLEXCHAIN.id);
|
| | | result.setFollowLength((short) (((MINIMAL_ELEMLENGTH + 12) / 2) - 2));
|
| | | result.setNumOfElement((short) elms.size());
|
| | | totalLength += result.raw.length;
|
| | | totalLength -= 19;
|
| | | result.setTotalLength((short) totalLength);
|
| | | result.setLevelIndex(0);
|
| | | result.setColorIndex(0);
|
| | | result.setWeight(0);
|
| | | result.setLineStyle(0);
|
| | |
|
| | | return result;
|
| | | }
|
| | | }
|
| | |
| | | | 23 Circular Truncated Cone | |
| | | | 24 B-Spline Surface (complex) | |
| | | | 25 B-Spline Surface boundary | |
| | | | 26 B-Spline Knot Record | |
| | | | 26 B-Spline Knot ElementRecord | |
| | | | 27 B-Spline Curve (complex) | |
| | | | 28 B-Spline Weight Factor | |
| | | | 33 Dimension Record | |
| | | | 34 Shared Cell Definition Record | |
| | | | 35 Shared Cell Record | |
| | | | 36 Multiline Record | |
| | | | 37 Attribute Record | |
| | | | 33 Dimension ElementRecord | |
| | | | 34 Shared Cell Definition ElementRecord | |
| | | | 35 Shared Cell ElementRecord | |
| | | | 36 Multiline ElementRecord | |
| | | | 37 Attribute ElementRecord | |
| | | | 38 DgnStore Component | |
| | | | 39 DgnStore Header | |
| | | | 66 MicroStation Application | |
| | |
| | | | 88 Raster Component | |
| | | | 90 Raster Reference Attachment | |
| | | | 91 Raster Reference Component | |
| | | | 92 Raster Hierarchy Record | |
| | | | 92 Raster Hierarchy ElementRecord | |
| | | | 93 Raster Hierarchy Component | |
| | | | 94 Raster Frame Record | |
| | | | 95 Table Entry Record | |
| | | | 96 Table Header Record | |
| | | | 97 View Group Record | |
| | | | 98 View Record | |
| | | | 99 Level Mask Record | |
| | | | 100 Reference Attach Record | |
| | | | 94 Raster Frame ElementRecord | |
| | | | 95 Table Entry ElementRecord | |
| | | | 96 Table Header ElementRecord | |
| | | | 97 View Group ElementRecord | |
| | | | 98 View ElementRecord | |
| | | | 99 Level Mask ElementRecord | |
| | | | 100 Reference Attach ElementRecord | |
| | | | 101 Matrix Header | |
| | | | 102 Matrix Int Data | |
| | | | 103 Matrix Double Data | |
| | | | 105 Mesh Header | |
| | | | 106 Extended Record (graphic) (complex) | |
| | | | 107 Extended Record (non-graphic) (complex) | |
| | | | 108 Reference Override Record | |
| | | | 106 Extended ElementRecord (graphic) (complex) | |
| | | | 107 Extended ElementRecord (non-graphic) (complex) | |
| | | | 108 Reference Override ElementRecord | |
| | | | 110 Named Group Header | |
| | | | 111 Named Group Component | |
| | | | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/17 ¤U¤È 01:26:49 |
| | | * @since 2006/5/17 �U�� 01:26:49 |
| | | */ |
| | | public final class ElementType { |
| | | /** |
| | |
| | | public class EllipseElement extends Element implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(EllipseElement.class); |
| | | |
| | | public EllipseElement(byte[] raw) { |
| | | EllipseElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 06:36:55 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class FrammeAttributeData extends UserAttributeData { |
| | | public FrammeAttributeData(short id) { |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤W¤È 11:38:57 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public interface GeometryConverter { |
| | | public Geometry toGeometry(GeometryFactory factory); |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/17 ¤U¤È 01:50:26 |
| | | * @since 2006/5/17 �U�� 01:50:26 |
| | | */ |
| | | public interface IElementHandler { |
| | | public ElementType getElementType(); |
| | | |
| | | public Object read(ByteBuffer buffer, short signature, int length); |
| | | public Element read(ByteBuffer buffer, short signature, int length); |
| | | |
| | | public void write(ByteBuffer buffer, Object element); |
| | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤W¤È 11:34:59 |
| | | * @since 2006/5/18 �W�� 11:34:59 |
| | | */ |
| | | public class LineElement extends Element implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(LineElement.class); |
| | | |
| | | public LineElement(byte[] raw) { |
| | | LineElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 02:48:58 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class LineStringElement extends Element implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(LineStringElement.class); |
| | | |
| | | public LineStringElement(byte[] raw) { |
| | | LineStringElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | return raw[18] & 0x0000ffff; |
| | | } |
| | | |
| | | public void setVerticeSize(int size) { |
| | | raw[18] = (short) (size & 0x0000ffff); |
| | | } |
| | | |
| | | public double getLength() { |
| | | double result = 0.0; |
| | | Coordinate[] vset = getVertices(); |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤W¤È 10:27:24 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class Lock { |
| | | Logger logger = LogManager.getLogger("com.ximple.io.dgn7"); |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 03:08:43 |
| | | * @since 2006/5/18 �U�� 03:08:43 |
| | | */ |
| | | public class ShapeElement extends LineStringElement implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(ShapeElement.class); |
| | | |
| | | public ShapeElement(byte[] raw) { |
| | | ShapeElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤W¤È 10:31:08 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class StreamLogging { |
| | | private static final Logger LOGGER = LogManager.getLogger("com.ximple.io.dgn7"); |
| | |
| | | public class TagElement extends Element implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(TagElement.class); |
| | | |
| | | public TagElement(byte[] raw) { |
| | | TagElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 05:03:46 |
| | | * @since 2006/5/18 �U�� 05:03:46 |
| | | */ |
| | | public class TcbElement extends Element { |
| | | private static final Logger logger = Logger.getLogger(TcbElement.class); |
| | | |
| | | public TcbElement(byte[] raw) { |
| | | TcbElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | package com.ximple.io.dgn7; |
| | | |
| | | import com.vividsolutions.jts.geom.Coordinate; |
| | | import com.vividsolutions.jts.geom.CoordinateList; |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import com.ximple.util.DgnUtility; |
| | | import org.apache.log4j.Logger; |
| | | |
| | | import java.awt.geom.AffineTransform; |
| | | import java.nio.ByteBuffer; |
| | | import java.nio.CharBuffer; |
| | | import java.nio.charset.CharacterCodingException; |
| | | import java.nio.charset.Charset; |
| | | import java.nio.charset.CharsetDecoder; |
| | | import java.util.ArrayList; |
| | | |
| | | import org.apache.log4j.Logger; |
| | | |
| | | import com.vividsolutions.jts.geom.Coordinate; |
| | | import com.vividsolutions.jts.geom.CoordinateList; |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.util.DgnUtility; |
| | | |
| | | /** |
| | | * TextElement |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤W¤È 11:45:29 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class TextElement extends Element implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(TextElement.class); |
| | |
| | | public static final int TXTJUST_RD = 24; /* Right Descender */ |
| | | public static final int TXTJUST_NONE = 127;/* no justfication */ |
| | | |
| | | public TextElement(byte[] raw) { |
| | | TextElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | } |
| | | |
| | | public boolean isChinese() { |
| | | if (raw.length < 31) return false; |
| | | int isChinese = raw[30] & 0x0000ffff; |
| | | |
| | | return (isChinese == 0xfdff); |
| | |
| | | return ""; |
| | | } |
| | | |
| | | if(30+num/2 > raw.length) |
| | | { |
| | | logger.warn("getTextLength() too long." ); |
| | | if (30 + num / 2 > raw.length) { |
| | | logger.warn("getTextLength() too long."); |
| | | return ""; |
| | | } |
| | | |
| | |
| | | |
| | | val.append(temp[i]); |
| | | } |
| | | } |
| | | else |
| | | { |
| | | } else { |
| | | byte[] strRaw = new byte[num * 2]; |
| | | ArrayList byteResult = new ArrayList(); |
| | | for (int i = 0; i < num; i++) |
| | | { |
| | | for (int i = 0; i < num; i++) { |
| | | short charValue = raw[i + 31]; |
| | | byte hi = (byte) (charValue >>> 8); |
| | | byte lo = (byte) charValue; |
| | | strRaw[i * 2] = hi; |
| | | strRaw[i * 2 + 1] = lo; |
| | | |
| | | } |
| | | |
| | | try { |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 04:02:58 |
| | | * @since 2006/5/18 �U�� 04:02:58 |
| | | */ |
| | | public class TextNodeElement extends Element implements ComplexElement, GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(TextElement.class); |
| | | |
| | | private ArrayList<Element> list = new ArrayList<Element>(); |
| | | |
| | | public TextNodeElement(byte[] raw) { |
| | | TextNodeElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 02:29:29 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class UserAttributeData { |
| | | protected short[] _src; |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 01:33:00 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public final class DgnUtility { |
| | | private static final Logger logger = Logger.getLogger(DgnUtility.class); |
| | |
| | | * Dgn7OracleReaderTest |
| | | * User: Ulysses |
| | | * Date: 2007/10/24 |
| | | * Time: ¤W¤È 10:49:54 |
| | | */ |
| | | public class Dgn7OracleReaderTest { |
| | | @BeforeTest |
| | |
| | | |
| | | } |
| | | |
| | | @Test |
| | | // @Test |
| | | public void testOracleReader() throws SQLException, IOException { |
| | | OracleConnection connection = OracleTarget.getInstance().getOracleConnection(); |
| | | // String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" ORDER BY ROWID"; |
| | |
| | | * Dgn7TextElementReaderTest |
| | | * User: Ulysses |
| | | * Date: 2008/1/10 |
| | | * Time: ¤W¤È 12:19:14 |
| | | */ |
| | | public class Dgn7TextElementReaderTest { |
| | | private final static Logger logger = Logger.getLogger(Dgn7fileReaderTest.class); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | count++; |
| | | } |
| | | |
| | | logger.info("ElementRecord Count=" + count); |
| | | logger.info("FileRecord Count=" + count); |
| | | } |
| | | } |
| | |
| | | * Dgn7fileReaderTest |
| | | * User: Ulysses |
| | | * Date: 2007/10/24 |
| | | * Time: ¤W¤È 01:43:41 |
| | | * Time: �W�� 01:43:41 |
| | | * To change this template use File | Settings | File Templates. |
| | | */ |
| | | public class Dgn7fileReaderTest { |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | count++; |
| | | } |
| | | |
| | | logger.info("ElementRecord Count=" + count); |
| | | logger.info("FileRecord Count=" + count); |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.io.dgn7;
|
| | |
|
| | | import org.apache.commons.io.FileUtils;
|
| | | import org.apache.log4j.Logger;
|
| | | import org.geotools.TestData;
|
| | | import org.testng.Assert;
|
| | | import org.testng.annotations.BeforeTest;
|
| | | import org.testng.annotations.Test;
|
| | |
|
| | | import java.io.File;
|
| | | import java.io.FileInputStream;
|
| | | import java.io.FileOutputStream;
|
| | | import java.io.IOException;
|
| | | import java.io.RandomAccessFile;
|
| | | import java.nio.channels.FileChannel;
|
| | |
|
| | | public class Dgn7fileWriterTest {
|
| | | private final static Logger logger = Logger.getLogger(Dgn7fileReaderTest.class);
|
| | |
|
| | | // private final static String testFilePath = "test-data\\testHV.dgn";
|
| | | private final static String testFilePath = "testHV.dgn";
|
| | | private FileInputStream _fs;
|
| | |
|
| | | @BeforeTest
|
| | | public void setUp() throws IOException {
|
| | | File dataFile = TestData.file(this, testFilePath);
|
| | | if (!dataFile.exists()) {
|
| | | return;
|
| | | }
|
| | |
|
| | | _fs = new FileInputStream(dataFile);
|
| | | }
|
| | | |
| | | @Test
|
| | | public void testWrite() {
|
| | | }
|
| | |
|
| | | @Test
|
| | | public void testCopy() throws Dgn7fileException, IOException {
|
| | | File target = TestData.temp(this, "testdgn2d.dgn");
|
| | | FileUtils.copyFile(TestData.file(this, "dgnseed2d.dgn"), target);
|
| | | RandomAccessFile targetStream = new RandomAccessFile(target, "rw");
|
| | | FileChannel fctarget = targetStream.getChannel();
|
| | | Lock lock = new Lock();
|
| | |
|
| | | Dgn7fileReader targetReader = new Dgn7fileReader(fctarget, new Lock());
|
| | | while (targetReader.hasNext()) {
|
| | | targetReader.nextElement();
|
| | | }
|
| | |
|
| | | Dgn7fileWriter writer = new Dgn7fileWriter(fctarget, lock);
|
| | |
|
| | | FileChannel fc = _fs.getChannel();
|
| | | Dgn7fileReader reader = new Dgn7fileReader(fc, new Lock());
|
| | | int count = 0;
|
| | | Element lastComplex = null;
|
| | | while (reader.hasNext()) {
|
| | | Element.FileRecord record = reader.nextElement();
|
| | | if (record.element() != null) {
|
| | | Element element = (Element) record.element();
|
| | | ElementType type = element.getElementType();
|
| | | boolean completed = false;
|
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) {
|
| | | if (lastComplex != null) {
|
| | | // @todo add process in here
|
| | | processCompleteElement(lastComplex, writer);
|
| | | lastComplex = null;
|
| | | }
|
| | |
|
| | | // @todo add process in here
|
| | | processCompleteElement(element, writer);
|
| | | } else if (element.isComponentElement()) {
|
| | | if (lastComplex != null) {
|
| | | ((ComplexElement) lastComplex).add(element);
|
| | | } else {
|
| | | logger.warn("wong." + element.toString());
|
| | | Assert.fail("Component Element cannot found parent.");
|
| | | }
|
| | | } else if (type.isComplexElement()) {
|
| | | if (lastComplex != null) {
|
| | | // @todo add process in here
|
| | | processCompleteElement(lastComplex, writer);
|
| | | }
|
| | | lastComplex = element;
|
| | | }
|
| | | // writer.writeElement(element);
|
| | | }
|
| | | count++;
|
| | | }
|
| | | writer.writeEOF();
|
| | | writer.close();
|
| | |
|
| | | // FileUtils.copyFile(target, new File("G://target.dgn"));
|
| | | }
|
| | |
|
| | | private boolean processCompleteElement(Element element, Dgn7fileWriter writer) throws IOException {
|
| | | writer.writeElement(element);
|
| | | return true;
|
| | | }
|
| | | }
|
New file |
| | |
| | | package com.ximple.io.dgn7;
|
| | |
|
| | | import com.vividsolutions.jts.geom.CoordinateList;
|
| | | import com.vividsolutions.jts.geom.GeometryFactory;
|
| | | import com.vividsolutions.jts.geom.LineString;
|
| | | import com.vividsolutions.jts.geom.MultiLineString;
|
| | | import org.apache.commons.io.FileUtils;
|
| | | import org.apache.log4j.Logger;
|
| | | import org.geotools.TestData;
|
| | | import org.testng.Assert;
|
| | | import org.testng.annotations.BeforeTest;
|
| | | import org.testng.annotations.Test;
|
| | |
|
| | | import java.io.File;
|
| | | import java.io.FileInputStream;
|
| | | import java.io.IOException;
|
| | | import java.io.RandomAccessFile;
|
| | | import java.nio.channels.FileChannel;
|
| | |
|
| | | /**
|
| | | * Created by IntelliJ IDEA.
|
| | | * User: Ulysses
|
| | | * Date: 2010/4/21
|
| | | */
|
| | | public class ElementFactoryTest {
|
| | | private final static Logger logger = Logger.getLogger(Dgn7fileReaderTest.class);
|
| | |
|
| | | // private final static String testFilePath = "test-data\\testHV.dgn";
|
| | | private final static String testFilePath = "testHV.dgn";
|
| | |
|
| | | private static GeometryFactory factory = new GeometryFactory();
|
| | |
|
| | | private FileInputStream _fs;
|
| | | private LineStringElement _testLineString = null;
|
| | | private ComplexChainElement _testComplexChain = null;
|
| | |
|
| | | @BeforeTest
|
| | | public void setUp() throws IOException, Dgn7fileException {
|
| | | File dataFile = TestData.file(this, testFilePath);
|
| | | if (!dataFile.exists()) {
|
| | | return;
|
| | | }
|
| | |
|
| | | _fs = new FileInputStream(dataFile);
|
| | |
|
| | | fetchTestElement(_fs);
|
| | | _fs.close();
|
| | | }
|
| | |
|
| | | private void fetchTestElement(FileInputStream fs) throws Dgn7fileException, IOException {
|
| | | FileChannel fc = _fs.getChannel();
|
| | | Dgn7fileReader reader = new Dgn7fileReader(fc, new Lock());
|
| | | int count = 0;
|
| | | Element lastComplex = null;
|
| | | while (reader.hasNext()) {
|
| | | Element.FileRecord record = reader.nextElement();
|
| | | if (record.element() != null) {
|
| | | Element element = (Element) record.element();
|
| | | ElementType type = element.getElementType();
|
| | | boolean completed = false;
|
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) {
|
| | | if (lastComplex != null) {
|
| | | // @todo add process in here
|
| | | if (!processCompleteElement(lastComplex)) break;
|
| | | lastComplex = null;
|
| | | }
|
| | |
|
| | | // @todo add process in here
|
| | | if (!processCompleteElement(element)) break;
|
| | | } else if (element.isComponentElement()) {
|
| | | if (lastComplex != null) {
|
| | | ((ComplexElement) lastComplex).add(element);
|
| | | } else {
|
| | | logger.warn("wong." + element.toString());
|
| | | Assert.fail("Component Element cannot found parent.");
|
| | | }
|
| | | } else if (type.isComplexElement()) {
|
| | | if (lastComplex != null) {
|
| | | // @todo add process in here
|
| | | if (!processCompleteElement(lastComplex)) break;
|
| | | }
|
| | | lastComplex = element;
|
| | | }
|
| | | }
|
| | | count++;
|
| | | }
|
| | | }
|
| | |
|
| | | private boolean processCompleteElement(Element element) throws IOException {
|
| | | if (element instanceof LineStringElement) {
|
| | | _testLineString = (LineStringElement) element;
|
| | | }
|
| | |
|
| | | if (element instanceof ComplexChainElement) {
|
| | | _testComplexChain = (ComplexChainElement) element;
|
| | | }
|
| | |
|
| | | if ((_testLineString != null) && (_testComplexChain != null)) return false;
|
| | | return true;
|
| | | }
|
| | |
|
| | | @Test
|
| | | public void testCreateLineString() throws IOException, Dgn7fileException {
|
| | | Assert.assertNotNull(_testLineString);
|
| | | LineStringElement originElement = _testLineString;
|
| | |
|
| | | LineString geometry = (LineString) originElement.toGeometry(factory);
|
| | | Element cloneElement = ElementFactory.createLineString(geometry);
|
| | | Assert.assertTrue(cloneElement instanceof LineStringElement);
|
| | | cloneElement.setLevelIndex(originElement.getLevelIndex());
|
| | | cloneElement.setColorIndex(originElement.getColorIndex());
|
| | | cloneElement.setWeight(originElement.getWeight());
|
| | | cloneElement.setLineStyle(originElement.getLineStyle());
|
| | | cloneElement.setRange(geometry.getEnvelopeInternal());
|
| | |
|
| | | int len = originElement.raw.length;
|
| | | int lenClone = cloneElement.raw.length;
|
| | | // Assert.assertEquals(lenClone, len - originElement.getUserAttributeDataOffset() + 1);
|
| | | if (originElement.hasUserAttributeData()) {
|
| | | Assert.assertEquals(lenClone, originElement.getUserAttributeDataOffset());
|
| | | }
|
| | | System.out.println("origin size=(" + len + ")-:- Clone size=(" + lenClone + ")");
|
| | | int headerSize = ElementFactory.MINIMAL_ELEMLENGTH / 2;
|
| | | for (int i = 0; i <= headerSize; i++) {
|
| | | if (originElement.raw[i] != cloneElement.raw[i]) {
|
| | | System.out.print("different index=" + i + ":");
|
| | | System.out.println("origin[" + Integer.toHexString(originElement.raw[i])
|
| | | + "]-clone:[" + Integer.toHexString(cloneElement.raw[i]) + "]");
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | @Test
|
| | | public void testCreateComplexChain() throws IOException, Dgn7fileException {
|
| | | Assert.assertNotNull(_testComplexChain);
|
| | | ComplexChainElement originElement = _testComplexChain;
|
| | |
|
| | | System.out.print("len=" + originElement.raw.length);
|
| | | for (Element elm : originElement) {
|
| | | System.out.print(":" + elm.raw.length);
|
| | | }
|
| | | System.out.println();
|
| | |
|
| | | MultiLineString geometries = (MultiLineString) originElement.toGeometry(factory);
|
| | | LineString geometry = factory.createLineString(geometries.getCoordinates());
|
| | |
|
| | | ComplexChainElement cloneElement = (ComplexChainElement) ElementFactory.createComplexChain(geometry);
|
| | | Assert.assertTrue(cloneElement instanceof ComplexChainElement);
|
| | | cloneElement.setLevelIndex(originElement.getLevelIndex());
|
| | | cloneElement.setColorIndex(originElement.getColorIndex());
|
| | | cloneElement.setWeight(originElement.getWeight());
|
| | | cloneElement.setLineStyle(originElement.getLineStyle());
|
| | | cloneElement.setRange(geometry.getEnvelopeInternal());
|
| | |
|
| | | int len = originElement.raw.length;
|
| | | int lenClone = cloneElement.raw.length;
|
| | | // Assert.assertEquals(lenClone, len - originElement.getUserAttributeDataOffset() + 1);
|
| | | System.out.print("clonelen=" + cloneElement.raw.length);
|
| | | for (Element elm : cloneElement) {
|
| | | System.out.print(":" + elm.raw.length);
|
| | | }
|
| | | System.out.println();
|
| | |
|
| | | if (originElement.hasUserAttributeData()) {
|
| | | Assert.assertEquals(lenClone, originElement.getUserAttributeDataOffset() + 4);
|
| | | }
|
| | | System.out.println("origin size=(" + len + ")-:- Clone size=(" + lenClone + ")");
|
| | | int headerSize = ElementFactory.MINIMAL_ELEMLENGTH / 2 + 6;
|
| | | for (int i = 0; i < headerSize; i++) {
|
| | | if (originElement.raw[i] != cloneElement.raw[i]) {
|
| | | System.out.print("different index=" + i + ":");
|
| | | System.out.println("origin[" + Integer.toHexString(originElement.raw[i])
|
| | | + "]-clone:[" + Integer.toHexString(cloneElement.raw[i]) + "]");
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | |
| | | private static final String ORACLE_URL = "jdbc:oracle:thin:@"; |
| | | private static final String _propUsrKey = "user"; |
| | | private static final String _propPassKey = "password"; |
| | | private static String _oracleHost = "192.168.11.200"; |
| | | private static String _oracleInstance = "NNTPC"; |
| | | private static String _oracleHost = "10.10.1.7"; |
| | | private static String _oracleInstance = "ORCL"; |
| | | private static String _oraclePort = "1521"; |
| | | |
| | | static { |
| | |
| | | |
| | | private OracleTarget() { |
| | | properties = new Properties(); |
| | | properties.put(_propUsrKey, "SPATIALDB"); |
| | | properties.put(_propPassKey, "SPATIALDB000"); |
| | | properties.put(_propUsrKey, "system"); |
| | | properties.put(_propPassKey, "simple000"); |
| | | } |
| | | |
| | | public static String getOracleHost() { |
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnjobs</artifactId> |
| | | <version>1.0.1</version> |
| | | <version>2.1.2</version> |
| | | </parent> |
| | | |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-elmparser</artifactId> |
| | | <version>1.0.1</version> |
| | | <version>2.1.2</version> |
| | | <packaging>jar</packaging> |
| | | <name>ximple-elmparser</name> |
| | | <url>http://maven.apache.org</url> |
| | | |
| | | <properties> |
| | | <xdgnio.version>1.0.1</xdgnio.version> |
| | | <xdgnio.version>1.2.2</xdgnio.version> |
| | | </properties> |
| | | |
| | | <scm> |
| | |
| | | <url>http://www.ximple.com.tw</url> |
| | | </organization> |
| | | |
| | | <inceptionYear>2008</inceptionYear> |
| | | <inceptionYear>2010</inceptionYear> |
| | | |
| | | <developers> |
| | | <developer> |
| | |
| | | <!-- =========================================================== --> |
| | | <dependencies> |
| | | <dependency> |
| | | <groupId>org.quartz-scheduler</groupId> |
| | | <artifactId>quartz</artifactId> |
| | | <groupId>opensymphony</groupId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | |
| | | |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-shapefile</artifactId> |
| | | <artifactId>gt-shapefile</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-sample-data</artifactId> |
| | | <artifactId>gt-sample-data</artifactId> |
| | | <scope>test</scope> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-data</artifactId> |
| | | <artifactId>gt-data</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-jdbc</artifactId> |
| | | <artifactId>gt-jdbc</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-postgis</artifactId> |
| | | <artifactId>gt-postgis</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-oracle-spatial</artifactId> |
| | | <artifactId>gt-oracle-spatial</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-mysql</artifactId> |
| | | <artifactId>gt-mysql</artifactId> |
| | | </dependency> |
| | | |
| | | <!-- because main and sample-data depend on referencing we need a tie breaker --> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-referencing</artifactId> |
| | | <artifactId>gt-referencing</artifactId> |
| | | </dependency> |
| | | |
| | | <!-- We need this to make the referencing module useful --> |
| | | <dependency> |
| | | <artifactId>gt2-epsg-hsql</artifactId> |
| | | <artifactId>gt-epsg-hsql</artifactId> |
| | | <groupId>org.geotools</groupId> |
| | | <scope>test</scope> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <artifactId>jdom</artifactId> |
| | | <groupId>org.jdom</groupId> |
| | | <artifactId>jdom</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | |
| | | |
| | | <!-- Ximple Library --> |
| | | <dependency> |
| | | <artifactId>ximple-dgnio</artifactId> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <version>${xdgnio.version}</version> |
| | | <artifactId>ximple-dgnio</artifactId> |
| | | <version>${project.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <artifactId>ximple-spatialjob</artifactId> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <version>${xdgnio.version}</version> |
| | | <artifactId>ximple-spatialjob</artifactId> |
| | | <version>${project.version}</version> |
| | | </dependency> |
| | | </dependencies> |
| | | |
| | |
| | | Use maven from the command line: |
| | | mvn exec:java -Dexec.mainClass="com.ximple.eofms.XElementParser" |
| | | --> |
| | | <artifactId>exec-maven-plugin</artifactId> |
| | | <groupId>org.codehaus.mojo</groupId> |
| | | <artifactId>exec-maven-plugin</artifactId> |
| | | <version>1.2.1</version> |
| | | <!-- |
| | | <executions> |
| | | <execution> |
| | |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.SimpleFeature; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.FeatureCollection; |
| | | import org.geotools.feature.FeatureCollections; |
| | | import org.geotools.data.FeatureWriter; |
| | |
| | | return; |
| | | } |
| | | |
| | | // §PÂ_¬O§_²Å©M±ø¥ó |
| | | // �P�_�O�_�ũM��� |
| | | Feature feature = elementDispatcher.execute(element, false); |
| | | if (feature == null) |
| | | { |
New file |
| | |
| | | /target/ |
| | | /xjobcarrier.log |
New file |
| | |
| | | # Create stdout appender
|
| | | log4j.rootLogger=info, logfile, stdout
|
| | |
|
| | | # Configure the stdout appender to go to the Console
|
| | | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
|
| | |
|
| | | # Configure stdout appender to use the PatternLayout
|
| | | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
| | |
|
| | | # Pattern output the caller's filename and line #
|
| | | log4j.appender.stdout.layout.ConversionPattern=%5p [%t] (%F:%L) - %m%n
|
| | | #log4j.appender.stdout.encoding=UTF-8
|
| | |
|
| | | log4j.appender.logfile=org.apache.log4j.FileAppender
|
| | | log4j.appender.logfile.file=xjobcarrier.log
|
| | | log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
|
| | | log4j.appender.logfile.layout.ConversionPattern=%5p [%t] (%F:%L) - %m%n
|
| | | #log4j.appender.logfile.encoding=UTF-8
|
| | |
|
| | | log4j.appender.remoteout=com.holub.log4j.RemoteAppender
|
| | | log4j.appender.remoteout.Port=8011
|
| | | log4j.appender.remoteout.layout=org.apache.log4j.PatternLayout
|
| | | log4j.appender.remoteout.layout.ConversionPattern=%5p [%t] (%F:%L) - %m%n
|
| | | #log4j.appender.remoteout.encoding=UTF-8
|
| | |
|
| | | # Print messages of level INFO or above for examples
|
| | | log4j.logger.org.cavaness.quartzbook=INFO
|
| | | log4j.logger.org.quartz=DEBUG
|
| | | log4j.logger.com.ximple.eofms=DEBUG |
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnjobs</artifactId> |
| | | <version>1.0.1</version> |
| | | <version>2.1.2</version> |
| | | </parent> |
| | | |
| | | |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-jobcarrier</artifactId> |
| | | <version>1.0.1</version> |
| | | <version>2.1.2</version> |
| | | <packaging>jar</packaging> |
| | | <name>ximple-jobcarrier</name> |
| | | <url>http://maven.apache.org</url> |
| | | |
| | | <properties> |
| | | <xdgnio.version>1.0.1</xdgnio.version> |
| | | <xdgnio.version>2.1.2</xdgnio.version> |
| | | </properties> |
| | | |
| | | <scm> |
| | |
| | | <url>http://www.ximple.com.tw</url> |
| | | </organization> |
| | | |
| | | <inceptionYear>2008</inceptionYear> |
| | | <inceptionYear>2012</inceptionYear> |
| | | |
| | | <developers> |
| | | <developer> |
| | |
| | | <contributors> |
| | | </contributors> |
| | | |
| | | <profiles> |
| | | <profile> |
| | | <id>qa</id> |
| | | <build> |
| | | <plugins> |
| | | <plugin> |
| | | <artifactId>maven-dependency-plugin</artifactId> |
| | | <executions> |
| | | <execution> |
| | | <phase>install</phase> |
| | | <goals> |
| | | <goal>copy-dependencies</goal> |
| | | </goals> |
| | | <configuration> |
| | | <outputDirectory>${project.build.directory}/lib</outputDirectory> |
| | | </configuration> |
| | | </execution> |
| | | </executions> |
| | | </plugin> |
| | | </plugins> |
| | | </build> |
| | | </profile> |
| | | </profiles> |
| | | |
| | | <!-- =========================================================== --> |
| | | <!-- Dependencies to be inherited by all modules. --> |
| | | <!-- =========================================================== --> |
| | | <dependencies> |
| | | <dependency> |
| | | <groupId>org.quartz-scheduler</groupId> |
| | | <artifactId>quartz</artifactId> |
| | | <groupId>opensymphony</groupId> |
| | | <exclusions> |
| | | <exclusion> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-api</artifactId> |
| | | </exclusion> |
| | | </exclusions> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.quartz-scheduler</groupId> |
| | | <artifactId>quartz-jobs</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-api</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-log4j12</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>jcl-over-slf4j</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | |
| | | |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-shapefile</artifactId> |
| | | <artifactId>gt-shapefile</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-sample-data</artifactId> |
| | | <artifactId>gt-sample-data</artifactId> |
| | | <scope>test</scope> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-data</artifactId> |
| | | <artifactId>gt-data</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-jdbc</artifactId> |
| | | <artifactId>gt-opengis</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-postgis</artifactId> |
| | | <artifactId>gt-jdbc</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-postgis</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-oracle</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-mysql</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-oracle-spatial</artifactId> |
| | | <artifactId>gt-opengis</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-mysql</artifactId> |
| | | <artifactId>gt-metadata</artifactId> |
| | | </dependency> |
| | | |
| | | <!-- because main and sample-data depend on referencing we need a tie breaker --> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-referencing</artifactId> |
| | | <artifactId>gt-referencing</artifactId> |
| | | </dependency> |
| | | |
| | | <!-- We need this to make the referencing module useful --> |
| | | <dependency> |
| | | <artifactId>gt2-epsg-hsql</artifactId> |
| | | <groupId>org.geotools</groupId> |
| | | <scope>test</scope> |
| | | <artifactId>gt-epsg-wkt</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | |
| | | <artifactId>sdoutl</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <!--dependency> |
| | | <groupId>postgresql</groupId> |
| | | <artifactId>postgresql</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.postgis</groupId> |
| | | <artifactId>postgis-driver</artifactId> |
| | | </dependency> |
| | | </dependency--> |
| | | |
| | | <dependency> |
| | | <!--dependency> |
| | | <groupId>mysql</groupId> |
| | | <artifactId>mysql-connector-java</artifactId> |
| | | </dependency> |
| | | </dependency--> |
| | | |
| | | <!-- AWL --> |
| | | <dependency> |
| | | <groupId>org.awl</groupId> |
| | | <artifactId>awl</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>xml-apis</groupId> |
| | | <artifactId>xml-apis</artifactId> |
| | | <version>1.4.01</version> |
| | | </dependency> |
| | | |
| | | <!-- Ximple Library --> |
| | |
| | | <dependency> |
| | | <artifactId>ximple-spatialjob</artifactId> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <version>${xdgnio.version}</version> |
| | | <version>${project.version}</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>commons-net</groupId> |
| | | <artifactId>commons-net</artifactId> |
| | | |
| | | </dependency> |
| | | |
| | | </dependencies> |
| | | |
| | | <build> |
| | |
| | | Use maven from the command line: |
| | | mvn exec:java -Dexec.mainClass="com.ximple.eofms.XQuartzJobCarrier" |
| | | --> |
| | | <artifactId>exec-maven-plugin</artifactId> |
| | | <groupId>org.codehaus.mojo</groupId> |
| | | <!-- |
| | | <artifactId>exec-maven-plugin</artifactId> |
| | | <version>1.2.1</version> |
| | | <executions> |
| | | <execution> |
| | | <phase>package</phase> |
| | | <!--<phase>package</phase> --> |
| | | <goals> |
| | | <goal>run</goal> |
| | | <goal>java</goal> |
| | | </goals> |
| | | </execution> |
| | | </executions> |
| | | --> |
| | | <configuration> |
| | | <mainClass>com.ximple.eofms.XQuartzJobCarrier</mainClass> |
| | | <!-- |
| | | <commandlineArgs>-wizard</commandlineArgs> |
| | | <commandlineArgs>-Dlog4j.configuration=log4j.properties</commandlineArgs> |
| | | --> |
| | | <systemProperties> |
| | | <systemProperty> |
| | | <key>log4j.configuration</key> |
| | | <value>log4j.properties</value> |
| | | </systemProperty> |
| | | </systemProperties> |
| | | </configuration> |
| | | <!-- |
| | | <dependencies> |
| | |
| | | <plugin> |
| | | <groupId>org.codehaus.mojo</groupId> |
| | | <artifactId>native2ascii-maven-plugin</artifactId> |
| | | <version>1.0-alpha-1</version> |
| | | <version>1.0-beta-1</version> |
| | | <configuration> |
| | | <dest>target/classes/com/ximple/eofms</dest> |
| | | <src>src/main/resources/com/ximple/eofms</src> |
| | |
| | | </goals> |
| | | <configuration> |
| | | <encoding>UTF8</encoding> |
| | | <includes>XQuartzJobWizard_zh*.properties</includes> |
| | | <includes> |
| | | <include>XQuartzJobWizard_zh*.properties</include> |
| | | </includes> |
| | | </configuration> |
| | | </execution> |
| | | </executions> |
| | |
| | | --> |
| | | <resource> |
| | | <directory>src/main/resources</directory> |
| | | <!-- |
| | | <includes> |
| | | <include>log4j.properties</include> |
| | | <include>quartz.properties</include> |
| | | <include>quartz_jobs.xml</include> |
| | | <include>quartz_jobs_edb.xml</include> |
| | | <include>quartz_jobs_sharpefiles.xml</include> |
| | | </includes> |
| | | --> |
| | | <!-- |
| | | <excludes> |
| | | <exclude>log4j.properties</exclude> |
| | | <exclude>quartz_jobs.xml</exclude> |
| | | </excludes> |
| | | --> |
| | | <!-- |
| | | <excludes> |
| | | <exclude>XQuartzJobWizard*.properties</exclude> |
New file |
| | |
| | | #===============================================================
|
| | | #Configure Main Scheduler Properties
|
| | | #===============================================================
|
| | | org.quartz.scheduler.instanceName = QuartzScheduler
|
| | | org.quartz.scheduler.instanceId = AUTO
|
| | |
|
| | | #===============================================================
|
| | | #Configure ThreadPool
|
| | | #===============================================================
|
| | | org.quartz.threadPool.threadCount = 5
|
| | | org.quartz.threadPool.threadPriority = 5
|
| | | org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool
|
| | |
|
| | | #===============================================================
|
| | | #Configure JobStore
|
| | | #===============================================================
|
| | | org.quartz.jobStore.class = org.quartz.simpl.RAMJobStore
|
| | | org.quartz.jobStore.misfireThreshold = 60000
|
| | |
|
| | | #===============================================================
|
| | | #Configure Plugins
|
| | | #===============================================================
|
| | | org.quartz.plugin.triggHistory.class = org.quartz.plugins.history.LoggingJobHistoryPlugin
|
| | |
|
| | | org.quartz.plugin.jobInitializer.class: org.quartz.plugins.xml.XMLSchedulingDataProcessorPlugin
|
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_dmmsroadfee.xml
|
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs.xml
|
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_inc.xml
|
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_edb.xml
|
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_colowner.xml
|
| | | org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_resetview.xml
|
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_dmmsroadfee.xml
|
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_nddjpb.xml
|
| | |
|
| | | org.quartz.plugin.jobInitializer.failOnFileNotFound = true
|
| | | org.quartz.plugin.jobInitializer.scanInterval = 10
|
| | | org.quartz.plugin.jobInitializer.wrapInUserTransaction = false
|
| | |
|
| | | org.quartz.plugin.shutdownhook.class = org.quartz.plugins.management.ShutdownHookPlugin
|
| | | org.quartz.plugin.shutdownhook.cleanShutdown = true |
New file |
| | |
| | | <?xml version='1.0' encoding='utf-8'?> |
| | | |
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd" |
| | | version="1.8"> |
| | | |
| | | <pre-processing-commands> |
| | | <delete-jobs-in-group>*</delete-jobs-in-group> <!-- clear all jobs in scheduler --> |
| | | <delete-triggers-in-group>*</delete-triggers-in-group> <!-- clear all triggers in scheduler --> |
| | | </pre-processing-commands> |
| | | |
| | | <processing-directives> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them --> |
| | | <overwrite-existing-data>true</overwrite-existing-data> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error --> |
| | | <ignore-duplicates>false</ignore-duplicates> |
| | | </processing-directives> |
| | | |
| | | <schedule> |
| | | <job> |
| | | <name>ConvertDMMS2PostGisWithGeoserver</name> |
| | | <group>DEFAULT</group> |
| | | <description>A job that convert dgn to postgis</description> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleConvertDgn2PostGISJob</job-class--> |
| | | <job-class>com.ximple.eofms.jobs.GeoserverIntegrateConfigJob</job-class> |
| | | <!--volatility>false</volatility--> |
| | | <durability>false</durability> |
| | | <recover>false</recover> |
| | | <!--job-data-map allows-transient-data="true"--> |
| | | <job-data-map> |
| | | <entry> |
| | | <key>JOBDATA_DIR</key> |
| | | <!--value>/Users/Shared/Public/Projects/XGeoDMMS/xjobrun/nstpcjobs/jobdata</value--> |
| | | <value>/mnt/hdisk/home.data/private/projects/xdcad/xjobrun/nntpcjobs/jobdata</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGHOST</key> |
| | | <value>192.168.11.99</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGDATBASE</key> |
| | | <value>pgNNTPC</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPORT</key> |
| | | <value>5432</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGSCHEMA</key> |
| | | <value>public</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGUSER</key> |
| | | <value>tpcdb</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAHOST</key> |
| | | <value>10.10.1.7</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAINST</key> |
| | | <value>orcl</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPORT</key> |
| | | <value>1521</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAUSER</key> |
| | | <value>system</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORGSCHEMA</key> |
| | | <!--value>SPATIALDB</value--> |
| | | <value>SPATIALDB, CMMS_SPATIALDB</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTDB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTFILE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTELEMIN</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CREATEDUMMY</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ELEMLOG</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEWKB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTMODE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTCOUNT</key> |
| | | <value>2</value> |
| | | </entry> |
| | | <entry> |
| | | <key>COPYCONNECTIVITYMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PROFILEMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEZONE121</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_URL</key> |
| | | <value>http://192.168.11.99:8080/geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_USER</key> |
| | | <value>admin</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_PASS</key> |
| | | <value>geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>IGNORE_DBETL</key> |
| | | <value>false</value> |
| | | </entry> |
| | | </job-data-map> |
| | | </job> |
| | | |
| | | <trigger> |
| | | <simple> |
| | | <name>convertTrigger</name> |
| | | <group>DEFAULT</group> |
| | | <job-name>ConvertDMMS2PostGisWithGeoserver</job-name> |
| | | <job-group>DEFAULT</job-group> |
| | | <start-time>2013-03-01T18:00:00</start-time> |
| | | <!-- repeat indefinitely every 10 seconds --> |
| | | <repeat-count>0</repeat-count> |
| | | <repeat-interval>500</repeat-interval> |
| | | <!-- <repeat-interval>72000000</repeat-interval> --> |
| | | </simple> |
| | | </trigger> |
| | | |
| | | </schedule> |
| | | |
| | | </job-scheduling-data> |
New file |
| | |
| | | <?xml version='1.0' encoding='utf-8'?> |
| | | |
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd" |
| | | version="1.8"> |
| | | |
| | | <pre-processing-commands> |
| | | <delete-jobs-in-group>*</delete-jobs-in-group> <!-- clear all jobs in scheduler --> |
| | | <delete-triggers-in-group>*</delete-triggers-in-group> <!-- clear all triggers in scheduler --> |
| | | </pre-processing-commands> |
| | | |
| | | <processing-directives> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them --> |
| | | <overwrite-existing-data>true</overwrite-existing-data> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error --> |
| | | <ignore-duplicates>false</ignore-duplicates> |
| | | </processing-directives> |
| | | |
| | | <schedule> |
| | | <job> |
| | | <name>ConvertPowerThemesIntoPostGISJob</name> |
| | | <group>DEFAULT</group> |
| | | <description>A job that convert dgn to postgis</description> |
| | | <job-class>com.ximple.eofms.jobs.OracleConvertThemes2PostGISJob</job-class> |
| | | <!--volatility>false</volatility--> |
| | | <durability>false</durability> |
| | | <recover>false</recover> |
| | | <!--job-data-map allows-transient-data="true"--> |
| | | <job-data-map> |
| | | <entry> |
| | | <key>JOBDATA_DIR</key> |
| | | <value>/Users/ulysseskao/Projects/XGeoDMMS/xjobrun/nntpcjobs/jobdata</value> |
| | | <!--value>/mnt/hdisk/home.data/private/projects/xdcad/xjobrun/nntpcjobs/jobdata</value--> |
| | | </entry> |
| | | <entry> |
| | | <key>PGHOST</key> |
| | | <value>10.10.1.19</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGDATBASE</key> |
| | | <value>pgDMMS</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPORT</key> |
| | | <value>5432</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGSCHEMA</key> |
| | | <value>public</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGUSER</key> |
| | | <value>tpcdb</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAHOST</key> |
| | | <value>10.10.1.19</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAINST</key> |
| | | <value>orcl</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPORT</key> |
| | | <value>1521</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAUSER</key> |
| | | <value>system</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPASS</key> |
| | | <value>SYSTEM000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORGSCHEMA</key> |
| | | <!--value>SPATIALDB</value--> |
| | | <value>SPATIALDB, CMMS_SPATIALDB</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTDB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTFILE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTELEMIN</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTPWTHEMES</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CREATEDUMMY</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ELEMLOG</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEWKB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTMODE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTCOUNT</key> |
| | | <value>2</value> |
| | | </entry> |
| | | <entry> |
| | | <key>COPYCONNECTIVITYMODE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PROFILEMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEZONE121</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_URL</key> |
| | | <value>http://192.168.11.99:8080/geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_USER</key> |
| | | <value>admin</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_PASS</key> |
| | | <value>geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>IGNORE_DBETL</key> |
| | | <value>true</value> |
| | | </entry> |
| | | </job-data-map> |
| | | </job> |
| | | |
| | | <trigger> |
| | | <simple> |
| | | <name>convertTrigger</name> |
| | | <group>DEFAULT</group> |
| | | <job-name>ConvertPowerThemesIntoPostGISJob</job-name> |
| | | <job-group>DEFAULT</job-group> |
| | | <start-time>2013-03-01T18:00:00</start-time> |
| | | <!-- repeat indefinitely every 10 seconds --> |
| | | <repeat-count>0</repeat-count> |
| | | <repeat-interval>500</repeat-interval> |
| | | <!-- <repeat-interval>72000000</repeat-interval> --> |
| | | </simple> |
| | | </trigger> |
| | | |
| | | </schedule> |
| | | |
| | | </job-scheduling-data> |
New file |
| | |
| | | <?xml version='1.0' encoding='utf-8'?> |
| | | |
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd" |
| | | version="1.8"> |
| | | |
| | | <pre-processing-commands> |
| | | <delete-jobs-in-group>*</delete-jobs-in-group> |
| | | <!-- clear all jobs in scheduler --> |
| | | <delete-triggers-in-group>*</delete-triggers-in-group> |
| | | <!-- clear all triggers in scheduler --> |
| | | </pre-processing-commands> |
| | | |
| | | <processing-directives> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them --> |
| | | <overwrite-existing-data>true</overwrite-existing-data> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error --> |
| | | <ignore-duplicates>false</ignore-duplicates> |
| | | </processing-directives> |
| | | |
| | | <schedule> |
| | | <job> |
| | | <name>ConvertDMMS2PostGisWithGeoserver</name> |
| | | <group>DEFAULT</group> |
| | | <description>A job that convert dgn to postgis</description> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleConvertDgn2PostGISJob</job-class--> |
| | | <!--<job-class>com.ximple.eofms.jobs.GeoserverIntegrateConfigJob</job-class>--> |
| | | <job-class>com.ximple.eofms.jobs.DMMSRoadfeeCalculateJob</job-class> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwnerJob</job-class--> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwner2CSVJob</job-class--> |
| | | <!--volatility>false</volatility--> |
| | | <durability>false</durability> |
| | | <recover>false</recover> |
| | | <!--job-data-map allows-transient-data="true"--> |
| | | <job-data-map> |
| | | <entry> |
| | | <key>JOBDATA_DIR</key> |
| | | <value>c:/tmp/</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGHOST</key> |
| | | <value>10.10.1.19</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGDATBASE</key> |
| | | <value>pgDMMS</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPORT</key> |
| | | <value>5432</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGSCHEMA</key> |
| | | <value>public</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGUSER</key> |
| | | <value>tpcdb</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAHOST</key> |
| | | <value>10.10.1.19</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAINST</key> |
| | | <value>orcl</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPORT</key> |
| | | <value>1521</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAUSER</key> |
| | | <value>system</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPASS</key> |
| | | <value>SYSTEM000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORGSCHEMA</key> |
| | | <!--value>SPATIALDB</value--> |
| | | <value>SPATIALDB, CMMS_SPATIALDB</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTDB</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTFILE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTELEMIN</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTPWTHEMES</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CREATEDUMMY</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ELEMLOG</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEWKB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTMODE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTCOUNT</key> |
| | | <value>2</value> |
| | | </entry> |
| | | <entry> |
| | | <key>COPYCONNECTIVITYMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PROFILEMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEZONE121</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_URL</key> |
| | | <value>http://10.10.1.19:8080/geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_USER</key> |
| | | <value>admin</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_PASS</key> |
| | | <value>geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>IGNORE_DBETL</key> |
| | | <value>false</value> |
| | | </entry> |
| | | </job-data-map> |
| | | </job> |
| | | |
| | | <trigger> |
| | | <simple> |
| | | <name>convertTrigger</name> |
| | | <group>DEFAULT</group> |
| | | <job-name>ConvertDMMS2PostGisWithGeoserver</job-name> |
| | | <job-group>DEFAULT</job-group> |
| | | <start-time>2013-03-01T18:00:00</start-time> |
| | | <!-- repeat indefinitely every 10 seconds --> |
| | | <repeat-count>0</repeat-count> |
| | | <repeat-interval>500</repeat-interval> |
| | | <!-- <repeat-interval>72000000</repeat-interval> --> |
| | | </simple> |
| | | </trigger> |
| | | |
| | | </schedule> |
| | | |
| | | </job-scheduling-data> |
New file |
| | |
| | | <?xml version='1.0' encoding='utf-8'?> |
| | | |
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd" |
| | | version="1.8"> |
| | | |
| | | <pre-processing-commands> |
| | | <delete-jobs-in-group>*</delete-jobs-in-group> |
| | | <!-- clear all jobs in scheduler --> |
| | | <delete-triggers-in-group>*</delete-triggers-in-group> |
| | | <!-- clear all triggers in scheduler --> |
| | | </pre-processing-commands> |
| | | |
| | | <processing-directives> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them --> |
| | | <overwrite-existing-data>true</overwrite-existing-data> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error --> |
| | | <ignore-duplicates>false</ignore-duplicates> |
| | | </processing-directives> |
| | | |
| | | <schedule> |
| | | <job> |
| | | <name>ConvertIncrementDMMS2PostGis</name> |
| | | <group>DEFAULT</group> |
| | | <description>A job that convert dgn to postgis</description> |
| | | <job-class>com.ximple.eofms.jobs.OracleIncrementDgn2PostGISJob</job-class> |
| | | <durability>false</durability> |
| | | <recover>false</recover> |
| | | <!--job-data-map allows-transient-data="true"--> |
| | | <job-data-map> |
| | | <entry> |
| | | <key>JOBDATA_DIR</key> |
| | | <!--value>/home/ulysseskao/projects/xgeodmms/xjobrun/nstpcjobs/jobdata</value--> |
| | | <value>/Users/ulysseskao/Projects/XGeoDMMS/xjobrun/nstpcjobs/jobdata</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGHOST</key> |
| | | <value>10.16.17.14</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGDATBASE</key> |
| | | <value>pgDMMS</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPORT</key> |
| | | <value>5432</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGSCHEMA</key> |
| | | <value>public</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGUSER</key> |
| | | <value>tpcdb</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPASS</key> |
| | | <value>tpc000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAHOST</key> |
| | | <value>10.16.17.14</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAINST</key> |
| | | <value>nntpc</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPORT</key> |
| | | <value>1521</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAUSER</key> |
| | | <value>system</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPASS</key> |
| | | <value>manager</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORGSCHEMA</key> |
| | | <!--value>SPATIALDB</value--> |
| | | <value>SPATIALDB, CMMS_SPATIALDB</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTDB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTFILE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTELEMIN</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTPWTHEMES</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CREATEDUMMY</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ELEMLOG</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEWKB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTMODE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTCOUNT</key> |
| | | <value>2</value> |
| | | </entry> |
| | | <entry> |
| | | <key>COPYCONNECTIVITYMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PROFILEMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEZONE121</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_URL</key> |
| | | <value>http://10.10.1.7:8080/geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_USER</key> |
| | | <value>admin</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_PASS</key> |
| | | <value>geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>IGNORE_DBETL</key> |
| | | <value>false</value> |
| | | </entry> |
| | | </job-data-map> |
| | | </job> |
| | | |
| | | <trigger> |
| | | <simple> |
| | | <name>convertTrigger</name> |
| | | <group>DEFAULT</group> |
| | | <job-name>ConvertIncrementDMMS2PostGis</job-name> |
| | | <job-group>DEFAULT</job-group> |
| | | <start-time>2013-03-01T18:00:00</start-time> |
| | | <!-- repeat indefinitely every 10 seconds --> |
| | | <repeat-count>0</repeat-count> |
| | | <repeat-interval>500</repeat-interval> |
| | | <!-- <repeat-interval>72000000</repeat-interval> --> |
| | | </simple> |
| | | </trigger> |
| | | |
| | | </schedule> |
| | | |
| | | </job-scheduling-data> |
New file |
| | |
| | | <?xml version='1.0' encoding='utf-8'?> |
| | | |
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd" |
| | | version="1.8"> |
| | | |
| | | <pre-processing-commands> |
| | | <delete-jobs-in-group>*</delete-jobs-in-group> |
| | | <!-- clear all jobs in scheduler --> |
| | | <delete-triggers-in-group>*</delete-triggers-in-group> |
| | | <!-- clear all triggers in scheduler --> |
| | | </pre-processing-commands> |
| | | |
| | | <processing-directives> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them --> |
| | | <overwrite-existing-data>true</overwrite-existing-data> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error --> |
| | | <ignore-duplicates>false</ignore-duplicates> |
| | | </processing-directives> |
| | | |
| | | <schedule> |
| | | <job> |
| | | <name>ConvertDMMS2PostGisWithGeoserver</name> |
| | | <group>DEFAULT</group> |
| | | <description>A job that convert dgn to postgis</description> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleConvertDgn2PostGISJob</job-class--> |
| | | <!--<job-class>com.ximple.eofms.jobs.GeoserverIntegrateConfigJob</job-class>--> |
| | | <job-class>com.ximple.eofms.jobs.DMMSNddUpdateJob</job-class> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwnerJob</job-class--> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwner2CSVJob</job-class--> |
| | | <!--volatility>false</volatility--> |
| | | <durability>false</durability> |
| | | <recover>false</recover> |
| | | <!--job-data-map allows-transient-data="true"--> |
| | | <job-data-map> |
| | | <entry> |
| | | <key>JOBDATA_DIR</key> |
| | | <value>C:/tmp/</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGHOST</key> |
| | | <value>10.10.1.19</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGDATBASE</key> |
| | | <value>pgDMMS</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPORT</key> |
| | | <value>5432</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGSCHEMA</key> |
| | | <value>ndd</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGUSER</key> |
| | | <value>tpcdb</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | |
| | | <entry> |
| | | <key>ftpurl</key> |
| | | <value>ftp://10.10.1.19:21/</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ftpdir</key> |
| | | <value>/tcdaas/ndddash/</value> |
| | | </entry> |
| | | |
| | | <entry> |
| | | <key>ftpuid</key> |
| | | <value>Administrator</value> |
| | | </entry> |
| | | |
| | | <entry> |
| | | <key>ftppwd</key> |
| | | <value>simple@000</value> |
| | | </entry> |
| | | |
| | | |
| | | <entry> |
| | | <key>ORAHOST</key> |
| | | <value>10.10.1.19</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAINST</key> |
| | | <value>orcl</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPORT</key> |
| | | <value>1521</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAUSER</key> |
| | | <value>system</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORGSCHEMA</key> |
| | | <!--value>SPATIALDB</value--> |
| | | <value>SPATIALDB, CMMS_SPATIALDB</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTDB</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTFILE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTELEMIN</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTPWTHEMES</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CREATEDUMMY</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ELEMLOG</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEWKB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTMODE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTCOUNT</key> |
| | | <value>2</value> |
| | | </entry> |
| | | <entry> |
| | | <key>COPYCONNECTIVITYMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PROFILEMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEZONE121</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_URL</key> |
| | | <value>http://10.10.1.19:8080/geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_USER</key> |
| | | <value>admin</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_PASS</key> |
| | | <value>geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>IGNORE_DBETL</key> |
| | | <value>false</value> |
| | | </entry> |
| | | </job-data-map> |
| | | </job> |
| | | |
| | | <trigger> |
| | | <simple> |
| | | <name>convertTrigger</name> |
| | | <group>DEFAULT</group> |
| | | <job-name>ConvertDMMS2PostGisWithGeoserver</job-name> |
| | | <job-group>DEFAULT</job-group> |
| | | <start-time>2013-03-01T18:00:00</start-time> |
| | | <!-- repeat indefinitely every 10 seconds --> |
| | | <repeat-count>0</repeat-count> |
| | | <repeat-interval>500</repeat-interval> |
| | | <!-- <repeat-interval>72000000</repeat-interval> --> |
| | | </simple> |
| | | </trigger> |
| | | |
| | | </schedule> |
| | | |
| | | </job-scheduling-data> |
New file |
| | |
| | | <?xml version='1.0' encoding='utf-8'?> |
| | | |
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd" |
| | | version="1.8"> |
| | | |
| | | <pre-processing-commands> |
| | | <delete-jobs-in-group>*</delete-jobs-in-group> |
| | | <!-- clear all jobs in scheduler --> |
| | | <delete-triggers-in-group>*</delete-triggers-in-group> |
| | | <!-- clear all triggers in scheduler --> |
| | | </pre-processing-commands> |
| | | |
| | | <processing-directives> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them --> |
| | | <overwrite-existing-data>true</overwrite-existing-data> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error --> |
| | | <ignore-duplicates>false</ignore-duplicates> |
| | | </processing-directives> |
| | | |
| | | <schedule> |
| | | <job> |
| | | <name>ConvertDMMS2PostGisWithGeoserver</name> |
| | | <group>DEFAULT</group> |
| | | <description>A job that convert dgn to postgis</description> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleConvertDgn2PostGISJob</job-class--> |
| | | <job-class>com.ximple.eofms.jobs.GeoserverIntegrateConfigJob</job-class> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwnerJob</job-class--> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwner2CSVJob</job-class--> |
| | | <!--volatility>false</volatility--> |
| | | <durability>false</durability> |
| | | <recover>false</recover> |
| | | <!--job-data-map allows-transient-data="true"--> |
| | | <job-data-map> |
| | | <entry> |
| | | <key>JOBDATA_DIR</key> |
| | | <value>/Users/ulysseskao/Projects/XGeoDMMS/xjobrun/nntpcjobs/jobdata</value> |
| | | <!--value>/home/ulysseskao/projects/xgeodmms/xjobrun/nntpcjobs/jobdata</value--> |
| | | </entry> |
| | | <entry> |
| | | <key>PGHOST</key> |
| | | <value>10.10.1.19</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGDATBASE</key> |
| | | <value>pgDMMS</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPORT</key> |
| | | <value>5432</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGSCHEMA</key> |
| | | <value>public</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGUSER</key> |
| | | <value>tpcdb</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAHOST</key> |
| | | <value>10.10.1.19</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAINST</key> |
| | | <value>orcl</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPORT</key> |
| | | <value>1521</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAUSER</key> |
| | | <value>system</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPASS</key> |
| | | <value>SYSTEM000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORGSCHEMA</key> |
| | | <!--value>SPATIALDB</value--> |
| | | <value>SPATIALDB, CMMS_SPATIALDB</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTDB</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTFILE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTELEMIN</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTPWTHEMES</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CREATEDUMMY</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ELEMLOG</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEWKB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTMODE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTCOUNT</key> |
| | | <value>2</value> |
| | | </entry> |
| | | <entry> |
| | | <key>COPYCONNECTIVITYMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PROFILEMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEZONE121</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_URL</key> |
| | | <value>http://10.10.1.7:8080/geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_USER</key> |
| | | <value>admin</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_PASS</key> |
| | | <value>geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>IGNORE_DBETL</key> |
| | | <value>false</value> |
| | | </entry> |
| | | </job-data-map> |
| | | </job> |
| | | |
| | | <trigger> |
| | | <simple> |
| | | <name>convertTrigger</name> |
| | | <group>DEFAULT</group> |
| | | <job-name>ConvertDMMS2PostGisWithGeoserver</job-name> |
| | | <job-group>DEFAULT</job-group> |
| | | <start-time>2013-03-01T18:00:00</start-time> |
| | | <!-- repeat indefinitely every 10 seconds --> |
| | | <repeat-count>0</repeat-count> |
| | | <repeat-interval>500</repeat-interval> |
| | | <!-- <repeat-interval>72000000</repeat-interval> --> |
| | | </simple> |
| | | </trigger> |
| | | |
| | | </schedule> |
| | | |
| | | </job-scheduling-data> |
| | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.awl.Wizard; |
| | | import org.quartz.DateBuilder; |
| | | import org.quartz.JobBuilder; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.Scheduler; |
| | | import org.quartz.SchedulerException; |
| | | import org.quartz.Trigger; |
| | | import org.quartz.TriggerBuilder; |
| | | import org.quartz.TriggerUtils; |
| | | import org.quartz.impl.JobDetailImpl; |
| | | import org.quartz.impl.StdSchedulerFactory; |
| | | |
| | | import com.ximple.eofms.jobs.OracleConvertDgn2ShpJob; |
| | |
| | | static Log logger = LogFactory.getLog(XQuartzJobCarrier.class); |
| | | static Options options = new Options(); |
| | | |
| | | private static final String VERSION = "0.9.0"; |
| | | private static final String VERSION = "1.3.1"; |
| | | |
| | | public static void main(String[] args) { |
| | | XQuartzJobCarrier instance = new XQuartzJobCarrier(); |
| | |
| | | |
| | | } catch (SchedulerException ex) { |
| | | // deal with any exceptions |
| | | logger.error(ex); |
| | | logger.error(ex, ex); |
| | | shutdown = true; |
| | | } catch (Throwable throwable) { |
| | | logger.error(throwable.getMessage(), throwable); |
| | |
| | | private void scheduleJob(Scheduler scheduler) throws SchedulerException { |
| | | |
| | | // Create a JobDetail for the Job |
| | | JobDetail jobDetail = new JobDetail("ScanDirectory", Scheduler.DEFAULT_GROUP, |
| | | /* |
| | | JobDetailImpl jobDetail = new JobDetailImpl("ScanDirectory", Scheduler.DEFAULT_GROUP, |
| | | OracleConvertDgn2ShpJob.class); |
| | | |
| | | */ |
| | | JobDetail jobDetail = JobBuilder.newJob(OracleConvertDgn2ShpJob.class) |
| | | .withIdentity("ScanDirectory", Scheduler.DEFAULT_GROUP) |
| | | .usingJobData("SCAN_DIR", "c:\\quartz-book\\input") |
| | | .build(); |
| | | // Configure the directory to scan |
| | | jobDetail.getJobDataMap().put("SCAN_DIR", "c:\\quartz-book\\input"); |
| | | // jobDetail.getJobDataMap().put("SCAN_DIR", "c:\\quartz-book\\input"); |
| | | |
| | | // Create a trigger that fires every 10 seconds, forever |
| | | Trigger trigger = TriggerUtils.makeSecondlyTrigger(10); |
| | | trigger.setName("scanTrigger"); |
| | | // Trigger trigger = TriggerUtils.makeSecondlyTrigger(10); |
| | | // trigger.setName("scanTrigger"); |
| | | Trigger trigger = TriggerBuilder.newTrigger().withIdentity("scanTrigger") |
| | | .startAt(DateBuilder.futureDate(10, DateBuilder.IntervalUnit.SECOND)) |
| | | .build(); |
| | | // Start the trigger firing from now |
| | | trigger.setStartTime(new Date()); |
| | | // trigger.setStartTime(new Date()); |
| | | |
| | | // Associate the trigger with the job in the scheduler |
| | | scheduler.scheduleJob(jobDetail, trigger); |
| | |
| | | title=Ximple Quartz Job Wizard |
| | | |
| | | first.title=ªÅ¶¡¸ê®ÆÂà´« |
| | | first.description=ªÅ¶¡¸ê®ÆÂà´«±N·|Ū¨ú OMS/CMMS ªÅ¶¡¸ê®Æ®w¤º®eÂà´«¦Ü PostGIS ªÅ¶¡¸ê®Æ®w |
| | | first.title=空間資料轉換 |
| | | first.description=空間資料轉換將會讀取 OMS/CMMS 空間資料庫內容轉換至 PostGIS 空間資料庫 |
| | | first.label.text=<html>This wizard will not install anything on your computer.<br/>It only demonstrates <b>Awl</b> functionnalities.</html> |
| | | |
| | | second.title=ª©ÅvÁn©ú |
| | | second.title=版權聲明 |
| | | second.description=Accept the license of this software |
| | | second.licenseAccepted=I accept the terms of this license agreement |
| | | second.licenseRefused=I do not accept the terms of this license agreement |
| | | second.messageContent=Accept the terms of the license |
| | | |
| | | third.title=²ÕºA³]©w¦ì¸m |
| | | third.title=組態設定位置 |
| | | third.description=Select the location where to install this software |
| | | third.label.text=Install location |
| | | third.fileChooser.selectLabel=Select |
| | | third.fileChooser.description=Select |
| | | third.messageContent=You must choose a valid location |
| | | |
| | | fourth.title=¶}©lÂàÀÉ... |
| | | fourth.title=開始轉檔... |
| | | fourth.description=Installation progress |
| | | fourth.inProgress.text=Installation in progress... |
| | | fourth.finished.text=Installation finished |
| | | fourth.finished.text=Installation finished |
| | |
| | | |
| | | # Print messages of level INFO or above for examples |
| | | log4j.logger.org.cavaness.quartzbook=INFO |
| | | log4j.logger.com.ximple.eofms=INFO |
| | | log4j.logger.com.ximple.eofms=INFO |
| | | |
| | | it.geosolutions.geoserver=INIFO |
| | |
| | | #Configure JobStore |
| | | #=============================================================== |
| | | org.quartz.jobStore.class = org.quartz.simpl.RAMJobStore |
| | | org.quartz.jobStore.misfireThreshold = 60000 |
| | | |
| | | #=============================================================== |
| | | #Configure Plugins |
| | | #=============================================================== |
| | | org.quartz.plugin.jobInitializer.class = org.quartz.plugins.xml.JobInitializationPlugin |
| | | org.quartz.plugin.triggHistory.class = org.quartz.plugins.history.LoggingJobHistoryPlugin |
| | | |
| | | org.quartz.plugin.jobInitializer.fileName = quartz_jobs.xml |
| | | org.quartz.plugin.jobInitializer.class = org.quartz.plugins.xml.XMLSchedulingDataProcessorPlugin |
| | | |
| | | org.quartz.plugin.jobInitializer.overWriteExistingJobs = true |
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs.xml |
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_inc.xml |
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_edb.xml |
| | | org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_colowner.xml |
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_dmmsroadfee.xml |
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_nddjpb.xml |
| | | |
| | | org.quartz.plugin.jobInitializer.failOnFileNotFound = true |
| | | org.quartz.plugin.jobInitializer.validating=false |
| | | org.quartz.plugin.jobInitializer.scanInterval = 10 |
| | | org.quartz.plugin.jobInitializer.wrapInUserTransaction = false |
| | | |
| | | org.quartz.plugin.shutdownhook.class = org.quartz.plugins.management.ShutdownHookPlugin |
| | | org.quartz.plugin.shutdownhook.cleanShutdown = true |
| | |
| | | <?xml version='1.0' encoding='utf-8'?> |
| | | |
| | | <quartz xmlns="http://www.opensymphony.com/quartz/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.opensymphony.com/quartz/JobSchedulingData |
| | | http://www.opensymphony.com/quartz/xml/job_scheduling_data_1_5.xsd" |
| | | version="1.5"> |
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd" |
| | | version="1.8"> |
| | | |
| | | <job> |
| | | <job-detail> |
| | | <name>ConvertDgn2PostGisIntoPostgre</name> |
| | | <pre-processing-commands> |
| | | <delete-jobs-in-group>*</delete-jobs-in-group> |
| | | <!-- clear all jobs in scheduler --> |
| | | <delete-triggers-in-group>*</delete-triggers-in-group> |
| | | <!-- clear all triggers in scheduler --> |
| | | </pre-processing-commands> |
| | | |
| | | <processing-directives> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them --> |
| | | <overwrite-existing-data>true</overwrite-existing-data> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error --> |
| | | <ignore-duplicates>false</ignore-duplicates> |
| | | </processing-directives> |
| | | |
| | | <schedule> |
| | | <job> |
| | | <name>ConvertDMMS2PostGisWithGeoserver</name> |
| | | <group>DEFAULT</group> |
| | | <description>A job that convert dgn to shapefiles</description> |
| | | <job-class>com.ximple.eofms.jobs.OracleConvertDgn2PostGISJob</job-class> |
| | | <volatility>false</volatility> |
| | | <description>A job that convert dgn to postgis</description> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleConvertDgn2PostGISJob</job-class--> |
| | | <job-class>com.ximple.eofms.jobs.GeoserverIntegrateConfigJob</job-class> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwnerJob</job-class--> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwner2CSVJob</job-class--> |
| | | <!--volatility>false</volatility--> |
| | | <durability>false</durability> |
| | | <recover>false</recover> |
| | | <job-data-map allows-transient-data="true"> |
| | | <!--job-data-map allows-transient-data="true"--> |
| | | <job-data-map> |
| | | <entry> |
| | | <key>JOBDATA_DIR</key> |
| | | <value>C:\DBS\XDGNDATA</value> |
| | | <value>/home/ulysseskao/projects/xgeodmms/xjobrun/nntpcjobs/jobdata</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGHOST</key> |
| | | <value>127.0.0.1</value> |
| | | <value>10.10.1.7</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGDDATBASE</key> |
| | | <value>tpc</value> |
| | | <key>PGDATBASE</key> |
| | | <value>pgDMMS</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPORT</key> |
| | |
| | | </entry> |
| | | <entry> |
| | | <key>PGUSER</key> |
| | | <value>spatialdb</value> |
| | | <value>tpcdb</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPASS</key> |
| | | <value>spatialdb000</value> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAHOST</key> |
| | | <value>10.206.120.190</value> |
| | | <value>10.10.1.7</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAINST</key> |
| | | <value>nntpc</value> |
| | | <value>orcl</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPORT</key> |
| | |
| | | </entry> |
| | | <entry> |
| | | <key>ORAUSER</key> |
| | | <value>spatialdb</value> |
| | | <value>system</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPASS</key> |
| | | <value>spatialdb000</value> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORGSCHEMA</key> |
| | | <!--value>SPATIALDB</value--> |
| | | <value>SPATIALDB, CMMS_SPATIALDB</value> |
| | | </entry> |
| | | <entry> |
| | |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTFILE</key> |
| | | <value>true</value> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTELEMIN</key> |
| | | <value>false</value> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTPWTHEMES</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CREATEDUMMY</key> |
| | |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEEPSG3826</key> |
| | | <key>USEZONE121</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_URL</key> |
| | | <value>http://10.10.1.7:8080/geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_USER</key> |
| | | <value>admin</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_PASS</key> |
| | | <value>geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>IGNORE_DBETL</key> |
| | | <value>false</value> |
| | | </entry> |
| | | </job-data-map> |
| | | </job-detail> |
| | | </job> |
| | | |
| | | <trigger> |
| | | <simple> |
| | | <name>convertTrigger</name> |
| | | <group>DEFAULT</group> |
| | | <job-name>ConvertDgn2PostGisIntoPostgre</job-name> |
| | | <job-name>ConvertDMMS2PostGisWithGeoserver</job-name> |
| | | <job-group>DEFAULT</job-group> |
| | | <start-time>2008-03-01T18:10:00</start-time> |
| | | <start-time>2013-03-01T18:00:00</start-time> |
| | | <!-- repeat indefinitely every 10 seconds --> |
| | | <repeat-count>0</repeat-count> |
| | | <repeat-interval>500</repeat-interval> |
| | |
| | | </simple> |
| | | </trigger> |
| | | |
| | | </job> |
| | | </quartz> |
| | | </schedule> |
| | | |
| | | </job-scheduling-data> |
New file |
| | |
| | | <?xml version='1.0' encoding='utf-8'?> |
| | | |
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd" |
| | | version="1.8"> |
| | | |
| | | <pre-processing-commands> |
| | | <delete-jobs-in-group>*</delete-jobs-in-group> <!-- clear all jobs in scheduler --> |
| | | <delete-triggers-in-group>*</delete-triggers-in-group> <!-- clear all triggers in scheduler --> |
| | | </pre-processing-commands> |
| | | |
| | | <processing-directives> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them --> |
| | | <overwrite-existing-data>true</overwrite-existing-data> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error --> |
| | | <ignore-duplicates>false</ignore-duplicates> |
| | | </processing-directives> |
| | | |
| | | <schedule> |
| | | <job> |
| | | <name>ConvertPowerThemesIntoPostGISJob</name> |
| | | <group>DEFAULT</group> |
| | | <description>A job that convert dgn to postgis</description> |
| | | <job-class>com.ximple.eofms.jobs.OracleConvertThemes2PostGISJob</job-class> |
| | | <!--volatility>false</volatility--> |
| | | <durability>false</durability> |
| | | <recover>false</recover> |
| | | <!--job-data-map allows-transient-data="true"--> |
| | | <job-data-map> |
| | | <entry> |
| | | <key>JOBDATA_DIR</key> |
| | | <value>/Users/ulysseskao/Projects/XGeoDMMS/xjobrun/nntpcjobs/jobdata</value> |
| | | <!--value>/mnt/hdisk/home.data/private/projects/xdcad/xjobrun/nntpcjobs/jobdata</value--> |
| | | </entry> |
| | | <entry> |
| | | <key>PGHOST</key> |
| | | <value>10.10.1.19</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGDATBASE</key> |
| | | <value>pgDMMS</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPORT</key> |
| | | <value>5432</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGSCHEMA</key> |
| | | <value>public</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGUSER</key> |
| | | <value>tpcdb</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAHOST</key> |
| | | <value>10.10.1.19</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAINST</key> |
| | | <value>orcl</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPORT</key> |
| | | <value>1521</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAUSER</key> |
| | | <value>system</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPASS</key> |
| | | <value>SYSTEM000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORGSCHEMA</key> |
| | | <!--value>SPATIALDB</value--> |
| | | <value>SPATIALDB, CMMS_SPATIALDB</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTDB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTFILE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTELEMIN</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTPWTHEMES</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CREATEDUMMY</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ELEMLOG</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEWKB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTMODE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTCOUNT</key> |
| | | <value>2</value> |
| | | </entry> |
| | | <entry> |
| | | <key>COPYCONNECTIVITYMODE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PROFILEMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEZONE121</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_URL</key> |
| | | <value>http://192.168.11.99:8080/geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_USER</key> |
| | | <value>admin</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_PASS</key> |
| | | <value>geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>IGNORE_DBETL</key> |
| | | <value>true</value> |
| | | </entry> |
| | | </job-data-map> |
| | | </job> |
| | | |
| | | <trigger> |
| | | <simple> |
| | | <name>convertTrigger</name> |
| | | <group>DEFAULT</group> |
| | | <job-name>ConvertPowerThemesIntoPostGISJob</job-name> |
| | | <job-group>DEFAULT</job-group> |
| | | <start-time>2013-03-01T18:00:00</start-time> |
| | | <!-- repeat indefinitely every 10 seconds --> |
| | | <repeat-count>0</repeat-count> |
| | | <repeat-interval>500</repeat-interval> |
| | | <!-- <repeat-interval>72000000</repeat-interval> --> |
| | | </simple> |
| | | </trigger> |
| | | |
| | | </schedule> |
| | | |
| | | </job-scheduling-data> |
New file |
| | |
| | | <?xml version='1.0' encoding='utf-8'?> |
| | | |
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd" |
| | | version="1.8"> |
| | | |
| | | <pre-processing-commands> |
| | | <delete-jobs-in-group>*</delete-jobs-in-group> |
| | | <!-- clear all jobs in scheduler --> |
| | | <delete-triggers-in-group>*</delete-triggers-in-group> |
| | | <!-- clear all triggers in scheduler --> |
| | | </pre-processing-commands> |
| | | |
| | | <processing-directives> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them --> |
| | | <overwrite-existing-data>true</overwrite-existing-data> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error --> |
| | | <ignore-duplicates>false</ignore-duplicates> |
| | | </processing-directives> |
| | | |
| | | <schedule> |
| | | <job> |
| | | <name>ConvertDMMS2PostGisWithGeoserver</name> |
| | | <group>DEFAULT</group> |
| | | <description>A job that convert dgn to postgis</description> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleConvertDgn2PostGISJob</job-class--> |
| | | <!--<job-class>com.ximple.eofms.jobs.GeoserverIntegrateConfigJob</job-class>--> |
| | | <job-class>com.ximple.eofms.jobs.DMMSRoadfeeCalculateJob</job-class> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwnerJob</job-class--> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwner2CSVJob</job-class--> |
| | | <!--volatility>false</volatility--> |
| | | <durability>false</durability> |
| | | <recover>false</recover> |
| | | <!--job-data-map allows-transient-data="true"--> |
| | | <job-data-map> |
| | | <entry> |
| | | <key>JOBDATA_DIR</key> |
| | | <value>/Users/Shared/Public/Projects/XGeoDMMS/xjobrun/tctpcjobs/jobdata</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGHOST</key> |
| | | <value>10.10.1.7</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGDATBASE</key> |
| | | <value>pgDMMS2</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPORT</key> |
| | | <value>5432</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGSCHEMA</key> |
| | | <value>public</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGUSER</key> |
| | | <value>tpcdb</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAHOST</key> |
| | | <value>10.10.1.7</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAINST</key> |
| | | <value>orcl</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPORT</key> |
| | | <value>1521</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAUSER</key> |
| | | <value>system</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORGSCHEMA</key> |
| | | <!--value>SPATIALDB</value--> |
| | | <value>SPATIALDB, CMMS_SPATIALDB</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTDB</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTFILE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTELEMIN</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTPWTHEMES</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CREATEDUMMY</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ELEMLOG</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEWKB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTMODE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTCOUNT</key> |
| | | <value>2</value> |
| | | </entry> |
| | | <entry> |
| | | <key>COPYCONNECTIVITYMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PROFILEMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEZONE121</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_URL</key> |
| | | <value>http://10.10.1.7:8080/geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_USER</key> |
| | | <value>admin</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_PASS</key> |
| | | <value>geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>IGNORE_DBETL</key> |
| | | <value>false</value> |
| | | </entry> |
| | | </job-data-map> |
| | | </job> |
| | | |
| | | <trigger> |
| | | <simple> |
| | | <name>convertTrigger</name> |
| | | <group>DEFAULT</group> |
| | | <job-name>ConvertDMMS2PostGisWithGeoserver</job-name> |
| | | <job-group>DEFAULT</job-group> |
| | | <start-time>2013-03-01T18:00:00</start-time> |
| | | <!-- repeat indefinitely every 10 seconds --> |
| | | <repeat-count>0</repeat-count> |
| | | <repeat-interval>500</repeat-interval> |
| | | <!-- <repeat-interval>72000000</repeat-interval> --> |
| | | </simple> |
| | | </trigger> |
| | | |
| | | </schedule> |
| | | |
| | | </job-scheduling-data> |
New file |
| | |
| | | <?xml version='1.0' encoding='utf-8'?>
|
| | |
|
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData"
|
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd"
|
| | | version="1.8">
|
| | |
|
| | | <pre-processing-commands>
|
| | | <delete-jobs-in-group>*</delete-jobs-in-group>
|
| | | <!-- clear all jobs in scheduler -->
|
| | | <delete-triggers-in-group>*</delete-triggers-in-group>
|
| | | <!-- clear all triggers in scheduler -->
|
| | | </pre-processing-commands>
|
| | |
|
| | | <processing-directives>
|
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them -->
|
| | | <overwrite-existing-data>true</overwrite-existing-data>
|
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error -->
|
| | | <ignore-duplicates>false</ignore-duplicates>
|
| | | </processing-directives>
|
| | |
|
| | | <schedule>
|
| | | <job>
|
| | | <name>ConvertDgn2GeoSpatialIntoEdb</name>
|
| | | <group>DEFAULT</group>
|
| | | <description>A job that convert dgn to gdb spatial</description>
|
| | | <!--job-class>com.ximple.eofms.jobs.OracleConvertDgn2EdbGeoJob</job-class-->
|
| | | <job-class>com.ximple.eofms.jobs.OracleConvertDgn2EdbGeoJob</job-class>
|
| | | <volatility>false</volatility>
|
| | | <durability>false</durability>
|
| | | <recover>false</recover>
|
| | | <!--job-data-map allows-transient-data="true"-->
|
| | | <job-data-map>
|
| | | <entry>
|
| | | <key>JOBDATA_DIR</key>
|
| | | <value>C:\Usr\Projects\XDCAD\nstpcjobs\jobdata</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>EDBHOST</key>
|
| | | <value>192.168.11.99</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>EDBDATBASE</key>
|
| | | <value>tpcdb</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>EDBPORT</key>
|
| | | <value>5444</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>EDBSCHEMA</key>
|
| | | <value>public</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>EDBUSER</key>
|
| | | <value>tpcdb</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>EDBPASS</key>
|
| | | <value>simple000</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ORAHOST</key>
|
| | | <value>192.168.11.200</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ORAINST</key>
|
| | | <value>nntpc</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ORAPORT</key>
|
| | | <value>1521</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ORAUSER</key>
|
| | | <value>spatialdb</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ORAPASS</key>
|
| | | <value>spatialdb000</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ORGSCHEMA</key>
|
| | | <value>SPATIALDB, CMMS_SPATIALDB</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>CONVERTDB</key>
|
| | | <value>true</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>CONVERTFILE</key>
|
| | | <value>false</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>CONVERTELEMIN</key>
|
| | | <value>false</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>CREATEDUMMY</key>
|
| | | <value>false</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ELEMLOG</key>
|
| | | <value>true</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>USEWKB</key>
|
| | | <value>true</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>TESTMODE</key>
|
| | | <value>false</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>TESTCOUNT</key>
|
| | | <value>2</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>COPYCONNECTIVITYMODE</key>
|
| | | <value>false</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>PROFILEMODE</key>
|
| | | <value>true</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>USEZONE121</key>
|
| | | <value>true</value>
|
| | | </entry>
|
| | | </job-data-map>
|
| | | </job>
|
| | |
|
| | | <trigger>
|
| | | <simple>
|
| | | <name>convertTrigger</name>
|
| | | <group>DEFAULT</group>
|
| | | <job-name>ConvertDgn2GeoSpatialIntoEdb</job-name>
|
| | | <job-group>DEFAULT</job-group>
|
| | | <start-time>2008-03-01T18:10:00</start-time>
|
| | | <!-- repeat indefinitely every 10 seconds -->
|
| | | <repeat-count>0</repeat-count>
|
| | | <repeat-interval>500</repeat-interval>
|
| | | <!-- <repeat-interval>72000000</repeat-interval> -->
|
| | | </simple>
|
| | | </trigger>
|
| | |
|
| | | </schedule>
|
| | | </job-scheduling-data>
|
New file |
| | |
| | | <?xml version='1.0' encoding='utf-8'?> |
| | | |
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd" |
| | | version="1.8"> |
| | | |
| | | <pre-processing-commands> |
| | | <delete-jobs-in-group>*</delete-jobs-in-group> |
| | | <!-- clear all jobs in scheduler --> |
| | | <delete-triggers-in-group>*</delete-triggers-in-group> |
| | | <!-- clear all triggers in scheduler --> |
| | | </pre-processing-commands> |
| | | |
| | | <processing-directives> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them --> |
| | | <overwrite-existing-data>true</overwrite-existing-data> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error --> |
| | | <ignore-duplicates>false</ignore-duplicates> |
| | | </processing-directives> |
| | | |
| | | <schedule> |
| | | <job> |
| | | <name>ConvertIncrementDMMS2PostGis</name> |
| | | <group>DEFAULT</group> |
| | | <description>A job that convert dgn to postgis</description> |
| | | <job-class>com.ximple.eofms.jobs.OracleIncrementDgn2PostGISJob</job-class> |
| | | <durability>false</durability> |
| | | <recover>false</recover> |
| | | <!--job-data-map allows-transient-data="true"--> |
| | | <job-data-map> |
| | | <entry> |
| | | <key>JOBDATA_DIR</key> |
| | | <value>/home/ulysseskao/projects/xgeodmms/xjobrun/nstpcjobs/jobdata</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGHOST</key> |
| | | <value>10.10.1.9</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGDATBASE</key> |
| | | <value>pgDMMS</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPORT</key> |
| | | <value>5432</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGSCHEMA</key> |
| | | <value>public</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGUSER</key> |
| | | <value>tpcdb</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAHOST</key> |
| | | <value>10.10.1.9</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAINST</key> |
| | | <value>orcl</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPORT</key> |
| | | <value>1521</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAUSER</key> |
| | | <value>system</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPASS</key> |
| | | <value>SYSTEM000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORGSCHEMA</key> |
| | | <!--value>SPATIALDB</value--> |
| | | <value>SPATIALDB, CMMS_SPATIALDB</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTDB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTFILE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTELEMIN</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTPWTHEMES</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CREATEDUMMY</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ELEMLOG</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEWKB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTMODE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTCOUNT</key> |
| | | <value>2</value> |
| | | </entry> |
| | | <entry> |
| | | <key>COPYCONNECTIVITYMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PROFILEMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEZONE121</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_URL</key> |
| | | <value>http://10.10.1.7:8080/geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_USER</key> |
| | | <value>admin</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_PASS</key> |
| | | <value>geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>IGNORE_DBETL</key> |
| | | <value>false</value> |
| | | </entry> |
| | | </job-data-map> |
| | | </job> |
| | | |
| | | <trigger> |
| | | <simple> |
| | | <name>convertTrigger</name> |
| | | <group>DEFAULT</group> |
| | | <job-name>ConvertDMMS2PostGisWithGeoserver</job-name> |
| | | <job-group>DEFAULT</job-group> |
| | | <start-time>2013-03-01T18:00:00</start-time> |
| | | <!-- repeat indefinitely every 10 seconds --> |
| | | <repeat-count>0</repeat-count> |
| | | <repeat-interval>500</repeat-interval> |
| | | <!-- <repeat-interval>72000000</repeat-interval> --> |
| | | </simple> |
| | | </trigger> |
| | | |
| | | </schedule> |
| | | |
| | | </job-scheduling-data> |
New file |
| | |
| | | <?xml version='1.0' encoding='utf-8'?> |
| | | |
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd" |
| | | version="1.8"> |
| | | |
| | | <pre-processing-commands> |
| | | <delete-jobs-in-group>*</delete-jobs-in-group> |
| | | <!-- clear all jobs in scheduler --> |
| | | <delete-triggers-in-group>*</delete-triggers-in-group> |
| | | <!-- clear all triggers in scheduler --> |
| | | </pre-processing-commands> |
| | | |
| | | <processing-directives> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them --> |
| | | <overwrite-existing-data>true</overwrite-existing-data> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error --> |
| | | <ignore-duplicates>false</ignore-duplicates> |
| | | </processing-directives> |
| | | |
| | | <schedule> |
| | | <job> |
| | | <name>ConvertDMMS2PostGisWithGeoserver</name> |
| | | <group>DEFAULT</group> |
| | | <description>A job that convert dgn to postgis</description> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleConvertDgn2PostGISJob</job-class--> |
| | | <!--<job-class>com.ximple.eofms.jobs.GeoserverIntegrateConfigJob</job-class>--> |
| | | <job-class>com.ximple.eofms.jobs.DMMSNddUpdateJob</job-class> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwnerJob</job-class--> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwner2CSVJob</job-class--> |
| | | <!--volatility>false</volatility--> |
| | | <durability>false</durability> |
| | | <recover>false</recover> |
| | | <!--job-data-map allows-transient-data="true"--> |
| | | <job-data-map> |
| | | <entry> |
| | | <key>JOBDATA_DIR</key> |
| | | <value>C:/tmp/</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGHOST</key> |
| | | <value>10.10.1.9</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGDATBASE</key> |
| | | <value>pgDMMS</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPORT</key> |
| | | <value>5432</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGSCHEMA</key> |
| | | <value>ndd</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGUSER</key> |
| | | <value>tpcdb</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | |
| | | <entry> |
| | | <key>ftpurl</key> |
| | | <value>ftp://20.20.1.3:21/</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ftpdir</key> |
| | | <value>/tcdaas/ndddash/</value> |
| | | </entry> |
| | | |
| | | <entry> |
| | | <key>ftpuid</key> |
| | | <value>DMMS</value> |
| | | </entry> |
| | | |
| | | <entry> |
| | | <key>ftppwd</key> |
| | | <value>DMMS000</value> |
| | | </entry> |
| | | |
| | | |
| | | <entry> |
| | | <key>ORAHOST</key> |
| | | <value>10.10.1.7</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAINST</key> |
| | | <value>orcl</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPORT</key> |
| | | <value>1521</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAUSER</key> |
| | | <value>system</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORGSCHEMA</key> |
| | | <!--value>SPATIALDB</value--> |
| | | <value>SPATIALDB, CMMS_SPATIALDB</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTDB</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTFILE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTELEMIN</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTPWTHEMES</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CREATEDUMMY</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ELEMLOG</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEWKB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTMODE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTCOUNT</key> |
| | | <value>2</value> |
| | | </entry> |
| | | <entry> |
| | | <key>COPYCONNECTIVITYMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PROFILEMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEZONE121</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_URL</key> |
| | | <value>http://10.10.1.7:8080/geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_USER</key> |
| | | <value>admin</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_PASS</key> |
| | | <value>geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>IGNORE_DBETL</key> |
| | | <value>false</value> |
| | | </entry> |
| | | </job-data-map> |
| | | </job> |
| | | |
| | | <trigger> |
| | | <simple> |
| | | <name>convertTrigger</name> |
| | | <group>DEFAULT</group> |
| | | <job-name>ConvertDMMS2PostGisWithGeoserver</job-name> |
| | | <job-group>DEFAULT</job-group> |
| | | <start-time>2013-03-01T18:00:00</start-time> |
| | | <!-- repeat indefinitely every 10 seconds --> |
| | | <repeat-count>0</repeat-count> |
| | | <repeat-interval>500</repeat-interval> |
| | | <!-- <repeat-interval>72000000</repeat-interval> --> |
| | | </simple> |
| | | </trigger> |
| | | |
| | | </schedule> |
| | | |
| | | </job-scheduling-data> |
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnjobs</artifactId> |
| | | <version>1.0.1</version> |
| | | <version>2.1.2</version> |
| | | </parent> |
| | | |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-spatialjob</artifactId> |
| | | <version>1.0.1</version> |
| | | <version>2.1.2</version> |
| | | <packaging>jar</packaging> |
| | | <name>ximple-spatialjob</name> |
| | | <url>http://www.ximple.com.tw</url> |
| | | |
| | | <properties> |
| | | <xdgnio.version>1.0.1</xdgnio.version> |
| | | <xdgnio.version>2.1.2</xdgnio.version> |
| | | </properties> |
| | | |
| | | <description> |
| | |
| | | <url>http://www.ximple.com.tw</url> |
| | | </organization> |
| | | |
| | | <inceptionYear>2008</inceptionYear> |
| | | <inceptionYear>2010</inceptionYear> |
| | | |
| | | <developers> |
| | | <developer> |
| | |
| | | <!-- =========================================================== --> |
| | | <dependencies> |
| | | <dependency> |
| | | <groupId>org.quartz-scheduler</groupId> |
| | | <artifactId>quartz</artifactId> |
| | | <groupId>opensymphony</groupId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.quartz-scheduler</groupId> |
| | | <artifactId>quartz-jobs</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-api</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-log4j12</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>jcl-over-slf4j</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-shapefile</artifactId> |
| | | <artifactId>gt-shapefile</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-sample-data</artifactId> |
| | | <artifactId>gt-sample-data</artifactId> |
| | | <scope>test</scope> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-data</artifactId> |
| | | <artifactId>gt-data</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-jdbc</artifactId> |
| | | <artifactId>gt-jdbc</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-postgis</artifactId> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-postgis</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-oracle-spatial</artifactId> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-oracle</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-mysql</artifactId> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-mysql</artifactId> |
| | | </dependency> |
| | | |
| | | <!-- because main and sample-data depend on referencing we need a tie breaker --> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt2-referencing</artifactId> |
| | | <artifactId>gt-referencing</artifactId> |
| | | </dependency> |
| | | |
| | | <!-- We need this to make the referencing module useful --> |
| | | <dependency> |
| | | <artifactId>gt2-epsg-hsql</artifactId> |
| | | <groupId>org.geotools</groupId> |
| | | <scope>test</scope> |
| | | <artifactId>gt-epsg-wkt</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | |
| | | <artifactId>sdoutl</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <!--dependency> |
| | | <groupId>postgresql</groupId> |
| | | <artifactId>postgresql</artifactId> |
| | | </dependency> |
| | | </dependency--> |
| | | <dependency> |
| | | <groupId>org.postgis</groupId> |
| | | <artifactId>postgis-driver</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>mysql</groupId> |
| | | <artifactId>mysql-connector-java</artifactId> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>xedb-gt-geospatial</artifactId> |
| | | </dependency> |
| | | |
| | | <!-- Ximple Library --> |
| | | <!--dependency> |
| | | <groupId>mysql</groupId> |
| | | <artifactId>mysql-connector-java</artifactId> |
| | | </dependency--> |
| | | |
| | | <dependency> |
| | | <groupId>net.sf.opencsv</groupId> |
| | | <artifactId>opencsv</artifactId> |
| | | <version>2.3</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>commons-net</groupId> |
| | | <artifactId>commons-net</artifactId> |
| | | |
| | | </dependency> |
| | | |
| | | <!-- Ximple Library --> |
| | | <dependency> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnio</artifactId> |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.util.LinkedList; |
| | | |
| | | import com.ximple.io.dgn7.Element; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | |
| | | import com.ximple.io.dgn7.Element; |
| | | import javax.swing.event.EventListenerList; |
| | | import java.util.LinkedList; |
| | | |
| | | public abstract class AbstractDispatchableFilter implements ElementDispatchableFilter { |
| | | private String name; |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import java.util.List; |
| | | |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | import java.util.List; |
| | | |
| | | public abstract class AbstractFLinkageDispatchableFilter extends AbstractDispatchableFilter { |
| | | public static FrammeAttributeData getFeatureLinkage(Element element) { |
| | | if (!element.hasUserAttributeData()) |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.util.List; |
| | | import java.util.TreeMap; |
| | | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.FeatureTypeBuilder; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.EPSG3825GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.EPSG3826GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.*; |
| | | import com.ximple.io.dgn7.ArcElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.util.List; |
| | | import java.util.TreeMap; |
| | | |
| | | public class CreateArcLineStringStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateLineStringStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | TreeMap<String, FeatureTypeBuilder> typeBuilders = new TreeMap<String, FeatureTypeBuilder>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | return null; |
| | | } |
| | | |
| | | public FeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | public SimpleFeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | if (!typeBuilders.containsKey(featureName)) { |
| | | FeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createArcFeatureTypeBuilder(featureName); |
| | | typeBuilders.put(featureName, typeBuilder); |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createArcFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | typeBuilders.put(featureName, featureType); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, featureType)); |
| | | |
| | | } |
| | | return typeBuilders.get(featureName).getFeatureType(); |
| | | return typeBuilders.get(featureName); |
| | | } |
| | | |
| | | public Feature createFeature(FeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | Feature feature = null; |
| | | SimpleFeature feature = null; |
| | | if (fLinkage == null) return null; |
| | | if (element instanceof ArcElement) { |
| | | ArcElement lineStringElement = (ArcElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineStringElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineStringElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(lineStringElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineStringElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(lineStringElement.getColorIndex()), |
| | | (short) lineStringElement.getWeight(), |
| | | (short) lineStringElement.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } |
| | | return feature; |
| | | } |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import com.vividsolutions.jts.geom.LineString; |
| | | import com.ximple.eofms.util.*; |
| | | import com.ximple.io.dgn7.*; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.util.List; |
| | | import java.util.TreeMap; |
| | | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.FeatureTypeBuilder; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import com.vividsolutions.jts.geom.LineString; |
| | | |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.EPSG3825GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.EPSG3826GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.GeometryConverterDecorator; |
| | | import com.ximple.io.dgn7.ArcElement; |
| | | import com.ximple.io.dgn7.ComplexChainElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.LineElement; |
| | | import com.ximple.io.dgn7.LineStringElement; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | public class CreateComplexChainStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateLineStringStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | TreeMap<String, FeatureTypeBuilder> typeBuilders = new TreeMap<String, FeatureTypeBuilder>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | return null; |
| | | } |
| | | |
| | | public FeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | public SimpleFeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | if (!typeBuilders.containsKey(featureName)) { |
| | | FeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createMultiLineFeatureTypeBuilder(featureName); |
| | | typeBuilders.put(featureName, typeBuilder); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, typeBuilder.getFeatureType())); |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createMultiLineFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | typeBuilders.put(featureName, featureType); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, featureType)); |
| | | } |
| | | return typeBuilders.get(featureName).getFeatureType(); |
| | | return typeBuilders.get(featureName); |
| | | } |
| | | |
| | | public Feature createFeature(FeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | Feature feature = null; |
| | | SimpleFeature feature = null; |
| | | if (fLinkage == null) return null; |
| | | if (element instanceof LineStringElement) { |
| | | LineStringElement lineStringElement = (LineStringElement) element; |
| | | Geometry gobj; |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineStringElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineStringElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | convertDecorator.setConverter(lineStringElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineStringElement.toGeometry(geometryFactory); |
| | | } |
| | |
| | | gobj = geometryFactory.createMultiLineString(new LineString[]{(LineString) gobj}); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(lineStringElement.getColorIndex()), |
| | | (short) lineStringElement.getWeight(), |
| | | (short) lineStringElement.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } else if (element instanceof ComplexChainElement) { |
| | | ComplexChainElement complexChain = (ComplexChainElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(complexChain); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(complexChain); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(complexChain); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else |
| | | gobj = complexChain.toGeometry(geometryFactory); |
| | | if (gobj instanceof LineString) { |
| | | gobj = geometryFactory.createMultiLineString(new LineString[]{(LineString) gobj}); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(complexChain.getColorIndex()), |
| | | (short) complexChain.getWeight(), |
| | | (short) complexChain.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } else if (element instanceof LineElement) { |
| | | LineElement lineElement = (LineElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(lineElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineElement.toGeometry(geometryFactory); |
| | | } |
| | |
| | | gobj = geometryFactory.createMultiLineString(new LineString[]{(LineString) gobj}); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(lineElement.getColorIndex()), |
| | | (short) lineElement.getWeight(), |
| | | (short) lineElement.getLineStyle() |
| | | }); |
| | | }, null); |
| | | return feature; |
| | | } else if (element instanceof ArcElement) { |
| | | ArcElement arcElement = (ArcElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(arcElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(arcElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(arcElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = arcElement.toGeometry(geometryFactory); |
| | | } |
| | |
| | | gobj = geometryFactory.createMultiLineString(new LineString[]{(LineString) gobj}); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(arcElement.getColorIndex()), |
| | | (short) arcElement.getWeight(), |
| | | (short) arcElement.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } |
| | | |
| | | return feature; |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.util.List; |
| | | import java.util.TreeMap; |
| | | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.FeatureTypeBuilder; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.EPSG3825GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.EPSG3826GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.*; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.EllipseElement; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.util.List; |
| | | import java.util.TreeMap; |
| | | |
| | | public class CreateEllipseShapeStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateShapeStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | TreeMap<String, FeatureTypeBuilder> typeBuilders = new TreeMap<String, FeatureTypeBuilder>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | return null; |
| | | } |
| | | |
| | | public FeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | public SimpleFeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | if (!typeBuilders.containsKey(featureName)) { |
| | | FeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createEllipseFeatureTypeBuilder(featureName); |
| | | typeBuilders.put(featureName, typeBuilder); |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createEllipseFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | typeBuilders.put(featureName, featureType); |
| | | } |
| | | return typeBuilders.get(featureName).getFeatureType(); |
| | | return typeBuilders.get(featureName); |
| | | } |
| | | |
| | | public Feature createFeature(FeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | Feature feature = null; |
| | | SimpleFeature feature = null; |
| | | if (fLinkage == null) return null; |
| | | if (element instanceof EllipseElement) { |
| | | EllipseElement ellipseElement = (EllipseElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(ellipseElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(ellipseElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(ellipseElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = ellipseElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(ellipseElement.getColorIndex()), |
| | | (short) ellipseElement.getWeight(), |
| | | (short) ellipseElement.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } |
| | | return feature; |
| | | } |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.ximple.io.dgn7.Element; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public interface CreateFeatureTypeStrategy { |
| | | public FeatureType createFeatureElement(String featureName) throws SchemaException; |
| | | public SimpleFeatureType createFeatureElement(String featureName) throws SchemaException; |
| | | |
| | | public Feature createFeature(FeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException; |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | short distId, boolean useTransform) throws IllegalAttributeException; |
| | | |
| | | public void addCreateFeatureTypeEventListener(CreateFeatureTypeEventListener listener); |
| | | |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.util.List; |
| | | import java.util.TreeMap; |
| | | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.FeatureTypeBuilder; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.CoordinateList; |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import com.vividsolutions.jts.geom.MultiLineString; |
| | | import com.ximple.eofms.util.*; |
| | | import com.ximple.io.dgn7.*; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.EPSG3825GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.EPSG3826GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.GeometryConverterDecorator; |
| | | import com.ximple.io.dgn7.ArcElement; |
| | | import com.ximple.io.dgn7.ComplexChainElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.LineElement; |
| | | import com.ximple.io.dgn7.LineStringElement; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import javax.swing.event.EventListenerList; |
| | | import java.util.List; |
| | | import java.util.TreeMap; |
| | | |
| | | public class CreateLineStringStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateLineStringStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | TreeMap<String, FeatureTypeBuilder> typeBuilders = new TreeMap<String, FeatureTypeBuilder>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | return null; |
| | | } |
| | | |
| | | public FeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | public SimpleFeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | if (!typeBuilders.containsKey(featureName)) { |
| | | FeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createLineFeatureTypeBuilder(featureName); |
| | | typeBuilders.put(featureName, typeBuilder); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, typeBuilder.getFeatureType())); |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createLineFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | typeBuilders.put(featureName, featureType); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, featureType)); |
| | | } |
| | | return typeBuilders.get(featureName).getFeatureType(); |
| | | return typeBuilders.get(featureName); |
| | | } |
| | | |
| | | public Feature createFeature(FeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | Feature feature = null; |
| | | SimpleFeature feature = null; |
| | | if (fLinkage == null) return null; |
| | | if (element instanceof LineStringElement) { |
| | | LineStringElement lineStringElement = (LineStringElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineStringElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineStringElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(lineStringElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineStringElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(lineStringElement.getColorIndex()), |
| | | (short) lineStringElement.getWeight(), |
| | | (short) lineStringElement.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } else if (element instanceof ComplexChainElement) { |
| | | ComplexChainElement complexChain = (ComplexChainElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(complexChain); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(complexChain); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(complexChain); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = complexChain.toGeometry(geometryFactory); |
| | | } |
| | |
| | | gobj = geometryFactory.createLineString(coordinateList.toCoordinateArray()); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(complexChain.getColorIndex()), |
| | | (short) complexChain.getWeight(), |
| | | (short) complexChain.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } else if (element instanceof LineElement) { |
| | | LineElement lineElement = (LineElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(lineElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(lineElement.getColorIndex()), |
| | | (short) lineElement.getWeight(), |
| | | (short) lineElement.getLineStyle() |
| | | }); |
| | | }, null); |
| | | return feature; |
| | | } else if (element instanceof ArcElement) { |
| | | ArcElement arcElement = (ArcElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(arcElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(arcElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(arcElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = arcElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(arcElement.getColorIndex()), |
| | | (short) arcElement.getWeight(), |
| | | (short) arcElement.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } |
| | | |
| | | return feature; |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import com.vividsolutions.jts.geom.*; |
| | | import com.ximple.eofms.util.*; |
| | | import com.ximple.io.dgn7.*; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.util.List; |
| | | import java.util.TreeMap; |
| | | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.FeatureTypeBuilder; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.Coordinate; |
| | | import com.vividsolutions.jts.geom.CoordinateList; |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import com.vividsolutions.jts.geom.LineString; |
| | | import com.vividsolutions.jts.geom.MultiLineString; |
| | | |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.EPSG3825GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.EPSG3826GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.TWDDatumConverter; |
| | | import com.ximple.io.dgn7.ComplexChainElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.LineElement; |
| | | import com.ximple.io.dgn7.LineStringElement; |
| | | import com.ximple.io.dgn7.TextElement; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | public class CreateLineTextStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateLineTextStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | TreeMap<String, FeatureTypeBuilder> typeBuilders = new TreeMap<String, FeatureTypeBuilder>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | return null; |
| | | } |
| | | |
| | | public FeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | public SimpleFeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | if (!typeBuilders.containsKey(featureName)) { |
| | | FeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createLineFeatureTypeBuilder(featureName); |
| | | typeBuilders.put(featureName, typeBuilder); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, typeBuilder.getFeatureType())); |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createLineFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | typeBuilders.put(featureName, featureType); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, featureType)); |
| | | } |
| | | return typeBuilders.get(featureName).getFeatureType(); |
| | | return typeBuilders.get(featureName); |
| | | } |
| | | |
| | | public Feature createFeature(FeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | Feature feature = null; |
| | | SimpleFeature feature = null; |
| | | if (fLinkage == null) return null; |
| | | if (element instanceof LineStringElement) { |
| | | LineStringElement lineStringElement = (LineStringElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineStringElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineStringElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(lineStringElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineStringElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(lineStringElement.getColorIndex()), |
| | | (short) lineStringElement.getWeight(), |
| | | (short) lineStringElement.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } else if (element instanceof TextElement) { |
| | | TextElement txtElement = (TextElement) element; |
| | | Coordinate ptOrigin = txtElement.getUserOrigin(); |
| | | Coordinate ptEnd = new Coordinate(); |
| | | ptEnd.x = ptOrigin.x; |
| | | ptEnd.y = ptOrigin.y + txtElement.getTextHeight(); |
| | | ptEnd.y = ptOrigin.y - txtElement.getTextHeight(); |
| | | Coordinate[] vect = new Coordinate[2]; |
| | | if (useTransform) { |
| | | vect[0] = useEPSG3826 ? |
| | | vect[0] = (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826) ? |
| | | TWDDatumConverter.fromTM2ToEPSG3826(ptOrigin) : |
| | | TWDDatumConverter.fromTM2ToEPSG3825(ptOrigin); |
| | | vect[1] = useEPSG3826 ? |
| | | vect[1] = FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826 ? |
| | | TWDDatumConverter.fromTM2ToEPSG3826(ptEnd) : |
| | | TWDDatumConverter.fromTM2ToEPSG3825(ptEnd); |
| | | } else { |
| | |
| | | |
| | | txtElement.getRotationAngle(); |
| | | |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | line, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(txtElement.getColorIndex()), |
| | | (short) txtElement.getWeight(), |
| | | (short) txtElement.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } else if (element instanceof ComplexChainElement) { |
| | | ComplexChainElement complexChain = (ComplexChainElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(complexChain); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(complexChain); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(complexChain); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = complexChain.toGeometry(geometryFactory); |
| | | } |
| | |
| | | gobj = geometryFactory.createLineString(coordinateList.toCoordinateArray()); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(complexChain.getColorIndex()), |
| | | (short) complexChain.getWeight(), |
| | | (short) complexChain.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } else if (element instanceof LineElement) { |
| | | LineElement lineElement = (LineElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(lineElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineElement.toGeometry(geometryFactory); |
| | | } |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(lineElement.getColorIndex()), |
| | | (short) lineElement.getWeight(), |
| | | (short) lineElement.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } |
| | | |
| | | return feature; |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.math.BigDecimal; |
| | | import java.math.RoundingMode; |
| | | import java.util.*; |
| | | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.FeatureTypeBuilder; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.Coordinate; |
| | | import com.vividsolutions.jts.geom.CoordinateList; |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import com.vividsolutions.jts.util.Assert; |
| | | import com.ximple.eofms.util.*; |
| | | import com.ximple.io.dgn7.*; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.EPSG3825GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.EPSG3826GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.GeometryConverterDecorator; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.GeometryConverter; |
| | | import com.ximple.io.dgn7.TextElement; |
| | | import com.ximple.io.dgn7.TextNodeElement; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import javax.swing.event.EventListenerList; |
| | | import java.math.BigDecimal; |
| | | import java.math.RoundingMode; |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | import java.util.TreeMap; |
| | | |
| | | public class CreateMultiSymbolStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateMultiSymbolStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | TreeMap<String, FeatureTypeBuilder> typeBuilders = new TreeMap<String, FeatureTypeBuilder>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826AnchorGeometryConverterDecorator(), |
| | | new EPSG3825AnchorGeometryConverterDecorator() |
| | | }; |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | return null; |
| | | } |
| | | |
| | | public FeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | public SimpleFeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | if (!typeBuilders.containsKey(featureName)) { |
| | | FeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createMultiSymbolFeatureTypeBuilder(featureName); |
| | | typeBuilders.put(featureName, typeBuilder); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, typeBuilder.getFeatureType())); |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createMultiSymbolFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | typeBuilders.put(featureName, featureType); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, featureType)); |
| | | } |
| | | return typeBuilders.get(featureName).getFeatureType(); |
| | | return typeBuilders.get(featureName); |
| | | } |
| | | |
| | | public Feature createFeature(FeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | Feature feature = null; |
| | | SimpleFeature feature = null; |
| | | if (fLinkage == null) return null; |
| | | if (element instanceof TextElement) { |
| | | TextElement txtElement = (TextElement) element; |
| | |
| | | String content = txtElement.getText().trim(); |
| | | if (content.length() == 0) { |
| | | logger.info("CreateMultiSymbolStrategy cannot conver " + element.toString() + |
| | | "to Feature - getText() is empty."); |
| | | "to Feature - getText() is empty."); |
| | | return null; |
| | | } |
| | | StringBuilder sb = new StringBuilder(); |
| | |
| | | |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(txtElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(txtElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(txtElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = txtElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | (float) txtElement.getTextWidth(), |
| | | (float) angle, |
| | | sb.toString() |
| | | }); |
| | | }, null); |
| | | } else if (element instanceof TextNodeElement) { |
| | | TextNodeElement nodeElement = (TextNodeElement) element; |
| | | |
| | | |
| | | double angle = nodeElement.getRotationAngle(); |
| | | angle = BigDecimal.valueOf(angle).setScale(3, RoundingMode.HALF_UP).doubleValue(); |
| | | if (nodeElement.size() == 0) { |
| | | logger.info("CreateMultiSymbolStrategy cannot conver " + element.toString() + |
| | | "to Feature - getText() is empty."); |
| | | logger.info("CreateMultiSymbolStrategy cannot convert " + element.toString() + |
| | | "to Feature - getText() is empty."); |
| | | return null; |
| | | } |
| | | |
| | | Iterator<Element> txtElement = nodeElement.iterator(); |
| | | Iterator<Element> txtElement = nodeElement.iterator(); |
| | | |
| | | while(txtElement.hasNext()) |
| | | { |
| | | TextElement txtChildElement = (TextElement) element; |
| | | |
| | | char[] charArray = txtChildElement.getText().toCharArray(); |
| | | while (txtElement.hasNext()) { |
| | | if (txtElement instanceof TextElement) { |
| | | TextElement txtChildElement = (TextElement) element; |
| | | char[] charArray = txtChildElement.getText().toCharArray(); |
| | | |
| | | |
| | | if (charArray.length == 0) { |
| | | logger.info("CreateMultiSymbolStrategy cannot conver " + element.toString() + |
| | | "to Feature - getText() is empty."); |
| | | return null; |
| | | } |
| | | if (charArray.length == 0) { |
| | | logger.info("CreateMultiSymbolStrategy cannot convert " + element.toString() + |
| | | "to Feature - getText() is empty."); |
| | | // return null; |
| | | continue; |
| | | } |
| | | |
| | | |
| | | for(int i = 0 ; i < charArray.length ; i++) |
| | | { |
| | | for (int i = 0; i < charArray.length; i++) { |
| | | |
| | | StringBuilder sb = new StringBuilder(); |
| | | sb.append("OCT"); |
| | | char id = charArray[i]; |
| | | sb.append(Integer.toOctalString((int) id)); |
| | | sb.append("-"); |
| | | sb.append(txtChildElement.getFontIndex()); |
| | | StringBuilder sb = new StringBuilder(); |
| | | sb.append("OCT"); |
| | | char id = charArray[i]; |
| | | sb.append(Integer.toOctalString((int) id)); |
| | | sb.append("-"); |
| | | sb.append(txtChildElement.getFontIndex()); |
| | | |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(txtChildElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(txtChildElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = txtChildElement.toGeometry(geometryFactory); |
| | | } |
| | | |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | | fLinkage.getOccID(), |
| | | (short) txtChildElement.getLevelIndex(), |
| | | colorTable.getColorCode(nodeElement.getColorIndex()), |
| | | (short) txtChildElement.getWeight(), |
| | | (short) txtChildElement.getLineStyle(), |
| | | (short) txtChildElement.getJustification(), |
| | | (float) txtChildElement.getTextHeight(), |
| | | (float) txtChildElement.getTextWidth(), |
| | | (float) angle, |
| | | sb.toString() |
| | | }, null); |
| | | } |
| | | |
| | | break; |
| | | } else if (txtElement instanceof TextNodeElement) { |
| | | // logger.info("CreateMultiSymbolStrategy cannot convert " + element.toString() + "to Feature"); |
| | | } else { |
| | | convertDecorator[1].setConverter(txtChildElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | // TODO: Ximple |
| | | // logger.info("CreateMultiSymbolStrategy cannot convert " + element.toString() + "to Feature"); |
| | | continue; |
| | | } |
| | | } else { |
| | | gobj = txtChildElement.toGeometry(geometryFactory); |
| | | } |
| | | |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | gobj, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | | fLinkage.getOccID(), |
| | | (short) txtChildElement.getLevelIndex(), |
| | | colorTable.getColorCode(nodeElement.getColorIndex()), |
| | | (short) txtChildElement.getWeight(), |
| | | (short) txtChildElement.getLineStyle(), |
| | | (short) txtChildElement.getJustification(), |
| | | (float) txtChildElement.getTextHeight(), |
| | | (float) txtChildElement.getTextWidth(), |
| | | (float) angle, |
| | | sb.toString() |
| | | }); |
| | | } |
| | | txtElement.next(); |
| | | } |
| | | } else { |
| | | logger.info("CreateMultiSymbolStrategy cannot conver " + element.toString() + "to Feature"); |
| | | logger.info("CreateMultiSymbolStrategy cannot convert " + element.toString() + "to Feature"); |
| | | return null; |
| | | } |
| | | return feature; |
| | | } |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import com.ximple.eofms.util.*; |
| | | import com.ximple.io.dgn7.*; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.util.List; |
| | | import java.util.TreeMap; |
| | | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.FeatureTypeBuilder; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.EPSG3825GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.EPSG3826GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.GeometryConverterDecorator; |
| | | import com.ximple.io.dgn7.ComplexShapeElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.ShapeElement; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | public class CreateShapeStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateShapeStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | TreeMap<String, FeatureTypeBuilder> typeBuilders = new TreeMap<String, FeatureTypeBuilder>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | return null; |
| | | } |
| | | |
| | | public FeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | public SimpleFeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | if (!typeBuilders.containsKey(featureName)) { |
| | | FeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createPolygonFeatureTypeBuilder(featureName); |
| | | typeBuilders.put(featureName, typeBuilder); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, typeBuilder.getFeatureType())); |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createPolygonFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | typeBuilders.put(featureName, featureType); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, featureType)); |
| | | } |
| | | return typeBuilders.get(featureName).getFeatureType(); |
| | | return typeBuilders.get(featureName); |
| | | } |
| | | |
| | | public Feature createFeature(FeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | Feature feature = null; |
| | | SimpleFeature feature = null; |
| | | if (fLinkage == null) return null; |
| | | if (element instanceof ShapeElement) { |
| | | ShapeElement shapeElement = (ShapeElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(shapeElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(shapeElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(shapeElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = shapeElement.toGeometry(geometryFactory); |
| | | } |
| | | |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(shapeElement.getColorIndex()), |
| | | (short) shapeElement.getWeight(), |
| | | (short) shapeElement.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } else if (element instanceof ComplexShapeElement) { |
| | | ComplexShapeElement complexShape = (ComplexShapeElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(complexShape); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(complexShape); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(complexShape); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = complexShape.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | colorTable.getColorCode(complexShape.getColorIndex()), |
| | | (short) complexShape.getWeight(), |
| | | (short) complexShape.getLineStyle() |
| | | }); |
| | | }, null); |
| | | } |
| | | return feature; |
| | | } |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import com.vividsolutions.jts.geom.Coordinate; |
| | | import com.vividsolutions.jts.geom.Envelope; |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import com.vividsolutions.jts.geom.Point; |
| | | import com.ximple.eofms.util.*; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.TextElement; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.awt.geom.AffineTransform; |
| | | import java.math.BigDecimal; |
| | | import java.math.RoundingMode; |
| | | import java.util.List; |
| | | import java.util.TreeMap; |
| | | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.FeatureTypeBuilder; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.EPSG3825GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.EPSG3826GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.GeometryConverterDecorator; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.TextElement; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | public class CreateSymbolStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateSymbolStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | TreeMap<String, FeatureTypeBuilder> typeBuilders = new TreeMap<String, FeatureTypeBuilder>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | return null; |
| | | } |
| | | |
| | | public FeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | public SimpleFeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | if (!typeBuilders.containsKey(featureName)) { |
| | | FeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createSymbolFeatureTypeBuilder(featureName); |
| | | typeBuilders.put(featureName, typeBuilder); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, typeBuilder.getFeatureType())); |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createSymbolFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | typeBuilders.put(featureName, featureType); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, featureType)); |
| | | } |
| | | return typeBuilders.get(featureName).getFeatureType(); |
| | | return typeBuilders.get(featureName); |
| | | } |
| | | |
| | | public Feature createFeature(FeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | Feature feature = null; |
| | | SimpleFeature feature = null; |
| | | if (fLinkage == null) return null; |
| | | if (element instanceof TextElement) { |
| | | TextElement txtElement = (TextElement) element; |
| | |
| | | sb.append("-"); |
| | | sb.append(txtElement.getFontIndex()); |
| | | |
| | | Geometry gobj; |
| | | Geometry gobj, geomOrigin = null; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(txtElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(txtElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(txtElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = txtElement.toGeometry(geometryFactory); |
| | | } |
| | | |
| | | if (gobj instanceof Point) { |
| | | geomOrigin = gobj; |
| | | Coordinate originPt = gobj.getCoordinate(); |
| | | gobj = buildBoundaryPolygon(originPt, // txtElement.getOrigin() |
| | | txtElement.getTextWidth(), txtElement.getTextHeight(), |
| | | angle, txtElement.getJustification(), gobj.getSRID()); |
| | | } else { |
| | | gobj = null; |
| | | } |
| | | |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | (float) txtElement.getTextHeight(), |
| | | (float) txtElement.getTextWidth(), |
| | | (float) angle, |
| | | sb.toString() |
| | | }); |
| | | sb.toString().trim(), |
| | | geomOrigin |
| | | }, null); |
| | | } else { |
| | | logger.info("CreateSymbolStrategy cannot conver " + element.toString() + "to Feature"); |
| | | } |
| | |
| | | } |
| | | } |
| | | } |
| | | |
| | | private Geometry buildBoundaryPolygon(Coordinate p, double symbolWidth, double symbolHeight, double rotationAngle, |
| | | int justification, int srid) { |
| | | double angle = Math.toRadians(rotationAngle); |
| | | |
| | | AffineTransform at = new AffineTransform(); |
| | | // at.translate(width, height); |
| | | at.setToRotation(angle, p.x, p.y); |
| | | at.scale(1, 1); |
| | | |
| | | double width = symbolWidth; |
| | | switch (justification) { |
| | | case TextElement.TXTJUST_LT: |
| | | case TextElement.TXTJUST_LC: |
| | | case TextElement.TXTJUST_LB: |
| | | width = width / 2; |
| | | break; |
| | | |
| | | case TextElement.TXTJUST_CT: |
| | | case TextElement.TXTJUST_CC: |
| | | case TextElement.TXTJUST_CB: |
| | | width = 0; |
| | | break; |
| | | |
| | | case TextElement.TXTJUST_RT: |
| | | case TextElement.TXTJUST_RC: |
| | | case TextElement.TXTJUST_RB: |
| | | width = -(width / 2); |
| | | break; |
| | | } |
| | | |
| | | double height = symbolHeight; |
| | | switch (justification) { |
| | | case TextElement.TXTJUST_LB: |
| | | case TextElement.TXTJUST_CB: |
| | | case TextElement.TXTJUST_RB: // bottom |
| | | height = height / 2; |
| | | break; |
| | | |
| | | case TextElement.TXTJUST_LC: |
| | | case TextElement.TXTJUST_CC: |
| | | case TextElement.TXTJUST_RC: // center |
| | | height = 0; |
| | | break; |
| | | |
| | | case TextElement.TXTJUST_LT: |
| | | case TextElement.TXTJUST_CT: |
| | | case TextElement.TXTJUST_RT: // height |
| | | height = -(height / 2); |
| | | break; |
| | | } |
| | | |
| | | |
| | | Envelope envelope = new Envelope(new Coordinate(p.x + width, p.y + height)); |
| | | envelope.expandBy(symbolWidth / 2, symbolHeight / 2); |
| | | |
| | | double[] srcPt = new double[8]; |
| | | double[] dstPt = new double[8]; |
| | | srcPt[0] = envelope.getMinX(); |
| | | srcPt[1] = envelope.getMinY(); |
| | | srcPt[2] = envelope.getMinX(); |
| | | srcPt[3] = envelope.getMaxY(); |
| | | srcPt[4] = envelope.getMaxX(); |
| | | srcPt[5] = envelope.getMaxY(); |
| | | srcPt[6] = envelope.getMaxX(); |
| | | srcPt[7] = envelope.getMinY(); |
| | | |
| | | at.transform(srcPt, 0, dstPt, 0, 4); |
| | | |
| | | Coordinate[] coords = new Coordinate[5]; |
| | | for (int i = 0; i < 4; i++) { |
| | | coords[i] = new Coordinate(dstPt[i*2], dstPt[i*2+1]); |
| | | } |
| | | coords[4] = new Coordinate(dstPt[0], dstPt[1]); |
| | | |
| | | Geometry geom = geometryFactory.createPolygon(geometryFactory.createLinearRing(coords), null); |
| | | if (geom.getSRID() != srid) { |
| | | geom.setSRID(srid); |
| | | } |
| | | return geom; |
| | | } |
| | | } |
| | | |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import com.ximple.eofms.util.*; |
| | | import com.ximple.io.dgn7.*; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.math.BigDecimal; |
| | | import java.math.RoundingMode; |
| | | import java.util.List; |
| | | import java.util.TreeMap; |
| | | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.FeatureTypeBuilder; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.EPSG3825GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.EPSG3826GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.GeometryConverterDecorator; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.TextElement; |
| | | import com.ximple.io.dgn7.TextNodeElement; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | public class CreateTextStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateTextStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | TreeMap<String, FeatureTypeBuilder> typeBuilders = new TreeMap<String, FeatureTypeBuilder>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | return null; |
| | | } |
| | | |
| | | public FeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | public SimpleFeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | if (!typeBuilders.containsKey(featureName)) { |
| | | FeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createPointFeatureTypeBuilder(featureName); |
| | | typeBuilders.put(featureName, typeBuilder); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, typeBuilder.getFeatureType())); |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createPointFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | typeBuilders.put(featureName, featureType); |
| | | fireFeatureTypeEvent(new FeatureTypeEvent(this, featureType)); |
| | | } |
| | | return typeBuilders.get(featureName).getFeatureType(); |
| | | return typeBuilders.get(featureName); |
| | | } |
| | | |
| | | public Feature createFeature(FeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | Feature feature = null; |
| | | SimpleFeature feature = null; |
| | | if (fLinkage == null) return null; |
| | | if (element instanceof TextElement) { |
| | | TextElement txtElement = (TextElement) element; |
| | |
| | | angle = BigDecimal.valueOf(angle).setScale(3, RoundingMode.HALF_UP).doubleValue(); |
| | | String content = txtElement.getText(); |
| | | content = content.replace('\u0000', ' '); |
| | | content = content.trim(); |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(txtElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(txtElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(txtElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = txtElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | (float) txtElement.getTextWidth(), |
| | | (float) angle, |
| | | content |
| | | }); |
| | | }, null); |
| | | } else if (element instanceof TextNodeElement) { |
| | | TextNodeElement nodeElement = (TextNodeElement) element; |
| | | String[] texts = nodeElement.getTextArray(); |
| | |
| | | angle = BigDecimal.valueOf(angle).setScale(3, RoundingMode.HALF_UP).doubleValue(); |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(nodeElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(nodeElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(nodeElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = nodeElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = featureType.create(new Object[]{ |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | (float) nodeElement.getTextNodeLength(), |
| | | (float) angle, |
| | | sb.toString() |
| | | }); |
| | | }, null); |
| | | } |
| | | return feature; |
| | | } |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import org.geotools.feature.Feature; |
| | | |
| | | import com.ximple.io.dgn7.Element; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | |
| | | public interface ElementDispatchableFilter { |
| | | public boolean isDispatchable(Element element); |
| | | |
| | | public Feature execute(Element element, boolean useTransform, boolean useEPSG3826); |
| | | public SimpleFeature execute(Element element, short distId, boolean useTransform); |
| | | |
| | | void setUseLongName(boolean useLongName); |
| | | |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import com.ximple.io.dgn7.Element; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.util.LinkedList; |
| | | |
| | | import org.geotools.feature.Feature; |
| | | |
| | | import com.ximple.io.dgn7.Element; |
| | | |
| | | public class ElementDispatcher implements CreateFeatureTypeEventListener { |
| | | private LinkedList<ElementDispatchableFilter> rules; |
| | |
| | | } |
| | | } |
| | | |
| | | public Feature execute(Element element, boolean useTransform, boolean useEPSG3826) { |
| | | public SimpleFeature execute(Element element, short distId, boolean useTransform) { |
| | | for (ElementDispatchableFilter rule : rules) { |
| | | if (rule.isDispatchable(element)) { |
| | | return rule.execute(element, useTransform, useEPSG3826); |
| | | return rule.execute(element, distId, useTransform); |
| | | } |
| | | } |
| | | return null; |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import java.util.ArrayList; |
| | | |
| | | import com.ximple.io.dgn7.Element; |
| | | |
| | | import java.util.ArrayList; |
| | | |
| | | public class ElementLevelCriterion implements Comparable { |
| | | private int elementLevel; |
| | | private ArrayList<Integer> elementLevelArray; |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import java.util.ArrayList; |
| | | |
| | | import com.ximple.io.dgn7.Element; |
| | | |
| | | import java.util.ArrayList; |
| | | |
| | | public class ElementTypeCriterion implements Comparable { |
| | | private int elementType; |
| | | private ArrayList<Integer> elementTypeArray; |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | import java.util.EventObject; |
| | | |
| | | import org.geotools.feature.FeatureType; |
| | | |
| | | public class FeatureTypeEvent extends EventObject { |
| | | private FeatureType featureType; |
| | | private SimpleFeatureType featureType; |
| | | |
| | | /** |
| | | * Constructs a prototypical Event. |
| | |
| | | * @param featureType featureType |
| | | * @throws IllegalArgumentException if source is null. |
| | | */ |
| | | public FeatureTypeEvent(Object source, FeatureType featureType) { |
| | | public FeatureTypeEvent(Object source, SimpleFeatureType featureType) { |
| | | super(source); |
| | | this.featureType = featureType; |
| | | } |
| | | |
| | | public FeatureType getFeatureType() { |
| | | public SimpleFeatureType getFeatureType() { |
| | | return featureType; |
| | | } |
| | | } |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.ximple.eofms.util.StringUtils; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public class TypeCompIdDispatchableFilter extends AbstractFLinkageDispatchableFilter |
| | | implements CreateFeatureTypeEventListener { |
| | |
| | | this.useLongName = useLongName; |
| | | } |
| | | |
| | | //§PÂ_¬O§_²Å©M±ø¥ó |
| | | //�P�_�O�_�ũM��� |
| | | public boolean isDispatchable(Element element) { |
| | | FrammeAttributeData featureLinkage = getFeatureLinkage(element); |
| | | return featureLinkage != null && tid == featureLinkage.getFsc() && |
| | |
| | | (compareType(element) == 0); |
| | | } |
| | | |
| | | public Feature execute(Element element, boolean useTransform, boolean useEPSG3826) { |
| | | public SimpleFeature execute(Element element, short distId, boolean useTransform) { |
| | | try { |
| | | String ftName = getFeatureTypeName(element); |
| | | FeatureType ftype = createStrategy.createFeatureElement(ftName); |
| | | return createStrategy.createFeature(ftype, element, useTransform, useEPSG3826); |
| | | SimpleFeatureType ftype = createStrategy.createFeatureElement(ftName); |
| | | return createStrategy.createFeature(ftype, element, distId, useTransform); |
| | | } catch (SchemaException e) { |
| | | logger.error(e.getMessage(), e); |
| | | } catch (IllegalAttributeException e) { |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.ximple.eofms.util.StringUtils; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public class TypeCompLevelIdDispatchableFilter extends AbstractFLinkageDispatchableFilter |
| | | implements CreateFeatureTypeEventListener { |
| | |
| | | (lid == element.getLevelIndex()) && (compareLevel(element) == 0); |
| | | } |
| | | |
| | | public Feature execute(Element element, boolean useTransform, boolean useEPSG3826) { |
| | | public SimpleFeature execute(Element element, short distId, boolean useTransform) { |
| | | try { |
| | | String ftName = getFeatureTypeName(element); |
| | | FeatureType ftype = createStrategy.createFeatureElement(ftName); |
| | | return createStrategy.createFeature(ftype, element, useTransform, useEPSG3826); |
| | | SimpleFeatureType ftype = createStrategy.createFeatureElement(ftName); |
| | | return createStrategy.createFeature(ftype, element, distId, useTransform); |
| | | } catch (SchemaException e) { |
| | | logger.error(e.getMessage(), e); |
| | | } catch (IllegalAttributeException e) { |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.ximple.eofms.util.StringUtils; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public class TypeIdDispatchableFilter extends AbstractFLinkageDispatchableFilter implements CreateFeatureTypeEventListener { |
| | | private int tid; |
| | |
| | | (compareType(element) == 0); |
| | | } |
| | | |
| | | public Feature execute(Element element, boolean useTransform, boolean useEPSG3826) { |
| | | public SimpleFeature execute(Element element, short distId, boolean useTransform) { |
| | | try { |
| | | String ftName = getFeatureTypeName(element); |
| | | FeatureType ftype = createStrategy.createFeatureElement(ftName); |
| | | return createStrategy.createFeature(ftype, element, useTransform, useEPSG3826); |
| | | SimpleFeatureType ftype = createStrategy.createFeatureElement(ftName); |
| | | return createStrategy.createFeature(ftype, element, distId, useTransform); |
| | | } catch (SchemaException e) { |
| | | logger.error(e.getMessage(), e); |
| | | } catch (IllegalAttributeException e) { |
New file |
| | |
| | | package com.ximple.eofms.geoserver.config; |
| | | |
| | | public class XGeosDataConfig { |
| | | private String PG; |
| | | private short FSC; |
| | | private short COMP; |
| | | private short LEV; |
| | | private short WEIGHT; |
| | | private String FTYPE; |
| | | |
| | | public XGeosDataConfig() { |
| | | } |
| | | |
| | | public String getPG() { |
| | | return PG; |
| | | } |
| | | |
| | | public void setPG(String PG) { |
| | | this.PG = PG; |
| | | } |
| | | |
| | | public short getFSC() { |
| | | return FSC; |
| | | } |
| | | |
| | | public void setFSC(short FSC) { |
| | | this.FSC = FSC; |
| | | } |
| | | |
| | | public short getCOMP() { |
| | | return COMP; |
| | | } |
| | | |
| | | public void setCOMP(short COMP) { |
| | | this.COMP = COMP; |
| | | } |
| | | |
| | | public short getLEV() { |
| | | return LEV; |
| | | } |
| | | |
| | | public void setLEV(short LEV) { |
| | | this.LEV = LEV; |
| | | } |
| | | |
| | | public short getWEIGHT() { |
| | | return WEIGHT; |
| | | } |
| | | |
| | | public void setWEIGHT(short WEIGHT) { |
| | | this.WEIGHT = WEIGHT; |
| | | } |
| | | |
| | | public String getFTYPE() { |
| | | return FTYPE; |
| | | } |
| | | |
| | | public void setFTYPE(String FTYPE) { |
| | | this.FTYPE = FTYPE; |
| | | } |
| | | |
| | | public String toString() { |
| | | return "XGeosDataConfig{" + |
| | | "PG='" + (PG != null ? PG : "null") + '\'' + |
| | | ", FSC=" + FSC + |
| | | ", COMP=" + COMP + |
| | | ", LEV=" + LEV + |
| | | ", WEIGHT=" + WEIGHT + |
| | | ", FTYPE='" + (FTYPE != null ? FTYPE : "null") + '\'' + |
| | | '}'; |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.geoserver.config; |
| | | |
| | | import org.apache.commons.collections.MultiMap; |
| | | import org.apache.commons.collections.map.MultiValueMap; |
| | | |
| | | public class XGeosDataConfigMapping { |
| | | private MultiValueMap mappings; |
| | | |
| | | public XGeosDataConfigMapping() { |
| | | mappings = new MultiValueMap(); |
| | | } |
| | | |
| | | public void addConfig(XGeosDataConfig config) { |
| | | mappings.put(config.getPG(), config); |
| | | } |
| | | |
| | | public MultiMap getMapping() { |
| | | return mappings; |
| | | } |
| | | } |
| | |
| | | import java.io.InputStream; |
| | | import java.nio.BufferOverflowException; |
| | | import java.nio.ByteBuffer; |
| | | import java.sql.Connection; |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.util.ArrayList; |
| | | import java.util.Map; |
| | | import java.util.StringTokenizer; |
| | | import java.util.TreeMap; |
| | | |
| | | import com.vividsolutions.jts.util.Assert; |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import oracle.sql.BLOB; |
| | | import org.apache.commons.logging.Log; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.oracle.OracleDataStore; |
| | | import org.geotools.data.oracle.OracleDataStoreFactory; |
| | | import org.geotools.data.oracle.OracleNGDataStoreFactory; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.quartz.Job; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | import com.vividsolutions.jts.util.Assert; |
| | | |
| | | import oracle.sql.BLOB; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | |
| | | public abstract class AbstractOracleDatabaseJob implements Job { |
| | | /** |
| | |
| | | private static final String CONVERTDB = "CONVERTDB"; |
| | | private static final String CONVERTFILE = "CONVERTFILE"; |
| | | private static final String CONVERTELEMIN = "CONVERTELEMIN"; |
| | | private static final String CONVERTPWTHEMES = "CONVERTPWTHEMES"; |
| | | private static final String CREATEDUMMY = "CREATEDUMMY"; |
| | | private static final String ELEMLOG = "ELEMLOG"; |
| | | private static final String ORAHOST = "ORAHOST"; |
| | |
| | | private static final String TESTCOUNT = "TESTCOUNT"; |
| | | private static final String COPYCONNECTIVITYMODE = "COPYCONNECTIVITYMODE"; |
| | | private static final String PROFILEMODE = "PROFILEMODE"; |
| | | private static final String USEEPSG3826 = "USEEPSG3826"; |
| | | private static final String USEZONE121 = "USEZONE121"; |
| | | private static final String IGNORE_DBETL = "IGNORE_DBETL"; |
| | | |
| | | protected static OracleDataStoreFactory dataStoreFactory = new OracleDataStoreFactory(); |
| | | protected static OracleNGDataStoreFactory dataStoreFactory = new OracleNGDataStoreFactory(); |
| | | |
| | | protected String _dataPath; |
| | | protected String _filterPath; |
| | |
| | | protected String _convertDB; |
| | | protected String _convertFile; |
| | | protected String _convertElementIn; |
| | | protected String _convertPWThemes; |
| | | protected String _elementLogging; |
| | | protected String _createDummy; |
| | | protected ArrayList<String> _orgSchema = new ArrayList<String>(); |
| | | protected boolean _testMode = false; |
| | | protected boolean _copyConnectivityMode = false; |
| | | protected boolean _profileMode = false; |
| | | protected boolean _useEPSG3826 = true; |
| | | protected boolean _useZone121 = true; |
| | | protected boolean _useTransform = true; |
| | | protected boolean _ignoreDBETL = false; |
| | | |
| | | protected int _testCount = -1; |
| | | protected OracleDataStore sourceDataStore; |
| | | protected JDBCDataStore sourceDataStore; |
| | | private boolean driverFound = true; |
| | | |
| | | private long _processTime; |
| | |
| | | _convertDB = dataMap.getString(CONVERTDB); |
| | | _convertFile = dataMap.getString(CONVERTFILE); |
| | | _convertElementIn = dataMap.getString(CONVERTELEMIN); |
| | | _convertPWThemes = dataMap.getString(CONVERTPWTHEMES); |
| | | _elementLogging = dataMap.getString(ELEMLOG); |
| | | _createDummy = dataMap.getString(CREATEDUMMY); |
| | | |
| | |
| | | _testCount = dataMap.getIntFromString(TESTCOUNT); |
| | | _copyConnectivityMode = dataMap.getBooleanFromString(COPYCONNECTIVITYMODE); |
| | | _profileMode = dataMap.getBooleanFromString(PROFILEMODE); |
| | | _useEPSG3826 = dataMap.getBooleanFromString(USEEPSG3826); |
| | | _useZone121 = dataMap.getBooleanFromString(USEZONE121); |
| | | _ignoreDBETL = dataMap.getBooleanFromString(IGNORE_DBETL); |
| | | |
| | | if (_useZone121) { |
| | | FeatureTypeBuilderUtil.setDefaultFeatureSRID(3826); |
| | | } else { |
| | | FeatureTypeBuilderUtil.setDefaultFeatureSRID(3825); |
| | | } |
| | | |
| | | // Validate the required input |
| | | if (_dataPath == null) { |
| | |
| | | } |
| | | |
| | | protected abstract AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826); |
| | | boolean profileMode, boolean useTransform); |
| | | |
| | | protected byte[] getBytesFromBLOB(BLOB blob) throws SQLException, BufferOverflowException { |
| | | byte[] raw = null; |
| | |
| | | !_convertElementIn.equalsIgnoreCase("no") && !_convertElementIn.equalsIgnoreCase("0"); |
| | | } |
| | | |
| | | public boolean checkConvertPWThemes() { |
| | | return _convertPWThemes != null && !_convertPWThemes.equalsIgnoreCase("false") && |
| | | !_convertPWThemes.equalsIgnoreCase("no") && !_convertPWThemes.equalsIgnoreCase("0"); |
| | | } |
| | | |
| | | public String getElementLogging() { |
| | | return _elementLogging; |
| | | } |
| | |
| | | return _useTransform; |
| | | } |
| | | |
| | | public boolean isEPSG3826() { |
| | | return _useEPSG3826; |
| | | public boolean isZone121() { |
| | | return _useZone121; |
| | | } |
| | | |
| | | public boolean isIgnoreDBETL() { |
| | | return _ignoreDBETL; |
| | | } |
| | | |
| | | public void set_ignoreDBETL(boolean _ignoreDBETL) { |
| | | this._ignoreDBETL = _ignoreDBETL; |
| | | } |
| | | |
| | | public DataStore getSourceDataStore() { |
| | |
| | | throw new JobExecutionException("Oracle JDBC Driver not found.-" + JDBC_DRIVER); |
| | | } |
| | | Map<String, String> map = new TreeMap<String, String>(); |
| | | map.put("host", _oracleHost); |
| | | map.put("port", _oraclePort); |
| | | map.put("instance", _oracleInstance); |
| | | map.put("user", _username); |
| | | map.put("passwd", _password); |
| | | map.put("dbtype", "oracle"); |
| | | map.put("alias", _oracleInstance); |
| | | map.put("namespace", null); |
| | | if (!map.containsKey(OracleDataStoreFactory.MAXCONN.key)) { |
| | | map.put(OracleDataStoreFactory.MAXCONN.key, "5"); |
| | | map.put(OracleNGDataStoreFactory.HOST.key, _oracleHost); |
| | | map.put(OracleNGDataStoreFactory.PORT.key, _oraclePort); |
| | | map.put(OracleNGDataStoreFactory.DATABASE.key, _oracleInstance); |
| | | map.put(OracleNGDataStoreFactory.USER.key, _username); |
| | | map.put(OracleNGDataStoreFactory.PASSWD.key, _password); |
| | | map.put(OracleNGDataStoreFactory.DBTYPE.key, "oracle"); |
| | | map.put(OracleNGDataStoreFactory.NAMESPACE.key, null); |
| | | |
| | | if (!map.containsKey(OracleNGDataStoreFactory.MAXCONN.key)) { |
| | | map.put(OracleNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | if (!map.containsKey(OracleDataStoreFactory.MINCONN.key)) { |
| | | map.put(OracleDataStoreFactory.MINCONN.key, "1"); |
| | | if (!map.containsKey(OracleNGDataStoreFactory.MINCONN.key)) { |
| | | map.put(OracleNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | if (!dataStoreFactory.canProcess(map)) { |
| | |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | sourceDataStore = (OracleDataStore) dataStoreFactory.createDataStore(map); |
| | | sourceDataStore = dataStoreFactory.createDataStore(map); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | |
| | | public final void resetUpdateTime() { |
| | | _updateTime = 0; |
| | | } |
| | | |
| | | /** |
| | | * Connectivity (Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws java.sql.SQLException sql exception |
| | | */ |
| | | protected void copyConnectivity(AbstractOracleJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | ResultSet rsMeta = connection.getMetaData().getTables(null, "BASEDB", |
| | | AbstractOracleJobContext.CONNECTIVITY_WEBCHECK_NAME + "%", |
| | | new String[]{"TABLE"}); |
| | | |
| | | boolean found = false; |
| | | try { |
| | | while (rsMeta.next()) { |
| | | String tablename = rsMeta.getString(3); |
| | | if (AbstractOracleJobContext.CONNECTIVITY_WEBCHECK_NAME.equalsIgnoreCase(tablename)) { |
| | | found = true; |
| | | break; |
| | | } |
| | | } |
| | | // } catch (SQLException e) |
| | | } finally { |
| | | if (rsMeta != null) { |
| | | rsMeta.close(); |
| | | rsMeta = null; |
| | | } |
| | | } |
| | | Statement stmt = connection.createStatement(); |
| | | if (found) { |
| | | stmt.execute(AbstractOracleJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK); |
| | | } else { |
| | | getLogger().info("Create CONNECTIVITY_WEBCHECK table."); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_1); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_2); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_3); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_4); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_5); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_6); |
| | | stmt.execute(AbstractOracleJobContext.ALTER_CONNECTIVITY_WEBCHECK_1); |
| | | stmt.execute(AbstractOracleJobContext.ALTER_CONNECTIVITY_WEBCHECK_2); |
| | | } |
| | | |
| | | stmt.execute(AbstractOracleJobContext.COPY_CONNECTIVITY_TO_WEBCHECK); |
| | | stmt.close(); |
| | | } |
| | | |
| | | protected void fetchTPData(AbstractOracleJobContext jobContext) { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | try { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = stmt.executeQuery(AbstractOracleJobContext.FETCH_TPDATA); |
| | | if (rs.next()) { |
| | | short disId = rs.getShort(1); |
| | | jobContext.setDistId(disId); |
| | | |
| | | String distName = rs.getString(2); |
| | | jobContext.setDistName(distName); |
| | | } |
| | | rs.close(); |
| | | stmt.close(); |
| | | } catch (SQLException e) { |
| | | getLogger().warn("Fetch TPDATA Error.", e); |
| | | } |
| | | } |
| | | |
| | | protected void createHibernateSequence(AbstractOracleJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | try { |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_HIBERNATE_SEQUENCE); |
| | | stmt.close(); |
| | | } catch (SQLException e) { |
| | | getLogger().warn("HIBERNATE_SEQUENCE is already exist."); |
| | | } |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.jobs; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.FeatureDgnConvertPostGISJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.GeneralDgnConvertPostGISJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.IndexDgnConvertPostGISJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.OracleConvertPostGISJobContext; |
| | | import com.ximple.eofms.util.*; |
| | | import com.ximple.io.dgn7.*; |
| | | import com.ximple.util.PrintfFormat; |
| | | import oracle.jdbc.OracleConnection; |
| | | import oracle.jdbc.OracleResultSet; |
| | | import oracle.sql.ARRAY; |
| | | import oracle.sql.BLOB; |
| | | import org.apache.commons.collections.OrderedMap; |
| | | import org.apache.commons.collections.OrderedMapIterator; |
| | | import org.apache.commons.collections.map.LinkedMap; |
| | | import org.apache.commons.dbcp.DelegatingConnection; |
| | | import org.apache.commons.io.output.ByteArrayOutputStream; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.net.ftp.FTP; |
| | | import org.apache.commons.net.ftp.FTPClient; |
| | | import org.apache.commons.net.ftp.FTPReply; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisNGDataStoreFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.postgresql.PGConnection; |
| | | import org.postgresql.copy.CopyManager; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | import java.io.*; |
| | | import java.math.BigDecimal; |
| | | import java.net.MalformedURLException; |
| | | import java.net.URL; |
| | | import java.nio.BufferOverflowException; |
| | | import java.nio.ByteBuffer; |
| | | import java.nio.ByteOrder; |
| | | import java.nio.channels.FileChannel; |
| | | import java.sql.*; |
| | | import java.util.*; |
| | | import java.util.Date; |
| | | /** |
| | | * Created by Alchemist on 2014/4/7. |
| | | */ |
| | | public class DMMSNddUpdateJob extends AbstractOracleDatabaseJob { |
| | | final static Log logger = LogFactory.getLog(DMMSNddUpdateJob.class); |
| | | |
| | | private static final String PGHOST = "PGHOST"; |
| | | private static final String PGDATBASE = "PGDATBASE"; |
| | | private static final String PGPORT = "PGPORT"; |
| | | private static final String PGSCHEMA = "PGSCHEMA"; |
| | | private static final String PGUSER = "PGUSER"; |
| | | private static final String PGPASS = "PGPASS"; |
| | | private static final String USEWKB = "USEWKB"; |
| | | |
| | | private static final boolean useTpclidText = false; |
| | | |
| | | private static final int FETCHSIZE = 30; |
| | | private static final int COMMITSIZE = 100; |
| | | private static final String INDEXPATHNAME = "index"; |
| | | private static final String OTHERPATHNAME = "other"; |
| | | public static final String FORWARDFLOW_MARK = "shape://ccarrow"; |
| | | public static final String BACKFLOW_MARK = "shape://rccarrow"; |
| | | public static final String UNFLOW_MARK = "shape://backslash"; |
| | | public static final String NONFLOW_MARK = "shape://slash"; |
| | | |
| | | private static String FETCH_CONNFDR = "SELECT FSC, UFID, FDR1, DIR FROM BASEDB.CONNECTIVITY ORDER BY FSC"; |
| | | private static String FETCH_COLORTAB = "SELECT TAG_SFSC, TAG_LUFID, COLOR FROM OCSDB.COLOR ORDER BY TAG_SFSC"; |
| | | |
| | | private static String CREATE_OWNERTABLE = "CREATE TABLE s (tid smallint not null, oid int not null, owner smallint not null)"; |
| | | private static String CREATE_COLORTABLE = "CREATE TABLE s (tid smallint not null, oid int not null, dyncolor varchar(10) not null)"; |
| | | |
| | | public static final String FDYNCOLOR_SUFFIX = "_fdyncolor"; |
| | | public static final String FOWNER_SUFFIX = "_fowner"; |
| | | |
| | | protected static class Pair { |
| | | Object first; |
| | | Object second; |
| | | |
| | | public Pair(Object first, Object second) { |
| | | this.first = first; |
| | | this.second = second; |
| | | } |
| | | } |
| | | |
| | | protected static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | protected String _pgHost; |
| | | protected String _pgDatabase; |
| | | protected String _pgPort; |
| | | protected String _pgSchema; |
| | | protected String _pgUsername; |
| | | protected String _pgPassword; |
| | | protected String _pgUseWKB; |
| | | |
| | | protected Map<String, String> pgProperties; |
| | | protected JDBCDataStore targetDataStore; |
| | | // protected OracleConvertEdbGeoJobContext oracleJobContext; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | | |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, |
| | | boolean useTransform) { |
| | | return new OracleConvertPostGISJobContext(getDataPath(), |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _pgHost = dataMap.getString(PGHOST); |
| | | _pgDatabase = dataMap.getString(PGDATBASE); |
| | | _pgPort = dataMap.getString(PGPORT); |
| | | _pgSchema = dataMap.getString(PGSCHEMA); |
| | | _pgUsername = dataMap.getString(PGUSER); |
| | | _pgPassword = dataMap.getString(PGPASS); |
| | | _pgUseWKB = dataMap.getString(USEWKB); |
| | | |
| | | Log logger = getLogger(); |
| | | /* |
| | | logger.info("PGHOST=" + _myHost); |
| | | logger.info("PGDATBASE=" + _myDatabase); |
| | | logger.info("PGPORT=" + _myPort); |
| | | logger.info("PGSCHEMA=" + _mySchema); |
| | | logger.info("PGUSER=" + _myUsername); |
| | | logger.info("PGPASS=" + _myPassword); |
| | | logger.info("USEWKB=" + _myUseWKB); |
| | | */ |
| | | |
| | | if (_pgHost == null) { |
| | | logger.warn("PGHOST is null"); |
| | | throw new JobExecutionException("Unknown PostGIS host."); |
| | | } |
| | | if (_pgDatabase == null) { |
| | | logger.warn("PGDATABASE is null"); |
| | | throw new JobExecutionException("Unknown PostGIS database."); |
| | | } |
| | | if (_pgPort == null) { |
| | | logger.warn("PGPORT is null"); |
| | | throw new JobExecutionException("Unknown PostGIS port."); |
| | | } |
| | | if (_pgSchema == null) { |
| | | logger.warn("PGSCHEMA is null"); |
| | | throw new JobExecutionException("Unknown PostGIS schema."); |
| | | } |
| | | if (_pgUsername == null) { |
| | | logger.warn("PGUSERNAME is null"); |
| | | throw new JobExecutionException("Unknown PostGIS username."); |
| | | } |
| | | if (_pgPassword == null) { |
| | | logger.warn("PGPASSWORD is null"); |
| | | throw new JobExecutionException("Unknown PostGIS password."); |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put(PostgisNGDataStoreFactory.DBTYPE.key, "postgis"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(PostgisNGDataStoreFactory.HOST.key, _pgHost); |
| | | remote.put(PostgisNGDataStoreFactory.PORT.key, _pgPort); |
| | | remote.put(PostgisNGDataStoreFactory.DATABASE.key, _pgDatabase); |
| | | remote.put(PostgisNGDataStoreFactory.USER.key, _pgUsername); |
| | | remote.put(PostgisNGDataStoreFactory.PASSWD.key, _pgPassword); |
| | | // remote.put( "namespace", null); |
| | | |
| | | String temp=""; |
| | | temp= dataMap.getString("ftpurl"); |
| | | if(temp==null) |
| | | { |
| | | logger.warn("not config ftpurl ->ftp://127.0.0.1:21/"); |
| | | temp="ftp://127.0.0.1:21/"; |
| | | } |
| | | remote.put("ftpurl", temp); |
| | | temp= dataMap.getString("ftpuid"); |
| | | if(temp==null) |
| | | { |
| | | temp="anonymous"; |
| | | } |
| | | remote.put("ftpuid", temp); |
| | | |
| | | temp= dataMap.getString("ftppwd"); |
| | | if(temp==null) |
| | | { |
| | | temp=""; |
| | | } |
| | | remote.put("ftppwd", temp); |
| | | |
| | | temp= dataMap.getString("ftpdir"); |
| | | if(temp==null) |
| | | { |
| | | temp="tcdaas/featureImg"; |
| | | } |
| | | remote.put("ftpdir", temp); |
| | | pgProperties = remote; |
| | | } |
| | | |
| | | |
| | | |
| | | private List<String[]> sqlExecQuery(Connection connection,String strSQLIn,String[] params) throws SQLException { |
| | | |
| | | String strSQL=strSQLIn; |
| | | for(int i=0;i<params.length;i++) |
| | | { |
| | | if(params[i]==null)params[i]=""; |
| | | strSQL=strSQL.replace("%s"+String.valueOf(i+1),params[i]); |
| | | } |
| | | List<String[]> result=new ArrayList<String[]>(); |
| | | List<String> temp = new ArrayList<String>(); |
| | | String strTemp=""; |
| | | // String result = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | |
| | | |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(strSQL.toString()); |
| | | // get first result |
| | | // temp.clear(); |
| | | |
| | | ResultSetMetaData rsmd = rs.getMetaData(); |
| | | int NumOfCol = rsmd.getColumnCount(); |
| | | |
| | | while (rs.next()) { |
| | | for (int idx = 0; idx < NumOfCol; idx++) { |
| | | strTemp = rs.getString(idx + 1); |
| | | temp.add(strTemp); |
| | | } |
| | | result.add(temp.toArray(new String[0])); |
| | | temp.clear(); |
| | | } |
| | | return result; |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | private void sqlExec(Connection connection,String strSQLIn,String[] params) throws SQLException { |
| | | |
| | | String strSQL=strSQLIn; |
| | | for(int i=0;i<params.length;i++) |
| | | { |
| | | if(params[i]==null)params[i]=""; |
| | | strSQL=strSQL.replace("%s"+String.valueOf(i+1),params[i]); |
| | | } |
| | | List<String[]> result=new ArrayList<String[]>(); |
| | | List<String> temp = new ArrayList<String>(); |
| | | String strTemp=""; |
| | | // String result = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | |
| | | |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.execute( strSQL.toString()); |
| | | // get first result |
| | | // temp.clear(); |
| | | |
| | | |
| | | } finally { |
| | | // JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private String findValue(String strSource,String findTag) |
| | | { |
| | | int idx=-1; int iStart=-1; int iEnd=-1; |
| | | idx=strSource.indexOf(findTag); |
| | | if(idx<0) return ""; |
| | | iStart= strSource.indexOf("\"",idx); |
| | | iEnd= strSource.indexOf("\"",iStart+1); |
| | | return strSource.substring(iStart+1,iEnd); |
| | | } |
| | | |
| | | |
| | | |
| | | |
| | | private void doJob(Connection postsql, String[] info) throws SQLException |
| | | { |
| | | // double switch (if db = enable -->work) |
| | | //Here is check |
| | | Date dtnow = new Date(); |
| | | //get all file |
| | | //dept, count,dist,nei ,y,m,d,t,custom |
| | | // HashMap<String> |
| | | String typhoonName=""; |
| | | String typhoonID=""; |
| | | String department=""; |
| | | String county=""; |
| | | String district=""; |
| | | String neighbor=""; |
| | | String affectCustomers=""; |
| | | String affectCustomersEver=""; |
| | | String[] tmpArray; |
| | | String sTemp; |
| | | List<String> arraySQLVals= new ArrayList<String>(); |
| | | boolean bActiveCheckDBSchedule=true; |
| | | if(!jobOnLine(postsql, "nddcanton")&& bActiveCheckDBSchedule) |
| | | { |
| | | |
| | | return; |
| | | } |
| | | logger.info("begin nddxml to postsql"); |
| | | logger.info("getftpfile..."); |
| | | String[] xmls= getNDDStrings(info, "neighbor_affect_customers.xml") ; |
| | | |
| | | logger.info(String.format("total %d file(s)",xmls.length)); |
| | | for(int iRow=0;iRow<xmls.length;iRow++) |
| | | { |
| | | arraySQLVals.clear(); |
| | | tmpArray= xmls[iRow].split("\n"); |
| | | for(int iLine=0;iLine<tmpArray.length;iLine++) |
| | | { |
| | | sTemp= findValue(tmpArray[iLine],"typhoonName"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | typhoonName= sTemp; |
| | | typhoonID= getTyphoonIDByName(postsql,typhoonName); |
| | | // |
| | | sTemp= findValue(tmpArray[iLine],"Department id"); |
| | | department=sTemp; |
| | | } |
| | | |
| | | sTemp= findValue(tmpArray[iLine],"county ufid"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | county=sTemp; |
| | | } |
| | | sTemp= findValue(tmpArray[iLine],"district ufid"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | district=sTemp; |
| | | } |
| | | sTemp= findValue(tmpArray[iLine],"neighbor ufid"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | neighbor=sTemp; |
| | | sTemp= findValue(tmpArray[iLine],"affectCustomers"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | affectCustomers=sTemp; |
| | | } |
| | | else |
| | | { |
| | | affectCustomers="0"; |
| | | } |
| | | |
| | | sTemp= findValue(tmpArray[iLine],"affectCustomersEver"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | affectCustomersEver=sTemp; |
| | | } |
| | | else |
| | | { |
| | | affectCustomersEver="0"; |
| | | } |
| | | arraySQLVals.add(String.format("(%s,%s,%s,%s,%s,%s,%s",typhoonID,department,county,district,neighbor,affectCustomers,affectCustomersEver)); |
| | | // insert into nddcanton_history (project_id,department_id,county_id,district_id,neighbor_id,affectcustomers,affectcustomersever |
| | | //yy,mm,dd,tt |
| | | } |
| | | |
| | | } |
| | | //!! |
| | | String yy="0000"+String.valueOf( dtnow.getYear()+1900); |
| | | String mm="00"+String.valueOf( dtnow.getMonth()+1); |
| | | String dd="00"+String.valueOf( dtnow.getDate()); |
| | | String t0="00"+ String.valueOf( dtnow.getHours()); |
| | | String t1="00"+ String.valueOf( dtnow.getMinutes()); |
| | | yy= yy.substring(yy.length()-4); |
| | | mm= mm.substring(mm.length()-2); |
| | | dd= dd.substring(dd.length()-2); |
| | | t0= t0.substring(t0.length()-2); |
| | | t1= t1.substring(t1.length()-2); |
| | | String insertDBSQL=" insert into ndd.nddcanton_history (project_id,department_id,county_id,district_id,neighbor_id,affectcustomers,affectcustomersever"+ |
| | | ",yy,mm,dd,tt,ts_ser) values "; |
| | | for(int j=0;j<arraySQLVals.size();j++) |
| | | { |
| | | sqlExec(postsql,insertDBSQL + arraySQLVals.get(j)+ |
| | | String.format(",%s,%s,%s,'%s%s',%s)", |
| | | yy,mm,dd,t0,t1, |
| | | yy+mm+dd+"."+t0+t1 |
| | | ) , |
| | | new String[]{}); |
| | | |
| | | } |
| | | |
| | | String strSQLUpdateCurr="update ndd.currdata set yy='%s',mm='%s',dd='%s',tt='%s%s' where sr=1"; |
| | | sqlExec(postsql, |
| | | String.format(strSQLUpdateCurr, |
| | | yy,mm,dd,t0,t1 |
| | | ) , |
| | | new String[]{}); |
| | | logger.info(String.format("next xml")); |
| | | } |
| | | logger.info(String.format("done")); |
| | | |
| | | |
| | | |
| | | |
| | | } |
| | | private void doJob2(Connection postsql, String[] info) throws SQLException |
| | | { |
| | | // double switch (if db = enable -->work) |
| | | //Here is check |
| | | Date dtnow = new Date(); |
| | | //get all file |
| | | //dept, count,dist,nei ,y,m,d,t,custom |
| | | // HashMap<String> |
| | | String typhoonName=""; |
| | | String typhoonID=""; |
| | | String department=""; |
| | | String department_id=""; |
| | | String substation=""; |
| | | String substation_ufid=""; |
| | | String substation_affectCustomers=""; |
| | | String substation_nopower=""; |
| | | |
| | | String mxfmr_name=""; |
| | | String mxfmr_ufid=""; |
| | | String mxfmr_affectCustomers=""; |
| | | String mxfmr_nopower=""; |
| | | |
| | | String feeder_name=""; |
| | | String feeder_id=""; |
| | | String feeder_affectCustomers=""; |
| | | String feeder_nopower=""; |
| | | |
| | | String[] tmpArray; |
| | | String sTemp; |
| | | List<String> arraySQLVals= new ArrayList<String>(); |
| | | |
| | | if(!jobOnLine(postsql, "nddfeeder")) |
| | | { |
| | | |
| | | return; |
| | | } |
| | | |
| | | String yy="0000"+String.valueOf( dtnow.getYear()+1900); |
| | | String mm="00"+String.valueOf( dtnow.getMonth()+1); |
| | | String dd="00"+String.valueOf( dtnow.getDate()); |
| | | String t0="00"+ String.valueOf( dtnow.getHours()); |
| | | String t1="00"+ String.valueOf( dtnow.getMinutes()); |
| | | yy= yy.substring(yy.length()-4); |
| | | mm= mm.substring(mm.length()-2); |
| | | dd= dd.substring(dd.length()-2); |
| | | t0= t0.substring(t0.length()-2); |
| | | t1= t1.substring(t1.length()-2); |
| | | |
| | | logger.info("begin nddxml(feeder) to postsql"); |
| | | logger.info("getftpfile..."); |
| | | String[] xmls= getNDDStrings(info, "feeder_affect_customers.xml") ; |
| | | logger.info(String.format("total %d file(s)",xmls.length)); |
| | | for(int iRow=0;iRow<xmls.length;iRow++) |
| | | { |
| | | arraySQLVals.clear(); |
| | | tmpArray= xmls[iRow].split("\n"); |
| | | for(int iLine=0;iLine<tmpArray.length;iLine++) |
| | | { |
| | | sTemp= findValue(tmpArray[iLine],"typhoonName"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | typhoonName= sTemp; |
| | | typhoonID= getTyphoonIDByName(postsql,typhoonName); |
| | | // |
| | | sTemp= findValue(tmpArray[iLine],"Department id"); |
| | | department_id=sTemp; |
| | | |
| | | sTemp= findValue(tmpArray[iLine],"name"); |
| | | department=sTemp; |
| | | } |
| | | |
| | | sTemp= findValue(tmpArray[iLine],"Substation name"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | substation=sTemp; |
| | | sTemp= findValue(tmpArray[iLine],"ufid"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | substation_ufid=sTemp; |
| | | } |
| | | |
| | | sTemp= findValue(tmpArray[iLine],"affectCustomers"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | substation_affectCustomers=sTemp; |
| | | } |
| | | |
| | | sTemp= findValue(tmpArray[iLine],"noPowerAll"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | substation_nopower=sTemp; |
| | | } |
| | | // |
| | | arraySQLVals.add(String.format("(%s,%s,%s,%s,%s,'%s','%s','%s','%s',%s,%s,%s,%s,%s,'%s%s')", |
| | | typhoonID, |
| | | department_id,substation_ufid,"-1","-1", |
| | | department,substation," "," ", |
| | | substation_affectCustomers,substation_nopower, |
| | | yy,mm,dd,t0,t1)); |
| | | } |
| | | |
| | | |
| | | sTemp= findValue(tmpArray[iLine],"Mxfmr name"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | mxfmr_name=sTemp; |
| | | sTemp= findValue(tmpArray[iLine],"ufid"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | mxfmr_ufid=sTemp; |
| | | } |
| | | |
| | | sTemp= findValue(tmpArray[iLine],"affectCustomers"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | mxfmr_affectCustomers=sTemp; |
| | | } |
| | | |
| | | sTemp= findValue(tmpArray[iLine],"noPowerAll"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | mxfmr_nopower=sTemp; |
| | | } |
| | | arraySQLVals.add(String.format("(%s,%s,%s,%s,%s,'%s','%s','%s','%s',%s,%s,%s,%s,%s,'%s%s')", |
| | | typhoonID, |
| | | department_id,substation_ufid,mxfmr_ufid,"-1", |
| | | department,substation,mxfmr_name," ", |
| | | mxfmr_affectCustomers,mxfmr_nopower, |
| | | yy,mm,dd,t0,t1)); |
| | | } |
| | | |
| | | sTemp= findValue(tmpArray[iLine],"Feeder name"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | feeder_name=sTemp; |
| | | sTemp= findValue(tmpArray[iLine],"id"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | feeder_id=sTemp; |
| | | } |
| | | |
| | | sTemp= findValue(tmpArray[iLine],"affectCustomers"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | feeder_affectCustomers=sTemp; |
| | | } |
| | | |
| | | sTemp= findValue(tmpArray[iLine],"noPowerAll"); |
| | | if(sTemp.length()>0) |
| | | { |
| | | feeder_nopower=sTemp; |
| | | } |
| | | arraySQLVals.add(String.format("(%s,%s,%s,%s,%s,'%s','%s','%s','%s',%s,%s,%s,%s,%s,'%s%s')", |
| | | typhoonID, |
| | | department_id,substation_ufid,mxfmr_ufid,feeder_id, |
| | | department,substation,mxfmr_name,feeder_name, |
| | | feeder_affectCustomers,feeder_nopower, |
| | | yy,mm,dd,t0,t1)); |
| | | } |
| | | // insert into nddcanton_history (project_id,department_id,county_id,district_id,neighbor_id,affectcustomers,affectcustomersever |
| | | //yy,mm,dd,tt |
| | | |
| | | |
| | | } |
| | | //!! |
| | | |
| | | String insertDBSQL= |
| | | " insert into ndd.nddfeeder_history (project_id,department_id,substation_id,mxfmr_id,feeder_id,"+ |
| | | "department_name,substation_name,mxfmr_name,feeder_name,"+ |
| | | "affectCustomers,nopower"+ |
| | | ",yy,mm,dd,tt) values "; |
| | | for(int j=0;j<arraySQLVals.size();j++) |
| | | { |
| | | sqlExec(postsql,insertDBSQL + arraySQLVals.get(j) |
| | | , |
| | | new String[]{}); |
| | | |
| | | } |
| | | |
| | | String strSQLUpdateCurr="update ndd.currdata set yy='%s',mm='%s',dd='%s',tt='%s%s' where sr=2"; |
| | | sqlExec(postsql, |
| | | String.format(strSQLUpdateCurr, |
| | | yy,mm,dd,t0,t1 |
| | | ) , |
| | | new String[]{}); |
| | | logger.info(String.format("next xml")); |
| | | } |
| | | logger.info(String.format("done")); |
| | | |
| | | |
| | | |
| | | |
| | | } |
| | | |
| | | /* |
| | | private void doJob(Connection postsql,Connection orcl) throws SQLException |
| | | { |
| | | String strSQLGetTask="select proc_id,procname,datastore,name,step,src,dest,txtsql from roadfee_proc where rowstatus=1 and procname like 'STEP%' order by procname,step" ; |
| | | List<String[]> joblist=null; |
| | | Connection inConnection; |
| | | int idOfJob=0; |
| | | |
| | | List<String[]> nodata= new ArrayList<String[]>(); |
| | | List<String[]> lista= new ArrayList<String[]>(); |
| | | List<String[]> list1= new ArrayList<String[]>(); |
| | | List<String[]> listIn= new ArrayList<String[]>(); |
| | | List<String[]> temp;//= new ArrayList<String[]>(); |
| | | nodata.add(new String[]{""}); |
| | | // proc_id[0],procname[1],datastore[2\,name[3],step[4], src[5],des[6]t,txtsql[7] |
| | | try{ |
| | | logger.info("getJoblist"); |
| | | joblist=sqlExecQuery(postsql, strSQLGetTask, new String[]{}); |
| | | |
| | | for ( idOfJob=0;idOfJob<joblist.size();idOfJob++) |
| | | { |
| | | logger.info("begin "+joblist.get(idOfJob)[1]+"-"+joblist.get(idOfJob)[3]+"("+joblist.get(idOfJob)[0]+")"); |
| | | if(joblist.get(idOfJob)[5].equals("nodata")) |
| | | { |
| | | listIn=nodata; |
| | | } |
| | | else if(joblist.get(idOfJob)[5].equals("list1")) |
| | | { |
| | | listIn=list1; |
| | | } |
| | | else if(joblist.get(idOfJob)[5].equals("lista")) |
| | | { |
| | | listIn=lista; |
| | | } |
| | | |
| | | if(joblist.get(idOfJob)[2].equals("psql")) |
| | | { |
| | | inConnection= postsql; |
| | | } |
| | | else if(joblist.get(idOfJob)[2].equals("orcl")) |
| | | { |
| | | inConnection= orcl; |
| | | } |
| | | else |
| | | return ; //connection failed |
| | | |
| | | if( joblist.get(idOfJob)[6].equals("list1")) list1.clear(); |
| | | if( joblist.get(idOfJob)[6].equals("lista")) lista.clear(); |
| | | //runsql |
| | | logger.info("process data count: "+String.valueOf(listIn.size())); |
| | | |
| | | for( int idxOfListIn=0;idxOfListIn< listIn.size();idxOfListIn++) |
| | | { |
| | | |
| | | if( joblist.get(idOfJob)[6].equals("nodata")) |
| | | { |
| | | sqlExec(inConnection, joblist.get(idOfJob)[7], listIn.get(idxOfListIn)); |
| | | //logger.info("finish "+joblist.get(idOfJob)[1]+"-"+joblist.get(idOfJob)[3]+"("+joblist.get(idOfJob)[0]+")") |
| | | |
| | | continue; |
| | | }else |
| | | { |
| | | temp=sqlExecQuery(inConnection, joblist.get(idOfJob)[7], listIn.get(idxOfListIn)); |
| | | |
| | | } |
| | | |
| | | |
| | | for(int j=0;j<temp.size();j++) |
| | | { |
| | | if( joblist.get(idOfJob)[6].equals("list1")) |
| | | { |
| | | list1.add(temp.get(j)); |
| | | } |
| | | else if( joblist.get(idOfJob)[6].equals("lista")) |
| | | { |
| | | lista.add(temp.get(j)); |
| | | } |
| | | } |
| | | } |
| | | |
| | | |
| | | } |
| | | |
| | | }catch(SQLException sqlex) |
| | | { |
| | | logger.warn("ERROR@ID:"+String.valueOf( joblist.get(idOfJob)[0])); |
| | | throw sqlex; |
| | | } |
| | | |
| | | |
| | | } |
| | | */ |
| | | public void execute(JobExecutionContext context) throws JobExecutionException { |
| | | // Every job has its own job detail |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | // Log the time the job started |
| | | logger.info(jobName + " fired at " + new Date()); |
| | | extractJobConfiguration(jobDetail); |
| | | |
| | | if (isIgnoreDBETL()) { |
| | | return; |
| | | } |
| | | |
| | | //createSourceDataStore(); |
| | | createTargetDataStore(); |
| | | /* |
| | | if (getSourceDataStore() == null) { |
| | | logger.warn("Cannot connect source oracle database."); |
| | | throw new JobExecutionException("Cannot connect source oracle database."); |
| | | } |
| | | */ |
| | | if (getTargetDataStore() == null) { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName, targetThemeTable; |
| | | try { |
| | | //logger.info("-- step:clearOutputDatabase --"); |
| | | doJob(targetDataStore.getConnection(Transaction.AUTO_COMMIT),new String[]{ |
| | | pgProperties.get("ftpurl"), |
| | | pgProperties.get("ftpuid"), |
| | | pgProperties.get("ftppwd"), |
| | | pgProperties.get("ftpdir") |
| | | }); |
| | | doJob2 (targetDataStore.getConnection(Transaction.AUTO_COMMIT),new String[]{ |
| | | pgProperties.get("ftpurl"), |
| | | pgProperties.get("ftpuid"), |
| | | pgProperties.get("ftppwd"), |
| | | pgProperties.get("ftpdir") |
| | | }); |
| | | // doJob( targetDataStore.getConnection(Transaction.AUTO_COMMIT),sourceDataStore.getConnection(Transaction.AUTO_COMMIT) ); |
| | | |
| | | } catch (IOException ex) { |
| | | disconnect(); |
| | | logger.warn(ex.getMessage(), ex); |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } catch (SQLException e) { |
| | | disconnect(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("Database error. " + e.getMessage(), e); |
| | | |
| | | }finally { |
| | | disconnect(); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) { |
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " + |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | private void exetcuteConvert(OracleConvertPostGISJobContext jobContext, |
| | | String querySchema, String targetSchemaName) throws SQLException { |
| | | int order = 0; |
| | | OrderedMap map = getBlobStorageList(jobContext.getOracleConnection(), |
| | | querySchema, "SD$SPACENODES", null); |
| | | |
| | | logger.info("begin convert job:[" + map.size() + "]:testmode=" + _testMode); |
| | | |
| | | int total = map.size(); //spacenodes count |
| | | int step = total / 100; |
| | | int current = 0; |
| | | |
| | | if (total == 0) { |
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is zero."); |
| | | return; |
| | | } |
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is " + map.size()); |
| | | |
| | | //jobContext.startTransaction(); |
| | | jobContext.setCurrentSchema(querySchema); |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", 0); |
| | | for (OrderedMapIterator it = map.orderedMapIterator(); it.hasNext(); ) { |
| | | it.next(); |
| | | |
| | | Pair pair = (Pair) it.getValue(); |
| | | String tableSrc = (String) pair.first; |
| | | |
| | | logger.info("begin convert:[" + order + "]-" + tableSrc); |
| | | queryIgsetElement(jobContext, querySchema, tableSrc); |
| | | |
| | | order++; |
| | | |
| | | if (_testMode) { |
| | | if ((_testCount < 0) || (order >= _testCount)) |
| | | break; |
| | | } |
| | | |
| | | if ((order % COMMITSIZE) == 0) { |
| | | // OracleConnection connection = jobContext.getOracleConnection(); |
| | | // connection.commitTransaction(); |
| | | jobContext.commitTransaction(); |
| | | //jobContext.startTransaction(); |
| | | System.gc(); |
| | | System.runFinalization(); |
| | | } |
| | | |
| | | if (step != 0) { |
| | | int now = order % step; |
| | | if (now != current) { |
| | | current = now; |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", current); |
| | | |
| | | } |
| | | } else { |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", current); |
| | | current++; |
| | | } |
| | | } |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", 100); |
| | | |
| | | jobContext.commitTransaction(); |
| | | jobContext.resetFeatureContext(); |
| | | |
| | | if (isProfileMode()) { |
| | | |
| | | } |
| | | |
| | | logger.info("end convert job:[" + order + "]"); |
| | | System.gc(); |
| | | System.runFinalization(); |
| | | } |
| | | |
| | | protected OrderedMap getBlobStorageList(Connection connection, String schemaSrc, String tableSrc, |
| | | OrderedMap orderedMap) throws SQLException { |
| | | if (orderedMap == null) |
| | | orderedMap = new LinkedMap(99); |
| | | String fetchStmtFmt = "SELECT SNID, SPACETABLE FROM \"%s\".\"%s\""; |
| | | PrintfFormat spf = new PrintfFormat(fetchStmtFmt); |
| | | String fetchStmt = spf.sprintf(new Object[]{schemaSrc, tableSrc}); |
| | | Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | ResultSet rs = null; |
| | | |
| | | stmt.setFetchSize(FETCHSIZE); |
| | | try { |
| | | rs = stmt.executeQuery(fetchStmt); |
| | | int size = rs.getMetaData().getColumnCount(); |
| | | |
| | | while (rs.next()) { |
| | | Object[] values = new Object[size]; |
| | | |
| | | for (int i = 0; i < size; i++) { |
| | | values[i] = rs.getObject(i + 1); |
| | | } |
| | | |
| | | Integer key = ((BigDecimal) values[0]).intValue(); |
| | | String name = (String) values[1]; |
| | | |
| | | Pair pair = (Pair) orderedMap.get(key); |
| | | if (pair == null) |
| | | orderedMap.put(key, new Pair(name, null)); |
| | | else |
| | | pair.first = name; |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.error(e.toString(), e); |
| | | logger.error("stmt=" + fetchStmt); |
| | | throw e; |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | |
| | | return orderedMap; |
| | | } |
| | | |
| | | protected OrderedMap getRawFormatStorageList(OracleConnection connection, String schemaSrc, String tableSrc, |
| | | OrderedMap orderedMap) throws SQLException { |
| | | if (orderedMap == null) |
| | | orderedMap = new LinkedMap(99); |
| | | String fetchStmtFmt = "SELECT RNID, SPACETABLE FROM \"%s\".\"%s\""; |
| | | PrintfFormat spf = new PrintfFormat(fetchStmtFmt); |
| | | String fetchStmt = spf.sprintf(new Object[]{schemaSrc, tableSrc}); |
| | | Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | |
| | | stmt.setFetchSize(FETCHSIZE); |
| | | ResultSet rs = stmt.executeQuery(fetchStmt); |
| | | try { |
| | | int size = rs.getMetaData().getColumnCount(); |
| | | while (rs.next()) { |
| | | Object[] values = new Object[size]; |
| | | |
| | | for (int i = 0; i < size; i++) { |
| | | values[i] = rs.getObject(i + 1); |
| | | } |
| | | |
| | | Integer key = ((BigDecimal) values[0]).intValue(); |
| | | String name = (String) values[1]; |
| | | |
| | | Pair pair = (Pair) orderedMap.get(key); |
| | | if (pair == null) |
| | | orderedMap.put(key, new Pair(null, name)); |
| | | else |
| | | pair.second = name; |
| | | } |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | return orderedMap; |
| | | } |
| | | |
| | | protected void queryIgsetElement(OracleConvertPostGISJobContext jobContext, |
| | | String srcschema, String srctable) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" ORDER BY ROWID"; |
| | | //String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" WHERE TAG_SFSC = 423 AND TAG_LUFID = 21612065 ORDER BY ROWID"; |
| | | PrintfFormat spf = new PrintfFormat(fetchSrcStmtFmt); |
| | | String fetchSrcStmt = spf.sprintf(new Object[]{srcschema, srctable}); |
| | | Statement stmtSrc = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | |
| | | stmtSrc.setFetchSize(FETCHSIZE); |
| | | ResultSet rsSrc = stmtSrc.executeQuery(fetchSrcStmt); |
| | | int igdsMetaType = rsSrc.getMetaData().getColumnType(1); |
| | | while (rsSrc.next()) { |
| | | if (isProfileMode()) { |
| | | markQueryTime(); |
| | | } |
| | | |
| | | byte[] raw = null; |
| | | if (igdsMetaType == Types.BLOB) { |
| | | BLOB blob = (BLOB) rsSrc.getBlob(1); |
| | | |
| | | try { |
| | | raw = getBytesFromBLOB(blob); |
| | | } catch (BufferOverflowException e) { |
| | | logger.warn("Wrong Element Structure-", e); |
| | | } finally { |
| | | // blob.close(); |
| | | } |
| | | } else { |
| | | raw = rsSrc.getBytes(1); |
| | | } |
| | | |
| | | try { |
| | | if (raw != null) { |
| | | Element element = fetchBinaryElement(raw); |
| | | if (isProfileMode()) { |
| | | accumulateQueryTime(); |
| | | } |
| | | jobContext.putFeatureCollection(element); |
| | | } else { |
| | | if (isProfileMode()) { |
| | | accumulateQueryTime(); |
| | | } |
| | | } |
| | | } catch (Dgn7fileException e) { |
| | | logger.warn("Dgn7Exception", e); |
| | | } |
| | | } |
| | | |
| | | JDBCUtils.close(rsSrc); |
| | | JDBCUtils.close(stmtSrc); |
| | | } |
| | | |
| | | protected void queryRawElement(OracleConvertPostGISJobContext jobContext, |
| | | String srcschema, String srctable) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | String fetchDestStmtFmt = "SELECT ELEMENT FROM \"%s\".\"%s\" ORDER BY ROWID"; |
| | | PrintfFormat spf = new PrintfFormat(fetchDestStmtFmt); |
| | | String fetchDestStmt = spf.sprintf(new Object[]{srcschema, srctable}); |
| | | Statement stmtDest = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | |
| | | stmtDest.setFetchSize(FETCHSIZE); |
| | | ResultSet rsDest = stmtDest.executeQuery(fetchDestStmt); |
| | | |
| | | try { |
| | | while (rsDest.next()) { |
| | | ARRAY rawsValue = ((OracleResultSet) rsDest).getARRAY(1); |
| | | long[] rawData = rawsValue.getLongArray(); |
| | | byte[] comparessedValue; |
| | | |
| | | /* |
| | | if (dataMode == TransferTask.DataMode.Normal) |
| | | { |
| | | comparessedValue = BinConverter.unmarshalByteArray(rawData, true); |
| | | } else |
| | | { |
| | | comparessedValue = BinConverter.unmarshalCompactByteArray(rawData); |
| | | } |
| | | */ |
| | | comparessedValue = BinConverter.unmarshalByteArray(rawData, true); |
| | | |
| | | byte[] rawDest = ByteArrayCompressor.decompressByteArray(comparessedValue); |
| | | |
| | | try { |
| | | Element element = fetchBinaryElement(rawDest); |
| | | jobContext.putFeatureCollection(element); |
| | | } catch (Dgn7fileException e) { |
| | | logger.warn("Dgn7Exception:" + e.getMessage(), e); |
| | | } |
| | | } |
| | | } finally { |
| | | JDBCUtils.close(rsDest); |
| | | JDBCUtils.close(stmtDest); |
| | | } |
| | | } |
| | | |
| | | // Binary to Element |
| | | private Element fetchBinaryElement(byte[] raws) throws Dgn7fileException { |
| | | ByteBuffer buffer = ByteBuffer.wrap(raws); |
| | | buffer.order(ByteOrder.LITTLE_ENDIAN); |
| | | short signature = buffer.getShort(); |
| | | |
| | | // byte type = (byte) (buffer.get() & 0x7f); |
| | | byte type = (byte) ((signature >>> 8) & 0x007f); |
| | | |
| | | // silly Bentley say contentLength is in 2-byte words |
| | | // and ByteByffer uses raws. |
| | | // track the record location |
| | | int elementLength = (buffer.getShort() * 2) + 4; |
| | | ElementType recordType = ElementType.forID(type); |
| | | IElementHandler handler; |
| | | |
| | | handler = recordType.getElementHandler(); |
| | | |
| | | Element dgnElement = (Element) handler.read(buffer, signature, elementLength); |
| | | if (recordType.isComplexElement() && (elementLength < raws.length)) { |
| | | int offset = elementLength; |
| | | while (offset < (raws.length - 4)) { |
| | | buffer.position(offset); |
| | | signature = buffer.getShort(); |
| | | type = (byte) ((signature >>> 8) & 0x007f); |
| | | elementLength = (buffer.getShort() * 2) + 4; |
| | | if (raws.length < (offset + elementLength)) { |
| | | logger.debug("Length not match:" + offset + ":" + buffer.position() + ":" + buffer.limit()); |
| | | break; |
| | | } |
| | | recordType = ElementType.forID(type); |
| | | handler = recordType.getElementHandler(); |
| | | if (handler != null) { |
| | | Element subElement = (Element) handler.read(buffer, signature, elementLength); |
| | | ((ComplexElement) dgnElement).add(subElement); |
| | | offset += elementLength; |
| | | } else { |
| | | byte[] remain = new byte[buffer.remaining()]; |
| | | System.arraycopy(raws, offset, remain, 0, buffer.remaining()); |
| | | for (int i = 0; i < remain.length; i++) { |
| | | if (remain[i] != 0) { |
| | | logger.info("fetch element has some error. index=" + (offset + i) + ":value=" + remain[i]); |
| | | } |
| | | } |
| | | break; |
| | | } |
| | | } |
| | | } |
| | | |
| | | return dgnElement; |
| | | } |
| | | |
| | | /** |
| | | * 嚙踝蕭嚙踝蕭嚙賞換嚙踝蕭嚙豬對蕭嚙褕迎蕭嚙線嚙瑾 |
| | | * |
| | | * @param context 嚙線嚙瑾嚙踝蕭嚙踝蕭嚙踝蕭嚙踝蕭 |
| | | * @throws org.quartz.JobExecutionException |
| | | * exception |
| | | */ |
| | | private void convertIndexDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException { |
| | | File indexDir = new File(getDataPath(), INDEXPATHNAME); |
| | | if (!indexDir.exists()) { |
| | | logger.info("index dir=" + indexDir + " not exist."); |
| | | return; |
| | | } |
| | | |
| | | if (!indexDir.isDirectory()) { |
| | | logger.info("index dir=" + indexDir + " is not a directory."); |
| | | } |
| | | |
| | | List<File> dgnFiles = FileUtils.recurseDir(indexDir, new FileFilter() { |
| | | public boolean accept(File pathname) { |
| | | return pathname.isDirectory() || pathname.getName().toLowerCase().endsWith("dgn"); |
| | | } |
| | | }); |
| | | |
| | | for (File dgnFile : dgnFiles) { |
| | | if (dgnFile.isDirectory()) continue; |
| | | IndexDgnConvertPostGISJobContext convertContext = |
| | | new IndexDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, |
| | | isProfileMode(), isTransformed()); |
| | | logger.info("--- start index dgnfile-" + dgnFile.toString() + " ---"); |
| | | FileInputStream fs = null; |
| | | FileChannel fc = null; |
| | | Dgn7fileReader reader = null; |
| | | try { |
| | | convertContext.clearOutputDatabase(); |
| | | convertContext.setExecutionContext(context); |
| | | String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator); |
| | | convertContext.setFilename(dgnPaths[dgnPaths.length - 1]); |
| | | convertContext.startTransaction(); |
| | | |
| | | fs = new FileInputStream(dgnFile); |
| | | fc = fs.getChannel(); |
| | | reader = new Dgn7fileReader(fc, new Lock()); |
| | | convertContext.setReader(reader); |
| | | |
| | | scanIndexDgnElement(convertContext); |
| | | |
| | | convertContext.commitTransaction(); |
| | | convertContext.closeFeatureWriter(); |
| | | |
| | | System.gc(); |
| | | System.runFinalization(); |
| | | } catch (FileNotFoundException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (Dgn7fileException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (IllegalAttributeException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (SchemaException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } finally { |
| | | convertContext.closeFeatureWriter(); |
| | | |
| | | if (reader != null) { |
| | | try { |
| | | reader.close(); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | if (fs != null) { |
| | | try { |
| | | fs.close(); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | logger.warn("Profile-Current convertContext Process Cost-" + |
| | | ((int) ((convertContext.getProcessTime()) / 60000.0)) + " min - " + |
| | | (((int) ((convertContext.getProcessTime()) % 60000.0)) / 1000) + " sec"); |
| | | logger.warn("Profile-Current convertContext Update Cost-" + |
| | | ((int) ((convertContext.getUpdateTime()) / 60000.0)) + " min - " + |
| | | (((int) ((convertContext.getUpdateTime()) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | protected void scanIndexDgnElement(IndexDgnConvertPostGISJobContext convertContext) |
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException { |
| | | Dgn7fileReader reader = convertContext.getReader(); |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | |
| | | while (reader.hasNext()) { |
| | | if (isProfileMode()) markProcessTime(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | | |
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) { |
| | | if (lastComplex != null) { |
| | | processIndexElement(lastComplex, convertContext); |
| | | lastComplex = null; |
| | | } |
| | | |
| | | processIndexElement(element, convertContext); |
| | | } else if (element.isComponentElement()) { |
| | | if (lastComplex != null) { |
| | | ((ComplexElement) lastComplex).add(element); |
| | | } |
| | | } else if (type.isComplexElement()) { |
| | | if (lastComplex != null) { |
| | | processIndexElement(lastComplex, convertContext); |
| | | } |
| | | lastComplex = element; |
| | | } |
| | | } |
| | | count++; |
| | | } |
| | | |
| | | if (lastComplex != null) { |
| | | processIndexElement(lastComplex, convertContext); |
| | | } |
| | | logger.debug("ElementRecord Count=" + count); |
| | | } |
| | | |
| | | private void processIndexElement(Element element, IndexDgnConvertPostGISJobContext convertContext) |
| | | throws IllegalAttributeException, SchemaException { |
| | | //if (useTpclidText) { |
| | | // if (element instanceof TextElement) { |
| | | // convertContext.putFeatureCollection(element); |
| | | // } |
| | | //} else { |
| | | // if (element instanceof ShapeElement) { |
| | | convertContext.putFeatureCollection(element); |
| | | // } |
| | | //} |
| | | } |
| | | |
| | | |
| | | /** |
| | | * 嚙踝蕭嚙踝蕭嚙賞換嚙踝蕭L嚙稽嚙緘嚙踝蕭嚙褕迎蕭嚙線嚙瑾 |
| | | * |
| | | * @param context jobContext |
| | | * @throws org.quartz.JobExecutionException |
| | | * exception |
| | | */ |
| | | private void convertOtherDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException { |
| | | File otherDir = new File(getDataPath(), OTHERPATHNAME); |
| | | if (!otherDir.exists()) { |
| | | logger.info("other dir=" + otherDir + " not exist."); |
| | | return; |
| | | } |
| | | |
| | | if (!otherDir.isDirectory()) { |
| | | logger.info("other dir=" + otherDir + " is not a directory."); |
| | | } |
| | | |
| | | List<File> dgnFiles = FileUtils.recurseDir(otherDir, new FileFilter() { |
| | | public boolean accept(File pathname) { |
| | | return pathname.isDirectory() || pathname.getName().toLowerCase().endsWith("dgn"); |
| | | } |
| | | }); |
| | | |
| | | for (File dgnFile : dgnFiles) { |
| | | if (dgnFile.isDirectory()) continue; |
| | | |
| | | GeneralDgnConvertPostGISJobContext convertContext = |
| | | new GeneralDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, |
| | | isProfileMode(), isTransformed()); |
| | | logger.info("--- start other dgnfile-" + dgnFile.toString() + " ---"); |
| | | FileInputStream fs = null; |
| | | FileChannel fc; |
| | | Dgn7fileReader reader = null; |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | | String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator); |
| | | convertContext.setFilename(dgnPaths[dgnPaths.length - 1]); |
| | | convertContext.startTransaction(); |
| | | |
| | | fs = new FileInputStream(dgnFile); |
| | | fc = fs.getChannel(); |
| | | reader = new Dgn7fileReader(fc, new Lock()); |
| | | convertContext.setReader(reader); |
| | | |
| | | scanOtherDgnElement(convertContext); |
| | | |
| | | convertContext.commitTransaction(); |
| | | convertContext.closeFeatureWriter(); |
| | | |
| | | System.gc(); |
| | | System.runFinalization(); |
| | | } catch (FileNotFoundException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (Dgn7fileException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (IllegalAttributeException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (SchemaException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } finally { |
| | | convertContext.closeFeatureWriter(); |
| | | |
| | | if (reader != null) { |
| | | try { |
| | | reader.close(); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | if (fs != null) { |
| | | try { |
| | | fs.close(); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | logger.warn("Profile-Current convertContext Process Cost-" + |
| | | ((int) ((convertContext.getProcessTime()) / 60000.0)) + " min - " + |
| | | (((int) ((convertContext.getProcessTime()) % 60000.0)) / 1000) + " sec"); |
| | | logger.warn("Profile-Current convertContext Update Cost-" + |
| | | ((int) ((convertContext.getUpdateTime()) / 60000.0)) + " min - " + |
| | | (((int) ((convertContext.getUpdateTime()) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | public void scanOtherDgnElement(GeneralDgnConvertPostGISJobContext convertContext) |
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException { |
| | | Dgn7fileReader reader = convertContext.getReader(); |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | | |
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) { |
| | | if (lastComplex != null) { |
| | | processOtherElement(lastComplex, convertContext); |
| | | lastComplex = null; |
| | | } |
| | | |
| | | processOtherElement(element, convertContext); |
| | | } else if (element.isComponentElement()) { |
| | | if (lastComplex != null) { |
| | | ((ComplexElement) lastComplex).add(element); |
| | | } |
| | | } else if (type.isComplexElement()) { |
| | | if (lastComplex != null) { |
| | | processOtherElement(lastComplex, convertContext); |
| | | } |
| | | lastComplex = element; |
| | | } |
| | | } |
| | | count++; |
| | | } |
| | | |
| | | if (lastComplex != null) { |
| | | processOtherElement(lastComplex, convertContext); |
| | | } |
| | | logger.debug("ElementRecord Count=" + count); |
| | | } |
| | | |
| | | private void processOtherElement(Element element, GeneralDgnConvertPostGISJobContext convertContext) |
| | | throws IllegalAttributeException, SchemaException { |
| | | convertContext.putFeatureCollection(element); |
| | | } |
| | | |
| | | private void clearOutputDatabase() { |
| | | /* |
| | | File outDataPath = new File(getDataPath(), OracleConvertEdbGeoJobContext.SHPOUTPATH); |
| | | if (outDataPath.exists() && outDataPath.isDirectory()) |
| | | { |
| | | deleteFilesInPath(outDataPath); |
| | | } |
| | | outDataPath = new File(getDataPath(), IndexDgnConvertShpJobContext.SHPOUTPATH); |
| | | if (outDataPath.exists() && outDataPath.isDirectory()) |
| | | { |
| | | deleteFilesInPath(outDataPath); |
| | | } |
| | | outDataPath = new File(getDataPath(), GeneralDgnConvertShpJobContext.SHPOUTPATH); |
| | | if (outDataPath.exists() && outDataPath.isDirectory()) |
| | | { |
| | | deleteFilesInPath(outDataPath); |
| | | } |
| | | */ |
| | | } |
| | | |
| | | private void deleteFilesInPath(File outDataPath) { |
| | | deleteFilesInPath(outDataPath, true); |
| | | } |
| | | |
| | | private void deleteFilesInPath(File outDataPath, boolean removeSubDir) { |
| | | if (!outDataPath.isDirectory()) { |
| | | return; |
| | | } |
| | | File[] files = outDataPath.listFiles(); |
| | | for (File file : files) { |
| | | if (file.isFile()) { |
| | | if (!file.delete()) { |
| | | logger.info("Cannot delete file-" + file.toString()); |
| | | } |
| | | } else if (file.isDirectory()) { |
| | | deleteFilesInPath(file, removeSubDir); |
| | | if (removeSubDir) { |
| | | if (file.delete()) { |
| | | logger.info("Cannot delete dir-" + file.toString()); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | private void convertFeatureDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException { |
| | | File elminDir = new File(getDataPath(), "elmin"); |
| | | if (!elminDir.exists()) { |
| | | logger.info("elmin dir=" + elminDir + " not exist."); |
| | | return; |
| | | } |
| | | |
| | | if (!elminDir.isDirectory()) { |
| | | logger.info("elmin dir=" + elminDir + " is not a directory."); |
| | | } |
| | | |
| | | File[] dgnFiles = elminDir.listFiles(new FilenameFilter() { |
| | | public boolean accept(File dir, String name) { |
| | | return name.toLowerCase().endsWith(".dgn"); |
| | | } |
| | | }); |
| | | |
| | | for (File dgnFile : dgnFiles) { |
| | | FeatureDgnConvertPostGISJobContext convertContext = |
| | | new FeatureDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | | String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator); |
| | | convertContext.setFilename(dgnPaths[dgnPaths.length - 1]); |
| | | convertContext.startTransaction(); |
| | | |
| | | FileInputStream fs = new FileInputStream(dgnFile); |
| | | FileChannel fc = fs.getChannel(); |
| | | Dgn7fileReader reader = new Dgn7fileReader(fc, new Lock()); |
| | | convertContext.setReader(reader); |
| | | |
| | | scanFeatureDgnElement(convertContext); |
| | | |
| | | convertContext.commitTransaction(); |
| | | convertContext.closeFeatureWriter(); |
| | | System.gc(); |
| | | System.runFinalization(); |
| | | } catch (FileNotFoundException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (Dgn7fileException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (IllegalAttributeException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (SchemaException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } finally { |
| | | convertContext.closeFeatureWriter(); |
| | | } |
| | | } |
| | | } |
| | | |
| | | public void scanFeatureDgnElement(FeatureDgnConvertPostGISJobContext convertContext) |
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException { |
| | | Dgn7fileReader reader = convertContext.getReader(); |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | | |
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) { |
| | | if (lastComplex != null) { |
| | | processFeatureElement(lastComplex, convertContext); |
| | | lastComplex = null; |
| | | } |
| | | |
| | | processFeatureElement(element, convertContext); |
| | | } else if (element.isComponentElement()) { |
| | | if (lastComplex != null) { |
| | | ((ComplexElement) lastComplex).add(element); |
| | | } |
| | | } else if (type.isComplexElement()) { |
| | | if (lastComplex != null) { |
| | | processFeatureElement(lastComplex, convertContext); |
| | | } |
| | | lastComplex = element; |
| | | } |
| | | } |
| | | count++; |
| | | } |
| | | |
| | | if (lastComplex != null) { |
| | | processFeatureElement(lastComplex, convertContext); |
| | | } |
| | | logger.debug("ElementRecord Count=" + count); |
| | | } |
| | | |
| | | private void processFeatureElement(Element element, FeatureDgnConvertPostGISJobContext convertContext) |
| | | throws IllegalAttributeException, SchemaException { |
| | | convertContext.putFeatureCollection(element); |
| | | } |
| | | |
| | | private void createDummyFeatureFile(JobExecutionContext context) throws JobExecutionException { |
| | | /* |
| | | DummyFeatureConvertShpJobContext convertContext = new DummyFeatureConvertShpJobContext(getDataPath(), _filterPath); |
| | | try { |
| | | convertContext.startTransaction(); |
| | | convertContext.commitTransaction(); |
| | | convertContext.closeFeatureWriter(); |
| | | } catch (IOException e) |
| | | { |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } |
| | | */ |
| | | } |
| | | |
| | | public DataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | protected void createTargetDataStore() throws JobExecutionException { |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | |
| | | /* |
| | | if (!isDriverFound()) |
| | | { |
| | | throw new JobExecutionException("Oracle JDBC Driver not found.-" + JDBC_DRIVER); |
| | | } |
| | | */ |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | /* |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.WKBENABLED.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.WKBENABLED.key, "true"); |
| | | } |
| | | */ |
| | | |
| | | if (!dataStoreFactory.canProcess(pgProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = dataStoreFactory.createDataStore(pgProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | protected void disconnect() { |
| | | super.disconnect(); |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | } |
| | | |
| | | private String determineTargetSchemaName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetSchema = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | // Create XGVERSIONTABLE_NAME |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) |
| | | createXGeosVersionTable(connection, _pgSchema); |
| | | rs.close(); |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vsschema, vsstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vsid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpSchemas = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vsschema"); |
| | | values[1] = rs.getShort("vsstatus"); |
| | | tmpSchemas.add(values); |
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) { |
| | | current = i; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current == -1) { |
| | | Object[] values = tmpSchemas.get(0); |
| | | targetSchema = (String) values[0]; |
| | | } else if (current < (tmpSchemas.size() - 1)) { |
| | | Object[] values = tmpSchemas.get(current + 1); |
| | | targetSchema = (String) values[0]; |
| | | } else { |
| | | Object[] values = tmpSchemas.get(0); |
| | | targetSchema = (String) values[0]; |
| | | } |
| | | |
| | | sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vsstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_COVERT); |
| | | sbSQL.append(" WHERE vsschema = '"); |
| | | sbSQL.append(targetSchema).append("'"); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetSchema + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetSchema; |
| | | } |
| | | |
| | | private String determineTargetThemeTableName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetTable = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | // Create XPTVERSIONTABLE_NAME |
| | | needCreate = false; |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) |
| | | createXPWThemeVersionTable(connection, _pgSchema); |
| | | rs.close(); |
| | | |
| | | rs = null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vptname, vptstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vptid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpTablenames = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vptname"); |
| | | values[1] = rs.getShort("vptstatus"); |
| | | tmpTablenames.add(values); |
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) { |
| | | current = i; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current == -1) { |
| | | Object[] values = tmpTablenames.get(0); |
| | | targetTable = (String) values[0]; |
| | | } else if (current < (tmpTablenames.size() - 1)) { |
| | | Object[] values = tmpTablenames.get(current + 1); |
| | | targetTable = (String) values[0]; |
| | | } else { |
| | | Object[] values = tmpTablenames.get(0); |
| | | targetTable = (String) values[0]; |
| | | } |
| | | |
| | | sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vptstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_COVERT); |
| | | sbSQL.append(" WHERE vptname = '"); |
| | | sbSQL.append(targetTable).append("'"); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetTable + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetTable; |
| | | } |
| | | |
| | | public String encodeSchemaTableName(String schemaName, String tableName) { |
| | | if (schemaName == null) |
| | | return "\"" + tableName + "\""; |
| | | return "\"" + schemaName + "\".\"" + tableName + "\""; |
| | | } |
| | | |
| | | private void createXGeosVersionTable(Connection connection, String pgSchema) throws SQLException { |
| | | Statement stmt = null; |
| | | StringBuilder sql = new StringBuilder("CREATE TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); |
| | | sql.append(" ( vsid serial PRIMARY KEY, "); |
| | | sql.append(" vsschema character varying(64) NOT NULL, "); |
| | | sql.append(" vsstatus smallint NOT NULL, "); |
| | | sql.append(" vstimestamp timestamp with time zone ) "); |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("ALTER TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); |
| | | sql.append(" OWNER TO ").append(_pgUsername); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("GRANT ALL ON TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); |
| | | sql.append(" TO public"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | for (String schemaName : DataReposVersionManager.DEFAULTXGVERSIONSCHEMA_NAMES) { |
| | | sql = new StringBuilder("INSERT INTO "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); |
| | | sql.append(" (vsschema, vsstatus) VALUES ('"); |
| | | sql.append(schemaName).append("', "); |
| | | sql.append(DataReposVersionManager.VSSTATUS_AVAILABLE).append(" )"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | createIfNotExistNewSchema(connection, schemaName); |
| | | } |
| | | |
| | | } finally { |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private void createXPWThemeVersionTable(Connection connection, String pgSchema) throws SQLException { |
| | | Statement stmt = null; |
| | | StringBuilder sql = new StringBuilder("CREATE TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" ( vptid serial PRIMARY KEY, "); |
| | | sql.append(" vptname character varying(64) NOT NULL, "); |
| | | sql.append(" vptstatus smallint NOT NULL, "); |
| | | sql.append(" vpttimestamp timestamp with time zone ) "); |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("ALTER TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" OWNER TO ").append(_pgUsername); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("GRANT ALL ON TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" TO public"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | for (String schemaName : DataReposVersionManager.DEFAULTXPTVERSIONTABLE_NAMES) { |
| | | sql = new StringBuilder("INSERT INTO "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" (vptname, vptstatus) VALUES ('"); |
| | | sql.append(schemaName).append("', "); |
| | | sql.append(DataReposVersionManager.VSSTATUS_AVAILABLE).append(" )"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | } |
| | | |
| | | } finally { |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private void updateRepoStatusToReady(String targetSchema) { |
| | | if (targetDataStore == null) return; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | StringBuilder sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vsstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_READY); |
| | | sbSQL.append(" , vstimestamp = CURRENT_TIMESTAMP WHERE vsschema = '"); |
| | | sbSQL.append(targetSchema).append("'"); |
| | | |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | stmt = connection.createStatement(); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetSchema + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void updatePWThemeStatusToReady(String targetSchema) { |
| | | if (targetDataStore == null) return; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | StringBuilder sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vptstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_READY); |
| | | sbSQL.append(" , vpttimestamp = CURRENT_TIMESTAMP WHERE vptname = '"); |
| | | sbSQL.append(targetSchema).append("'"); |
| | | |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | stmt = connection.createStatement(); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetSchema + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void createIfNotExistNewSchema(Connection connection, String s) throws SQLException { |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | try { |
| | | /* |
| | | rs = connection.getMetaData().getSchemas(null, s); |
| | | if (rs.next()) return; |
| | | rs.close(); |
| | | rs = null; |
| | | */ |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("CREATE SCHEMA "); |
| | | sbSQL.append(s).append(' '); |
| | | sbSQL.append("AUTHORIZATION ").append(_pgUsername); |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sbSQL.toString()); |
| | | |
| | | sbSQL = new StringBuilder("GRANT ALL ON SCHEMA "); |
| | | sbSQL.append(s).append(' '); |
| | | sbSQL.append("TO public"); |
| | | stmt.executeUpdate(sbSQL.toString()); |
| | | } catch (SQLException e) { |
| | | logger.info("create schema:" + s + " has exception."); |
| | | logger.info(e.getMessage(), e); |
| | | } finally { |
| | | if (rs != null) rs.close(); |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | public final void accumulateQueryTime() { |
| | | queryTime += System.currentTimeMillis() - queryTimeStart; |
| | | } |
| | | |
| | | public long getQueryTime() { |
| | | return queryTime; |
| | | } |
| | | |
| | | public final void markQueryTime() { |
| | | queryTimeStart = System.currentTimeMillis(); |
| | | } |
| | | |
| | | public final void resetQueryTime() { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | private void convertDynamicColorTheme(AbstractOracleJobContext context, String targetTableBaseName) throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertDynamicColorTheme"); |
| | | return; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | PreparedStatement pstmt = null; |
| | | try { |
| | | |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | String targetTableName = targetTableBaseName + FDYNCOLOR_SUFFIX; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(50); |
| | | |
| | | createOrClearTargetTable(connectionPG, targetTableName, |
| | | "(tid smallint not null, oid int not null, dyncolor varchar(10) not null)"); |
| | | |
| | | pstmt = connectionPG.prepareStatement("INSERT INTO " + |
| | | encodeSchemaTableName(_pgSchema, targetTableName) + |
| | | " (tid, oid, dyncolor) VALUES (?, ?, ?)" ); |
| | | |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int colorId = rs.getInt(3); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | |
| | | pstmt.setShort(1, (short) cid); |
| | | pstmt.setInt(2, (int) oid); |
| | | pstmt.setString(3, colorText); |
| | | pstmt.addBatch(); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | pstmt.executeBatch(); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | pstmt.executeBatch(); |
| | | createTargetTableIndex(connectionPG, targetTableName); |
| | | |
| | | logger.info("Execute Update Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(pstmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void convertPowerOwnerTheme(AbstractOracleJobContext context, String targetTableBaseName) throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertPowerOwnerTheme"); |
| | | return; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | PreparedStatement pstmt = null; |
| | | try { |
| | | connectionPG.setAutoCommit(false); |
| | | String targetTableName = targetTableBaseName + FOWNER_SUFFIX; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(50); |
| | | |
| | | createOrClearTargetTable(connectionPG, targetTableName, |
| | | "(tid smallint not null, oid int not null, fowner smallint not null, flow varchar(20) not null)"); |
| | | |
| | | pstmt = connectionPG.prepareStatement("INSERT INTO " + |
| | | encodeSchemaTableName(_pgSchema, targetTableName) + |
| | | " (tid, oid, fowner, flow) VALUES (?, ?, ?, ?)" ); |
| | | |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int ownerId = rs.getInt(3); |
| | | short dirId = (short) rs.getInt(4); |
| | | pstmt.setShort(1, (short) cid); |
| | | pstmt.setInt(2, (int) oid); |
| | | pstmt.setShort(3, (short) ownerId); |
| | | ConnectivityDirectionEnum dir = ConnectivityDirectionEnum.convertShort(dirId); |
| | | if ((ConnectivityDirectionEnum.ForwardflowON == dir) || |
| | | (ConnectivityDirectionEnum.ForwardFixflowON == dir)) { |
| | | pstmt.setString(4, "shape://ccarrow"); |
| | | |
| | | } else if ((ConnectivityDirectionEnum.BackflowON == dir) || |
| | | (ConnectivityDirectionEnum.BackFixflowON == dir)) { |
| | | pstmt.setString(4, "shape://rccarrow"); |
| | | } else { |
| | | pstmt.setString(4, "shape://backslash"); |
| | | } |
| | | pstmt.addBatch(); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | pstmt.executeBatch(); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | pstmt.executeBatch(); |
| | | createTargetTableIndex(connectionPG, targetTableName); |
| | | |
| | | logger.info("Execute Update Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(pstmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void createOrClearTargetTable(Connection connection, String tableName, String sql) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("DROP TABLE " + encodeSchemaTableName(_pgSchema, tableName) + "CASCADE"); |
| | | } |
| | | |
| | | stmt.executeUpdate("CREATE TABLE " + encodeSchemaTableName(_pgSchema, tableName) + " " + sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void createTargetTableIndex(Connection connection, String tableName) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("ALTER TABLE " + encodeSchemaTableName(_pgSchema, tableName) + |
| | | " ADD PRIMARY KEY (tid, oid)"); |
| | | } |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private boolean convertDynamicColorThemeWithCopyAPI(AbstractOracleJobContext context, String targetTableBaseName) |
| | | throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertDynamicColorThemeWithCopyAPI"); |
| | | return false; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | while (connectionPG instanceof DelegatingConnection) { |
| | | connectionPG = ((DelegatingConnection) connectionPG).getDelegate(); |
| | | } |
| | | |
| | | if (!(connectionPG instanceof PGConnection)) { |
| | | return false; |
| | | } |
| | | |
| | | final int MAX_BATCHSIZE = 250; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | // connectionPG.setAutoCommit(false); |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | String targetTableName = targetTableBaseName + FDYNCOLOR_SUFFIX; |
| | | String targetTempName = "tmp_" + targetTableName; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(MAX_BATCHSIZE); |
| | | |
| | | createOrClearTempTargetTable(connectionPG, targetTempName, |
| | | "(tid smallint not null, oid int not null, dyncolor varchar(10) not null)"); |
| | | StringBuilder sb = new StringBuilder(); |
| | | |
| | | CopyManager cpMgr = ((PGConnection) connectionPG).getCopyAPI(); |
| | | PushbackReader reader = new PushbackReader(new StringReader(""), 10240); |
| | | |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int colorId = rs.getInt(3); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | |
| | | sb.append(cid).append(','); |
| | | sb.append(oid).append(','); |
| | | sb.append(colorText).append("\n"); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | sb.delete(0, sb.length()); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | createTargetTableIndexAndDropTemp(connectionPG, targetTableName, targetTempName); |
| | | |
| | | logger.info("Execute Copy Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return true; |
| | | } |
| | | |
| | | private boolean convertPowerOwnerThemeWithCopyAPI(AbstractOracleJobContext context, String targetTableBaseName) |
| | | throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertPowerOwnerThemeWithCopyAPI"); |
| | | return false; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | while (connectionPG instanceof DelegatingConnection) { |
| | | connectionPG = ((DelegatingConnection) connectionPG).getDelegate(); |
| | | } |
| | | |
| | | if (!(connectionPG instanceof PGConnection)) { |
| | | return false; |
| | | } |
| | | |
| | | final int MAX_BATCHSIZE = 250; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | // connectionPG.setAutoCommit(false); |
| | | String targetTableName = targetTableBaseName + FOWNER_SUFFIX; |
| | | String targetTempName = "tmp_" + targetTableName; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(MAX_BATCHSIZE); |
| | | |
| | | createOrClearTempTargetTable(connectionPG, targetTempName, |
| | | "(tid smallint not null, oid int not null, fowner smallint not null, flow varchar(20) not null)"); |
| | | |
| | | StringBuilder sb = new StringBuilder(); |
| | | |
| | | CopyManager cpMgr = ((PGConnection) connectionPG).getCopyAPI(); |
| | | PushbackReader reader = new PushbackReader(new StringReader(""), 10240); |
| | | |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int ownerId = rs.getInt(3); |
| | | short dirId = (short) rs.getInt(4); |
| | | String flowMark = null; |
| | | ConnectivityDirectionEnum dir = ConnectivityDirectionEnum.convertShort(dirId); |
| | | if ((ConnectivityDirectionEnum.ForwardflowON == dir) || |
| | | (ConnectivityDirectionEnum.ForwardFixflowON == dir)) { |
| | | flowMark = FORWARDFLOW_MARK; |
| | | |
| | | } else if ((ConnectivityDirectionEnum.BackflowON == dir) || |
| | | (ConnectivityDirectionEnum.BackFixflowON == dir)) { |
| | | flowMark = BACKFLOW_MARK; |
| | | } else if (ConnectivityDirectionEnum.Nondeterminate == dir) { |
| | | flowMark = NONFLOW_MARK; |
| | | } else { |
| | | flowMark = UNFLOW_MARK; |
| | | } |
| | | |
| | | sb.append(cid).append(','); |
| | | sb.append(oid).append(','); |
| | | sb.append(ownerId).append(','); |
| | | sb.append(flowMark).append('\n'); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | sb.delete(0, sb.length()); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | createTargetTableIndexAndDropTemp(connectionPG, targetTableName, targetTempName); |
| | | |
| | | logger.info("Execute Copy Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return true; |
| | | } |
| | | |
| | | private void createOrClearTempTargetTable(Connection connection, String tableName, String sql) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, null, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("DROP TABLE " + encodeSchemaTableName(null, tableName) + "CASCADE"); |
| | | } |
| | | |
| | | stmt.executeUpdate("CREATE TEMP TABLE " + encodeSchemaTableName(null, tableName) + " " + sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void createTargetTableIndexAndDropTemp(Connection connection, String tableName, String tempTable) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | stmt.execute("CREATE TABLE " + tableName +" AS SELECT * FROM " + tempTable); |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("ALTER TABLE " + encodeSchemaTableName(_pgSchema, tableName) + |
| | | " ADD PRIMARY KEY (tid, oid)"); |
| | | } |
| | | stmt.execute("DROP TABLE " + tempTable); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | String [] siteInfo=new String[]{"ftp://10.10.1.9:21","DMMS","DMMS000"} ; |
| | | |
| | | private String[] getNDDList(String[] info){ |
| | | String url=info[3]; |
| | | String ftp=info[0]; |
| | | String uid=info[1]; |
| | | String pwd=info[2]; |
| | | // List<MapItemValue> tmp= dmmsSite.getFtpList(site); |
| | | //for(int i=0;i<tmp.size();i++) |
| | | //{ |
| | | // if(tmp.get(i).getGroupName().equals("featureimg")) |
| | | // { |
| | | // url="/tcdaas/ndddash/"; |
| | | String [] fileNow=getFileList(ftp,uid,pwd,url,""); |
| | | return fileNow ; |
| | | // } |
| | | //} |
| | | //return new String[]{}; |
| | | } |
| | | |
| | | private byte[] getNDDDash(String[] info, String dirname, String filename) { |
| | | String url="";//info[3]; |
| | | String ftp=info[0]; |
| | | String uid=info[1]; |
| | | String pwd=info[2]; |
| | | |
| | | dirname= dirname.replace("[.]","_"); //防hack |
| | | filename= filename.replace("[/]","_"); //防hack |
| | | // List<MapItemValue> tmp= dmmsSite.getFtpList(site); |
| | | String[] temp=dirname.split("/"); |
| | | dirname= temp[temp.length-1]; |
| | | |
| | | // for(int i=0;i<tmp.size();i++) |
| | | // { |
| | | // if(tmp.get(i).getGroupName().equals("featureimg")) |
| | | // { |
| | | url=info[3]+dirname+"/"; |
| | | |
| | | |
| | | byte[] bytes= getFile(ftp,uid,pwd,url,filename); |
| | | return bytes; |
| | | // return new FileTransfer(filename, "application/octet-stream",bytes); |
| | | // } |
| | | // } |
| | | // return null; |
| | | } |
| | | |
| | | private String[] getFileList(String urlString,String ftpUser,String ftpPwd,String filePath, String filter){ |
| | | FTPClient ftpClient=null; |
| | | try{ |
| | | ftpClient= new FTPClient(); |
| | | }catch(Throwable ex) |
| | | { |
| | | ex.getMessage(); |
| | | } |
| | | |
| | | URL url; |
| | | |
| | | // /tcdaas/dsbncard -- for feature D |
| | | // /tcdaas/mhole -- for feature D |
| | | // /tcdaas/featureimg -- for feature U/D attached and LIST |
| | | try{ |
| | | url= new URL(urlString);//"ftp://20.20.1.3:21/"); |
| | | ftpClient.connect( |
| | | url.getHost(),url.getPort() |
| | | ); |
| | | |
| | | if(!ftpClient.login(ftpUser,ftpPwd))// "DMMS","DMMS000")) |
| | | { |
| | | return null; |
| | | } |
| | | int reply = ftpClient.getReplyCode(); |
| | | //FTPReply stores a set of constants for FTP reply codes. |
| | | |
| | | if (!FTPReply.isPositiveCompletion(reply)) |
| | | { |
| | | ftpClient.disconnect(); |
| | | return null; |
| | | } |
| | | ftpClient.setFileType(FTP.BINARY_FILE_TYPE); |
| | | //enter passive mode |
| | | ftpClient.enterLocalPassiveMode(); |
| | | |
| | | String[] filelist=ftpClient.listNames(filePath + (filter == null ? "" : filter)) ; |
| | | ftpClient.disconnect(); |
| | | return filelist; |
| | | }catch(MalformedURLException urlex) |
| | | { |
| | | |
| | | } catch (Exception ex) |
| | | { |
| | | |
| | | } |
| | | return new String[]{}; |
| | | } |
| | | |
| | | private byte[] getFile(String urlString,String ftpUser,String ftpPwd,String filePath,String fileName){ |
| | | FTPClient ftpClient= new FTPClient(); |
| | | |
| | | URL url; |
| | | byte[] result; |
| | | // /tcdaas/dsbncard -- for feature D |
| | | // /tcdaas/mhole -- for feature D |
| | | // /tcdaas/featureimg -- for feature U/D attached and LIST |
| | | try{ |
| | | url= new URL(urlString);//"ftp://20.20.1.3:21/"); |
| | | ftpClient.connect( |
| | | url.getHost(),url.getPort() |
| | | ); |
| | | |
| | | if(!ftpClient.login(ftpUser,ftpPwd))// "DMMS","DMMS000")) |
| | | { |
| | | return null; |
| | | } |
| | | int reply = ftpClient.getReplyCode(); |
| | | //FTPReply stores a set of constants for FTP reply codes. |
| | | |
| | | if (!FTPReply.isPositiveCompletion(reply)) |
| | | { |
| | | ftpClient.disconnect(); |
| | | return null; |
| | | } |
| | | ftpClient.setFileType(FTP.BINARY_FILE_TYPE); |
| | | //enter passive mode |
| | | ftpClient.enterLocalPassiveMode(); |
| | | |
| | | String[] filelist=ftpClient.listNames(filePath+ fileName) ; |
| | | ByteArrayOutputStream bos = new ByteArrayOutputStream(); |
| | | if(filelist.length>0) |
| | | { |
| | | if(ftpClient.retrieveFile(filePath+fileName,bos)) |
| | | { |
| | | result= bos.toByteArray() ; //negative numbers can use (b)&0xff |
| | | bos.close(); |
| | | } |
| | | else |
| | | { |
| | | result=null; |
| | | try{ |
| | | bos.close(); |
| | | } catch (Exception ex) |
| | | { |
| | | |
| | | } |
| | | } |
| | | } |
| | | else |
| | | { |
| | | result=null; |
| | | } |
| | | |
| | | ftpClient.disconnect(); |
| | | |
| | | }catch(MalformedURLException urlex) |
| | | { |
| | | result=null; |
| | | } catch (Exception ex) |
| | | { |
| | | result=null; |
| | | } |
| | | return result; |
| | | } |
| | | public String[] getNDDStrings(String[] info, String filename) |
| | | { |
| | | byte[] temp; |
| | | // String stie=getUserDept(); |
| | | |
| | | // String[] list=dmmsFtpClient.getNDDList(stie) ; |
| | | String[] list =getNDDList(info) ; |
| | | |
| | | List<String> lstXML= new ArrayList<String>(); |
| | | for(int i=0;i<list.length;i++) |
| | | { |
| | | temp=getNDDDash(info, list[i], filename) ; |
| | | try{ |
| | | if(temp!=null) lstXML.add(new String(temp,"UTF-8")); |
| | | } catch (UnsupportedEncodingException ex) { |
| | | // this should never happen because "UTF-8" is hard-coded. |
| | | throw new IllegalStateException(ex); |
| | | } |
| | | } |
| | | if(lstXML.size()>0) |
| | | return lstXML. toArray(new String[0]); |
| | | |
| | | return new String[]{}; |
| | | } |
| | | |
| | | |
| | | private static Map<String, String> ditTyphoon = new HashMap<String, String>(); |
| | | |
| | | public String getTyphoonIDByName(Connection postsql,String typhoonName) throws SQLException |
| | | { |
| | | if(ditTyphoon.containsKey(typhoonName)) |
| | | { |
| | | return ditTyphoon.get(typhoonName); |
| | | }else |
| | | { |
| | | return readOrCreateTyphoonByName(postsql,typhoonName); |
| | | //readOrCreateTyphoon; |
| | | } |
| | | } |
| | | public String readOrCreateTyphoonByName(Connection postsql,String typhoonName) throws SQLException |
| | | { |
| | | //targetDataStore |
| | | //time of create should be modify |
| | | List<String[]> listDict; |
| | | String strSQLSelectProject=String.format( "select typhoon_id,typhoon_name from ndd.typhoonproject where typhoon_name='%s'",typhoonName); |
| | | String strSQLInsertProject=String.format( "insert into ndd.typhoonproject (typhoon_name,row_created) values ('%s',now())",typhoonName); |
| | | |
| | | listDict= sqlExecQuery(postsql,strSQLSelectProject,new String[]{}); |
| | | //boolean bCreate=false; |
| | | if(listDict!=null) |
| | | { |
| | | if(listDict.size()>0) |
| | | { |
| | | for(int i=0;i<listDict.size();i++) |
| | | { |
| | | return addDict(listDict.get(i)[0],listDict.get(i)[1]); |
| | | } |
| | | } |
| | | } |
| | | //bCreate=true; |
| | | //insert |
| | | logger.info(String.format("new project:%s",typhoonName)); |
| | | sqlExec(postsql,strSQLInsertProject,new String[]{}); |
| | | return readOrCreateTyphoonByName(postsql, typhoonName) ; |
| | | } |
| | | private synchronized static String addDict(String id,String typhoon) |
| | | { |
| | | if(ditTyphoon.containsKey(typhoon)) |
| | | return ditTyphoon.get(typhoon); |
| | | else |
| | | ditTyphoon.put(typhoon,id); |
| | | return id; |
| | | } |
| | | |
| | | public boolean jobOnLine(Connection postsql,String jobname) throws SQLException |
| | | { |
| | | //working when jobname=1 |
| | | //targetDataStore |
| | | //time of create should be modify |
| | | List<String[]> listDict; |
| | | String strSQLSelectSchedule=String.format( "select enabled from ndd.schedule where name='%s'",jobname); |
| | | |
| | | listDict= sqlExecQuery(postsql,strSQLSelectSchedule,new String[]{}); |
| | | if(listDict.size()==0)return false; // not exist ->dont work |
| | | return listDict.get(0)[0].equals("1"); |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.jobs; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.FeatureDgnConvertPostGISJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.GeneralDgnConvertPostGISJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.IndexDgnConvertPostGISJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.OracleConvertPostGISJobContext; |
| | | import com.ximple.eofms.util.*; |
| | | import com.ximple.io.dgn7.*; |
| | | import com.ximple.util.PrintfFormat; |
| | | import oracle.jdbc.OracleConnection; |
| | | import oracle.jdbc.OracleResultSet; |
| | | import oracle.sql.ARRAY; |
| | | import oracle.sql.BLOB; |
| | | import org.apache.commons.collections.OrderedMap; |
| | | import org.apache.commons.collections.OrderedMapIterator; |
| | | import org.apache.commons.collections.map.LinkedMap; |
| | | import org.apache.commons.dbcp.DelegatingConnection; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisNGDataStoreFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.postgresql.PGConnection; |
| | | import org.postgresql.copy.CopyManager; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | import java.io.*; |
| | | import java.math.BigDecimal; |
| | | import java.nio.BufferOverflowException; |
| | | import java.nio.ByteBuffer; |
| | | import java.nio.ByteOrder; |
| | | import java.nio.channels.FileChannel; |
| | | import java.sql.*; |
| | | import java.util.*; |
| | | import java.util.Date; |
| | | |
| | | public class DMMSRoadfeeCalculateJob extends AbstractOracleDatabaseJob { |
| | | final static Log logger = LogFactory.getLog(DMMSRoadfeeCalculateJob.class); |
| | | |
| | | private static final String PGHOST = "PGHOST"; |
| | | private static final String PGDATBASE = "PGDATBASE"; |
| | | private static final String PGPORT = "PGPORT"; |
| | | private static final String PGSCHEMA = "PGSCHEMA"; |
| | | private static final String PGUSER = "PGUSER"; |
| | | private static final String PGPASS = "PGPASS"; |
| | | private static final String USEWKB = "USEWKB"; |
| | | |
| | | private static final boolean useTpclidText = false; |
| | | |
| | | private static final int FETCHSIZE = 30; |
| | | private static final int COMMITSIZE = 100; |
| | | private static final String INDEXPATHNAME = "index"; |
| | | private static final String OTHERPATHNAME = "other"; |
| | | public static final String FORWARDFLOW_MARK = "shape://ccarrow"; |
| | | public static final String BACKFLOW_MARK = "shape://rccarrow"; |
| | | public static final String UNFLOW_MARK = "shape://backslash"; |
| | | public static final String NONFLOW_MARK = "shape://slash"; |
| | | |
| | | private static String FETCH_CONNFDR = "SELECT FSC, UFID, FDR1, DIR FROM BASEDB.CONNECTIVITY ORDER BY FSC"; |
| | | private static String FETCH_COLORTAB = "SELECT TAG_SFSC, TAG_LUFID, COLOR FROM OCSDB.COLOR ORDER BY TAG_SFSC"; |
| | | |
| | | private static String CREATE_OWNERTABLE = "CREATE TABLE s (tid smallint not null, oid int not null, owner smallint not null)"; |
| | | private static String CREATE_COLORTABLE = "CREATE TABLE s (tid smallint not null, oid int not null, dyncolor varchar(10) not null)"; |
| | | |
| | | public static final String FDYNCOLOR_SUFFIX = "_fdyncolor"; |
| | | public static final String FOWNER_SUFFIX = "_fowner"; |
| | | |
| | | protected static class Pair { |
| | | Object first; |
| | | Object second; |
| | | |
| | | public Pair(Object first, Object second) { |
| | | this.first = first; |
| | | this.second = second; |
| | | } |
| | | } |
| | | |
| | | protected static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | protected String _pgHost; |
| | | protected String _pgDatabase; |
| | | protected String _pgPort; |
| | | protected String _pgSchema; |
| | | protected String _pgUsername; |
| | | protected String _pgPassword; |
| | | protected String _pgUseWKB; |
| | | |
| | | protected Map<String, String> pgProperties; |
| | | protected JDBCDataStore targetDataStore; |
| | | // protected OracleConvertEdbGeoJobContext oracleJobContext; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | | |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, |
| | | boolean useTransform) { |
| | | return new OracleConvertPostGISJobContext(getDataPath(), |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _pgHost = dataMap.getString(PGHOST); |
| | | _pgDatabase = dataMap.getString(PGDATBASE); |
| | | _pgPort = dataMap.getString(PGPORT); |
| | | _pgSchema = dataMap.getString(PGSCHEMA); |
| | | _pgUsername = dataMap.getString(PGUSER); |
| | | _pgPassword = dataMap.getString(PGPASS); |
| | | _pgUseWKB = dataMap.getString(USEWKB); |
| | | |
| | | Log logger = getLogger(); |
| | | /* |
| | | logger.info("PGHOST=" + _myHost); |
| | | logger.info("PGDATBASE=" + _myDatabase); |
| | | logger.info("PGPORT=" + _myPort); |
| | | logger.info("PGSCHEMA=" + _mySchema); |
| | | logger.info("PGUSER=" + _myUsername); |
| | | logger.info("PGPASS=" + _myPassword); |
| | | logger.info("USEWKB=" + _myUseWKB); |
| | | */ |
| | | |
| | | if (_pgHost == null) { |
| | | logger.warn("PGHOST is null"); |
| | | throw new JobExecutionException("Unknown PostGIS host."); |
| | | } |
| | | if (_pgDatabase == null) { |
| | | logger.warn("PGDATABASE is null"); |
| | | throw new JobExecutionException("Unknown PostGIS database."); |
| | | } |
| | | if (_pgPort == null) { |
| | | logger.warn("PGPORT is null"); |
| | | throw new JobExecutionException("Unknown PostGIS port."); |
| | | } |
| | | if (_pgSchema == null) { |
| | | logger.warn("PGSCHEMA is null"); |
| | | throw new JobExecutionException("Unknown PostGIS schema."); |
| | | } |
| | | if (_pgUsername == null) { |
| | | logger.warn("PGUSERNAME is null"); |
| | | throw new JobExecutionException("Unknown PostGIS username."); |
| | | } |
| | | if (_pgPassword == null) { |
| | | logger.warn("PGPASSWORD is null"); |
| | | throw new JobExecutionException("Unknown PostGIS password."); |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put(PostgisNGDataStoreFactory.DBTYPE.key, "postgis"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(PostgisNGDataStoreFactory.HOST.key, _pgHost); |
| | | remote.put(PostgisNGDataStoreFactory.PORT.key, _pgPort); |
| | | remote.put(PostgisNGDataStoreFactory.DATABASE.key, _pgDatabase); |
| | | remote.put(PostgisNGDataStoreFactory.USER.key, _pgUsername); |
| | | remote.put(PostgisNGDataStoreFactory.PASSWD.key, _pgPassword); |
| | | // remote.put( "namespace", null); |
| | | pgProperties = remote; |
| | | } |
| | | |
| | | |
| | | |
| | | private List<String[]> sqlExecQuery(Connection connection,String strSQLIn,String[] params) throws SQLException { |
| | | |
| | | String strSQL=strSQLIn; |
| | | for(int i=0;i<params.length;i++) |
| | | { |
| | | if(params[i]==null)params[i]=""; |
| | | strSQL=strSQL.replace("%s"+String.valueOf(i+1),params[i]); |
| | | } |
| | | List<String[]> result=new ArrayList<String[]>(); |
| | | List<String> temp = new ArrayList<String>(); |
| | | String strTemp=""; |
| | | // String result = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | |
| | | |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(strSQL.toString()); |
| | | // get first result |
| | | // temp.clear(); |
| | | |
| | | ResultSetMetaData rsmd = rs.getMetaData(); |
| | | int NumOfCol = rsmd.getColumnCount(); |
| | | |
| | | while (rs.next()) { |
| | | for (int idx = 0; idx < NumOfCol; idx++) { |
| | | strTemp = rs.getString(idx + 1); |
| | | temp.add(strTemp); |
| | | } |
| | | result.add(temp.toArray(new String[0])); |
| | | temp.clear(); |
| | | } |
| | | return result; |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | private void sqlExec(Connection connection,String strSQLIn,String[] params) throws SQLException { |
| | | |
| | | String strSQL=strSQLIn; |
| | | for(int i=0;i<params.length;i++) |
| | | { |
| | | if(params[i]==null)params[i]=""; |
| | | strSQL=strSQL.replace("%s"+String.valueOf(i+1),params[i]); |
| | | } |
| | | List<String[]> result=new ArrayList<String[]>(); |
| | | List<String> temp = new ArrayList<String>(); |
| | | String strTemp=""; |
| | | // String result = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | |
| | | |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.execute( strSQL.toString()); |
| | | // get first result |
| | | // temp.clear(); |
| | | |
| | | |
| | | } finally { |
| | | // JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | |
| | | private void doJob(Connection postsql,Connection orcl) throws SQLException |
| | | { |
| | | String strSQLGetTask="select proc_id,procname,datastore,name,step,src,dest,txtsql from roadfee_proc where rowstatus=1 and procname like 'STEP%' order by procname,step" ; |
| | | List<String[]> joblist=null; |
| | | Connection inConnection; |
| | | int idOfJob=0; |
| | | |
| | | List<String[]> nodata= new ArrayList<String[]>(); |
| | | List<String[]> lista= new ArrayList<String[]>(); |
| | | List<String[]> list1= new ArrayList<String[]>(); |
| | | List<String[]> listIn= new ArrayList<String[]>(); |
| | | List<String[]> temp;//= new ArrayList<String[]>(); |
| | | nodata.add(new String[]{""}); |
| | | // proc_id[0],procname[1],datastore[2\,name[3],step[4], src[5],des[6]t,txtsql[7] |
| | | try{ |
| | | logger.info("getJoblist"); |
| | | joblist=sqlExecQuery(postsql, strSQLGetTask, new String[]{}); |
| | | |
| | | for ( idOfJob=0;idOfJob<joblist.size();idOfJob++) |
| | | { |
| | | logger.info("begin "+joblist.get(idOfJob)[1]+"-"+joblist.get(idOfJob)[3]+"("+joblist.get(idOfJob)[0]+")"); |
| | | if(joblist.get(idOfJob)[5].equals("nodata")) |
| | | { |
| | | listIn=nodata; |
| | | } |
| | | else if(joblist.get(idOfJob)[5].equals("list1")) |
| | | { |
| | | listIn=list1; |
| | | } |
| | | else if(joblist.get(idOfJob)[5].equals("lista")) |
| | | { |
| | | listIn=lista; |
| | | } |
| | | |
| | | if(joblist.get(idOfJob)[2].equals("psql")) |
| | | { |
| | | inConnection= postsql; |
| | | } |
| | | else if(joblist.get(idOfJob)[2].equals("orcl")) |
| | | { |
| | | inConnection= orcl; |
| | | } |
| | | else |
| | | return ; //connection failed |
| | | |
| | | if( joblist.get(idOfJob)[6].equals("list1")) list1.clear(); |
| | | if( joblist.get(idOfJob)[6].equals("lista")) lista.clear(); |
| | | //runsql |
| | | logger.info("process data count: "+String.valueOf(listIn.size())); |
| | | |
| | | for( int idxOfListIn=0;idxOfListIn< listIn.size();idxOfListIn++) |
| | | { |
| | | |
| | | if( joblist.get(idOfJob)[6].equals("nodata")) |
| | | { |
| | | sqlExec(inConnection, joblist.get(idOfJob)[7], listIn.get(idxOfListIn)); |
| | | //logger.info("finish "+joblist.get(idOfJob)[1]+"-"+joblist.get(idOfJob)[3]+"("+joblist.get(idOfJob)[0]+")") |
| | | |
| | | continue; |
| | | }else |
| | | { |
| | | temp=sqlExecQuery(inConnection, joblist.get(idOfJob)[7], listIn.get(idxOfListIn)); |
| | | |
| | | } |
| | | |
| | | |
| | | for(int j=0;j<temp.size();j++) |
| | | { |
| | | if( joblist.get(idOfJob)[6].equals("list1")) |
| | | { |
| | | list1.add(temp.get(j)); |
| | | } |
| | | else if( joblist.get(idOfJob)[6].equals("lista")) |
| | | { |
| | | lista.add(temp.get(j)); |
| | | } |
| | | } |
| | | } |
| | | |
| | | |
| | | } |
| | | |
| | | }catch(SQLException sqlex) |
| | | { |
| | | logger.warn("ERROR@ID:"+String.valueOf( joblist.get(idOfJob)[0])); |
| | | throw sqlex; |
| | | } |
| | | |
| | | |
| | | } |
| | | |
| | | public void execute(JobExecutionContext context) throws JobExecutionException { |
| | | // Every job has its own job detail |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | // Log the time the job started |
| | | logger.info(jobName + " fired at " + new Date()); |
| | | extractJobConfiguration(jobDetail); |
| | | |
| | | if (isIgnoreDBETL()) { |
| | | return; |
| | | } |
| | | |
| | | createSourceDataStore(); |
| | | createTargetDataStore(); |
| | | if (getSourceDataStore() == null) { |
| | | logger.warn("Cannot connect source oracle database."); |
| | | throw new JobExecutionException("Cannot connect source oracle database."); |
| | | } |
| | | |
| | | if (getTargetDataStore() == null) { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName, targetThemeTable; |
| | | try { |
| | | //logger.info("-- step:clearOutputDatabase --"); |
| | | |
| | | doJob( targetDataStore.getConnection(Transaction.AUTO_COMMIT),sourceDataStore.getConnection(Transaction.AUTO_COMMIT) ); |
| | | |
| | | } catch (IOException ex) { |
| | | disconnect(); |
| | | logger.warn(ex.getMessage(), ex); |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } catch (SQLException e) { |
| | | disconnect(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("Database error. " + e.getMessage(), e); |
| | | } finally { |
| | | disconnect(); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) { |
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " + |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | private void exetcuteConvert(OracleConvertPostGISJobContext jobContext, |
| | | String querySchema, String targetSchemaName) throws SQLException { |
| | | int order = 0; |
| | | OrderedMap map = getBlobStorageList(jobContext.getOracleConnection(), |
| | | querySchema, "SD$SPACENODES", null); |
| | | |
| | | logger.info("begin convert job:[" + map.size() + "]:testmode=" + _testMode); |
| | | |
| | | int total = map.size(); //spacenodes count |
| | | int step = total / 100; |
| | | int current = 0; |
| | | |
| | | if (total == 0) { |
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is zero."); |
| | | return; |
| | | } |
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is " + map.size()); |
| | | |
| | | //jobContext.startTransaction(); |
| | | jobContext.setCurrentSchema(querySchema); |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", 0); |
| | | for (OrderedMapIterator it = map.orderedMapIterator(); it.hasNext(); ) { |
| | | it.next(); |
| | | |
| | | Pair pair = (Pair) it.getValue(); |
| | | String tableSrc = (String) pair.first; |
| | | |
| | | logger.info("begin convert:[" + order + "]-" + tableSrc); |
| | | queryIgsetElement(jobContext, querySchema, tableSrc); |
| | | |
| | | order++; |
| | | |
| | | if (_testMode) { |
| | | if ((_testCount < 0) || (order >= _testCount)) |
| | | break; |
| | | } |
| | | |
| | | if ((order % COMMITSIZE) == 0) { |
| | | // OracleConnection connection = jobContext.getOracleConnection(); |
| | | // connection.commitTransaction(); |
| | | jobContext.commitTransaction(); |
| | | //jobContext.startTransaction(); |
| | | System.gc(); |
| | | System.runFinalization(); |
| | | } |
| | | |
| | | if (step != 0) { |
| | | int now = order % step; |
| | | if (now != current) { |
| | | current = now; |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", current); |
| | | |
| | | } |
| | | } else { |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", current); |
| | | current++; |
| | | } |
| | | } |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", 100); |
| | | |
| | | jobContext.commitTransaction(); |
| | | jobContext.resetFeatureContext(); |
| | | |
| | | if (isProfileMode()) { |
| | | |
| | | } |
| | | |
| | | logger.info("end convert job:[" + order + "]"); |
| | | System.gc(); |
| | | System.runFinalization(); |
| | | } |
| | | |
| | | protected OrderedMap getBlobStorageList(Connection connection, String schemaSrc, String tableSrc, |
| | | OrderedMap orderedMap) throws SQLException { |
| | | if (orderedMap == null) |
| | | orderedMap = new LinkedMap(99); |
| | | String fetchStmtFmt = "SELECT SNID, SPACETABLE FROM \"%s\".\"%s\""; |
| | | PrintfFormat spf = new PrintfFormat(fetchStmtFmt); |
| | | String fetchStmt = spf.sprintf(new Object[]{schemaSrc, tableSrc}); |
| | | Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | ResultSet rs = null; |
| | | |
| | | stmt.setFetchSize(FETCHSIZE); |
| | | try { |
| | | rs = stmt.executeQuery(fetchStmt); |
| | | int size = rs.getMetaData().getColumnCount(); |
| | | |
| | | while (rs.next()) { |
| | | Object[] values = new Object[size]; |
| | | |
| | | for (int i = 0; i < size; i++) { |
| | | values[i] = rs.getObject(i + 1); |
| | | } |
| | | |
| | | Integer key = ((BigDecimal) values[0]).intValue(); |
| | | String name = (String) values[1]; |
| | | |
| | | Pair pair = (Pair) orderedMap.get(key); |
| | | if (pair == null) |
| | | orderedMap.put(key, new Pair(name, null)); |
| | | else |
| | | pair.first = name; |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.error(e.toString(), e); |
| | | logger.error("stmt=" + fetchStmt); |
| | | throw e; |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | |
| | | return orderedMap; |
| | | } |
| | | |
| | | protected OrderedMap getRawFormatStorageList(OracleConnection connection, String schemaSrc, String tableSrc, |
| | | OrderedMap orderedMap) throws SQLException { |
| | | if (orderedMap == null) |
| | | orderedMap = new LinkedMap(99); |
| | | String fetchStmtFmt = "SELECT RNID, SPACETABLE FROM \"%s\".\"%s\""; |
| | | PrintfFormat spf = new PrintfFormat(fetchStmtFmt); |
| | | String fetchStmt = spf.sprintf(new Object[]{schemaSrc, tableSrc}); |
| | | Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | |
| | | stmt.setFetchSize(FETCHSIZE); |
| | | ResultSet rs = stmt.executeQuery(fetchStmt); |
| | | try { |
| | | int size = rs.getMetaData().getColumnCount(); |
| | | while (rs.next()) { |
| | | Object[] values = new Object[size]; |
| | | |
| | | for (int i = 0; i < size; i++) { |
| | | values[i] = rs.getObject(i + 1); |
| | | } |
| | | |
| | | Integer key = ((BigDecimal) values[0]).intValue(); |
| | | String name = (String) values[1]; |
| | | |
| | | Pair pair = (Pair) orderedMap.get(key); |
| | | if (pair == null) |
| | | orderedMap.put(key, new Pair(null, name)); |
| | | else |
| | | pair.second = name; |
| | | } |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | return orderedMap; |
| | | } |
| | | |
| | | protected void queryIgsetElement(OracleConvertPostGISJobContext jobContext, |
| | | String srcschema, String srctable) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" ORDER BY ROWID"; |
| | | //String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" WHERE TAG_SFSC = 423 AND TAG_LUFID = 21612065 ORDER BY ROWID"; |
| | | PrintfFormat spf = new PrintfFormat(fetchSrcStmtFmt); |
| | | String fetchSrcStmt = spf.sprintf(new Object[]{srcschema, srctable}); |
| | | Statement stmtSrc = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | |
| | | stmtSrc.setFetchSize(FETCHSIZE); |
| | | ResultSet rsSrc = stmtSrc.executeQuery(fetchSrcStmt); |
| | | int igdsMetaType = rsSrc.getMetaData().getColumnType(1); |
| | | while (rsSrc.next()) { |
| | | if (isProfileMode()) { |
| | | markQueryTime(); |
| | | } |
| | | |
| | | byte[] raw = null; |
| | | if (igdsMetaType == Types.BLOB) { |
| | | BLOB blob = (BLOB) rsSrc.getBlob(1); |
| | | |
| | | try { |
| | | raw = getBytesFromBLOB(blob); |
| | | } catch (BufferOverflowException e) { |
| | | logger.warn("Wrong Element Structure-", e); |
| | | } finally { |
| | | // blob.close(); |
| | | } |
| | | } else { |
| | | raw = rsSrc.getBytes(1); |
| | | } |
| | | |
| | | try { |
| | | if (raw != null) { |
| | | Element element = fetchBinaryElement(raw); |
| | | if (isProfileMode()) { |
| | | accumulateQueryTime(); |
| | | } |
| | | jobContext.putFeatureCollection(element); |
| | | } else { |
| | | if (isProfileMode()) { |
| | | accumulateQueryTime(); |
| | | } |
| | | } |
| | | } catch (Dgn7fileException e) { |
| | | logger.warn("Dgn7Exception", e); |
| | | } |
| | | } |
| | | |
| | | JDBCUtils.close(rsSrc); |
| | | JDBCUtils.close(stmtSrc); |
| | | } |
| | | |
| | | protected void queryRawElement(OracleConvertPostGISJobContext jobContext, |
| | | String srcschema, String srctable) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | String fetchDestStmtFmt = "SELECT ELEMENT FROM \"%s\".\"%s\" ORDER BY ROWID"; |
| | | PrintfFormat spf = new PrintfFormat(fetchDestStmtFmt); |
| | | String fetchDestStmt = spf.sprintf(new Object[]{srcschema, srctable}); |
| | | Statement stmtDest = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | |
| | | stmtDest.setFetchSize(FETCHSIZE); |
| | | ResultSet rsDest = stmtDest.executeQuery(fetchDestStmt); |
| | | |
| | | try { |
| | | while (rsDest.next()) { |
| | | ARRAY rawsValue = ((OracleResultSet) rsDest).getARRAY(1); |
| | | long[] rawData = rawsValue.getLongArray(); |
| | | byte[] comparessedValue; |
| | | |
| | | /* |
| | | if (dataMode == TransferTask.DataMode.Normal) |
| | | { |
| | | comparessedValue = BinConverter.unmarshalByteArray(rawData, true); |
| | | } else |
| | | { |
| | | comparessedValue = BinConverter.unmarshalCompactByteArray(rawData); |
| | | } |
| | | */ |
| | | comparessedValue = BinConverter.unmarshalByteArray(rawData, true); |
| | | |
| | | byte[] rawDest = ByteArrayCompressor.decompressByteArray(comparessedValue); |
| | | |
| | | try { |
| | | Element element = fetchBinaryElement(rawDest); |
| | | jobContext.putFeatureCollection(element); |
| | | } catch (Dgn7fileException e) { |
| | | logger.warn("Dgn7Exception:" + e.getMessage(), e); |
| | | } |
| | | } |
| | | } finally { |
| | | JDBCUtils.close(rsDest); |
| | | JDBCUtils.close(stmtDest); |
| | | } |
| | | } |
| | | |
| | | // Binary to Element |
| | | private Element fetchBinaryElement(byte[] raws) throws Dgn7fileException { |
| | | ByteBuffer buffer = ByteBuffer.wrap(raws); |
| | | buffer.order(ByteOrder.LITTLE_ENDIAN); |
| | | short signature = buffer.getShort(); |
| | | |
| | | // byte type = (byte) (buffer.get() & 0x7f); |
| | | byte type = (byte) ((signature >>> 8) & 0x007f); |
| | | |
| | | // silly Bentley say contentLength is in 2-byte words |
| | | // and ByteByffer uses raws. |
| | | // track the record location |
| | | int elementLength = (buffer.getShort() * 2) + 4; |
| | | ElementType recordType = ElementType.forID(type); |
| | | IElementHandler handler; |
| | | |
| | | handler = recordType.getElementHandler(); |
| | | |
| | | Element dgnElement = (Element) handler.read(buffer, signature, elementLength); |
| | | if (recordType.isComplexElement() && (elementLength < raws.length)) { |
| | | int offset = elementLength; |
| | | while (offset < (raws.length - 4)) { |
| | | buffer.position(offset); |
| | | signature = buffer.getShort(); |
| | | type = (byte) ((signature >>> 8) & 0x007f); |
| | | elementLength = (buffer.getShort() * 2) + 4; |
| | | if (raws.length < (offset + elementLength)) { |
| | | logger.debug("Length not match:" + offset + ":" + buffer.position() + ":" + buffer.limit()); |
| | | break; |
| | | } |
| | | recordType = ElementType.forID(type); |
| | | handler = recordType.getElementHandler(); |
| | | if (handler != null) { |
| | | Element subElement = (Element) handler.read(buffer, signature, elementLength); |
| | | ((ComplexElement) dgnElement).add(subElement); |
| | | offset += elementLength; |
| | | } else { |
| | | byte[] remain = new byte[buffer.remaining()]; |
| | | System.arraycopy(raws, offset, remain, 0, buffer.remaining()); |
| | | for (int i = 0; i < remain.length; i++) { |
| | | if (remain[i] != 0) { |
| | | logger.info("fetch element has some error. index=" + (offset + i) + ":value=" + remain[i]); |
| | | } |
| | | } |
| | | break; |
| | | } |
| | | } |
| | | } |
| | | |
| | | return dgnElement; |
| | | } |
| | | |
| | | /** |
| | | * �����ഫ�����ɪ��u�@ |
| | | * |
| | | * @param context �u�@�������� |
| | | * @throws org.quartz.JobExecutionException |
| | | * exception |
| | | */ |
| | | private void convertIndexDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException { |
| | | File indexDir = new File(getDataPath(), INDEXPATHNAME); |
| | | if (!indexDir.exists()) { |
| | | logger.info("index dir=" + indexDir + " not exist."); |
| | | return; |
| | | } |
| | | |
| | | if (!indexDir.isDirectory()) { |
| | | logger.info("index dir=" + indexDir + " is not a directory."); |
| | | } |
| | | |
| | | List<File> dgnFiles = FileUtils.recurseDir(indexDir, new FileFilter() { |
| | | public boolean accept(File pathname) { |
| | | return pathname.isDirectory() || pathname.getName().toLowerCase().endsWith("dgn"); |
| | | } |
| | | }); |
| | | |
| | | for (File dgnFile : dgnFiles) { |
| | | if (dgnFile.isDirectory()) continue; |
| | | IndexDgnConvertPostGISJobContext convertContext = |
| | | new IndexDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, |
| | | isProfileMode(), isTransformed()); |
| | | logger.info("--- start index dgnfile-" + dgnFile.toString() + " ---"); |
| | | FileInputStream fs = null; |
| | | FileChannel fc = null; |
| | | Dgn7fileReader reader = null; |
| | | try { |
| | | convertContext.clearOutputDatabase(); |
| | | convertContext.setExecutionContext(context); |
| | | String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator); |
| | | convertContext.setFilename(dgnPaths[dgnPaths.length - 1]); |
| | | convertContext.startTransaction(); |
| | | |
| | | fs = new FileInputStream(dgnFile); |
| | | fc = fs.getChannel(); |
| | | reader = new Dgn7fileReader(fc, new Lock()); |
| | | convertContext.setReader(reader); |
| | | |
| | | scanIndexDgnElement(convertContext); |
| | | |
| | | convertContext.commitTransaction(); |
| | | convertContext.closeFeatureWriter(); |
| | | |
| | | System.gc(); |
| | | System.runFinalization(); |
| | | } catch (FileNotFoundException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (Dgn7fileException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (IllegalAttributeException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (SchemaException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } finally { |
| | | convertContext.closeFeatureWriter(); |
| | | |
| | | if (reader != null) { |
| | | try { |
| | | reader.close(); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | if (fs != null) { |
| | | try { |
| | | fs.close(); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | logger.warn("Profile-Current convertContext Process Cost-" + |
| | | ((int) ((convertContext.getProcessTime()) / 60000.0)) + " min - " + |
| | | (((int) ((convertContext.getProcessTime()) % 60000.0)) / 1000) + " sec"); |
| | | logger.warn("Profile-Current convertContext Update Cost-" + |
| | | ((int) ((convertContext.getUpdateTime()) / 60000.0)) + " min - " + |
| | | (((int) ((convertContext.getUpdateTime()) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | protected void scanIndexDgnElement(IndexDgnConvertPostGISJobContext convertContext) |
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException { |
| | | Dgn7fileReader reader = convertContext.getReader(); |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | |
| | | while (reader.hasNext()) { |
| | | if (isProfileMode()) markProcessTime(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | | |
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) { |
| | | if (lastComplex != null) { |
| | | processIndexElement(lastComplex, convertContext); |
| | | lastComplex = null; |
| | | } |
| | | |
| | | processIndexElement(element, convertContext); |
| | | } else if (element.isComponentElement()) { |
| | | if (lastComplex != null) { |
| | | ((ComplexElement) lastComplex).add(element); |
| | | } |
| | | } else if (type.isComplexElement()) { |
| | | if (lastComplex != null) { |
| | | processIndexElement(lastComplex, convertContext); |
| | | } |
| | | lastComplex = element; |
| | | } |
| | | } |
| | | count++; |
| | | } |
| | | |
| | | if (lastComplex != null) { |
| | | processIndexElement(lastComplex, convertContext); |
| | | } |
| | | logger.debug("ElementRecord Count=" + count); |
| | | } |
| | | |
| | | private void processIndexElement(Element element, IndexDgnConvertPostGISJobContext convertContext) |
| | | throws IllegalAttributeException, SchemaException { |
| | | //if (useTpclidText) { |
| | | // if (element instanceof TextElement) { |
| | | // convertContext.putFeatureCollection(element); |
| | | // } |
| | | //} else { |
| | | // if (element instanceof ShapeElement) { |
| | | convertContext.putFeatureCollection(element); |
| | | // } |
| | | //} |
| | | } |
| | | |
| | | |
| | | /** |
| | | * �����ഫ��L�]�p���ɪ��u�@ |
| | | * |
| | | * @param context jobContext |
| | | * @throws org.quartz.JobExecutionException |
| | | * exception |
| | | */ |
| | | private void convertOtherDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException { |
| | | File otherDir = new File(getDataPath(), OTHERPATHNAME); |
| | | if (!otherDir.exists()) { |
| | | logger.info("other dir=" + otherDir + " not exist."); |
| | | return; |
| | | } |
| | | |
| | | if (!otherDir.isDirectory()) { |
| | | logger.info("other dir=" + otherDir + " is not a directory."); |
| | | } |
| | | |
| | | List<File> dgnFiles = FileUtils.recurseDir(otherDir, new FileFilter() { |
| | | public boolean accept(File pathname) { |
| | | return pathname.isDirectory() || pathname.getName().toLowerCase().endsWith("dgn"); |
| | | } |
| | | }); |
| | | |
| | | for (File dgnFile : dgnFiles) { |
| | | if (dgnFile.isDirectory()) continue; |
| | | |
| | | GeneralDgnConvertPostGISJobContext convertContext = |
| | | new GeneralDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, |
| | | isProfileMode(), isTransformed()); |
| | | logger.info("--- start other dgnfile-" + dgnFile.toString() + " ---"); |
| | | FileInputStream fs = null; |
| | | FileChannel fc; |
| | | Dgn7fileReader reader = null; |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | | String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator); |
| | | convertContext.setFilename(dgnPaths[dgnPaths.length - 1]); |
| | | convertContext.startTransaction(); |
| | | |
| | | fs = new FileInputStream(dgnFile); |
| | | fc = fs.getChannel(); |
| | | reader = new Dgn7fileReader(fc, new Lock()); |
| | | convertContext.setReader(reader); |
| | | |
| | | scanOtherDgnElement(convertContext); |
| | | |
| | | convertContext.commitTransaction(); |
| | | convertContext.closeFeatureWriter(); |
| | | |
| | | System.gc(); |
| | | System.runFinalization(); |
| | | } catch (FileNotFoundException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (Dgn7fileException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (IllegalAttributeException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (SchemaException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } finally { |
| | | convertContext.closeFeatureWriter(); |
| | | |
| | | if (reader != null) { |
| | | try { |
| | | reader.close(); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | if (fs != null) { |
| | | try { |
| | | fs.close(); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | logger.warn("Profile-Current convertContext Process Cost-" + |
| | | ((int) ((convertContext.getProcessTime()) / 60000.0)) + " min - " + |
| | | (((int) ((convertContext.getProcessTime()) % 60000.0)) / 1000) + " sec"); |
| | | logger.warn("Profile-Current convertContext Update Cost-" + |
| | | ((int) ((convertContext.getUpdateTime()) / 60000.0)) + " min - " + |
| | | (((int) ((convertContext.getUpdateTime()) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | public void scanOtherDgnElement(GeneralDgnConvertPostGISJobContext convertContext) |
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException { |
| | | Dgn7fileReader reader = convertContext.getReader(); |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | | |
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) { |
| | | if (lastComplex != null) { |
| | | processOtherElement(lastComplex, convertContext); |
| | | lastComplex = null; |
| | | } |
| | | |
| | | processOtherElement(element, convertContext); |
| | | } else if (element.isComponentElement()) { |
| | | if (lastComplex != null) { |
| | | ((ComplexElement) lastComplex).add(element); |
| | | } |
| | | } else if (type.isComplexElement()) { |
| | | if (lastComplex != null) { |
| | | processOtherElement(lastComplex, convertContext); |
| | | } |
| | | lastComplex = element; |
| | | } |
| | | } |
| | | count++; |
| | | } |
| | | |
| | | if (lastComplex != null) { |
| | | processOtherElement(lastComplex, convertContext); |
| | | } |
| | | logger.debug("ElementRecord Count=" + count); |
| | | } |
| | | |
| | | private void processOtherElement(Element element, GeneralDgnConvertPostGISJobContext convertContext) |
| | | throws IllegalAttributeException, SchemaException { |
| | | convertContext.putFeatureCollection(element); |
| | | } |
| | | |
| | | private void clearOutputDatabase() { |
| | | /* |
| | | File outDataPath = new File(getDataPath(), OracleConvertEdbGeoJobContext.SHPOUTPATH); |
| | | if (outDataPath.exists() && outDataPath.isDirectory()) |
| | | { |
| | | deleteFilesInPath(outDataPath); |
| | | } |
| | | outDataPath = new File(getDataPath(), IndexDgnConvertShpJobContext.SHPOUTPATH); |
| | | if (outDataPath.exists() && outDataPath.isDirectory()) |
| | | { |
| | | deleteFilesInPath(outDataPath); |
| | | } |
| | | outDataPath = new File(getDataPath(), GeneralDgnConvertShpJobContext.SHPOUTPATH); |
| | | if (outDataPath.exists() && outDataPath.isDirectory()) |
| | | { |
| | | deleteFilesInPath(outDataPath); |
| | | } |
| | | */ |
| | | } |
| | | |
| | | private void deleteFilesInPath(File outDataPath) { |
| | | deleteFilesInPath(outDataPath, true); |
| | | } |
| | | |
| | | private void deleteFilesInPath(File outDataPath, boolean removeSubDir) { |
| | | if (!outDataPath.isDirectory()) { |
| | | return; |
| | | } |
| | | File[] files = outDataPath.listFiles(); |
| | | for (File file : files) { |
| | | if (file.isFile()) { |
| | | if (!file.delete()) { |
| | | logger.info("Cannot delete file-" + file.toString()); |
| | | } |
| | | } else if (file.isDirectory()) { |
| | | deleteFilesInPath(file, removeSubDir); |
| | | if (removeSubDir) { |
| | | if (file.delete()) { |
| | | logger.info("Cannot delete dir-" + file.toString()); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | private void convertFeatureDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException { |
| | | File elminDir = new File(getDataPath(), "elmin"); |
| | | if (!elminDir.exists()) { |
| | | logger.info("elmin dir=" + elminDir + " not exist."); |
| | | return; |
| | | } |
| | | |
| | | if (!elminDir.isDirectory()) { |
| | | logger.info("elmin dir=" + elminDir + " is not a directory."); |
| | | } |
| | | |
| | | File[] dgnFiles = elminDir.listFiles(new FilenameFilter() { |
| | | public boolean accept(File dir, String name) { |
| | | return name.toLowerCase().endsWith(".dgn"); |
| | | } |
| | | }); |
| | | |
| | | for (File dgnFile : dgnFiles) { |
| | | FeatureDgnConvertPostGISJobContext convertContext = |
| | | new FeatureDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | | String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator); |
| | | convertContext.setFilename(dgnPaths[dgnPaths.length - 1]); |
| | | convertContext.startTransaction(); |
| | | |
| | | FileInputStream fs = new FileInputStream(dgnFile); |
| | | FileChannel fc = fs.getChannel(); |
| | | Dgn7fileReader reader = new Dgn7fileReader(fc, new Lock()); |
| | | convertContext.setReader(reader); |
| | | |
| | | scanFeatureDgnElement(convertContext); |
| | | |
| | | convertContext.commitTransaction(); |
| | | convertContext.closeFeatureWriter(); |
| | | System.gc(); |
| | | System.runFinalization(); |
| | | } catch (FileNotFoundException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (Dgn7fileException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (IllegalAttributeException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } catch (SchemaException e) { |
| | | convertContext.rollbackTransaction(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } finally { |
| | | convertContext.closeFeatureWriter(); |
| | | } |
| | | } |
| | | } |
| | | |
| | | public void scanFeatureDgnElement(FeatureDgnConvertPostGISJobContext convertContext) |
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException { |
| | | Dgn7fileReader reader = convertContext.getReader(); |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | | |
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) { |
| | | if (lastComplex != null) { |
| | | processFeatureElement(lastComplex, convertContext); |
| | | lastComplex = null; |
| | | } |
| | | |
| | | processFeatureElement(element, convertContext); |
| | | } else if (element.isComponentElement()) { |
| | | if (lastComplex != null) { |
| | | ((ComplexElement) lastComplex).add(element); |
| | | } |
| | | } else if (type.isComplexElement()) { |
| | | if (lastComplex != null) { |
| | | processFeatureElement(lastComplex, convertContext); |
| | | } |
| | | lastComplex = element; |
| | | } |
| | | } |
| | | count++; |
| | | } |
| | | |
| | | if (lastComplex != null) { |
| | | processFeatureElement(lastComplex, convertContext); |
| | | } |
| | | logger.debug("ElementRecord Count=" + count); |
| | | } |
| | | |
| | | private void processFeatureElement(Element element, FeatureDgnConvertPostGISJobContext convertContext) |
| | | throws IllegalAttributeException, SchemaException { |
| | | convertContext.putFeatureCollection(element); |
| | | } |
| | | |
| | | private void createDummyFeatureFile(JobExecutionContext context) throws JobExecutionException { |
| | | /* |
| | | DummyFeatureConvertShpJobContext convertContext = new DummyFeatureConvertShpJobContext(getDataPath(), _filterPath); |
| | | try { |
| | | convertContext.startTransaction(); |
| | | convertContext.commitTransaction(); |
| | | convertContext.closeFeatureWriter(); |
| | | } catch (IOException e) |
| | | { |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } |
| | | */ |
| | | } |
| | | |
| | | public DataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | protected void createTargetDataStore() throws JobExecutionException { |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | |
| | | /* |
| | | if (!isDriverFound()) |
| | | { |
| | | throw new JobExecutionException("Oracle JDBC Driver not found.-" + JDBC_DRIVER); |
| | | } |
| | | */ |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | /* |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.WKBENABLED.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.WKBENABLED.key, "true"); |
| | | } |
| | | */ |
| | | |
| | | if (!dataStoreFactory.canProcess(pgProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = dataStoreFactory.createDataStore(pgProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | protected void disconnect() { |
| | | super.disconnect(); |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | } |
| | | |
| | | private String determineTargetSchemaName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetSchema = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | // Create XGVERSIONTABLE_NAME |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) |
| | | createXGeosVersionTable(connection, _pgSchema); |
| | | rs.close(); |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vsschema, vsstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vsid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpSchemas = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vsschema"); |
| | | values[1] = rs.getShort("vsstatus"); |
| | | tmpSchemas.add(values); |
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) { |
| | | current = i; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current == -1) { |
| | | Object[] values = tmpSchemas.get(0); |
| | | targetSchema = (String) values[0]; |
| | | } else if (current < (tmpSchemas.size() - 1)) { |
| | | Object[] values = tmpSchemas.get(current + 1); |
| | | targetSchema = (String) values[0]; |
| | | } else { |
| | | Object[] values = tmpSchemas.get(0); |
| | | targetSchema = (String) values[0]; |
| | | } |
| | | |
| | | sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vsstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_COVERT); |
| | | sbSQL.append(" WHERE vsschema = '"); |
| | | sbSQL.append(targetSchema).append("'"); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetSchema + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetSchema; |
| | | } |
| | | |
| | | private String determineTargetThemeTableName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetTable = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | // Create XPTVERSIONTABLE_NAME |
| | | needCreate = false; |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) |
| | | createXPWThemeVersionTable(connection, _pgSchema); |
| | | rs.close(); |
| | | |
| | | rs = null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vptname, vptstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vptid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpTablenames = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vptname"); |
| | | values[1] = rs.getShort("vptstatus"); |
| | | tmpTablenames.add(values); |
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) { |
| | | current = i; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current == -1) { |
| | | Object[] values = tmpTablenames.get(0); |
| | | targetTable = (String) values[0]; |
| | | } else if (current < (tmpTablenames.size() - 1)) { |
| | | Object[] values = tmpTablenames.get(current + 1); |
| | | targetTable = (String) values[0]; |
| | | } else { |
| | | Object[] values = tmpTablenames.get(0); |
| | | targetTable = (String) values[0]; |
| | | } |
| | | |
| | | sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vptstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_COVERT); |
| | | sbSQL.append(" WHERE vptname = '"); |
| | | sbSQL.append(targetTable).append("'"); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetTable + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetTable; |
| | | } |
| | | |
| | | public String encodeSchemaTableName(String schemaName, String tableName) { |
| | | if (schemaName == null) |
| | | return "\"" + tableName + "\""; |
| | | return "\"" + schemaName + "\".\"" + tableName + "\""; |
| | | } |
| | | |
| | | private void createXGeosVersionTable(Connection connection, String pgSchema) throws SQLException { |
| | | Statement stmt = null; |
| | | StringBuilder sql = new StringBuilder("CREATE TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); |
| | | sql.append(" ( vsid serial PRIMARY KEY, "); |
| | | sql.append(" vsschema character varying(64) NOT NULL, "); |
| | | sql.append(" vsstatus smallint NOT NULL, "); |
| | | sql.append(" vstimestamp timestamp with time zone ) "); |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("ALTER TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); |
| | | sql.append(" OWNER TO ").append(_pgUsername); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("GRANT ALL ON TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); |
| | | sql.append(" TO public"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | for (String schemaName : DataReposVersionManager.DEFAULTXGVERSIONSCHEMA_NAMES) { |
| | | sql = new StringBuilder("INSERT INTO "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); |
| | | sql.append(" (vsschema, vsstatus) VALUES ('"); |
| | | sql.append(schemaName).append("', "); |
| | | sql.append(DataReposVersionManager.VSSTATUS_AVAILABLE).append(" )"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | createIfNotExistNewSchema(connection, schemaName); |
| | | } |
| | | |
| | | } finally { |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private void createXPWThemeVersionTable(Connection connection, String pgSchema) throws SQLException { |
| | | Statement stmt = null; |
| | | StringBuilder sql = new StringBuilder("CREATE TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" ( vptid serial PRIMARY KEY, "); |
| | | sql.append(" vptname character varying(64) NOT NULL, "); |
| | | sql.append(" vptstatus smallint NOT NULL, "); |
| | | sql.append(" vpttimestamp timestamp with time zone ) "); |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("ALTER TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" OWNER TO ").append(_pgUsername); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("GRANT ALL ON TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" TO public"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | for (String schemaName : DataReposVersionManager.DEFAULTXPTVERSIONTABLE_NAMES) { |
| | | sql = new StringBuilder("INSERT INTO "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" (vptname, vptstatus) VALUES ('"); |
| | | sql.append(schemaName).append("', "); |
| | | sql.append(DataReposVersionManager.VSSTATUS_AVAILABLE).append(" )"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | } |
| | | |
| | | } finally { |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private void updateRepoStatusToReady(String targetSchema) { |
| | | if (targetDataStore == null) return; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | StringBuilder sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vsstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_READY); |
| | | sbSQL.append(" , vstimestamp = CURRENT_TIMESTAMP WHERE vsschema = '"); |
| | | sbSQL.append(targetSchema).append("'"); |
| | | |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | stmt = connection.createStatement(); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetSchema + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void updatePWThemeStatusToReady(String targetSchema) { |
| | | if (targetDataStore == null) return; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | StringBuilder sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vptstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_READY); |
| | | sbSQL.append(" , vpttimestamp = CURRENT_TIMESTAMP WHERE vptname = '"); |
| | | sbSQL.append(targetSchema).append("'"); |
| | | |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | stmt = connection.createStatement(); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetSchema + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void createIfNotExistNewSchema(Connection connection, String s) throws SQLException { |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | try { |
| | | /* |
| | | rs = connection.getMetaData().getSchemas(null, s); |
| | | if (rs.next()) return; |
| | | rs.close(); |
| | | rs = null; |
| | | */ |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("CREATE SCHEMA "); |
| | | sbSQL.append(s).append(' '); |
| | | sbSQL.append("AUTHORIZATION ").append(_pgUsername); |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sbSQL.toString()); |
| | | |
| | | sbSQL = new StringBuilder("GRANT ALL ON SCHEMA "); |
| | | sbSQL.append(s).append(' '); |
| | | sbSQL.append("TO public"); |
| | | stmt.executeUpdate(sbSQL.toString()); |
| | | } catch (SQLException e) { |
| | | logger.info("create schema:" + s + " has exception."); |
| | | logger.info(e.getMessage(), e); |
| | | } finally { |
| | | if (rs != null) rs.close(); |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | public final void accumulateQueryTime() { |
| | | queryTime += System.currentTimeMillis() - queryTimeStart; |
| | | } |
| | | |
| | | public long getQueryTime() { |
| | | return queryTime; |
| | | } |
| | | |
| | | public final void markQueryTime() { |
| | | queryTimeStart = System.currentTimeMillis(); |
| | | } |
| | | |
| | | public final void resetQueryTime() { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | private void convertDynamicColorTheme(AbstractOracleJobContext context, String targetTableBaseName) throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertDynamicColorTheme"); |
| | | return; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | PreparedStatement pstmt = null; |
| | | try { |
| | | |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | String targetTableName = targetTableBaseName + FDYNCOLOR_SUFFIX; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(50); |
| | | |
| | | createOrClearTargetTable(connectionPG, targetTableName, |
| | | "(tid smallint not null, oid int not null, dyncolor varchar(10) not null)"); |
| | | |
| | | pstmt = connectionPG.prepareStatement("INSERT INTO " + |
| | | encodeSchemaTableName(_pgSchema, targetTableName) + |
| | | " (tid, oid, dyncolor) VALUES (?, ?, ?)" ); |
| | | |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int colorId = rs.getInt(3); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | |
| | | pstmt.setShort(1, (short) cid); |
| | | pstmt.setInt(2, (int) oid); |
| | | pstmt.setString(3, colorText); |
| | | pstmt.addBatch(); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | pstmt.executeBatch(); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | pstmt.executeBatch(); |
| | | createTargetTableIndex(connectionPG, targetTableName); |
| | | |
| | | logger.info("Execute Update Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(pstmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void convertPowerOwnerTheme(AbstractOracleJobContext context, String targetTableBaseName) throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertPowerOwnerTheme"); |
| | | return; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | PreparedStatement pstmt = null; |
| | | try { |
| | | connectionPG.setAutoCommit(false); |
| | | String targetTableName = targetTableBaseName + FOWNER_SUFFIX; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(50); |
| | | |
| | | createOrClearTargetTable(connectionPG, targetTableName, |
| | | "(tid smallint not null, oid int not null, fowner smallint not null, flow varchar(20) not null)"); |
| | | |
| | | pstmt = connectionPG.prepareStatement("INSERT INTO " + |
| | | encodeSchemaTableName(_pgSchema, targetTableName) + |
| | | " (tid, oid, fowner, flow) VALUES (?, ?, ?, ?)" ); |
| | | |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int ownerId = rs.getInt(3); |
| | | short dirId = (short) rs.getInt(4); |
| | | pstmt.setShort(1, (short) cid); |
| | | pstmt.setInt(2, (int) oid); |
| | | pstmt.setShort(3, (short) ownerId); |
| | | ConnectivityDirectionEnum dir = ConnectivityDirectionEnum.convertShort(dirId); |
| | | if ((ConnectivityDirectionEnum.ForwardflowON == dir) || |
| | | (ConnectivityDirectionEnum.ForwardFixflowON == dir)) { |
| | | pstmt.setString(4, "shape://ccarrow"); |
| | | |
| | | } else if ((ConnectivityDirectionEnum.BackflowON == dir) || |
| | | (ConnectivityDirectionEnum.BackFixflowON == dir)) { |
| | | pstmt.setString(4, "shape://rccarrow"); |
| | | } else { |
| | | pstmt.setString(4, "shape://backslash"); |
| | | } |
| | | pstmt.addBatch(); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | pstmt.executeBatch(); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | pstmt.executeBatch(); |
| | | createTargetTableIndex(connectionPG, targetTableName); |
| | | |
| | | logger.info("Execute Update Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(pstmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void createOrClearTargetTable(Connection connection, String tableName, String sql) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("DROP TABLE " + encodeSchemaTableName(_pgSchema, tableName) + "CASCADE"); |
| | | } |
| | | |
| | | stmt.executeUpdate("CREATE TABLE " + encodeSchemaTableName(_pgSchema, tableName) + " " + sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void createTargetTableIndex(Connection connection, String tableName) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("ALTER TABLE " + encodeSchemaTableName(_pgSchema, tableName) + |
| | | " ADD PRIMARY KEY (tid, oid)"); |
| | | } |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private boolean convertDynamicColorThemeWithCopyAPI(AbstractOracleJobContext context, String targetTableBaseName) |
| | | throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertDynamicColorThemeWithCopyAPI"); |
| | | return false; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | while (connectionPG instanceof DelegatingConnection) { |
| | | connectionPG = ((DelegatingConnection) connectionPG).getDelegate(); |
| | | } |
| | | |
| | | if (!(connectionPG instanceof PGConnection)) { |
| | | return false; |
| | | } |
| | | |
| | | final int MAX_BATCHSIZE = 250; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | // connectionPG.setAutoCommit(false); |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | String targetTableName = targetTableBaseName + FDYNCOLOR_SUFFIX; |
| | | String targetTempName = "tmp_" + targetTableName; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(MAX_BATCHSIZE); |
| | | |
| | | createOrClearTempTargetTable(connectionPG, targetTempName, |
| | | "(tid smallint not null, oid int not null, dyncolor varchar(10) not null)"); |
| | | StringBuilder sb = new StringBuilder(); |
| | | |
| | | CopyManager cpMgr = ((PGConnection) connectionPG).getCopyAPI(); |
| | | PushbackReader reader = new PushbackReader(new StringReader(""), 10240); |
| | | |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int colorId = rs.getInt(3); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | |
| | | sb.append(cid).append(','); |
| | | sb.append(oid).append(','); |
| | | sb.append(colorText).append("\n"); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | sb.delete(0, sb.length()); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | createTargetTableIndexAndDropTemp(connectionPG, targetTableName, targetTempName); |
| | | |
| | | logger.info("Execute Copy Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return true; |
| | | } |
| | | |
| | | private boolean convertPowerOwnerThemeWithCopyAPI(AbstractOracleJobContext context, String targetTableBaseName) |
| | | throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertPowerOwnerThemeWithCopyAPI"); |
| | | return false; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | while (connectionPG instanceof DelegatingConnection) { |
| | | connectionPG = ((DelegatingConnection) connectionPG).getDelegate(); |
| | | } |
| | | |
| | | if (!(connectionPG instanceof PGConnection)) { |
| | | return false; |
| | | } |
| | | |
| | | final int MAX_BATCHSIZE = 250; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | // connectionPG.setAutoCommit(false); |
| | | String targetTableName = targetTableBaseName + FOWNER_SUFFIX; |
| | | String targetTempName = "tmp_" + targetTableName; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(MAX_BATCHSIZE); |
| | | |
| | | createOrClearTempTargetTable(connectionPG, targetTempName, |
| | | "(tid smallint not null, oid int not null, fowner smallint not null, flow varchar(20) not null)"); |
| | | |
| | | StringBuilder sb = new StringBuilder(); |
| | | |
| | | CopyManager cpMgr = ((PGConnection) connectionPG).getCopyAPI(); |
| | | PushbackReader reader = new PushbackReader(new StringReader(""), 10240); |
| | | |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int ownerId = rs.getInt(3); |
| | | short dirId = (short) rs.getInt(4); |
| | | String flowMark = null; |
| | | ConnectivityDirectionEnum dir = ConnectivityDirectionEnum.convertShort(dirId); |
| | | if ((ConnectivityDirectionEnum.ForwardflowON == dir) || |
| | | (ConnectivityDirectionEnum.ForwardFixflowON == dir)) { |
| | | flowMark = FORWARDFLOW_MARK; |
| | | |
| | | } else if ((ConnectivityDirectionEnum.BackflowON == dir) || |
| | | (ConnectivityDirectionEnum.BackFixflowON == dir)) { |
| | | flowMark = BACKFLOW_MARK; |
| | | } else if (ConnectivityDirectionEnum.Nondeterminate == dir) { |
| | | flowMark = NONFLOW_MARK; |
| | | } else { |
| | | flowMark = UNFLOW_MARK; |
| | | } |
| | | |
| | | sb.append(cid).append(','); |
| | | sb.append(oid).append(','); |
| | | sb.append(ownerId).append(','); |
| | | sb.append(flowMark).append('\n'); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | sb.delete(0, sb.length()); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | createTargetTableIndexAndDropTemp(connectionPG, targetTableName, targetTempName); |
| | | |
| | | logger.info("Execute Copy Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return true; |
| | | } |
| | | |
| | | private void createOrClearTempTargetTable(Connection connection, String tableName, String sql) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, null, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("DROP TABLE " + encodeSchemaTableName(null, tableName) + "CASCADE"); |
| | | } |
| | | |
| | | stmt.executeUpdate("CREATE TEMP TABLE " + encodeSchemaTableName(null, tableName) + " " + sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void createTargetTableIndexAndDropTemp(Connection connection, String tableName, String tempTable) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | stmt.execute("CREATE TABLE " + tableName +" AS SELECT * FROM " + tempTable); |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("ALTER TABLE " + encodeSchemaTableName(_pgSchema, tableName) + |
| | | " ADD PRIMARY KEY (tid, oid)"); |
| | | } |
| | | stmt.execute("DROP TABLE " + tempTable); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | } |
| | |
| | | "gisrepo1", "gisrepo2" |
| | | }; |
| | | |
| | | public static final String XPTVERSIONTABLE_NAME = "xpwthemes_vsversion"; |
| | | public static final String[] DEFAULTXPTVERSIONTABLE_NAMES = new String[]{ |
| | | "xpwtheme1", "xpwtheme2" |
| | | }; |
| | | |
| | | public static final short VSSTATUS_AVAILABLE = 0x0000; |
| | | public static final short VSSTATUS_USING = 0x0100; |
| | | public static final short VSSTATUS_CONFIG = 0x0020; |
| | |
| | | import java.util.HashMap; |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | | import org.apache.commons.transaction.util.LoggerFacade; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.shapefile.ShapefileDataStore; |
| | | import org.geotools.data.shapefile.indexed.IndexedShapefileDataStore; |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.SimpleFeature; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import java.util.TimeZone; |
| | | |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatchableFilter; |
| | |
| | | import com.ximple.eofms.filter.TypeCompLevelIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeIdDispatchableFilter; |
| | | import com.ximple.eofms.jobs.context.AbstractDgnFileJobContext; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | | import org.apache.commons.transaction.util.LoggerFacade; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.shapefile.ShapefileDataStore; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | public class DummyFeatureConvertJobContext extends AbstractDgnFileJobContext { |
| | | static final Log logger = LogFactory.getLog(DummyFeatureConvertJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | static final String SHPOUTPATH = "shpout"; |
| | | |
| | | private String dataOut = null; |
| | | |
| | | private HashMap<String, ArrayList<Feature>> featuresContext = new HashMap<String, ArrayList<Feature>>(); |
| | | private HashMap<String, ArrayList<SimpleFeature>> featuresContext = new HashMap<String, ArrayList<SimpleFeature>>(); |
| | | private HashMap<String, FeatureWriter> featuresWriterContext = new HashMap<String, FeatureWriter>(); |
| | | |
| | | private PessimisticMapWrapper txFeaturesContext; |
| | |
| | | private String _filterConfig; |
| | | |
| | | public DummyFeatureConvertJobContext(String dataPath, String filterConfig, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | return; |
| | | } |
| | | |
| | | // §PÂ_¬O§_²Å©M±ø¥ó |
| | | Feature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|UFID=" + linkage.getUfid() + |
| | | "|COMPID=" + linkage.getComponentID()))); |
| | | |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | | logger.warn("----Complex Element size=" + complex.size()); |
| | |
| | | |
| | | try { |
| | | while (it.hasNext()) { |
| | | FeatureType featureType = (FeatureType) it.next(); |
| | | SimpleFeatureType featureType = (SimpleFeatureType) it.next(); |
| | | File sfile = new File(getDataOutPath() + File.separator + featureType.getTypeName()); |
| | | logger.debug("Begin Save shapefile:" + sfile.toURI()); |
| | | |
| | |
| | | ShapefileDataStore shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL(), |
| | | true, Charset.forName("UTF-8")); |
| | | */ |
| | | ShapefileDataStore shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL()); |
| | | /* |
| | | if(namespace != null) { |
| | | shapefileDataStore.setNamespaceURI(namespace.toString()); |
| | | } |
| | | */ |
| | | shapefileDataStore.setMemoryMapped(true); |
| | | // shapefileDataStore.setBufferCachingEnabled(cacheMemoryMaps); |
| | | shapefileDataStore.setCharset(Charset.forName("UTF-8")); |
| | | shapefileDataStore.setTimeZone(TimeZone.getDefault()); |
| | | shapefileDataStore.setIndexed(true); |
| | | shapefileDataStore.setIndexCreationEnabled(true); |
| | | |
| | | if (!sfile.exists()) { |
| | | ShapefileDataStore shapefileDataStore = new IndexedShapefileDataStore(sfile.toURI().toURL(), |
| | | null, true, true, IndexedShapefileDataStore.TREE_QIX, Charset.forName("UTF-8")); |
| | | shapefileDataStore.createSchema(featureType); |
| | | writer = shapefileDataStore.getFeatureWriter(featureType.getTypeName(), |
| | | Transaction.AUTO_COMMIT); |
| | | } else { |
| | | ShapefileDataStore shapefileDataStore = new IndexedShapefileDataStore(sfile.toURI().toURL(), |
| | | null, true, true, IndexedShapefileDataStore.TREE_QIX, Charset.forName("UTF-8")); |
| | | writer = shapefileDataStore.getFeatureWriterAppend(featureType.getTypeName(), |
| | | Transaction.AUTO_COMMIT); |
| | | } |
| | | featuresWriterContext.put(featureType.getTypeName(), writer); |
| | | } |
| | | |
| | | ArrayList<Feature> features = featuresContext.get(featureType); |
| | | ArrayList<SimpleFeature> features = featuresContext.get(featureType); |
| | | Iterator itFeature = features.iterator(); |
| | | while (itFeature.hasNext()) { |
| | | Feature feature = (Feature) itFeature.next(); |
| | | ((SimpleFeature) writer.next()).setAttributes(feature.getAttributes(null)); |
| | | SimpleFeature feature = (SimpleFeature) itFeature.next(); |
| | | ((SimpleFeature) writer.next()).setAttributes(feature.getAttributes()); |
| | | } |
| | | //writer.close(); |
| | | logger.debug("End Save shapefile:" + sfile.toURI()); |
New file |
| | |
| | | package com.ximple.eofms.jobs; |
| | | |
| | | import com.vividsolutions.jts.geom.LineString; |
| | | import com.vividsolutions.jts.geom.LinearRing; |
| | | import com.vividsolutions.jts.geom.MultiLineString; |
| | | import com.vividsolutions.jts.geom.MultiPoint; |
| | | import com.vividsolutions.jts.geom.MultiPolygon; |
| | | import com.vividsolutions.jts.geom.Point; |
| | | import com.vividsolutions.jts.geom.Polygon; |
| | | import com.ximple.eofms.geoserver.config.XGeosDataConfig; |
| | | import com.ximple.eofms.geoserver.config.XGeosDataConfigMapping; |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.util.PrintfFormat; |
| | | import com.ximple.eofms.util.XGeosConfigDigesterUtils; |
| | | import org.apache.commons.collections.MultiMap; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.opengis.feature.type.FeatureType; |
| | | import org.opengis.feature.type.GeometryDescriptor; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import java.io.IOException; |
| | | import java.net.URL; |
| | | import java.sql.Connection; |
| | | import java.sql.DatabaseMetaData; |
| | | import java.sql.PreparedStatement; |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.sql.Timestamp; |
| | | import java.util.ArrayList; |
| | | import java.util.Arrays; |
| | | import java.util.HashMap; |
| | | import java.util.List; |
| | | import java.util.Set; |
| | | import java.util.TreeSet; |
| | | |
| | | public class GeoserverIntegrateConfigJob extends OracleConvertDgn2PostGISJob { |
| | | final static Log logger = LogFactory.getLog(GeoserverIntegrateConfigJob.class); |
| | | |
| | | private static final String SKIPCONFIGJOB = "SKIPCONFIGJOB"; |
| | | private static final String MASTERMODE = "MASTERMODE"; |
| | | private static final String EPSG = "EPSG:"; |
| | | private static final String DEFAULT_NAMESPACE = "xtpc"; |
| | | private static final String XGEOSDATACONFIG_PATH = "xgeosdataconfig.xml"; |
| | | private static final String GEOSERVER_BASEURL = "GEOSERVER_URL"; |
| | | private static final String GEOSERVER_USER = "GEOSERVER_USER"; |
| | | private static final String GEOSERVER_PASS = "GEOSERVER_PASS"; |
| | | |
| | | // private static final int MAGIC_BLOCKSIZE = (64 * 1024 * 1024) - (32 * 1024); |
| | | |
| | | private static final String QUERY_VIEWDEFSQL = "SELECT table_name, view_definition FROM information_schema.views " + |
| | | "WHERE table_schema = ? AND table_name LIKE "; |
| | | |
| | | private static final String CREATE_VIEWSQL = "CREATE OR REPLACE VIEW \"%s\" AS SELECT * FROM \"%s\".\"%s\""; |
| | | private static final String EXTRAWHERE_VIEWSQL = " WHERE \"%s\".level = %s AND \"%s\".symweight = %s"; |
| | | |
| | | private static final String ALTER_VIEWSQL = "ALTER TABLE \"%s\" OWNER TO "; |
| | | // private static final String GRANT_VIEWSQL = "GRANT SELECT ON TABLE \"%s\" TO public"; |
| | | private static final int SRSID_TWD97_ZONE119 = 3825; |
| | | private static final int SRSID_TWD97_ZONE121 = 3826; |
| | | public static final String DEFAULT_STORENAME = "pgDMMS"; |
| | | public static final String DEFAULT_GEODMMS_NAMESPACE = "http://tpc.ximple.com.tw/geodmms"; |
| | | |
| | | private static XGeosDataConfigMapping xgeosDataConfigMapping = null; |
| | | |
| | | protected String _geoServerURL; |
| | | protected String _geoServerUser; |
| | | protected String _geoServerPass; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | | |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, |
| | | boolean useTransform) { |
| | | return super.prepareJobContext(targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _geoServerURL = dataMap.getString(GEOSERVER_BASEURL); |
| | | _geoServerUser = dataMap.getString(GEOSERVER_USER); |
| | | _geoServerPass = dataMap.getString(GEOSERVER_PASS); |
| | | |
| | | if (_geoServerURL == null) { |
| | | logger.warn("GEOSERVER_URL is null"); |
| | | throw new JobExecutionException("Unknown GEOSERVER_URL."); |
| | | } |
| | | if (_geoServerUser == null) { |
| | | logger.warn("GEOSERVER_USER is null"); |
| | | throw new JobExecutionException("Unknown GEOSERVER_USER."); |
| | | } |
| | | if (_geoServerPass == null) { |
| | | logger.warn("GEOSERVER_PASS is null"); |
| | | throw new JobExecutionException("Unknown GEOSERVER_PASS."); |
| | | } |
| | | } |
| | | |
| | | protected XGeosDataConfigMapping getConfigMapping() { |
| | | if (xgeosDataConfigMapping == null) { |
| | | Digester digester = XGeosConfigDigesterUtils.getXGeosConfigDigester(); |
| | | final URL configDataURL = XGeosDataConfigMapping.class.getResource(XGEOSDATACONFIG_PATH); |
| | | try { |
| | | xgeosDataConfigMapping = (XGeosDataConfigMapping) digester.parse(configDataURL); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SAXException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | |
| | | } |
| | | return xgeosDataConfigMapping; |
| | | } |
| | | |
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) { |
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " + |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | @Override |
| | | public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { |
| | | |
| | | super.execute(jobExecutionContext); |
| | | |
| | | createTargetDataStore(); |
| | | |
| | | if (getTargetDataStore() == null) { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName; |
| | | |
| | | try { |
| | | logger.info("-- step:resetPostgisViewMapping --"); |
| | | long tStep = System.currentTimeMillis(); |
| | | resetPostgisViewMapping(jobExecutionContext); |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-resetPostgisViewMapping", tStep, tStepEnd); |
| | | } |
| | | logger.info("-- step:resetGeoServerConfig --"); |
| | | tStep = System.currentTimeMillis(); |
| | | // resetGeoServerConfig(jobExecutionContext); |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-resetGeoServerConfig", tStep, tStepEnd); |
| | | } |
| | | } finally { |
| | | disconnect(); |
| | | } |
| | | } |
| | | |
| | | /** |
| | | * 重新建立所有重新建立所有PostGIS中的資料庫視景 |
| | | * |
| | | * @param executionContext 批次執行的關係 |
| | | */ |
| | | private void resetPostgisViewMapping(JobExecutionContext executionContext) { |
| | | assert executionContext != null; |
| | | Connection connection = null; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | String ownerName = _pgUsername; |
| | | String currentTargetSchema = retrieveCurrentSchemaName(connection, |
| | | DataReposVersionManager.VSSTATUS_READY); |
| | | if (currentTargetSchema == null) { |
| | | logger.info("Cannot found schema that status is VSSTATUS_READY[" + |
| | | DataReposVersionManager.VSSTATUS_READY + "]"); |
| | | return; |
| | | } |
| | | |
| | | ArrayList<String> realTableNames = new ArrayList<String>(); |
| | | retrieveAllRealTableName(connection, currentTargetSchema, realTableNames); |
| | | |
| | | HashMap<String, String> viewDefs = retrieveViewDef(connection, "public", "fsc%"); |
| | | HashMap<String, String> tempViewDefs = retrieveViewDef(connection, "public", "indexshape%"); |
| | | viewDefs.putAll(tempViewDefs); |
| | | tempViewDefs = viewDefs = retrieveViewDef(connection, "public", "lndtpc%"); |
| | | viewDefs.putAll(tempViewDefs); |
| | | |
| | | for (String tableName : realTableNames) { |
| | | resetPostgisDataView(connection, viewDefs, ownerName, currentTargetSchema, tableName); |
| | | } |
| | | |
| | | resetExtraPostgisDataView(connection, ownerName, currentTargetSchema, realTableNames); |
| | | |
| | | updateCurrentRepositoryStatus(connection, currentTargetSchema, |
| | | DataReposVersionManager.VSSTATUS_LINKVIEW); |
| | | |
| | | String currentTargetThemesName = retrieveCurrentThemeName(connection, |
| | | DataReposVersionManager.VSSTATUS_READY); |
| | | if (currentTargetThemesName == null) { |
| | | logger.info("Cannot found themes that status is VSSTATUS_READY[" + |
| | | DataReposVersionManager.VSSTATUS_READY + "]"); |
| | | return; |
| | | } |
| | | |
| | | resetThemesBaseView(connection, ownerName, currentTargetThemesName); |
| | | |
| | | XGeosDataConfigMapping configMapping = getConfigMapping(); |
| | | String[] allView = retrieveTargetStoreAllViewNames(connection); |
| | | TreeSet<String> allViewNames = new TreeSet<String>(); |
| | | if (allView != null) { |
| | | allViewNames.addAll(Arrays.asList(allView)); |
| | | } |
| | | List values = (List) configMapping.getMapping().get("pgOMS"); |
| | | for (Object value : values) { |
| | | XGeosDataConfig xgeosConfig = (XGeosDataConfig) value; |
| | | short tid = xgeosConfig.getFSC(); |
| | | short cid = xgeosConfig.getCOMP(); |
| | | StringBuilder sbTable = new StringBuilder("fsc-"); |
| | | sbTable.append(tid).append("-c-"); |
| | | sbTable.append(cid); |
| | | |
| | | int index = realTableNames.indexOf(sbTable.toString()); |
| | | if (index == -1) { |
| | | logger.debug("pgOMS LayerView Cannot found-" + xgeosConfig.toString()); |
| | | continue; |
| | | } |
| | | |
| | | StringBuilder sbView = new StringBuilder("fsc-"); |
| | | sbView.append(tid).append("-c"); |
| | | sbView.append(cid).append("-l"); |
| | | sbView.append(xgeosConfig.getLEV()).append("-w"); |
| | | sbView.append(xgeosConfig.getWEIGHT()); |
| | | String viewName = sbView.toString(); |
| | | if (allViewNames.contains(viewName)) { |
| | | resetThemesPostgisDataView(connection, ownerName, null, viewName); |
| | | if (tid == 106) { |
| | | resetFlowThemesPostgisDataView(connection, ownerName, null, viewName); |
| | | } |
| | | } |
| | | } |
| | | |
| | | updateCurrentThemeStatus(connection, currentTargetThemesName, |
| | | DataReposVersionManager.VSSTATUS_LINKVIEW); |
| | | |
| | | // String[] featureNames = dataStore.getTypeNames(); |
| | | // logger.info("featureNames[] size = " + featureNames.length); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | if (connection != null) |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | // if (dataStore != null) dataStore.dispose(); |
| | | } |
| | | } |
| | | |
| | | private void retrieveAllRealTableName(Connection connection, String targetSchema, |
| | | ArrayList<String> realTableNames) throws SQLException { |
| | | ResultSet rsMeta = null; |
| | | try { |
| | | rsMeta = connection.getMetaData().getTables("", targetSchema, "fsc%", new String[]{"TABLE"}); |
| | | while (rsMeta.next()) { |
| | | String tableName = rsMeta.getString(3); |
| | | realTableNames.add(tableName); |
| | | } |
| | | rsMeta.close(); |
| | | rsMeta = null; |
| | | |
| | | rsMeta = connection.getMetaData().getTables("", targetSchema, "index%", new String[]{"TABLE"}); |
| | | while (rsMeta.next()) { |
| | | String tableName = rsMeta.getString(3); |
| | | realTableNames.add(tableName); |
| | | } |
| | | rsMeta.close(); |
| | | rsMeta = null; |
| | | |
| | | rsMeta = connection.getMetaData().getTables("", targetSchema, "lndtpc%", new String[]{"TABLE"}); |
| | | while (rsMeta.next()) { |
| | | String tableName = rsMeta.getString(3); |
| | | realTableNames.add(tableName); |
| | | } |
| | | } finally { |
| | | if (rsMeta != null) rsMeta.close(); |
| | | } |
| | | } |
| | | |
| | | private void resetPostgisDataView(Connection connection, HashMap<String, String> viewDefs, |
| | | String ownerName, String schemaName, String tableName) throws SQLException { |
| | | String[] splits = tableName.split("-"); |
| | | if (splits.length > 3) { |
| | | // feature table |
| | | |
| | | StringBuilder viewBuilder = new StringBuilder(); |
| | | viewBuilder.append(splits[0]); |
| | | viewBuilder.append('-'); |
| | | viewBuilder.append(splits[1]); |
| | | viewBuilder.append('-'); |
| | | viewBuilder.append(splits[2]); |
| | | viewBuilder.append(splits[3]); |
| | | String viewName = viewBuilder.toString(); |
| | | if (viewDefs.containsKey(viewName)) { |
| | | String viewDef = viewDefs.get(viewName); |
| | | int pos = viewDef.indexOf("FROM"); |
| | | String subView = viewDef.substring(pos + 4); |
| | | // String[] viewSources = subView.split("\\."); |
| | | String[] viewSources = subView.split("(\\.\"|\")"); |
| | | if (!viewSources[0].equalsIgnoreCase(schemaName)) { |
| | | createOrReplaceView(connection, schemaName, tableName, viewName, ownerName); |
| | | } |
| | | } else { |
| | | createOrReplaceView(connection, schemaName, tableName, viewName, ownerName); |
| | | } |
| | | |
| | | } else { |
| | | |
| | | splits = tableName.split("_"); |
| | | if (splits.length > 0) { |
| | | StringBuilder viewBuilder = new StringBuilder(); |
| | | viewBuilder.append(splits[0]); |
| | | if (splits.length > 1) viewBuilder.append(splits[1]); |
| | | if (splits.length > 2) viewBuilder.append(splits[2]); |
| | | String viewName = viewBuilder.toString(); |
| | | if (viewDefs.containsKey(viewName)) { |
| | | String viewDef = viewDefs.get(viewName); |
| | | int pos = viewDef.indexOf("FROM"); |
| | | String subView = viewDef.substring(pos + 4); |
| | | String[] viewSources = subView.split("(\\.\"|\")"); |
| | | if (!viewSources[0].equalsIgnoreCase(schemaName)) { |
| | | createOrReplaceView(connection, schemaName, tableName, viewName, ownerName); |
| | | } |
| | | } else { |
| | | createOrReplaceView(connection, schemaName, tableName, viewName, ownerName); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | private void resetExtraPostgisDataView(Connection connection, String ownerName, String currentSchema, |
| | | ArrayList<String> realTableNames) { |
| | | try { |
| | | // ArrayList<String> extraViewNames = new ArrayList<String>(); |
| | | XGeosDataConfigMapping configMapping = getConfigMapping(); |
| | | MultiMap configMultiMap = configMapping.getMapping(); |
| | | for (Object key : configMultiMap.keySet()) { |
| | | List values = (List) configMultiMap.get(key); |
| | | for (Object value : values) { |
| | | XGeosDataConfig xgeosConfig = (XGeosDataConfig) value; |
| | | short tid = xgeosConfig.getFSC(); |
| | | short cid = xgeosConfig.getCOMP(); |
| | | StringBuilder sbTable = new StringBuilder("fsc-"); |
| | | sbTable.append(tid).append("-c-"); |
| | | sbTable.append(cid); |
| | | int index = realTableNames.indexOf(sbTable.toString()); |
| | | if (index == -1) { |
| | | logger.debug("Cannot found-" + xgeosConfig.toString()); |
| | | continue; |
| | | } |
| | | StringBuilder sbView = new StringBuilder("fsc-"); |
| | | sbView.append(tid).append("-c"); |
| | | sbView.append(cid).append("-l"); |
| | | sbView.append(xgeosConfig.getLEV()).append("-w"); |
| | | sbView.append(xgeosConfig.getWEIGHT()); |
| | | // extraViewNames.add(sbView.toString()); |
| | | |
| | | createOrReplaceExtraView(connection, currentSchema, sbTable.toString(), sbView.toString(), |
| | | ownerName, xgeosConfig); |
| | | } |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | private void resetThemesBaseView(Connection connection, String ownerName, String currentThemesName) |
| | | throws SQLException { |
| | | String viewName = "xpwtheme" + FDYNCOLOR_SUFFIX; |
| | | String tableName = currentThemesName + FDYNCOLOR_SUFFIX; |
| | | PrintfFormat pf = new PrintfFormat("CREATE OR REPLACE VIEW \"%s\" AS SELECT * FROM \"%s\""); |
| | | String sql = pf.sprintf(new Object[]{viewName, tableName}); |
| | | Statement stmt = connection.createStatement(); |
| | | try { |
| | | stmt.execute(sql); |
| | | pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(viewName); |
| | | stmt.execute(sql); |
| | | |
| | | viewName = "xpwtheme" + FOWNER_SUFFIX; |
| | | tableName = currentThemesName + FOWNER_SUFFIX; |
| | | pf = new PrintfFormat("CREATE OR REPLACE VIEW \"%s\" AS SELECT * FROM \"%s\""); |
| | | sql = pf.sprintf(new Object[]{viewName, tableName}); |
| | | |
| | | stmt.execute(sql); |
| | | pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(viewName); |
| | | stmt.execute(sql); |
| | | } catch (SQLException e) { |
| | | // logger.warn(e.getMessage(), e); |
| | | logger.info(sql == null ? "SQL=NULL" : "SQL=" + sql); |
| | | throw e; |
| | | } finally { |
| | | stmt.close(); |
| | | } |
| | | } |
| | | |
| | | |
| | | private void resetThemesPostgisDataView(Connection connection, String ownerName, |
| | | String currentSchema, String viewName) throws SQLException { |
| | | String themeViewName = viewName + "-oms"; |
| | | // PrintfFormat pf = new PrintfFormat(CREATE_VIEWSQL); |
| | | // String sql = pf.sprintf(new Object[]{viewName, schemaName, tableName}); |
| | | ResultSet rs = null; |
| | | Statement stmt = connection.createStatement(); |
| | | |
| | | try { |
| | | StringBuilder sbSQL = new StringBuilder("CREATE OR REPLACE VIEW \""); |
| | | sbSQL.append(themeViewName).append("\" AS SELECT "); |
| | | |
| | | rs = connection.getMetaData().getColumns(null, currentSchema, viewName, "%"); |
| | | while (rs.next()) { |
| | | String fieldName = rs.getString("COLUMN_NAME"); |
| | | sbSQL.append("t." + fieldName).append(", "); |
| | | } |
| | | sbSQL.append("fc.dyncolor, fo.fowner FROM "); |
| | | if (currentSchema != null) |
| | | sbSQL.append("\"").append(currentSchema).append("\".\"").append(viewName).append("\" AS t,"); |
| | | else |
| | | sbSQL.append("\"").append(viewName).append("\" AS t,"); |
| | | sbSQL.append("xpwtheme").append(FDYNCOLOR_SUFFIX).append(" AS fc,"); |
| | | sbSQL.append("xpwtheme").append(FOWNER_SUFFIX).append(" AS fo WHERE "); |
| | | sbSQL.append("t.tid = fc.tid AND t.oid = fc.oid AND "); |
| | | sbSQL.append("t.tid = fo.tid AND t.oid = fo.oid"); |
| | | |
| | | // sbSQL.delete(sbSQL.length() - 2, sbSQL.length()); |
| | | String sql = sbSQL.toString(); |
| | | stmt.execute(sql); |
| | | sbSQL.delete(0, sbSQL.length()); |
| | | |
| | | PrintfFormat pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(themeViewName); |
| | | stmt.execute(sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void resetFlowThemesPostgisDataView(Connection connection, String ownerName, |
| | | String currentSchema, String viewName) throws SQLException { |
| | | String themeViewName = viewName + "-flow-oms"; |
| | | ResultSet rs = null; |
| | | Statement stmt = connection.createStatement(); |
| | | |
| | | try { |
| | | StringBuilder sbSQL = new StringBuilder("CREATE OR REPLACE VIEW \""); |
| | | sbSQL.append(themeViewName).append("\" AS SELECT "); |
| | | |
| | | rs = connection.getMetaData().getColumns(null, currentSchema, viewName, "%"); |
| | | while (rs.next()) { |
| | | String fieldName = rs.getString("COLUMN_NAME"); |
| | | sbSQL.append("t." + fieldName).append(", "); |
| | | } |
| | | sbSQL.append("fc.dyncolor, fo.fowner, fo.flow FROM "); |
| | | if (currentSchema != null) |
| | | sbSQL.append("\"").append(currentSchema).append("\".\"").append(viewName).append("\" AS t,"); |
| | | else |
| | | sbSQL.append("\"").append(viewName).append("\" AS t,"); |
| | | sbSQL.append("xpwtheme").append(FDYNCOLOR_SUFFIX).append(" AS fc,"); |
| | | sbSQL.append("xpwtheme").append(FOWNER_SUFFIX).append(" AS fo WHERE "); |
| | | sbSQL.append("t.tid = fc.tid AND t.oid = fc.oid AND "); |
| | | sbSQL.append("t.tid = fo.tid AND t.oid = fo.oid"); |
| | | |
| | | // sbSQL.delete(sbSQL.length() - 2, sbSQL.length()); |
| | | String sql = sbSQL.toString(); |
| | | stmt.execute(sql); |
| | | sbSQL.delete(0, sbSQL.length()); |
| | | |
| | | PrintfFormat pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(themeViewName); |
| | | stmt.execute(sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private HashMap<String, String> retrieveViewDef(Connection connection, String schemaName, String tablePattern) throws SQLException { |
| | | PreparedStatement stmt = connection.prepareStatement(QUERY_VIEWDEFSQL + "'" + tablePattern + "'"); |
| | | stmt.setString(1, schemaName); |
| | | // stmt.setString(2, tablePattern); |
| | | HashMap<String, String> result = new HashMap<String, String>(); |
| | | ResultSet rs = stmt.executeQuery(); |
| | | while (rs.next()) { |
| | | String tableName = rs.getString(1); |
| | | String viewDef = rs.getString(2); |
| | | result.put(tableName, viewDef); |
| | | } |
| | | rs.close(); |
| | | stmt.close(); |
| | | return result; |
| | | } |
| | | |
| | | private void createOrReplaceView(Connection connection, String schemaName, String tableName, String viewName, |
| | | String ownerName) throws SQLException { |
| | | PrintfFormat pf = new PrintfFormat(CREATE_VIEWSQL); |
| | | String sql = pf.sprintf(new Object[]{viewName, schemaName, tableName}); |
| | | Statement stmt = connection.createStatement(); |
| | | try { |
| | | stmt.execute(sql); |
| | | pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(viewName); |
| | | stmt.execute(sql); |
| | | } catch (SQLException e) { |
| | | // logger.warn(e.getMessage(), e); |
| | | logger.info(sql == null ? "SQL=NULL" : "SQL=" + sql); |
| | | throw e; |
| | | } finally { |
| | | stmt.close(); |
| | | } |
| | | // connection.commit(); |
| | | } |
| | | |
| | | private void createOrReplaceExtraView(Connection connection, String schemaName, String tableName, String viewName, |
| | | String ownerName, XGeosDataConfig xgeosConfig) throws SQLException { |
| | | PrintfFormat pf = new PrintfFormat(CREATE_VIEWSQL); |
| | | String sql = pf.sprintf(new Object[]{viewName, schemaName, tableName}); |
| | | |
| | | PrintfFormat pfWhere = new PrintfFormat(EXTRAWHERE_VIEWSQL); |
| | | sql += pfWhere.sprintf(new String[]{tableName, Short.toString(xgeosConfig.getLEV()), |
| | | tableName, Short.toString(xgeosConfig.getWEIGHT())}); |
| | | |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(sql); |
| | | |
| | | pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(viewName); |
| | | stmt.execute(sql); |
| | | stmt.close(); |
| | | // connection.commit(); |
| | | } |
| | | |
| | | private Timestamp retrieveCurrentSchemaTimestamp(Connection connection, short status) throws SQLException { |
| | | StringBuilder sbSQL = new StringBuilder("SELECT vstimestamp, vsschema, vsstatus FROM "); |
| | | sbSQL.append(DataReposVersionManager.XGVERSIONTABLE_NAME); |
| | | sbSQL.append(" WHERE vsstatus = "); |
| | | sbSQL.append(status); |
| | | sbSQL.append(" ORDER BY vsid"); |
| | | |
| | | Timestamp result = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | // get first result |
| | | if (rs.next()) { |
| | | result = rs.getTimestamp(1); |
| | | } |
| | | return result; |
| | | } finally { |
| | | if (rs != null) rs.close(); |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private void updateCurrentRepositoryStatus(Connection connection, String schemaName, short newStatus) |
| | | throws SQLException { |
| | | StringBuilder sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(DataReposVersionManager.XGVERSIONTABLE_NAME).append(' '); |
| | | sbSQL.append(" SET vsstatus = "); |
| | | sbSQL.append(newStatus); |
| | | sbSQL.append(", vstimestamp = CURRENT_TIMESTAMP WHERE vsschema = '"); |
| | | sbSQL.append(schemaName).append("'"); |
| | | |
| | | Statement stmt = null; |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sbSQL.toString()); |
| | | } finally { |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private boolean checkCurrentRepositoryStatus(Connection connection, short status) { |
| | | try { |
| | | return (retrieveCurrentSchemaName(connection, status) != null); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | return false; |
| | | } |
| | | } |
| | | |
| | | private String retrieveCurrentSchemaName(Connection connection, short status) throws SQLException { |
| | | StringBuilder sbSQL = new StringBuilder("SELECT vsschema, vstimestamp, vsstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); |
| | | sbSQL.append(" WHERE vsstatus = "); |
| | | sbSQL.append(status); |
| | | sbSQL.append(" ORDER BY vsid"); |
| | | |
| | | String result = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | // get first result |
| | | if (rs.next()) { |
| | | result = rs.getString(1); |
| | | } |
| | | return result; |
| | | } finally { |
| | | if (rs != null) rs.close(); |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private void updateCurrentThemeStatus(Connection connection, String themeTableName, short newStatus) |
| | | throws SQLException { |
| | | StringBuilder sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(DataReposVersionManager.XPTVERSIONTABLE_NAME).append(' '); |
| | | sbSQL.append(" SET vptstatus = "); |
| | | sbSQL.append(newStatus); |
| | | sbSQL.append(", vpttimestamp = CURRENT_TIMESTAMP WHERE vptname = '"); |
| | | sbSQL.append(themeTableName).append("'"); |
| | | |
| | | Statement stmt = null; |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sbSQL.toString()); |
| | | } finally { |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | |
| | | private boolean checkCurrentThemeStatus(Connection connection, short status) { |
| | | try { |
| | | return (retrieveCurrentThemeName(connection, status) != null); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | return false; |
| | | } |
| | | } |
| | | |
| | | |
| | | private String retrieveCurrentThemeName(Connection connection, short status) throws SQLException { |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vptname, vpttimestamp, vptstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sbSQL.append(" WHERE vptstatus = "); |
| | | sbSQL.append(status); |
| | | sbSQL.append("ORDER BY vptid"); |
| | | |
| | | String result = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | // get first result |
| | | if (rs.next()) { |
| | | result = rs.getString(1); |
| | | } |
| | | return result; |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | protected void transferXGeosVersionStatus(Connection connection, |
| | | short vsstatusBefore, short vsstatusAfter, boolean exclusive) throws JobExecutionException { |
| | | |
| | | try { |
| | | String currentTargetSchema = retrieveCurrentSchemaName(connection, vsstatusBefore); |
| | | if (currentTargetSchema == null) { |
| | | logger.info("Cannot found target schema in dataStore. status=" + vsstatusBefore); |
| | | return; |
| | | } |
| | | String existTargetSchema = null; |
| | | if (exclusive) |
| | | existTargetSchema = retrieveCurrentSchemaName(connection, vsstatusAfter); |
| | | |
| | | |
| | | updateCurrentRepositoryStatus(connection, currentTargetSchema, vsstatusAfter); |
| | | if ((exclusive) && (existTargetSchema != null)) { |
| | | updateCurrentRepositoryStatus(connection, existTargetSchema, |
| | | DataReposVersionManager.VSSTATUS_AVAILABLE); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("Update " + DataReposVersionManager.XGVERSIONTABLE_NAME + |
| | | " has error-", e); |
| | | } |
| | | } |
| | | |
| | | protected void transferThemesVersionStatus(Connection connection, |
| | | short vsstatusBefore, short vsstatusAfter, boolean exclusive) throws JobExecutionException { |
| | | |
| | | try { |
| | | String currentTargetTheme = retrieveCurrentThemeName(connection, vsstatusBefore); |
| | | if (currentTargetTheme == null) { |
| | | logger.info("Cannot found target schema in dataStore. status=" + vsstatusBefore); |
| | | return; |
| | | } |
| | | String existTargetSchema = null; |
| | | if (exclusive) |
| | | existTargetSchema = retrieveCurrentThemeName(connection, vsstatusAfter); |
| | | |
| | | |
| | | updateCurrentThemeStatus(connection, currentTargetTheme, vsstatusAfter); |
| | | if ((exclusive) && (existTargetSchema != null)) { |
| | | updateCurrentThemeStatus(connection, existTargetSchema, |
| | | DataReposVersionManager.VSSTATUS_AVAILABLE); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("Update " + DataReposVersionManager.XPTVERSIONTABLE_NAME + |
| | | " has error-", e); |
| | | } |
| | | } |
| | | |
| | | private HashMap<String, String> buildDefaultStylesMapping(XGeosDataConfigMapping configMapping) { |
| | | HashMap<String, String> result = new HashMap<String, String>(); |
| | | |
| | | for (Object key : configMapping.getMapping().keySet()) { |
| | | List xgeosConfigs = (List) configMapping.getMapping().get(key); |
| | | for (Object value : xgeosConfigs) { |
| | | XGeosDataConfig xgeosConfig = (XGeosDataConfig) value; |
| | | |
| | | StringBuilder sbView = new StringBuilder("fsc-"); |
| | | sbView.append(xgeosConfig.getFSC()).append("-c"); |
| | | sbView.append(xgeosConfig.getCOMP()).append("-l"); |
| | | sbView.append(xgeosConfig.getLEV()).append("-w"); |
| | | sbView.append(xgeosConfig.getWEIGHT()); |
| | | |
| | | String viewName = sbView.toString(); |
| | | if (!result.containsKey(viewName)) { |
| | | result.put(viewName, xgeosConfig.getFTYPE()); |
| | | } else { |
| | | if (xgeosConfig.getFTYPE() != null) { |
| | | if (!result.get(viewName).equals(xgeosConfig.getFTYPE())) |
| | | logger.info("Style Define Diff:" + result.get(viewName) + " - " + xgeosConfig.getFTYPE()); |
| | | } else { |
| | | logger.warn("xgeosConfig getFTYPE() is null - " + xgeosConfig.toString()); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | return result; |
| | | } |
| | | |
| | | protected String[] retrieveTargetStoreAllViewNames(Connection connection) { |
| | | try { |
| | | final int TABLE_NAME_COL = 3; |
| | | List list = new ArrayList(); |
| | | |
| | | DatabaseMetaData meta = connection.getMetaData(); |
| | | // String[] tableType = { "TABLE", "VIEW" }; |
| | | String[] tableType = { "VIEW" }; |
| | | ResultSet tables = meta.getTables(null, _pgSchema, "%", tableType); |
| | | |
| | | while (tables.next()) { |
| | | String tableName = tables.getString(TABLE_NAME_COL); |
| | | list.add(tableName); |
| | | /* |
| | | if (allowTable(tableName)) { |
| | | list.add(tableName); |
| | | } |
| | | */ |
| | | } |
| | | tables.close(); |
| | | return (String[]) list.toArray(new String[list.size()]); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | return null; |
| | | } |
| | | |
| | | protected String getDefaultFeatureTypeStyleId(Set<String> styles, HashMap<String, String> defaultStyles, FeatureType featureType) { |
| | | String ftName = featureType.getName().getLocalPart(); |
| | | boolean isNormalFeature = false; |
| | | boolean isLandBased = false; |
| | | boolean isIndex = false; |
| | | boolean isSmallIndex = false; |
| | | boolean isSymbol = false; |
| | | GeometryDescriptor geomAttrType = featureType.getGeometryDescriptor(); |
| | | Class geomType = geomAttrType.getType().getBinding(); |
| | | if (defaultStyles.containsKey(ftName)) { |
| | | String defaultStyleName = defaultStyles.get(ftName); |
| | | String styleName = retrieveDefaultStyle(styles, defaultStyleName, "unknown"); |
| | | if (!styleName.equals("unknown")) { |
| | | return styleName; |
| | | } |
| | | } |
| | | |
| | | if (ftName.indexOf("fsc") != -1) { |
| | | isNormalFeature = true; |
| | | } |
| | | if (ftName.indexOf("indexshape") != -1) { |
| | | isIndex = true; |
| | | } |
| | | if (ftName.indexOf("indexshapes") != -1) { |
| | | isSmallIndex = true; |
| | | } |
| | | if (ftName.indexOf("lnd") != -1) { |
| | | isLandBased = true; |
| | | } |
| | | if (featureType.getDescriptor("symbol") != null) { |
| | | isSymbol = true; |
| | | } |
| | | |
| | | if (Point.class.equals(geomType)) { |
| | | if (isSymbol) { |
| | | return retrieveDefaultStyle(styles, "xtpc-symbol", "point"); |
| | | } else if (isIndex) { |
| | | return retrieveDefaultStyle(styles, "xtpc-text2", "point"); |
| | | } else { |
| | | return retrieveDefaultStyle(styles, "xtpc-text", "point"); |
| | | } |
| | | } else if (LineString.class.equals(geomType)) { |
| | | if ((!isIndex) && (!isLandBased)) { |
| | | return retrieveDefaultStyle(styles, "xtpc-conductor", "line"); |
| | | } else if (isIndex) { |
| | | if (isSmallIndex) |
| | | return retrieveDefaultStyle(styles, "xtpc-indexshapes", "line"); |
| | | |
| | | return retrieveDefaultStyle(styles, "xtpc-indexshape", "line"); |
| | | } else if (isLandBased) { |
| | | return retrieveDefaultStyle(styles, "xtpc-lndcityLine", "line"); |
| | | } |
| | | } else if (MultiPoint.class.equals(geomType)) { |
| | | if (isSymbol) { |
| | | return retrieveDefaultStyle(styles, "xtpc-symbol", "point"); |
| | | } else { |
| | | return retrieveDefaultStyle(styles, "xtpc-text", "point"); |
| | | } |
| | | } else if (Polygon.class.equals(geomType)) { |
| | | if (isSymbol) { |
| | | return retrieveDefaultStyle(styles, "xtpc-symbol", "polygon"); |
| | | } else if ((!isIndex) && (!isLandBased)) { |
| | | return retrieveDefaultStyle(styles, "polygon", "polygon"); |
| | | } else if (isIndex) { |
| | | return retrieveDefaultStyle(styles, "xtpc-indexshape", "polygon"); |
| | | } else if (isLandBased) { |
| | | return retrieveDefaultStyle(styles, "xtpc-lndcityPolygon", "polygon"); |
| | | } |
| | | } else if (LinearRing.class.equals(geomType)) { |
| | | if (!isIndex) { |
| | | return retrieveDefaultStyle(styles, "polygon", "polygon"); |
| | | } else { |
| | | return retrieveDefaultStyle(styles, "xtpc-indexshape", "polygon"); |
| | | } |
| | | } else if (MultiLineString.class.equals(geomType)) { |
| | | if ((!isIndex) && (!isLandBased)) { |
| | | return retrieveDefaultStyle(styles, "xtpc-conductor", "line"); |
| | | } else if (isLandBased) { |
| | | return retrieveDefaultStyle(styles, "xtpc-lndcityLine", "line"); |
| | | } else { |
| | | return retrieveDefaultStyle(styles, "xtpc-indexshape", "line"); |
| | | } |
| | | } else if (MultiPolygon.class.equals(geomType)) { |
| | | return "polygon"; |
| | | } |
| | | |
| | | return "xtpc-symbol"; |
| | | } |
| | | |
| | | private static String retrieveDefaultStyle(Set styles, String styleName, String defaultStyleName) { |
| | | if (styles.contains(styleName)) { |
| | | return styleName; |
| | | } else |
| | | return defaultStyleName; |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.jobs; |
| | | |
| | | import java.io.IOException; |
| | | import java.sql.Connection; |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.util.ArrayList; |
| | | import java.util.Date; |
| | | import java.util.Map; |
| | | import java.util.TreeMap; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.OracleConvertPostGISJobContext; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisNGDataStoreFactory; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | public class OracleClearExchangeJob extends AbstractOracleDatabaseJob { |
| | | final static Log logger = LogFactory.getLog(OracleClearExchangeJob.class); |
| | | |
| | | public static String FETCH_TPDATA = "SELECT TPID, TPNAME FROM BASEDB.TPDATA"; |
| | | |
| | | private static final String PGHOST = "PGHOST"; |
| | | private static final String PGDATBASE = "PGDATBASE"; |
| | | private static final String PGPORT = "PGPORT"; |
| | | private static final String PGSCHEMA = "PGSCHEMA"; |
| | | private static final String PGUSER = "PGUSER"; |
| | | private static final String PGPASS = "PGPASS"; |
| | | private static final String USEWKB = "USEWKB"; |
| | | |
| | | private static final boolean useTpclidText = false; |
| | | |
| | | private static final int FETCHSIZE = 100; |
| | | private static final int COMMITSIZE = 100; |
| | | |
| | | protected static class Pair { |
| | | Object first; |
| | | Object second; |
| | | |
| | | public Pair(Object first, Object second) { |
| | | this.first = first; |
| | | this.second = second; |
| | | } |
| | | } |
| | | |
| | | protected static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | protected String _pgHost; |
| | | protected String _pgDatabase; |
| | | protected String _pgPort; |
| | | protected String _pgSchema; |
| | | protected String _pgUsername; |
| | | protected String _pgPassword; |
| | | protected String _pgUseWKB; |
| | | |
| | | protected Map<String, String> pgProperties; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _pgHost = dataMap.getString(PGHOST); |
| | | _pgDatabase = dataMap.getString(PGDATBASE); |
| | | _pgPort = dataMap.getString(PGPORT); |
| | | _pgSchema = dataMap.getString(PGSCHEMA); |
| | | _pgUsername = dataMap.getString(PGUSER); |
| | | _pgPassword = dataMap.getString(PGPASS); |
| | | _pgUseWKB = dataMap.getString(USEWKB); |
| | | |
| | | Log logger = getLogger(); |
| | | /* |
| | | logger.info("PGHOST=" + _myHost); |
| | | logger.info("PGDATBASE=" + _myDatabase); |
| | | logger.info("PGPORT=" + _myPort); |
| | | logger.info("PGSCHEMA=" + _mySchema); |
| | | logger.info("PGUSER=" + _myUsername); |
| | | logger.info("PGPASS=" + _myPassword); |
| | | logger.info("USEWKB=" + _myUseWKB); |
| | | */ |
| | | |
| | | if (_pgHost == null) { |
| | | logger.warn("PGHOST is null"); |
| | | throw new JobExecutionException("Unknown PostGIS host."); |
| | | } |
| | | if (_pgDatabase == null) { |
| | | logger.warn("PGDATABASE is null"); |
| | | throw new JobExecutionException("Unknown PostGIS database."); |
| | | } |
| | | if (_pgPort == null) { |
| | | logger.warn("PGPORT is null"); |
| | | throw new JobExecutionException("Unknown PostGIS port."); |
| | | } |
| | | if (_pgSchema == null) { |
| | | logger.warn("PGSCHEMA is null"); |
| | | throw new JobExecutionException("Unknown PostGIS schema."); |
| | | } |
| | | if (_pgUsername == null) { |
| | | logger.warn("PGUSERNAME is null"); |
| | | throw new JobExecutionException("Unknown PostGIS username."); |
| | | } |
| | | if (_pgPassword == null) { |
| | | logger.warn("PGPASSWORD is null"); |
| | | throw new JobExecutionException("Unknown PostGIS password."); |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put(PostgisNGDataStoreFactory.DBTYPE.key, "postgis"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(PostgisNGDataStoreFactory.HOST.key, _pgHost); |
| | | remote.put(PostgisNGDataStoreFactory.PORT.key, _pgPort); |
| | | remote.put(PostgisNGDataStoreFactory.DATABASE.key, _pgDatabase); |
| | | remote.put(PostgisNGDataStoreFactory.USER.key, _pgUsername); |
| | | remote.put(PostgisNGDataStoreFactory.PASSWD.key, _pgPassword); |
| | | // remote.put( "namespace", null); |
| | | pgProperties = remote; |
| | | } |
| | | |
| | | @Override |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | @Override |
| | | public void execute(JobExecutionContext context) throws JobExecutionException { |
| | | // Every job has its own job detail |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | // Log the time the job started |
| | | logger.info(jobName + " fired at " + new Date()); |
| | | extractJobConfiguration(jobDetail); |
| | | |
| | | createSourceDataStore(); |
| | | if (getSourceDataStore() == null) { |
| | | logger.warn("Cannot connect source oracle database."); |
| | | throw new JobExecutionException("Cannot connect source oracle database."); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName; |
| | | try { |
| | | logger.info("-- step:clearOutputDatabase --"); |
| | | clearOutputDatabase(); |
| | | |
| | | logger.info("-- step:transformOracleDMMSDB --"); |
| | | targetSchemaName = ""; |
| | | |
| | | OracleConvertPostGISJobContext jobContext = |
| | | (OracleConvertPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | long tStep = System.currentTimeMillis(); |
| | | |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | clearExchangeData(jobContext); |
| | | |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-Merge Connectivity Owner", tStep, tStepEnd); |
| | | } |
| | | |
| | | tStep = System.currentTimeMillis(); |
| | | |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-Merge ColorTable", tStep, tStepEnd); |
| | | } |
| | | |
| | | jobContext.closeOracleConnection(); |
| | | |
| | | long t2 = System.currentTimeMillis(); |
| | | // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss"; |
| | | // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); |
| | | logTimeDiff("Total ", t1, t2); |
| | | |
| | | } catch (SQLException e) { |
| | | disconnect(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("Database error. " + e.getMessage(), e); |
| | | } catch (IOException ex) { |
| | | disconnect(); |
| | | logger.warn(ex.getMessage(), ex); |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } finally { |
| | | disconnect(); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | private void clearExchangeData(OracleConvertPostGISJobContext jobContext) throws SQLException, IOException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | ResultSet rsMeta = connection.getMetaData().getTables(null, "CMMS_POSTDB", "GEO_EXCHANGE", |
| | | new String[]{"TABLE"}); |
| | | |
| | | boolean found = false; |
| | | try { |
| | | while (rsMeta.next()) { |
| | | found = true; |
| | | break; |
| | | } |
| | | // } catch (SQLException e) |
| | | } finally { |
| | | if (rsMeta != null) { |
| | | rsMeta.close(); |
| | | rsMeta = null; |
| | | } |
| | | } |
| | | |
| | | if (!found) { |
| | | logger.info("Cannot Found GEO_EXCHANGE in CMMS_POSTDB."); |
| | | return; |
| | | } |
| | | |
| | | Statement stmt = null; |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | int count = stmt.executeUpdate("DELETE FROM \"CMMS_POSTDB\".\"GEO_EXCHANGE\" WHERE ISEXCHANGE=1"); |
| | | logger.info("DELETE GEO_EXCHANGE UPDATE SIZE=" + count); |
| | | } finally { |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | @Override |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, boolean profileMode, boolean useTransform) { |
| | | return new OracleConvertPostGISJobContext(getDataPath(), |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) { |
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " + |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | public DataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | protected void createTargetDataStore() throws JobExecutionException { |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | if (!dataStoreFactory.canProcess(pgProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = dataStoreFactory.createDataStore(pgProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | protected void disconnect() { |
| | | super.disconnect(); |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | } |
| | | |
| | | private String determineTargetSchemaName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetSchema = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) { |
| | | throw new IOException("cannot found " + DataReposVersionManager.XGVERSIONTABLE_NAME); |
| | | } |
| | | rs.close(); |
| | | rs = null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vsschema, vsstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vsid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpSchemas = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vsschema"); |
| | | values[1] = rs.getShort("vsstatus"); |
| | | tmpSchemas.add(values); |
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) { |
| | | current = i; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current != -1) { |
| | | Object[] values = tmpSchemas.get(current); |
| | | targetSchema = (String) values[0]; |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetSchema; |
| | | } |
| | | |
| | | public String encodeSchemaTableName(String schemaName, String tableName) { |
| | | return "\"" + schemaName + "\".\"" + tableName + "\""; |
| | | } |
| | | |
| | | public final void accumulateQueryTime() { |
| | | queryTime += System.currentTimeMillis() - queryTimeStart; |
| | | } |
| | | |
| | | public long getQueryTime() { |
| | | return queryTime; |
| | | } |
| | | |
| | | public final void markQueryTime() { |
| | | queryTimeStart = System.currentTimeMillis(); |
| | | } |
| | | |
| | | public final void resetQueryTime() { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | private void clearOutputDatabase() { |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.jobs;
|
| | |
|
| | | import java.io.File;
|
| | | import java.io.FileFilter;
|
| | | import java.io.FileInputStream;
|
| | | import java.io.FileNotFoundException;
|
| | | import java.io.FilenameFilter;
|
| | | import java.io.IOException;
|
| | | import java.math.BigDecimal;
|
| | | import java.nio.BufferOverflowException;
|
| | | import java.nio.ByteBuffer;
|
| | | import java.nio.ByteOrder;
|
| | | import java.nio.channels.FileChannel;
|
| | | import java.sql.Connection;
|
| | | import java.sql.ResultSet;
|
| | | import java.sql.SQLException;
|
| | | import java.sql.Statement;
|
| | | import java.sql.Types;
|
| | | import java.util.ArrayList;
|
| | | import java.util.Date;
|
| | | import java.util.List;
|
| | | import java.util.Map;
|
| | | import java.util.TreeMap;
|
| | |
|
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext;
|
| | | import com.ximple.eofms.jobs.context.edbgeo.FeatureDgnConvertEdbGeoJobContext;
|
| | | import com.ximple.eofms.jobs.context.edbgeo.GeneralDgnConvertEdbGeoJobContext;
|
| | | import com.ximple.eofms.jobs.context.edbgeo.IndexDgnConvertEdbGeoJobContext;
|
| | | import com.ximple.eofms.jobs.context.edbgeo.OracleConvertEdbGeoJobContext;
|
| | | import com.ximple.eofms.util.BinConverter;
|
| | | import com.ximple.eofms.util.ByteArrayCompressor;
|
| | | import com.ximple.eofms.util.FileUtils;
|
| | | import com.ximple.eofms.util.StringUtils;
|
| | | import com.ximple.io.dgn7.ComplexElement;
|
| | | import com.ximple.io.dgn7.Dgn7fileException;
|
| | | import com.ximple.io.dgn7.Dgn7fileReader;
|
| | | import com.ximple.io.dgn7.Element;
|
| | | import com.ximple.io.dgn7.ElementType;
|
| | | import com.ximple.io.dgn7.IElementHandler;
|
| | | import com.ximple.io.dgn7.Lock;
|
| | | import com.ximple.util.PrintfFormat;
|
| | | import oracle.jdbc.OracleConnection;
|
| | | import oracle.jdbc.OracleResultSet;
|
| | | import oracle.sql.ARRAY;
|
| | | import oracle.sql.BLOB;
|
| | | import org.apache.commons.collections.OrderedMap;
|
| | | import org.apache.commons.collections.OrderedMapIterator;
|
| | | import org.apache.commons.collections.map.LinkedMap;
|
| | | import org.apache.commons.logging.Log;
|
| | | import org.apache.commons.logging.LogFactory;
|
| | | import org.geotools.data.DataStore;
|
| | | import org.geotools.data.Transaction;
|
| | | import org.geotools.data.edbgeo.PostgisDataStoreFactory;
|
| | | import org.geotools.data.jdbc.JDBCUtils;
|
| | | import org.geotools.feature.SchemaException;
|
| | | import org.geotools.jdbc.JDBCDataStore;
|
| | | import org.opengis.feature.IllegalAttributeException;
|
| | | import org.quartz.JobDataMap;
|
| | | import org.quartz.JobDetail;
|
| | | import org.quartz.JobExecutionContext;
|
| | | import org.quartz.JobExecutionException;
|
| | |
|
| | | public class OracleConvertDgn2EdbGeoJob extends AbstractOracleDatabaseJob {
|
| | | final static Log logger = LogFactory.getLog(OracleConvertDgn2EdbGeoJob.class);
|
| | |
|
| | | private static final String EDBHOST = "EDBHOST";
|
| | | private static final String EDBDATBASE = "EDBDATBASE";
|
| | | private static final String EDBPORT = "EDBPORT";
|
| | | private static final String EDBSCHEMA = "EDBSCHEMA";
|
| | | private static final String EDBUSER = "EDBUSER";
|
| | | private static final String EDBPASS = "EDBPASS";
|
| | | private static final String USEWKB = "USEWKB";
|
| | |
|
| | | private static final boolean useTpclidText = false;
|
| | |
|
| | | private static final int FETCHSIZE = 30;
|
| | | private static final int COMMITSIZE = 100;
|
| | | private static final String INDEXPATHNAME = "index";
|
| | | private static final String OTHERPATHNAME = "other";
|
| | |
|
| | | protected static class Pair {
|
| | | Object first;
|
| | | Object second;
|
| | |
|
| | | public Pair(Object first, Object second) {
|
| | | this.first = first;
|
| | | this.second = second;
|
| | | }
|
| | | }
|
| | |
|
| | | protected static PostgisDataStoreFactory dataStoreFactory = new PostgisDataStoreFactory();
|
| | |
|
| | | protected String _edbHost;
|
| | | protected String _edbDatabase;
|
| | | protected String _edbPort;
|
| | | protected String _edbSchema;
|
| | | protected String _edbUsername;
|
| | | protected String _edbPassword;
|
| | | protected String _edbUseWKB;
|
| | |
|
| | | protected Map<String, String> edbProperties;
|
| | | protected JDBCDataStore targetDataStore;
|
| | | // protected OracleConvertEdbGeoJobContext oracleJobContext;
|
| | |
|
| | | private long queryTime = 0;
|
| | | private long queryTimeStart = 0;
|
| | |
|
| | | public Log getLogger() {
|
| | | return logger;
|
| | | }
|
| | |
|
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath,
|
| | | boolean profileMode,
|
| | | boolean useTransform) {
|
| | | return new OracleConvertEdbGeoJobContext(getDataPath(),
|
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform);
|
| | | }
|
| | |
|
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException {
|
| | | super.extractJobConfiguration(jobDetail);
|
| | | JobDataMap dataMap = jobDetail.getJobDataMap();
|
| | | _edbHost = dataMap.getString(EDBHOST);
|
| | | _edbDatabase = dataMap.getString(EDBDATBASE);
|
| | | _edbPort = dataMap.getString(EDBPORT);
|
| | | _edbSchema = dataMap.getString(EDBSCHEMA);
|
| | | _edbUsername = dataMap.getString(EDBUSER);
|
| | | _edbPassword = dataMap.getString(EDBPASS);
|
| | | _edbUseWKB = dataMap.getString(USEWKB);
|
| | |
|
| | | Log logger = getLogger();
|
| | | /*
|
| | | logger.info("EDBHOST=" + _myHost);
|
| | | logger.info("EDBDATBASE=" + _myDatabase);
|
| | | logger.info("EDBPORT=" + _myPort);
|
| | | logger.info("EDBSCHEMA=" + _mySchema);
|
| | | logger.info("EDBUSER=" + _myUsername);
|
| | | logger.info("EDBPASS=" + _myPassword);
|
| | | logger.info("USEWKB=" + _myUseWKB);
|
| | | */
|
| | |
|
| | | if (_edbHost == null) {
|
| | | logger.warn("EDBHOST is null");
|
| | | throw new JobExecutionException("Unknown EdbGeoSpatial host.");
|
| | | }
|
| | | if (_edbDatabase == null) {
|
| | | logger.warn("PGDATABASE is null");
|
| | | throw new JobExecutionException("Unknown EdbGeoSpatial database.");
|
| | | }
|
| | | if (_edbPort == null) {
|
| | | logger.warn("EDBPORT is null");
|
| | | throw new JobExecutionException("Unknown EdbGeoSpatial port.");
|
| | | }
|
| | | if (_edbSchema == null) {
|
| | | logger.warn("EDBSCHEMA is null");
|
| | | throw new JobExecutionException("Unknown EdbGeoSpatial schema.");
|
| | | }
|
| | | if (_edbUsername == null) {
|
| | | logger.warn("PGUSERNAME is null");
|
| | | throw new JobExecutionException("Unknown EdbGeoSpatial username.");
|
| | | }
|
| | | if (_edbPassword == null) {
|
| | | logger.warn("PGPASSWORD is null");
|
| | | throw new JobExecutionException("Unknown EdbGeoSpatial password.");
|
| | | }
|
| | |
|
| | | Map<String, String> remote = new TreeMap<String, String>();
|
| | | remote.put("dbtype", "edbgeo");
|
| | | remote.put("charset", "UTF-8");
|
| | | remote.put("host", _edbHost);
|
| | | remote.put("port", _edbPort);
|
| | | remote.put("database", _edbDatabase);
|
| | | remote.put("user", _edbUsername);
|
| | | remote.put("passwd", _edbPassword);
|
| | | remote.put("namespace", null);
|
| | | edbProperties = remote;
|
| | | }
|
| | |
|
| | | public void execute(JobExecutionContext context) throws JobExecutionException {
|
| | | // Every job has its own job detail
|
| | | JobDetail jobDetail = context.getJobDetail();
|
| | |
|
| | | // The name is defined in the job definition
|
| | | String jobName = jobDetail.getKey().getName();
|
| | |
|
| | | // Log the time the job started
|
| | | logger.info(jobName + " fired at " + new Date());
|
| | | extractJobConfiguration(jobDetail);
|
| | | createSourceDataStore();
|
| | | createTargetDataStore();
|
| | | if (getSourceDataStore() == null) {
|
| | | logger.warn("Cannot connect source oracle database.");
|
| | | throw new JobExecutionException("Cannot connect source oracle database.");
|
| | | }
|
| | |
|
| | | if (getTargetDataStore() == null) {
|
| | | logger.warn("Cannot connect source postgreSQL database.");
|
| | | throw new JobExecutionException("Cannot connect source postgreSQL database.");
|
| | | }
|
| | |
|
| | | if (isProfileMode()) {
|
| | | queryTime = 0;
|
| | | }
|
| | |
|
| | | long t1 = System.currentTimeMillis();
|
| | | String targetSchemaName;
|
| | | try {
|
| | | logger.info("-- step:clearOutputDatabase --");
|
| | | clearOutputDatabase();
|
| | | targetSchemaName = determineTargetSchemaName();
|
| | |
|
| | | if (checkConvertFile()) {
|
| | | logger.info("-- step:convertIndexDesignFile --");
|
| | | long tStep = System.currentTimeMillis();
|
| | | convertIndexDesignFile(context, targetSchemaName);
|
| | | if (isProfileMode()) {
|
| | | long tStepEnd = System.currentTimeMillis();
|
| | | logTimeDiff("Profile-convertIndexDesignFile", tStep, tStepEnd);
|
| | | }
|
| | |
|
| | | logger.info("-- step:convertOtherDesignFile --");
|
| | | tStep = System.currentTimeMillis();
|
| | | convertOtherDesignFile(context, targetSchemaName);
|
| | | if (isProfileMode()) {
|
| | | long tStepEnd = System.currentTimeMillis();
|
| | | logTimeDiff("Profile-convertOtherDesignFile", tStep, tStepEnd);
|
| | | }
|
| | |
|
| | | }
|
| | |
|
| | | if (checkConvertDB()) {
|
| | | logger.info("-- step:convertOracleDB --");
|
| | |
|
| | | OracleConvertEdbGeoJobContext jobContext =
|
| | | (OracleConvertEdbGeoJobContext) prepareJobContext(targetSchemaName, _filterPath,
|
| | | isProfileMode(), isTransformed());
|
| | | jobContext.setSourceDataStore(getSourceDataStore());
|
| | | // jobContext.setConvertElementIn(_convertElementIn);
|
| | | jobContext.setElementLogging(checkElementLogging());
|
| | | jobContext.setExecutionContext(context);
|
| | |
|
| | | createHibernateSequence(jobContext);
|
| | | fetchTPData(jobContext);
|
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" +
|
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName()));
|
| | |
|
| | | long tStep = System.currentTimeMillis();
|
| | |
|
| | | if (isCopyConnectivityMode()) {
|
| | | copyConnectivity(jobContext);
|
| | | }
|
| | |
|
| | | if (isProfileMode()) {
|
| | | long tStepEnd = System.currentTimeMillis();
|
| | | logTimeDiff("Profile-Copy Connectivity", tStep, tStepEnd);
|
| | | }
|
| | |
|
| | | for (String orgSchema : _orgSchema) {
|
| | | logger.info("----- start schema:" + orgSchema + " -----");
|
| | | if (isProfileMode()) {
|
| | | jobContext.resetProcessTime();
|
| | | jobContext.resetUpdateTime();
|
| | | }
|
| | | tStep = System.currentTimeMillis();
|
| | | exetcuteConvert(jobContext, orgSchema, _dataPath);
|
| | |
|
| | | //close all open filewriter instance
|
| | | jobContext.closeFeatureWriter();
|
| | |
|
| | | if (isProfileMode()) {
|
| | | logger.warn("Profile-Current Query Oracle Cost-" +
|
| | | ((int) ((getQueryTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((getQueryTime()) % 60000.0)) / 1000) + " sec");
|
| | | long tStepEnd = System.currentTimeMillis();
|
| | | logger.warn("Profile-Current Process Cost-" +
|
| | | ((int) ((getProcessTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((getProcessTime()) % 60000.0)) / 1000) + " sec");
|
| | | logger.warn("Profile-Current Update Cost-" +
|
| | | ((int) ((getUpdateTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((getUpdateTime()) % 60000.0)) / 1000) + " sec");
|
| | | logger.warn("Profile-Current JobContext Process Cost-" +
|
| | | ((int) ((jobContext.getProcessTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((jobContext.getProcessTime()) % 60000.0)) / 1000) + " sec");
|
| | | logger.warn("Profile-Current JobContext Update Cost-" +
|
| | | ((int) ((jobContext.getUpdateTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((jobContext.getUpdateTime()) % 60000.0)) / 1000) + " sec");
|
| | | logTimeDiff("Profile-Convert[" + orgSchema + "]", tStep, tStepEnd);
|
| | |
|
| | | resetQueryTime();
|
| | | resetProcessTime();
|
| | | resetUpdateTime();
|
| | | }
|
| | | }
|
| | |
|
| | | jobContext.closeOracleConnection();
|
| | | }
|
| | |
|
| | | if (checkConvertElementIn()) {
|
| | | logger.info("-- step:convertFeatureDesignFile --");
|
| | | long tStep = System.currentTimeMillis();
|
| | | convertFeatureDesignFile(context, targetSchemaName);
|
| | | if (isProfileMode()) {
|
| | | long tStepEnd = System.currentTimeMillis();
|
| | | logTimeDiff("Profile-convertFeatureDesignFile", tStep, tStepEnd);
|
| | | }
|
| | | }
|
| | |
|
| | | if (checkCreateDummy()) {
|
| | | logger.info("-- step:createDummyFeatureFile --");
|
| | | createDummyFeatureFile(context);
|
| | | }
|
| | |
|
| | | long t2 = System.currentTimeMillis();
|
| | | // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss";
|
| | | // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW);
|
| | | logTimeDiff("Total ", t1, t2);
|
| | |
|
| | | updateRepoStatusToReady(targetSchemaName);
|
| | |
|
| | | } catch (SQLException e) {
|
| | | disconnect();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException("Database error. " + e.getMessage(), e);
|
| | | } catch (IOException ex) {
|
| | | disconnect();
|
| | | logger.warn(ex.getMessage(), ex);
|
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex);
|
| | | } finally {
|
| | | disconnect();
|
| | | }
|
| | | logger.warn(jobName + " end at " + new Date());
|
| | | }
|
| | |
|
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) {
|
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " +
|
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec");
|
| | | }
|
| | |
|
| | | private void exetcuteConvert(OracleConvertEdbGeoJobContext jobContext,
|
| | | String querySchema, String targetSchemaName) throws SQLException {
|
| | | int order = 0;
|
| | | OrderedMap map = getBlobStorageList(jobContext.getOracleConnection(),
|
| | | querySchema, "SD$SPACENODES", null);
|
| | |
|
| | | logger.info("begin convert job:[" + map.size() + "]:testmode=" + _testMode);
|
| | |
|
| | | int total = map.size(); //spacenodes count
|
| | | int step = total / 100;
|
| | | int current = 0;
|
| | |
|
| | | if (total == 0) {
|
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is zero.");
|
| | | return;
|
| | | }
|
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is " + map.size());
|
| | |
|
| | | //jobContext.startTransaction();
|
| | | jobContext.setCurrentSchema(querySchema);
|
| | | jobContext.getExecutionContext().put("ConvertDgn2EdbGeoJobProgress", 0);
|
| | | for (OrderedMapIterator it = map.orderedMapIterator(); it.hasNext(); ) {
|
| | | it.next();
|
| | |
|
| | | Pair pair = (Pair) it.getValue();
|
| | | String tableSrc = (String) pair.first;
|
| | |
|
| | | logger.info("begin convert:[" + order + "]-" + tableSrc);
|
| | | queryIgsetElement(jobContext, querySchema, tableSrc);
|
| | |
|
| | |
|
| | | order++;
|
| | |
|
| | | if (_testMode) {
|
| | | if ((_testCount < 0) || (order >= _testCount))
|
| | | break;
|
| | | }
|
| | |
|
| | | if ((order % COMMITSIZE) == 0) {
|
| | | // OracleConnection connection = jobContext.getOracleConnection();
|
| | | // connection.commitTransaction();
|
| | | jobContext.commitTransaction();
|
| | | //jobContext.startTransaction();
|
| | | System.gc();
|
| | | System.runFinalization();
|
| | | }
|
| | |
|
| | | if (step != 0) {
|
| | | int now = order % step;
|
| | | if (now != current) {
|
| | | current = now;
|
| | | jobContext.getExecutionContext().put("ConvertDgn2EdbGeoSpatialJob", current);
|
| | |
|
| | | }
|
| | | } else {
|
| | | jobContext.getExecutionContext().put("ConvertDgn2EdbGeoSpatialJob", current);
|
| | | current++;
|
| | | }
|
| | | }
|
| | | jobContext.getExecutionContext().put("ConvertDgn2EdbGeoSpatialJob", 100);
|
| | |
|
| | | jobContext.commitTransaction();
|
| | | jobContext.resetFeatureContext();
|
| | |
|
| | | if (isProfileMode()) {
|
| | |
|
| | | }
|
| | |
|
| | | logger.info("end convert job:[" + order + "]");
|
| | | System.gc();
|
| | | System.runFinalization();
|
| | | }
|
| | |
|
| | | protected OrderedMap getBlobStorageList(Connection connection, String schemaSrc, String tableSrc,
|
| | | OrderedMap orderedMap) throws SQLException {
|
| | | if (orderedMap == null)
|
| | | orderedMap = new LinkedMap(99);
|
| | | String fetchStmtFmt = "SELECT SNID, SPACETABLE FROM \"%s\".\"%s\"";
|
| | | PrintfFormat spf = new PrintfFormat(fetchStmtFmt);
|
| | | String fetchStmt = spf.sprintf(new Object[]{schemaSrc, tableSrc});
|
| | | Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
| | | ResultSet rs = null;
|
| | |
|
| | | stmt.setFetchSize(FETCHSIZE);
|
| | | try {
|
| | | rs = stmt.executeQuery(fetchStmt);
|
| | | int size = rs.getMetaData().getColumnCount();
|
| | |
|
| | | while (rs.next()) {
|
| | | Object[] values = new Object[size];
|
| | |
|
| | | for (int i = 0; i < size; i++) {
|
| | | values[i] = rs.getObject(i + 1);
|
| | | }
|
| | |
|
| | | Integer key = ((BigDecimal) values[0]).intValue();
|
| | | String name = (String) values[1];
|
| | |
|
| | | Pair pair = (Pair) orderedMap.get(key);
|
| | | if (pair == null)
|
| | | orderedMap.put(key, new Pair(name, null));
|
| | | else
|
| | | pair.first = name;
|
| | | }
|
| | | } catch (SQLException e) {
|
| | | logger.error(e.toString(), e);
|
| | | logger.error("stmt=" + fetchStmt);
|
| | | throw e;
|
| | | } finally {
|
| | | JDBCUtils.close(rs);
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | |
|
| | | return orderedMap;
|
| | | }
|
| | |
|
| | | protected OrderedMap getRawFormatStorageList(OracleConnection connection, String schemaSrc, String tableSrc,
|
| | | OrderedMap orderedMap) throws SQLException {
|
| | | if (orderedMap == null)
|
| | | orderedMap = new LinkedMap(99);
|
| | | String fetchStmtFmt = "SELECT RNID, SPACETABLE FROM \"%s\".\"%s\"";
|
| | | PrintfFormat spf = new PrintfFormat(fetchStmtFmt);
|
| | | String fetchStmt = spf.sprintf(new Object[]{schemaSrc, tableSrc});
|
| | | Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
| | |
|
| | | stmt.setFetchSize(FETCHSIZE);
|
| | | ResultSet rs = stmt.executeQuery(fetchStmt);
|
| | | try {
|
| | | int size = rs.getMetaData().getColumnCount();
|
| | | while (rs.next()) {
|
| | | Object[] values = new Object[size];
|
| | |
|
| | | for (int i = 0; i < size; i++) {
|
| | | values[i] = rs.getObject(i + 1);
|
| | | }
|
| | |
|
| | | Integer key = ((BigDecimal) values[0]).intValue();
|
| | | String name = (String) values[1];
|
| | |
|
| | | Pair pair = (Pair) orderedMap.get(key);
|
| | | if (pair == null)
|
| | | orderedMap.put(key, new Pair(null, name));
|
| | | else
|
| | | pair.second = name;
|
| | | }
|
| | | } finally {
|
| | | JDBCUtils.close(rs);
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | | return orderedMap;
|
| | | }
|
| | |
|
| | | protected void queryIgsetElement(OracleConvertEdbGeoJobContext jobContext,
|
| | | String srcschema, String srctable) throws SQLException {
|
| | | Connection connection = jobContext.getOracleConnection();
|
| | | String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" ORDER BY ROWID";
|
| | | //String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" WHERE TAG_SFSC = 423 AND TAG_LUFID = 21612065 ORDER BY ROWID";
|
| | | PrintfFormat spf = new PrintfFormat(fetchSrcStmtFmt);
|
| | | String fetchSrcStmt = spf.sprintf(new Object[]{srcschema, srctable});
|
| | | Statement stmtSrc = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
| | |
|
| | | stmtSrc.setFetchSize(FETCHSIZE);
|
| | | ResultSet rsSrc = stmtSrc.executeQuery(fetchSrcStmt);
|
| | | int igdsMetaType = rsSrc.getMetaData().getColumnType(1);
|
| | | while (rsSrc.next()) {
|
| | | if (isProfileMode()) {
|
| | | markQueryTime();
|
| | | }
|
| | |
|
| | | byte[] raw = null;
|
| | | if (igdsMetaType == Types.BLOB) {
|
| | | BLOB blob = (BLOB) rsSrc.getBlob(1);
|
| | |
|
| | | try {
|
| | | raw = getBytesFromBLOB(blob);
|
| | | } catch (BufferOverflowException e) {
|
| | | logger.warn("Wrong Element Structure-", e);
|
| | | } finally {
|
| | | // blob.close();
|
| | | }
|
| | | } else {
|
| | | raw = rsSrc.getBytes(1);
|
| | | }
|
| | |
|
| | | try {
|
| | | if (raw != null) {
|
| | | Element element = fetchBinaryElement(raw);
|
| | | if (isProfileMode()) {
|
| | | accumulateQueryTime();
|
| | | }
|
| | | jobContext.putFeatureCollection(element);
|
| | | } else {
|
| | | if (isProfileMode()) {
|
| | | accumulateQueryTime();
|
| | | }
|
| | | }
|
| | | } catch (Dgn7fileException e) {
|
| | | logger.warn("Dgn7Exception", e);
|
| | | }
|
| | | }
|
| | |
|
| | | JDBCUtils.close(rsSrc);
|
| | | JDBCUtils.close(stmtSrc);
|
| | | }
|
| | |
|
| | | protected void queryRawElement(OracleConvertEdbGeoJobContext jobContext,
|
| | | String srcschema, String srctable) throws SQLException {
|
| | | Connection connection = jobContext.getOracleConnection();
|
| | | String fetchDestStmtFmt = "SELECT ELEMENT FROM \"%s\".\"%s\" ORDER BY ROWID";
|
| | | PrintfFormat spf = new PrintfFormat(fetchDestStmtFmt);
|
| | | String fetchDestStmt = spf.sprintf(new Object[]{srcschema, srctable});
|
| | | Statement stmtDest = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
| | |
|
| | | stmtDest.setFetchSize(FETCHSIZE);
|
| | | ResultSet rsDest = stmtDest.executeQuery(fetchDestStmt);
|
| | |
|
| | | try {
|
| | | while (rsDest.next()) {
|
| | | ARRAY rawsValue = ((OracleResultSet) rsDest).getARRAY(1);
|
| | | long[] rawData = rawsValue.getLongArray();
|
| | | byte[] comparessedValue;
|
| | |
|
| | | /*
|
| | | if (dataMode == TransferTask.DataMode.Normal)
|
| | | {
|
| | | comparessedValue = BinConverter.unmarshalByteArray(rawData, true);
|
| | | } else
|
| | | {
|
| | | comparessedValue = BinConverter.unmarshalCompactByteArray(rawData);
|
| | | }
|
| | | */
|
| | | comparessedValue = BinConverter.unmarshalByteArray(rawData, true);
|
| | |
|
| | | byte[] rawDest = ByteArrayCompressor.decompressByteArray(comparessedValue);
|
| | |
|
| | | try {
|
| | | Element element = fetchBinaryElement(rawDest);
|
| | | jobContext.putFeatureCollection(element);
|
| | | } catch (Dgn7fileException e) {
|
| | | logger.warn("Dgn7Exception:" + e.getMessage(), e);
|
| | | }
|
| | | }
|
| | | } finally {
|
| | | JDBCUtils.close(rsDest);
|
| | | JDBCUtils.close(stmtDest);
|
| | | }
|
| | | }
|
| | |
|
| | | // Binary to Element
|
| | | private Element fetchBinaryElement(byte[] raws) throws Dgn7fileException {
|
| | | ByteBuffer buffer = ByteBuffer.wrap(raws);
|
| | | buffer.order(ByteOrder.LITTLE_ENDIAN);
|
| | | short signature = buffer.getShort();
|
| | |
|
| | | // byte type = (byte) (buffer.get() & 0x7f);
|
| | | byte type = (byte) ((signature >>> 8) & 0x007f);
|
| | |
|
| | | // silly Bentley say contentLength is in 2-byte words
|
| | | // and ByteByffer uses raws.
|
| | | // track the record location
|
| | | int elementLength = (buffer.getShort() * 2) + 4;
|
| | | ElementType recordType = ElementType.forID(type);
|
| | | IElementHandler handler;
|
| | |
|
| | | handler = recordType.getElementHandler();
|
| | |
|
| | | Element dgnElement = (Element) handler.read(buffer, signature, elementLength);
|
| | | if (recordType.isComplexElement() && (elementLength < raws.length)) {
|
| | | int offset = elementLength;
|
| | | while (offset < (raws.length - 4)) {
|
| | | buffer.position(offset);
|
| | | signature = buffer.getShort();
|
| | | type = (byte) ((signature >>> 8) & 0x007f);
|
| | | elementLength = (buffer.getShort() * 2) + 4;
|
| | | if (raws.length < (offset + elementLength)) {
|
| | | logger.debug("Length not match:" + offset + ":" + buffer.position() + ":" + buffer.limit());
|
| | | break;
|
| | | }
|
| | | recordType = ElementType.forID(type);
|
| | | handler = recordType.getElementHandler();
|
| | | if (handler != null) {
|
| | | Element subElement = (Element) handler.read(buffer, signature, elementLength);
|
| | | ((ComplexElement) dgnElement).add(subElement);
|
| | | offset += elementLength;
|
| | | } else {
|
| | | byte[] remain = new byte[buffer.remaining()];
|
| | | System.arraycopy(raws, offset, remain, 0, buffer.remaining());
|
| | | for (int i = 0; i < remain.length; i++) {
|
| | | if (remain[i] != 0) {
|
| | | logger.info("fetch element has some error. index=" + (offset + i) + ":value=" + remain[i]);
|
| | | }
|
| | | }
|
| | | break;
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | return dgnElement;
|
| | | }
|
| | |
|
| | | /**
|
| | | * �����ഫ�����ɪ��u�@
|
| | | *
|
| | | * @param context �u�@��������
|
| | | * @throws org.quartz.JobExecutionException
|
| | | * exception
|
| | | */
|
| | | private void convertIndexDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException {
|
| | | File indexDir = new File(getDataPath(), INDEXPATHNAME);
|
| | | if (!indexDir.exists()) {
|
| | | logger.info("index dir=" + indexDir + " not exist.");
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!indexDir.isDirectory()) {
|
| | | logger.info("index dir=" + indexDir + " is not a directory.");
|
| | | }
|
| | |
|
| | | List<File> dgnFiles = FileUtils.recurseDir(indexDir, new FileFilter() {
|
| | | public boolean accept(File pathname) {
|
| | | return pathname.isDirectory() || pathname.getName().toLowerCase().endsWith("dgn");
|
| | | }
|
| | | });
|
| | |
|
| | | for (File dgnFile : dgnFiles) {
|
| | | if (dgnFile.isDirectory()) continue;
|
| | | IndexDgnConvertEdbGeoJobContext convertContext =
|
| | | new IndexDgnConvertEdbGeoJobContext(getDataPath(), getTargetDataStore(), targetSchemaName,
|
| | | isProfileMode(), isTransformed());
|
| | | logger.info("--- start index dgnfile-" + dgnFile.toString() + " ---");
|
| | | FileInputStream fs = null;
|
| | | FileChannel fc = null;
|
| | | Dgn7fileReader reader = null;
|
| | | try {
|
| | | convertContext.clearOutputDatabase();
|
| | | convertContext.setExecutionContext(context);
|
| | | String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator);
|
| | | convertContext.setFilename(dgnPaths[dgnPaths.length - 1]);
|
| | | convertContext.startTransaction();
|
| | |
|
| | | fs = new FileInputStream(dgnFile);
|
| | | fc = fs.getChannel();
|
| | | reader = new Dgn7fileReader(fc, new Lock());
|
| | | convertContext.setReader(reader);
|
| | |
|
| | | scanIndexDgnElement(convertContext);
|
| | |
|
| | | convertContext.commitTransaction();
|
| | | convertContext.closeFeatureWriter();
|
| | |
|
| | | System.gc();
|
| | | System.runFinalization();
|
| | | } catch (FileNotFoundException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (Dgn7fileException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (IOException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (IllegalAttributeException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (SchemaException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } finally {
|
| | | convertContext.closeFeatureWriter();
|
| | |
|
| | | if (reader != null) {
|
| | | try {
|
| | | reader.close();
|
| | | } catch (IOException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | if (fs != null) {
|
| | | try {
|
| | | fs.close();
|
| | | } catch (IOException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | if (isProfileMode()) {
|
| | | logger.warn("Profile-Current convertContext Process Cost-" +
|
| | | ((int) ((convertContext.getProcessTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((convertContext.getProcessTime()) % 60000.0)) / 1000) + " sec");
|
| | | logger.warn("Profile-Current convertContext Update Cost-" +
|
| | | ((int) ((convertContext.getUpdateTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((convertContext.getUpdateTime()) % 60000.0)) / 1000) + " sec");
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | protected void scanIndexDgnElement(IndexDgnConvertEdbGeoJobContext convertContext)
|
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException {
|
| | | Dgn7fileReader reader = convertContext.getReader();
|
| | | int count = 0;
|
| | | Element lastComplex = null;
|
| | |
|
| | | while (reader.hasNext()) {
|
| | | if (isProfileMode()) markProcessTime();
|
| | | Element.FileRecord record = reader.nextElement();
|
| | | if (record.element() != null) {
|
| | | Element element = (Element) record.element();
|
| | | ElementType type = element.getElementType();
|
| | |
|
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) {
|
| | | if (lastComplex != null) {
|
| | | processIndexElement(lastComplex, convertContext);
|
| | | lastComplex = null;
|
| | | }
|
| | |
|
| | | processIndexElement(element, convertContext);
|
| | | } else if (element.isComponentElement()) {
|
| | | if (lastComplex != null) {
|
| | | ((ComplexElement) lastComplex).add(element);
|
| | | }
|
| | | } else if (type.isComplexElement()) {
|
| | | if (lastComplex != null) {
|
| | | processIndexElement(lastComplex, convertContext);
|
| | | }
|
| | | lastComplex = element;
|
| | | }
|
| | | }
|
| | | count++;
|
| | | }
|
| | |
|
| | | if (lastComplex != null) {
|
| | | processIndexElement(lastComplex, convertContext);
|
| | | }
|
| | | logger.debug("ElementRecord Count=" + count);
|
| | | }
|
| | |
|
| | | private void processIndexElement(Element element, IndexDgnConvertEdbGeoJobContext convertContext)
|
| | | throws IllegalAttributeException, SchemaException {
|
| | | //if (useTpclidText) {
|
| | | // if (element instanceof TextElement) {
|
| | | // convertContext.putFeatureCollection(element);
|
| | | // }
|
| | | //} else {
|
| | | // if (element instanceof ShapeElement) {
|
| | | convertContext.putFeatureCollection(element);
|
| | | // }
|
| | | //}
|
| | | }
|
| | |
|
| | |
|
| | | /**
|
| | | * �����ഫ��L�]�p���ɪ��u�@
|
| | | *
|
| | | * @param context jobContext
|
| | | * @throws org.quartz.JobExecutionException
|
| | | * exception
|
| | | */
|
| | | private void convertOtherDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException {
|
| | | File otherDir = new File(getDataPath(), OTHERPATHNAME);
|
| | | if (!otherDir.exists()) {
|
| | | logger.info("other dir=" + otherDir + " not exist.");
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!otherDir.isDirectory()) {
|
| | | logger.info("other dir=" + otherDir + " is not a directory.");
|
| | | }
|
| | |
|
| | | List<File> dgnFiles = FileUtils.recurseDir(otherDir, new FileFilter() {
|
| | | public boolean accept(File pathname) {
|
| | | return pathname.isDirectory() || pathname.getName().toLowerCase().endsWith("dgn");
|
| | | }
|
| | | });
|
| | |
|
| | | for (File dgnFile : dgnFiles) {
|
| | | if (dgnFile.isDirectory()) continue;
|
| | |
|
| | | GeneralDgnConvertEdbGeoJobContext convertContext =
|
| | | new GeneralDgnConvertEdbGeoJobContext(getDataPath(), getTargetDataStore(), targetSchemaName,
|
| | | isProfileMode(), isTransformed());
|
| | | logger.info("--- start other dgnfile-" + dgnFile.toString() + " ---");
|
| | | FileInputStream fs = null;
|
| | | FileChannel fc;
|
| | | Dgn7fileReader reader = null;
|
| | | try {
|
| | | convertContext.setExecutionContext(context);
|
| | | String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator);
|
| | | convertContext.setFilename(dgnPaths[dgnPaths.length - 1]);
|
| | | convertContext.startTransaction();
|
| | |
|
| | | fs = new FileInputStream(dgnFile);
|
| | | fc = fs.getChannel();
|
| | | reader = new Dgn7fileReader(fc, new Lock());
|
| | | convertContext.setReader(reader);
|
| | |
|
| | | scanOtherDgnElement(convertContext);
|
| | |
|
| | | convertContext.commitTransaction();
|
| | | convertContext.closeFeatureWriter();
|
| | |
|
| | | System.gc();
|
| | | System.runFinalization();
|
| | | } catch (FileNotFoundException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (Dgn7fileException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (IOException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (IllegalAttributeException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (SchemaException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } finally {
|
| | | convertContext.closeFeatureWriter();
|
| | |
|
| | | if (reader != null) {
|
| | | try {
|
| | | reader.close();
|
| | | } catch (IOException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | if (fs != null) {
|
| | | try {
|
| | | fs.close();
|
| | | } catch (IOException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | if (isProfileMode()) {
|
| | | logger.warn("Profile-Current convertContext Process Cost-" +
|
| | | ((int) ((convertContext.getProcessTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((convertContext.getProcessTime()) % 60000.0)) / 1000) + " sec");
|
| | | logger.warn("Profile-Current convertContext Update Cost-" +
|
| | | ((int) ((convertContext.getUpdateTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((convertContext.getUpdateTime()) % 60000.0)) / 1000) + " sec");
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | public void scanOtherDgnElement(GeneralDgnConvertEdbGeoJobContext convertContext)
|
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException {
|
| | | Dgn7fileReader reader = convertContext.getReader();
|
| | | int count = 0;
|
| | | Element lastComplex = null;
|
| | | while (reader.hasNext()) {
|
| | | Element.FileRecord record = reader.nextElement();
|
| | | if (record.element() != null) {
|
| | | Element element = (Element) record.element();
|
| | | ElementType type = element.getElementType();
|
| | |
|
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) {
|
| | | if (lastComplex != null) {
|
| | | processOtherElement(lastComplex, convertContext);
|
| | | lastComplex = null;
|
| | | }
|
| | |
|
| | | processOtherElement(element, convertContext);
|
| | | } else if (element.isComponentElement()) {
|
| | | if (lastComplex != null) {
|
| | | ((ComplexElement) lastComplex).add(element);
|
| | | }
|
| | | } else if (type.isComplexElement()) {
|
| | | if (lastComplex != null) {
|
| | | processOtherElement(lastComplex, convertContext);
|
| | | }
|
| | | lastComplex = element;
|
| | | }
|
| | | }
|
| | | count++;
|
| | | }
|
| | |
|
| | | if (lastComplex != null) {
|
| | | processOtherElement(lastComplex, convertContext);
|
| | | }
|
| | | logger.debug("ElementRecord Count=" + count);
|
| | | }
|
| | |
|
| | | private void processOtherElement(Element element, GeneralDgnConvertEdbGeoJobContext convertContext)
|
| | | throws IllegalAttributeException, SchemaException {
|
| | | convertContext.putFeatureCollection(element);
|
| | | }
|
| | |
|
| | | private void clearOutputDatabase() {
|
| | | /*
|
| | | File outDataPath = new File(getDataPath(), OracleConvertEdbGeoJobContext.SHPOUTPATH);
|
| | | if (outDataPath.exists() && outDataPath.isDirectory())
|
| | | {
|
| | | deleteFilesInPath(outDataPath);
|
| | | }
|
| | | outDataPath = new File(getDataPath(), IndexDgnConvertShpJobContext.SHPOUTPATH);
|
| | | if (outDataPath.exists() && outDataPath.isDirectory())
|
| | | {
|
| | | deleteFilesInPath(outDataPath);
|
| | | }
|
| | | outDataPath = new File(getDataPath(), GeneralDgnConvertShpJobContext.SHPOUTPATH);
|
| | | if (outDataPath.exists() && outDataPath.isDirectory())
|
| | | {
|
| | | deleteFilesInPath(outDataPath);
|
| | | }
|
| | | */
|
| | | }
|
| | |
|
| | | private void deleteFilesInPath(File outDataPath) {
|
| | | deleteFilesInPath(outDataPath, true);
|
| | | }
|
| | |
|
| | | private void deleteFilesInPath(File outDataPath, boolean removeSubDir) {
|
| | | if (!outDataPath.isDirectory()) {
|
| | | return;
|
| | | }
|
| | | File[] files = outDataPath.listFiles();
|
| | | for (File file : files) {
|
| | | if (file.isFile()) {
|
| | | if (!file.delete()) {
|
| | | logger.info("Cannot delete file-" + file.toString());
|
| | | }
|
| | | } else if (file.isDirectory()) {
|
| | | deleteFilesInPath(file, removeSubDir);
|
| | | if (removeSubDir) {
|
| | | if (file.delete()) {
|
| | | logger.info("Cannot delete dir-" + file.toString());
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | private void convertFeatureDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException {
|
| | | File elminDir = new File(getDataPath(), "elmin");
|
| | | if (!elminDir.exists()) {
|
| | | logger.info("elmin dir=" + elminDir + " not exist.");
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!elminDir.isDirectory()) {
|
| | | logger.info("elmin dir=" + elminDir + " is not a directory.");
|
| | | }
|
| | |
|
| | | File[] dgnFiles = elminDir.listFiles(new FilenameFilter() {
|
| | | public boolean accept(File dir, String name) {
|
| | | return name.toLowerCase().endsWith(".dgn");
|
| | | }
|
| | | });
|
| | |
|
| | | for (File dgnFile : dgnFiles) {
|
| | | FeatureDgnConvertEdbGeoJobContext convertContext =
|
| | | new FeatureDgnConvertEdbGeoJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, _filterPath,
|
| | | isProfileMode(), isTransformed());
|
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---");
|
| | | try {
|
| | | convertContext.setExecutionContext(context);
|
| | | String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator);
|
| | | convertContext.setFilename(dgnPaths[dgnPaths.length - 1]);
|
| | | convertContext.startTransaction();
|
| | |
|
| | | FileInputStream fs = new FileInputStream(dgnFile);
|
| | | FileChannel fc = fs.getChannel();
|
| | | Dgn7fileReader reader = new Dgn7fileReader(fc, new Lock());
|
| | | convertContext.setReader(reader);
|
| | |
|
| | | scanFeatureDgnElement(convertContext);
|
| | |
|
| | | convertContext.commitTransaction();
|
| | | convertContext.closeFeatureWriter();
|
| | | System.gc();
|
| | | System.runFinalization();
|
| | | } catch (FileNotFoundException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (Dgn7fileException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (IOException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (IllegalAttributeException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (SchemaException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } finally {
|
| | | convertContext.closeFeatureWriter();
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | public void scanFeatureDgnElement(FeatureDgnConvertEdbGeoJobContext convertContext)
|
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException {
|
| | | Dgn7fileReader reader = convertContext.getReader();
|
| | | int count = 0;
|
| | | Element lastComplex = null;
|
| | | while (reader.hasNext()) {
|
| | | Element.FileRecord record = reader.nextElement();
|
| | | if (record.element() != null) {
|
| | | Element element = (Element) record.element();
|
| | | ElementType type = element.getElementType();
|
| | |
|
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) {
|
| | | if (lastComplex != null) {
|
| | | processFeatureElement(lastComplex, convertContext);
|
| | | lastComplex = null;
|
| | | }
|
| | |
|
| | | processFeatureElement(element, convertContext);
|
| | | } else if (element.isComponentElement()) {
|
| | | if (lastComplex != null) {
|
| | | ((ComplexElement) lastComplex).add(element);
|
| | | }
|
| | | } else if (type.isComplexElement()) {
|
| | | if (lastComplex != null) {
|
| | | processFeatureElement(lastComplex, convertContext);
|
| | | }
|
| | | lastComplex = element;
|
| | | }
|
| | | }
|
| | | count++;
|
| | | }
|
| | |
|
| | | if (lastComplex != null) {
|
| | | processFeatureElement(lastComplex, convertContext);
|
| | | }
|
| | | logger.debug("ElementRecord Count=" + count);
|
| | | }
|
| | |
|
| | | private void processFeatureElement(Element element, FeatureDgnConvertEdbGeoJobContext convertContext)
|
| | | throws IllegalAttributeException, SchemaException {
|
| | | convertContext.putFeatureCollection(element);
|
| | | }
|
| | |
|
| | | private void createDummyFeatureFile(JobExecutionContext context) throws JobExecutionException {
|
| | | /*
|
| | | DummyFeatureConvertShpJobContext convertContext = new DummyFeatureConvertShpJobContext(getDataPath(), _filterPath);
|
| | | try {
|
| | | convertContext.startTransaction();
|
| | | convertContext.commitTransaction();
|
| | | convertContext.closeFeatureWriter();
|
| | | } catch (IOException e)
|
| | | {
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | }
|
| | | */
|
| | | }
|
| | |
|
| | | public DataStore getTargetDataStore() {
|
| | | return targetDataStore;
|
| | | }
|
| | |
|
| | | protected void createTargetDataStore() throws JobExecutionException {
|
| | | if (targetDataStore != null) {
|
| | | targetDataStore.dispose();
|
| | | targetDataStore = null;
|
| | | }
|
| | |
|
| | | /*
|
| | | if (!isDriverFound())
|
| | | {
|
| | | throw new JobExecutionException("Oracle JDBC Driver not found.-" + JDBC_DRIVER);
|
| | | }
|
| | | */
|
| | |
|
| | | if (!edbProperties.containsKey(PostgisDataStoreFactory.MAXCONN.key)) {
|
| | | edbProperties.put(PostgisDataStoreFactory.MAXCONN.key, "5");
|
| | | }
|
| | |
|
| | | if (!edbProperties.containsKey(PostgisDataStoreFactory.MINCONN.key)) {
|
| | | edbProperties.put(PostgisDataStoreFactory.MINCONN.key, "1");
|
| | | }
|
| | |
|
| | | if (!edbProperties.containsKey(PostgisDataStoreFactory.WKBENABLED.key)) {
|
| | | edbProperties.put(PostgisDataStoreFactory.WKBENABLED.key, "true");
|
| | | }
|
| | |
|
| | | if (!dataStoreFactory.canProcess(edbProperties)) {
|
| | | getLogger().warn("cannot process properties-");
|
| | | throw new JobExecutionException("cannot process properties-");
|
| | | }
|
| | | try {
|
| | | targetDataStore = (JDBCDataStore) dataStoreFactory.createDataStore(edbProperties);
|
| | | } catch (IOException e) {
|
| | | getLogger().warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | protected void disconnect() {
|
| | | super.disconnect();
|
| | | if (targetDataStore != null) {
|
| | | targetDataStore.dispose();
|
| | | targetDataStore = null;
|
| | | }
|
| | | }
|
| | |
|
| | | private String determineTargetSchemaName() throws IOException {
|
| | | if (targetDataStore == null) return null;
|
| | | Connection connection = null;
|
| | | Statement stmt = null;
|
| | | ResultSet rs = null;
|
| | | String targetSchema = null;
|
| | | boolean needCreate = false;
|
| | | try {
|
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT);
|
| | | rs = connection.getMetaData().getTables(null, _edbSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"});
|
| | | if (!rs.next()) needCreate = true;
|
| | | rs.close();
|
| | | rs = null;
|
| | |
|
| | | stmt = connection.createStatement();
|
| | | stmt.execute("SET edb_redwood_date TO OFF");
|
| | | stmt.execute("SET edb_redwood_strings TO OFF");
|
| | | // stmt.execute("SET edb_stmt_level_tx TO OFF");
|
| | | stmt.close();
|
| | |
|
| | | if (needCreate)
|
| | | createXGeosVersionTable(connection, _edbSchema);
|
| | |
|
| | | StringBuilder sbSQL = new StringBuilder("SELECT ");
|
| | | sbSQL.append("vsschema, vsstatus FROM ");
|
| | | sbSQL.append(encodeSchemaTableName(_edbSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' ');
|
| | | sbSQL.append("ORDER BY vsid");
|
| | | stmt = connection.createStatement();
|
| | | rs = stmt.executeQuery(sbSQL.toString());
|
| | | ArrayList<Object[]> tmpSchemas = new ArrayList<Object[]>();
|
| | | int i = 0;
|
| | | int current = -1;
|
| | | while (rs.next()) {
|
| | | Object[] values = new Object[2];
|
| | | values[0] = rs.getString("vsschema");
|
| | | values[1] = rs.getShort("vsstatus");
|
| | | tmpSchemas.add(values);
|
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) {
|
| | | current = i;
|
| | | }
|
| | | i++;
|
| | | }
|
| | |
|
| | | if (current == -1) {
|
| | | Object[] values = tmpSchemas.get(0);
|
| | | targetSchema = (String) values[0];
|
| | | } else if (current < (tmpSchemas.size() - 1)) {
|
| | | Object[] values = tmpSchemas.get(current + 1);
|
| | | targetSchema = (String) values[0];
|
| | | } else {
|
| | | Object[] values = tmpSchemas.get(0);
|
| | | targetSchema = (String) values[0];
|
| | | }
|
| | |
|
| | | sbSQL = new StringBuilder("UPDATE ");
|
| | | sbSQL.append(encodeSchemaTableName(_edbSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' ');
|
| | | sbSQL.append(" SET vsstatus = ");
|
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_COVERT);
|
| | | sbSQL.append(" WHERE vsschema = '");
|
| | | sbSQL.append(targetSchema).append("'");
|
| | | int count = stmt.executeUpdate(sbSQL.toString());
|
| | | if (count != 1) {
|
| | | logger.info("update status for " + targetSchema + " update result count="
|
| | | + count);
|
| | | }
|
| | | } catch (SQLException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | } finally {
|
| | | JDBCUtils.close(rs);
|
| | | JDBCUtils.close(stmt);
|
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null);
|
| | | }
|
| | | return targetSchema;
|
| | | }
|
| | |
|
| | | public String encodeSchemaTableName(String schemaName, String tableName) {
|
| | | return "\"" + schemaName + "\".\"" + tableName + "\"";
|
| | | }
|
| | |
|
| | | private void createXGeosVersionTable(Connection connection, String pgSchema) throws SQLException {
|
| | | Statement stmt = null;
|
| | | StringBuilder sql = new StringBuilder("CREATE TABLE ");
|
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME));
|
| | | sql.append(" ( vsid serial PRIMARY KEY, ");
|
| | | sql.append(" vsschema character varying(64) NOT NULL, ");
|
| | | sql.append(" vsstatus smallint NOT NULL, ");
|
| | | sql.append(" vstimestamp timestamp with time zone ) ");
|
| | | try {
|
| | | stmt = connection.createStatement();
|
| | | stmt.executeUpdate(sql.toString());
|
| | |
|
| | | sql = new StringBuilder("ALTER TABLE ");
|
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME));
|
| | | sql.append(" OWNER TO ").append(_edbUsername);
|
| | | stmt.executeUpdate(sql.toString());
|
| | |
|
| | | sql = new StringBuilder("GRANT ALL ON TABLE ");
|
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME));
|
| | | sql.append(" TO public");
|
| | | stmt.executeUpdate(sql.toString());
|
| | |
|
| | | for (String schemaName : DataReposVersionManager.DEFAULTXGVERSIONSCHEMA_NAMES) {
|
| | | sql = new StringBuilder("INSERT INTO ");
|
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME));
|
| | | sql.append(" (vsschema, vsstatus) VALUES ('");
|
| | | sql.append(schemaName).append("', ");
|
| | | sql.append(DataReposVersionManager.VSSTATUS_AVAILABLE).append(" )");
|
| | | stmt.executeUpdate(sql.toString());
|
| | |
|
| | | createIfNotExistNewSchema(connection, schemaName);
|
| | | }
|
| | |
|
| | | } finally {
|
| | | if (stmt != null) stmt.close();
|
| | | }
|
| | | }
|
| | |
|
| | | private void updateRepoStatusToReady(String targetSchema) {
|
| | | if (targetDataStore == null) return;
|
| | | Connection connection = null;
|
| | | Statement stmt = null;
|
| | | ResultSet rs = null;
|
| | | boolean needCreate = false;
|
| | | try {
|
| | | StringBuilder sbSQL = new StringBuilder("UPDATE ");
|
| | | sbSQL.append(encodeSchemaTableName(_edbSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' ');
|
| | | sbSQL.append(" SET vsstatus = ");
|
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_READY);
|
| | | sbSQL.append(" , vstimestamp = CURRENT_TIMESTAMP WHERE vsschema = '");
|
| | | sbSQL.append(targetSchema).append("'");
|
| | |
|
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT);
|
| | | stmt = connection.createStatement();
|
| | | int count = stmt.executeUpdate(sbSQL.toString());
|
| | | if (count != 1) {
|
| | | logger.info("update status for " + targetSchema + " update result count="
|
| | | + count);
|
| | | }
|
| | | } catch (SQLException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | } catch (IOException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | } finally {
|
| | | JDBCUtils.close(rs);
|
| | | JDBCUtils.close(stmt);
|
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null);
|
| | | }
|
| | | }
|
| | |
|
| | | private void createIfNotExistNewSchema(Connection connection, String s) throws SQLException {
|
| | | Statement stmt = null;
|
| | | ResultSet rs = null;
|
| | | try {
|
| | | /*
|
| | | rs = connection.getMetaData().getSchemas(null, s);
|
| | | if (rs.next()) return;
|
| | | rs.close();
|
| | | rs = null;
|
| | | */
|
| | |
|
| | | StringBuilder sbSQL = new StringBuilder("CREATE SCHEMA ");
|
| | | sbSQL.append(s).append(' ');
|
| | | sbSQL.append("AUTHORIZATION ").append(_edbUsername);
|
| | | stmt = connection.createStatement();
|
| | | stmt.executeUpdate(sbSQL.toString());
|
| | |
|
| | | sbSQL = new StringBuilder("GRANT ALL ON SCHEMA ");
|
| | | sbSQL.append(s).append(' ');
|
| | | sbSQL.append("TO public");
|
| | | stmt.executeUpdate(sbSQL.toString());
|
| | | } catch (SQLException e) {
|
| | | logger.info("create schema:" + s + " has exception.");
|
| | | logger.info(e.getMessage(), e);
|
| | | } finally {
|
| | | if (rs != null) rs.close();
|
| | | if (stmt != null) stmt.close();
|
| | | }
|
| | | }
|
| | |
|
| | | public final void accumulateQueryTime() {
|
| | | queryTime += System.currentTimeMillis() - queryTimeStart;
|
| | | }
|
| | |
|
| | | public long getQueryTime() {
|
| | | return queryTime;
|
| | | }
|
| | |
|
| | | public final void markQueryTime() {
|
| | | queryTimeStart = System.currentTimeMillis();
|
| | | }
|
| | |
|
| | | public final void resetQueryTime() {
|
| | | queryTime = 0;
|
| | | }
|
| | | }
|
| | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.mysql.MySQLDataStore; |
| | | import org.geotools.data.mysql.MySQLDataStoreFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import oracle.jdbc.OracleConnection; |
| | | import oracle.jdbc.OracleResultSet; |
| | |
| | | |
| | | protected static MySQLDataStoreFactory dataStoreFactory = new MySQLDataStoreFactory(); |
| | | |
| | | GeometryFactory _geomFactory = new GeometryFactory(); |
| | | protected String _myHost; |
| | | protected String _myDatabase; |
| | | protected String _myPort; |
| | |
| | | protected String _myUseWKB; |
| | | |
| | | protected Map<String, String> myProperties; |
| | | protected MySQLDataStore targetDataStore; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | boolean useTransform) { |
| | | return new OracleConvertMySQLJobContext(getDataPath(), getTargetDataStore(), filterPath, profileMode, |
| | | useTransform, useEPSG3826); |
| | | useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getName(); |
| | | String jobName = jobDetail.getKey().getName(); |
| | | String targetSchemaName = null; |
| | | |
| | | // Log the time the job started |
| | |
| | | for (String orgSchema : _orgSchema) { |
| | | OracleConvertMySQLJobContext jobContext = |
| | | (OracleConvertMySQLJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed(), isEPSG3826()); |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | // jobContext.setConvertElementIn(_convertElementIn); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | if (bFirst) |
| | | copyConnectivity(jobContext); |
| | |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | /** |
| | | * Connectivity½Æ»s¤@Óª©¥»¡A¦b¬d¸ß¹q¬y¤è¦V®É¥Î¨Ó¤ñ¹ïOMS¸ê®Æ®wªº¹q¾¹³s±µ©Ê(Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws SQLException sql exception |
| | | */ |
| | | private void copyConnectivity(OracleConvertMySQLJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(AbstractOracleJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK); |
| | | stmt.execute(AbstractOracleJobContext.COPY_CONNECTIVITY_TO_WEBCHECK); |
| | | } |
| | | |
| | | private void exetcuteConvert(OracleConvertMySQLJobContext jobContext, |
| | |
| | | } |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¯Á¤Þ¹ÏÀɪº¤u§@ |
| | | * �����ഫ�����ɪ��u�@ |
| | | * |
| | | * @param context ¤u§@°õ¦æÀô¹Ò |
| | | * @param context �u�@�������� |
| | | * @throws org.quartz.JobExecutionException |
| | | * exception |
| | | */ |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | IndexDgnConvertMySQLJobContext convertContext = |
| | | new IndexDgnConvertMySQLJobContext(getDataPath(), getTargetDataStore(), isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.debug("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¨ä¥L³]p¹ÏÀɪº¤u§@ |
| | | * �����ഫ��L�]�p���ɪ��u�@ |
| | | * |
| | | * @param context jobContext |
| | | * @throws org.quartz.JobExecutionException |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | GeneralDgnConvertMySQLJobContext convertContext = |
| | | new GeneralDgnConvertMySQLJobContext(getDataPath(), getTargetDataStore(), isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | |
| | | private void clearOutputDatabase() { |
| | | /* |
| | | File outDataPath = new File(getDataPath(), OracleConvertPostGISJobContext.SHPOUTPATH); |
| | | File outDataPath = new File(getDataPath(), OracleConvertEdbGeoJobContext.SHPOUTPATH); |
| | | if (outDataPath.exists() && outDataPath.isDirectory()) |
| | | { |
| | | deleteFilesInPath(outDataPath); |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | FeatureDgnConvertMySQLJobContext convertContext = |
| | | new FeatureDgnConvertMySQLJobContext(getDataPath(), getTargetDataStore(), _filterPath, isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | } |
| | | */ |
| | | |
| | | if (!myProperties.containsKey("max connections" /*MySQLDataStoreFactory.MAXCONN.key */)) { |
| | | myProperties.put("max connections", "2"); |
| | | if (!myProperties.containsKey(MySQLDataStoreFactory.MAXCONN.key)) { |
| | | myProperties.put(MySQLDataStoreFactory.MAXCONN.key, "2"); |
| | | } |
| | | |
| | | if (!myProperties.containsKey("min connections" /* MySQLDataStoreFactory.MINCONN.key */)) { |
| | | myProperties.put("min connections", "1"); |
| | | if (!myProperties.containsKey(MySQLDataStoreFactory.MINCONN.key)) { |
| | | myProperties.put(MySQLDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | /* |
| | | if (!myProperties.containsKey(MySQLDataStoreFactory.WKBENABLED.key)) { |
| | | myProperties.put(MySQLDataStoreFactory.WKBENABLED.key, "true"); |
| | | } |
| | | */ |
| | | |
| | | if (!dataStoreFactory.canProcess(myProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = (MySQLDataStore) dataStoreFactory.createDataStore(myProperties); |
| | | targetDataStore = dataStoreFactory.createDataStore(myProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.oracle.OracleDataStore; |
| | | import org.geotools.data.oracle.OracleDataStoreFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.data.oracle.OracleNGDataStoreFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | |
| | | } |
| | | } |
| | | |
| | | protected static OracleDataStoreFactory dataStoreFactory = new OracleDataStoreFactory(); |
| | | protected static OracleNGDataStoreFactory dataStoreFactory = new OracleNGDataStoreFactory(); |
| | | |
| | | GeometryFactory _geomFactory = new GeometryFactory(); |
| | | protected String _sdoHost; |
| | | protected String _sdoDatabase; |
| | | protected String _sdoPort; |
| | |
| | | protected String _sdoUseWKB; |
| | | |
| | | protected Map<String, String> sdoProperties; |
| | | protected OracleDataStore targetDataStore; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | boolean profileMode, boolean useTransform) { |
| | | return new OracleConvertOraSDOJobContext(getDataPath(), getTargetDataStore(), filterPath, profileMode, |
| | | useTransform, useEPSG3826); |
| | | useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put("dbtype", "OraSDO"); |
| | | remote.put("charset", "UTF-8"); |
| | | remote.put("host", _sdoHost); |
| | | remote.put("port", _sdoPort); |
| | | remote.put("database", _sdoDatabase); |
| | | remote.put("user", _sdoUsername); |
| | | remote.put("passwd", _sdoPassword); |
| | | remote.put("namespace", null); |
| | | remote.put(OracleNGDataStoreFactory.DBTYPE.key, "oracle"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(OracleNGDataStoreFactory.HOST.key, _sdoHost); |
| | | remote.put(OracleNGDataStoreFactory.PORT.key, _sdoPort); |
| | | remote.put(OracleNGDataStoreFactory.DATABASE.key, _sdoDatabase); |
| | | remote.put(OracleNGDataStoreFactory.USER.key, _sdoUsername); |
| | | remote.put(OracleNGDataStoreFactory.PASSWD.key, _sdoPassword); |
| | | // remote.put("namespace", null); |
| | | sdoProperties = remote; |
| | | } |
| | | |
| | |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getName(); |
| | | String jobName = jobDetail.getKey().getName(); |
| | | String targetSchemaName = null; |
| | | |
| | | // Log the time the job started |
| | |
| | | for (String orgSchema : _orgSchema) { |
| | | OracleConvertOraSDOJobContext jobContext = |
| | | (OracleConvertOraSDOJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | // jobContext.setConvertElementIn(_convertElementIn); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | if (bFirst) |
| | | copyConnectivity(jobContext); |
| | |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | /** |
| | | * Connectivity½Æ»s¤@Óª©¥»¡A¦b¬d¸ß¹q¬y¤è¦V®É¥Î¨Ó¤ñ¹ïOMS¸ê®Æ®wªº¹q¾¹³s±µ©Ê(Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws SQLException sql exception |
| | | */ |
| | | private void copyConnectivity(OracleConvertOraSDOJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(AbstractOracleJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK); |
| | | stmt.execute(AbstractOracleJobContext.COPY_CONNECTIVITY_TO_WEBCHECK); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | |
| | | private void exetcuteConvert(OracleConvertOraSDOJobContext jobContext, |
| | |
| | | } |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¯Á¤Þ¹ÏÀɪº¤u§@ |
| | | * �����ഫ�����ɪ��u�@ |
| | | * |
| | | * @param context ¤u§@°õ¦æÀô¹Ò |
| | | * @param context �u�@�������� |
| | | * @throws org.quartz.JobExecutionException |
| | | * exception |
| | | */ |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | IndexDgnConvertOraSDOJobContext convertContext = |
| | | new IndexDgnConvertOraSDOJobContext(getDataPath(), getTargetDataStore(), isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.debug("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¨ä¥L³]p¹ÏÀɪº¤u§@ |
| | | * �����ഫ��L�]�p���ɪ��u�@ |
| | | * |
| | | * @param context jobContext |
| | | * @throws org.quartz.JobExecutionException |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | GeneralDgnConvertOraSDOJobContext convertContext = |
| | | new GeneralDgnConvertOraSDOJobContext(getDataPath(), getTargetDataStore(), isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | FeatureDgnConvertOraSDOJobContext convertContext = |
| | | new FeatureDgnConvertOraSDOJobContext(getDataPath(), getTargetDataStore(), _filterPath, isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | } |
| | | */ |
| | | |
| | | if (!sdoProperties.containsKey(OracleDataStoreFactory.MAXCONN.key)) { |
| | | sdoProperties.put(OracleDataStoreFactory.MAXCONN.key, "2"); |
| | | if (!sdoProperties.containsKey(OracleNGDataStoreFactory.MAXCONN.key)) { |
| | | sdoProperties.put(OracleNGDataStoreFactory.MAXCONN.key, "2"); |
| | | } |
| | | |
| | | if (!sdoProperties.containsKey(OracleDataStoreFactory.MINCONN.key)) { |
| | | sdoProperties.put(OracleDataStoreFactory.MINCONN.key, "1"); |
| | | if (!sdoProperties.containsKey(OracleNGDataStoreFactory.MINCONN.key)) { |
| | | sdoProperties.put(OracleNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | /* |
| | |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = (OracleDataStore) dataStoreFactory.createDataStore(sdoProperties); |
| | | targetDataStore = dataStoreFactory.createDataStore(sdoProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | |
| | | import java.io.FileNotFoundException; |
| | | import java.io.FilenameFilter; |
| | | import java.io.IOException; |
| | | import java.io.PushbackReader; |
| | | import java.io.StringReader; |
| | | import java.math.BigDecimal; |
| | | import java.nio.BufferOverflowException; |
| | | import java.nio.ByteBuffer; |
| | | import java.nio.ByteOrder; |
| | | import java.nio.channels.FileChannel; |
| | | import java.sql.Connection; |
| | | import java.sql.PreparedStatement; |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | |
| | | import java.util.Map; |
| | | import java.util.TreeMap; |
| | | |
| | | import com.ximple.eofms.util.*; |
| | | import org.apache.commons.collections.OrderedMap; |
| | | import org.apache.commons.collections.OrderedMapIterator; |
| | | import org.apache.commons.collections.map.LinkedMap; |
| | | import org.apache.commons.dbcp.DelegatingConnection; |
| | | import org.apache.commons.dbcp.PoolingConnection; |
| | | import org.apache.commons.dbcp.PoolingDataSource; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisDataStore; |
| | | import org.geotools.data.postgis.PostgisDataStoreFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.data.postgis.PostgisNGDataStoreFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.postgresql.PGConnection; |
| | | import org.postgresql.copy.CopyManager; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import oracle.jdbc.OracleConnection; |
| | | import oracle.jdbc.OracleResultSet; |
| | |
| | | import com.ximple.eofms.jobs.context.postgis.GeneralDgnConvertPostGISJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.IndexDgnConvertPostGISJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.OracleConvertPostGISJobContext; |
| | | import com.ximple.eofms.util.BinConverter; |
| | | import com.ximple.eofms.util.ByteArrayCompressor; |
| | | import com.ximple.eofms.util.FileUtils; |
| | | import com.ximple.eofms.util.StringUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Dgn7fileException; |
| | | import com.ximple.io.dgn7.Dgn7fileReader; |
| | |
| | | import com.ximple.io.dgn7.ElementType; |
| | | import com.ximple.io.dgn7.IElementHandler; |
| | | import com.ximple.io.dgn7.Lock; |
| | | import com.ximple.io.dgn7.ShapeElement; |
| | | import com.ximple.io.dgn7.TextElement; |
| | | import com.ximple.util.PrintfFormat; |
| | | |
| | | public class OracleConvertDgn2PostGISJob extends AbstractOracleDatabaseJob { |
| | | final static Log logger = LogFactory.getLog(OracleConvertDgn2PostGISJob.class); |
| | | |
| | | private static final String PGHOST = "PGHOST"; |
| | | private static final String PGDDATBASE = "PGDDATBASE"; |
| | | private static final String PGDATBASE = "PGDATBASE"; |
| | | private static final String PGPORT = "PGPORT"; |
| | | private static final String PGSCHEMA = "PGSCHEMA"; |
| | | private static final String PGUSER = "PGUSER"; |
| | |
| | | private static final int COMMITSIZE = 100; |
| | | private static final String INDEXPATHNAME = "index"; |
| | | private static final String OTHERPATHNAME = "other"; |
| | | public static final String FORWARDFLOW_MARK = "shape://ccarrow"; |
| | | public static final String BACKFLOW_MARK = "shape://rccarrow"; |
| | | public static final String UNFLOW_MARK = "shape://backslash"; |
| | | public static final String NONFLOW_MARK = "shape://slash"; |
| | | |
| | | private static String FETCH_CONNFDR = "SELECT FSC, UFID, FDR1, DIR FROM BASEDB.CONNECTIVITY ORDER BY FSC"; |
| | | private static String FETCH_COLORTAB = "SELECT TAG_SFSC, TAG_LUFID, COLOR FROM OCSDB.COLOR WHERE TAG_BCOMPID = 0 ORDER BY TAG_SFSC"; |
| | | |
| | | private static String CREATE_OWNERTABLE = "CREATE TABLE s (tid smallint not null, oid int not null, owner smallint not null)"; |
| | | private static String CREATE_COLORTABLE = "CREATE TABLE s (tid smallint not null, oid int not null, dyncolor varchar(10) not null)"; |
| | | |
| | | public static final String FDYNCOLOR_SUFFIX = "_fdyncolor"; |
| | | public static final String FOWNER_SUFFIX = "_fowner"; |
| | | |
| | | protected static class Pair { |
| | | Object first; |
| | |
| | | } |
| | | } |
| | | |
| | | protected static PostgisDataStoreFactory dataStoreFactory = new PostgisDataStoreFactory(); |
| | | protected static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | GeometryFactory _geomFactory = new GeometryFactory(); |
| | | protected String _pgHost; |
| | | protected String _pgDatabase; |
| | | protected String _pgPort; |
| | |
| | | protected String _pgUseWKB; |
| | | |
| | | protected Map<String, String> pgProperties; |
| | | protected PostgisDataStore targetDataStore; |
| | | // protected OracleConvertPostGISJobContext oracleJobContext; |
| | | protected JDBCDataStore targetDataStore; |
| | | // protected OracleConvertEdbGeoJobContext oracleJobContext; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | boolean useTransform) { |
| | | return new OracleConvertPostGISJobContext(getDataPath(), |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform, useEPSG3826); |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _pgHost = dataMap.getString(PGHOST); |
| | | _pgDatabase = dataMap.getString(PGDDATBASE); |
| | | _pgDatabase = dataMap.getString(PGDATBASE); |
| | | _pgPort = dataMap.getString(PGPORT); |
| | | _pgSchema = dataMap.getString(PGSCHEMA); |
| | | _pgUsername = dataMap.getString(PGUSER); |
| | |
| | | Log logger = getLogger(); |
| | | /* |
| | | logger.info("PGHOST=" + _myHost); |
| | | logger.info("PGDDATBASE=" + _myDatabase); |
| | | logger.info("PGDATBASE=" + _myDatabase); |
| | | logger.info("PGPORT=" + _myPort); |
| | | logger.info("PGSCHEMA=" + _mySchema); |
| | | logger.info("PGUSER=" + _myUsername); |
| | |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put("dbtype", "postgis"); |
| | | remote.put("charset", "UTF-8"); |
| | | remote.put("host", _pgHost); |
| | | remote.put("port", _pgPort); |
| | | remote.put("database", _pgDatabase); |
| | | remote.put("user", _pgUsername); |
| | | remote.put("passwd", _pgPassword); |
| | | remote.put("namespace", null); |
| | | remote.put(PostgisNGDataStoreFactory.DBTYPE.key, "postgis"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(PostgisNGDataStoreFactory.HOST.key, _pgHost); |
| | | remote.put(PostgisNGDataStoreFactory.PORT.key, _pgPort); |
| | | remote.put(PostgisNGDataStoreFactory.DATABASE.key, _pgDatabase); |
| | | remote.put(PostgisNGDataStoreFactory.USER.key, _pgUsername); |
| | | remote.put(PostgisNGDataStoreFactory.PASSWD.key, _pgPassword); |
| | | // remote.put( "namespace", null); |
| | | pgProperties = remote; |
| | | } |
| | | |
| | |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getName(); |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | // Log the time the job started |
| | | logger.info(jobName + " fired at " + new Date()); |
| | | extractJobConfiguration(jobDetail); |
| | | |
| | | if (isIgnoreDBETL()) { |
| | | return; |
| | | } |
| | | |
| | | createSourceDataStore(); |
| | | createTargetDataStore(); |
| | | if (getSourceDataStore() == null) { |
| | |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName; |
| | | String targetSchemaName, targetThemeTable; |
| | | try { |
| | | logger.info("-- step:clearOutputDatabase --"); |
| | | clearOutputDatabase(); |
| | | targetSchemaName = determineTargetSchemaName(); |
| | | targetThemeTable = determineTargetThemeTableName(); |
| | | |
| | | if (checkConvertFile()) { |
| | | logger.info("-- step:convertIndexDesignFile --"); |
| | |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-convertOtherDesignFile", tStep, tStepEnd); |
| | | } |
| | | |
| | | } |
| | | |
| | | OracleConvertPostGISJobContext jobContext = null; |
| | | if (checkConvertDB()) { |
| | | logger.info("-- step:convertOracleDB --"); |
| | | |
| | | OracleConvertPostGISJobContext jobContext = |
| | | (OracleConvertPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed(), isEPSG3826()); |
| | | jobContext = (OracleConvertPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | // jobContext.setConvertElementIn(_convertElementIn); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | CreateHibernateSequence(jobContext); |
| | | |
| | | createHibernateSequence(jobContext); |
| | | |
| | | long tStep = System.currentTimeMillis(); |
| | | |
| | | if (isCopyConnectivityMode()) |
| | | { |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | if (isCopyConnectivityMode()) { |
| | | copyConnectivity(jobContext); |
| | | } |
| | | |
| | |
| | | } |
| | | } |
| | | |
| | | clearExchangeData(jobContext); |
| | | jobContext.closeOracleConnection(); |
| | | } |
| | | |
| | |
| | | createDummyFeatureFile(context); |
| | | } |
| | | |
| | | updateRepoStatusToReady(targetSchemaName); |
| | | |
| | | if (checkConvertPWThemes()) { |
| | | jobContext = (OracleConvertPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | long tStep = System.currentTimeMillis(); |
| | | if (!convertPowerOwnerThemeWithCopyAPI(jobContext, targetThemeTable)) { |
| | | convertPowerOwnerTheme(jobContext, targetThemeTable); |
| | | } |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-convertFeatureDesignFile", tStep, tStepEnd); |
| | | } |
| | | tStep = System.currentTimeMillis(); |
| | | if (!convertDynamicColorThemeWithCopyAPI(jobContext, targetThemeTable)) |
| | | convertDynamicColorTheme(jobContext, targetThemeTable); |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-convertFeatureDesignFile", tStep, tStepEnd); |
| | | } |
| | | jobContext.closeOracleConnection(); |
| | | } |
| | | |
| | | updatePWThemeStatusToReady(targetThemeTable); |
| | | |
| | | long t2 = System.currentTimeMillis(); |
| | | // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss"; |
| | | // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); |
| | | logTimeDiff("Total ", t1, t2); |
| | | |
| | | updateRepoStatusToReady(targetSchemaName); |
| | | |
| | | } catch (SQLException e) { |
| | | disconnect(); |
| | |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | /** |
| | | * Connectivity½Æ»s¤@Óª©¥»¡A¦b¬d¸ß¹q¬y¤è¦V®É¥Î¨Ó¤ñ¹ïOMS¸ê®Æ®wªº¹q¾¹³s±µ©Ê(Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws SQLException sql exception |
| | | */ |
| | | private void copyConnectivity(OracleConvertPostGISJobContext jobContext) throws SQLException { |
| | | private void clearExchangeData(OracleConvertPostGISJobContext jobContext) throws SQLException, IOException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | ResultSet rsMeta = connection.getMetaData().getTables(null, "BASEDB", |
| | | AbstractOracleJobContext.CONNECTIVITY_WEBCHECK_NAME + "%", |
| | | new String[]{"TABLE"}); |
| | | |
| | | ResultSet rsMeta = connection.getMetaData().getTables(null, "CMMS_POSTDB", "GEO_EXCHANGE", |
| | | new String[]{"TABLE"}); |
| | | |
| | | boolean found = false; |
| | | try { |
| | | while (rsMeta.next()) { |
| | | String tablename = rsMeta.getString(3); |
| | | if (AbstractOracleJobContext.CONNECTIVITY_WEBCHECK_NAME.equalsIgnoreCase(tablename)) { |
| | | found = true; |
| | | break; |
| | | } |
| | | found = true; |
| | | break; |
| | | } |
| | | // } catch (SQLException e) |
| | | } finally { |
| | |
| | | rsMeta = null; |
| | | } |
| | | } |
| | | Statement stmt = connection.createStatement(); |
| | | if (found) { |
| | | stmt.execute(AbstractOracleJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK); |
| | | } else { |
| | | logger.info("Create CONNECTIVITY_WEBCHECK table."); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_1); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_2); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_3); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_4); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_5); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_6); |
| | | stmt.execute(AbstractOracleJobContext.ALTER_CONNECTIVITY_WEBCHECK_1); |
| | | stmt.execute(AbstractOracleJobContext.ALTER_CONNECTIVITY_WEBCHECK_2); |
| | | |
| | | if (!found) { |
| | | logger.info("Cannot Found GEO_EXCHANGE in CMMS_POSTDB."); |
| | | return; |
| | | } |
| | | |
| | | stmt.execute(AbstractOracleJobContext.COPY_CONNECTIVITY_TO_WEBCHECK); |
| | | stmt.close(); |
| | | } |
| | | |
| | | |
| | | private void CreateHibernateSequence(OracleConvertPostGISJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | Statement stmt = null; |
| | | try { |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_HIBERNATE_SEQUENCE); |
| | | stmt.close(); |
| | | } catch (SQLException e) |
| | | { |
| | | logger.warn("HIBERNATE_SEQUENCE ¤w¸g¦s¦b"); |
| | | stmt = connection.createStatement(); |
| | | int count = stmt.executeUpdate("UPDATE \"CMMS_POSTDB\".\"GEO_EXCHANGE\" SET ISEXCHANGE=1 WHERE ISEXCHANGE=0"); |
| | | logger.info("GEO_EXCHANGE UPDATE SIZE=" + count); |
| | | } finally { |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | |
| | | String querySchema, String targetSchemaName) throws SQLException { |
| | | int order = 0; |
| | | OrderedMap map = getBlobStorageList(jobContext.getOracleConnection(), |
| | | querySchema, "SD$SPACENODES", null); |
| | | querySchema, "SD$SPACENODES", null); |
| | | |
| | | logger.info("begin convert job:[" + map.size() + "]:testmode=" + _testMode); |
| | | |
| | |
| | | int step = total / 100; |
| | | int current = 0; |
| | | |
| | | if (total == 0) { |
| | | if (total == 0) { |
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is zero."); |
| | | return; |
| | | } |
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is " + map.size() ); |
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is " + map.size()); |
| | | |
| | | //jobContext.startTransaction(); |
| | | jobContext.setCurrentSchema(querySchema); |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", 0); |
| | | for (OrderedMapIterator it = map.orderedMapIterator(); it.hasNext();) { |
| | | for (OrderedMapIterator it = map.orderedMapIterator(); it.hasNext(); ) { |
| | | it.next(); |
| | | |
| | | Pair pair = (Pair) it.getValue(); |
| | |
| | | |
| | | logger.info("begin convert:[" + order + "]-" + tableSrc); |
| | | queryIgsetElement(jobContext, querySchema, tableSrc); |
| | | |
| | | |
| | | order++; |
| | | |
| | |
| | | System.runFinalization(); |
| | | } |
| | | |
| | | if( step != 0) |
| | | { |
| | | if (step != 0) { |
| | | int now = order % step; |
| | | if (now != current) { |
| | | current = now; |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", current); |
| | | |
| | | } |
| | | }else |
| | | { |
| | | } else { |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", current); |
| | | current++; |
| | | } |
| | |
| | | } |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¯Á¤Þ¹ÏÀɪº¤u§@ |
| | | * �����ഫ�����ɪ��u�@ |
| | | * |
| | | * @param context ¤u§@°õ¦æÀô¹Ò |
| | | * @param context �u�@�������� |
| | | * @throws org.quartz.JobExecutionException |
| | | * exception |
| | | */ |
| | |
| | | if (dgnFile.isDirectory()) continue; |
| | | IndexDgnConvertPostGISJobContext convertContext = |
| | | new IndexDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, |
| | | isProfileMode(), isTransformed(), isEPSG3826()); |
| | | isProfileMode(), isTransformed()); |
| | | logger.info("--- start index dgnfile-" + dgnFile.toString() + " ---"); |
| | | FileInputStream fs = null; |
| | | FileChannel fc = null; |
| | |
| | | } finally { |
| | | convertContext.closeFeatureWriter(); |
| | | |
| | | if (reader != null) |
| | | { |
| | | if (reader != null) { |
| | | try { |
| | | reader.close(); |
| | | } catch (IOException e) { |
| | |
| | | |
| | | while (reader.hasNext()) { |
| | | if (isProfileMode()) markProcessTime(); |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | // } |
| | | //} else { |
| | | // if (element instanceof ShapeElement) { |
| | | convertContext.putFeatureCollection(element); |
| | | convertContext.putFeatureCollection(element); |
| | | // } |
| | | //} |
| | | } |
| | | |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¨ä¥L³]p¹ÏÀɪº¤u§@ |
| | | * �����ഫ��L�]�p���ɪ��u�@ |
| | | * |
| | | * @param context jobContext |
| | | * @throws org.quartz.JobExecutionException |
| | |
| | | |
| | | GeneralDgnConvertPostGISJobContext convertContext = |
| | | new GeneralDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, |
| | | isProfileMode(), isTransformed(), isEPSG3826()); |
| | | isProfileMode(), isTransformed()); |
| | | logger.info("--- start other dgnfile-" + dgnFile.toString() + " ---"); |
| | | FileInputStream fs = null; |
| | | FileChannel fc; |
| | |
| | | } finally { |
| | | convertContext.closeFeatureWriter(); |
| | | |
| | | if (reader != null) |
| | | { |
| | | if (reader != null) { |
| | | try { |
| | | reader.close(); |
| | | } catch (IOException e) { |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | |
| | | private void clearOutputDatabase() { |
| | | /* |
| | | File outDataPath = new File(getDataPath(), OracleConvertPostGISJobContext.SHPOUTPATH); |
| | | File outDataPath = new File(getDataPath(), OracleConvertEdbGeoJobContext.SHPOUTPATH); |
| | | if (outDataPath.exists() && outDataPath.isDirectory()) |
| | | { |
| | | deleteFilesInPath(outDataPath); |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | FeatureDgnConvertPostGISJobContext convertContext = |
| | | new FeatureDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed(), isEPSG3826()); |
| | | isProfileMode(), isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | } |
| | | */ |
| | | |
| | | if (!pgProperties.containsKey(PostgisDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisDataStoreFactory.MAXCONN.key, "5"); |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisDataStoreFactory.MINCONN.key, "1"); |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisDataStoreFactory.WKBENABLED.key)) { |
| | | pgProperties.put(PostgisDataStoreFactory.WKBENABLED.key, "true"); |
| | | /* |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.WKBENABLED.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.WKBENABLED.key, "true"); |
| | | } |
| | | */ |
| | | |
| | | if (!dataStoreFactory.canProcess(pgProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = (PostgisDataStore) dataStoreFactory.createDataStore(pgProperties); |
| | | targetDataStore = dataStoreFactory.createDataStore(pgProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | // Create XGVERSIONTABLE_NAME |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) |
| | | createXGeosVersionTable(connection, _pgSchema); |
| | | rs.close(); |
| | | rs = null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vsschema, vsstatus FROM "); |
| | |
| | | return targetSchema; |
| | | } |
| | | |
| | | private String determineTargetThemeTableName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetTable = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | // Create XPTVERSIONTABLE_NAME |
| | | needCreate = false; |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) |
| | | createXPWThemeVersionTable(connection, _pgSchema); |
| | | rs.close(); |
| | | |
| | | rs = null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vptname, vptstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vptid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpTablenames = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vptname"); |
| | | values[1] = rs.getShort("vptstatus"); |
| | | tmpTablenames.add(values); |
| | | if (((Short) values[1]) >= DataReposVersionManager.VSSTATUS_LINKVIEW) { |
| | | current = i; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current == -1) { |
| | | Object[] values = tmpTablenames.get(0); |
| | | targetTable = (String) values[0]; |
| | | } else if (current < (tmpTablenames.size() - 1)) { |
| | | Object[] values = tmpTablenames.get(current + 1); |
| | | targetTable = (String) values[0]; |
| | | } else { |
| | | Object[] values = tmpTablenames.get(0); |
| | | targetTable = (String) values[0]; |
| | | } |
| | | |
| | | sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vptstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_COVERT); |
| | | sbSQL.append(" WHERE vptname = '"); |
| | | sbSQL.append(targetTable).append("'"); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetTable + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetTable; |
| | | } |
| | | |
| | | public String encodeSchemaTableName(String schemaName, String tableName) { |
| | | if (schemaName == null) |
| | | return "\"" + tableName + "\""; |
| | | return "\"" + schemaName + "\".\"" + tableName + "\""; |
| | | } |
| | | |
| | |
| | | |
| | | sql = new StringBuilder("ALTER TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); |
| | | sql.append(" OWNER TO spatialdb"); |
| | | sql.append(" OWNER TO ").append(_pgUsername); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("GRANT ALL ON TABLE "); |
| | |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | createIfNotExistNewSchema(connection, schemaName); |
| | | } |
| | | |
| | | } finally { |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private void createXPWThemeVersionTable(Connection connection, String pgSchema) throws SQLException { |
| | | Statement stmt = null; |
| | | StringBuilder sql = new StringBuilder("CREATE TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" ( vptid serial PRIMARY KEY, "); |
| | | sql.append(" vptname character varying(64) NOT NULL, "); |
| | | sql.append(" vptstatus smallint NOT NULL, "); |
| | | sql.append(" vpttimestamp timestamp with time zone ) "); |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("ALTER TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" OWNER TO ").append(_pgUsername); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("GRANT ALL ON TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" TO public"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | for (String schemaName : DataReposVersionManager.DEFAULTXPTVERSIONTABLE_NAMES) { |
| | | sql = new StringBuilder("INSERT INTO "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" (vptname, vptstatus) VALUES ('"); |
| | | sql.append(schemaName).append("', "); |
| | | sql.append(DataReposVersionManager.VSSTATUS_AVAILABLE).append(" )"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | } |
| | | |
| | | } finally { |
| | |
| | | } |
| | | } |
| | | |
| | | private void updatePWThemeStatusToReady(String targetSchema) { |
| | | if (targetDataStore == null) return; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | StringBuilder sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vptstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_READY); |
| | | sbSQL.append(" , vpttimestamp = CURRENT_TIMESTAMP WHERE vptname = '"); |
| | | sbSQL.append(targetSchema).append("'"); |
| | | |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | stmt = connection.createStatement(); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetSchema + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void createIfNotExistNewSchema(Connection connection, String s) throws SQLException { |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("CREATE SCHEMA "); |
| | | sbSQL.append(s).append(' '); |
| | | sbSQL.append("AUTHORIZATION spatialdb"); |
| | | sbSQL.append("AUTHORIZATION ").append(_pgUsername); |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sbSQL.toString()); |
| | | |
| | |
| | | public final void resetQueryTime() { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | private void convertDynamicColorTheme(AbstractOracleJobContext context, String targetTableBaseName) throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertDynamicColorTheme"); |
| | | return; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | PreparedStatement pstmt = null; |
| | | try { |
| | | |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | String targetTableName = targetTableBaseName + FDYNCOLOR_SUFFIX; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(50); |
| | | |
| | | createOrClearTargetTable(connectionPG, targetTableName, |
| | | "(tid smallint not null, oid int not null, dyncolor varchar(10) not null)"); |
| | | |
| | | pstmt = connectionPG.prepareStatement("INSERT INTO " + |
| | | encodeSchemaTableName(_pgSchema, targetTableName) + |
| | | " (tid, oid, dyncolor) VALUES (?, ?, ?)" ); |
| | | |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int colorId = rs.getInt(3); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | |
| | | pstmt.setShort(1, (short) cid); |
| | | pstmt.setInt(2, (int) oid); |
| | | pstmt.setString(3, colorText); |
| | | pstmt.addBatch(); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | pstmt.executeBatch(); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | pstmt.executeBatch(); |
| | | createTargetTableIndex(connectionPG, targetTableName); |
| | | |
| | | logger.info("Execute Update Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(pstmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void convertPowerOwnerTheme(AbstractOracleJobContext context, String targetTableBaseName) throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertPowerOwnerTheme"); |
| | | return; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | PreparedStatement pstmt = null; |
| | | try { |
| | | connectionPG.setAutoCommit(false); |
| | | String targetTableName = targetTableBaseName + FOWNER_SUFFIX; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(50); |
| | | |
| | | createOrClearTargetTable(connectionPG, targetTableName, |
| | | "(tid smallint not null, oid int not null, fowner smallint not null, flow varchar(20) not null)"); |
| | | |
| | | pstmt = connectionPG.prepareStatement("INSERT INTO " + |
| | | encodeSchemaTableName(_pgSchema, targetTableName) + |
| | | " (tid, oid, fowner, flow) VALUES (?, ?, ?, ?)" ); |
| | | |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int ownerId = rs.getInt(3); |
| | | short dirId = (short) rs.getInt(4); |
| | | pstmt.setShort(1, (short) cid); |
| | | pstmt.setInt(2, (int) oid); |
| | | pstmt.setShort(3, (short) ownerId); |
| | | ConnectivityDirectionEnum dir = ConnectivityDirectionEnum.convertShort(dirId); |
| | | if ((ConnectivityDirectionEnum.ForwardflowON == dir) || |
| | | (ConnectivityDirectionEnum.ForwardFixflowON == dir)) { |
| | | pstmt.setString(4, "shape://ccarrow"); |
| | | |
| | | } else if ((ConnectivityDirectionEnum.BackflowON == dir) || |
| | | (ConnectivityDirectionEnum.BackFixflowON == dir)) { |
| | | pstmt.setString(4, "shape://rccarrow"); |
| | | } else { |
| | | pstmt.setString(4, "shape://backslash"); |
| | | } |
| | | pstmt.addBatch(); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | pstmt.executeBatch(); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | pstmt.executeBatch(); |
| | | createTargetTableIndex(connectionPG, targetTableName); |
| | | |
| | | logger.info("Execute Update Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(pstmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void createOrClearTargetTable(Connection connection, String tableName, String sql) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("DROP TABLE " + encodeSchemaTableName(_pgSchema, tableName) + "CASCADE"); |
| | | } |
| | | |
| | | stmt.executeUpdate("CREATE TABLE " + encodeSchemaTableName(_pgSchema, tableName) + " " + sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void createTargetTableIndex(Connection connection, String tableName) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("ALTER TABLE " + encodeSchemaTableName(_pgSchema, tableName) + |
| | | " ADD PRIMARY KEY (tid, oid)"); |
| | | } |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private boolean convertDynamicColorThemeWithCopyAPI(AbstractOracleJobContext context, String targetTableBaseName) |
| | | throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertDynamicColorThemeWithCopyAPI"); |
| | | return false; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | while (connectionPG instanceof DelegatingConnection) { |
| | | connectionPG = ((DelegatingConnection) connectionPG).getDelegate(); |
| | | } |
| | | |
| | | if (!(connectionPG instanceof PGConnection)) { |
| | | return false; |
| | | } |
| | | |
| | | final int MAX_BATCHSIZE = 250; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | // connectionPG.setAutoCommit(false); |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | String targetTableName = targetTableBaseName + FDYNCOLOR_SUFFIX; |
| | | String targetTempName = "tmp_" + targetTableName; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(MAX_BATCHSIZE); |
| | | |
| | | createOrClearTempTargetTable(connectionPG, targetTempName, |
| | | "(tid smallint not null, oid int not null, dyncolor varchar(10) not null)"); |
| | | StringBuilder sb = new StringBuilder(); |
| | | |
| | | CopyManager cpMgr = ((PGConnection) connectionPG).getCopyAPI(); |
| | | PushbackReader reader = new PushbackReader(new StringReader(""), 10240); |
| | | |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int colorId = rs.getInt(3); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | if (cid > Short.MAX_VALUE) { |
| | | logger.info("Wrong Color Table:" + cid + "-" + oid); |
| | | continue; |
| | | } |
| | | sb.append(cid).append(','); |
| | | sb.append(oid).append(','); |
| | | sb.append(colorText).append("\n"); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | sb.delete(0, sb.length()); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | createTargetTableIndexAndDropTemp(connectionPG, targetTableName, targetTempName); |
| | | |
| | | logger.info("Execute Copy Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return true; |
| | | } |
| | | |
| | | private boolean convertPowerOwnerThemeWithCopyAPI(AbstractOracleJobContext context, String targetTableBaseName) |
| | | throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertPowerOwnerThemeWithCopyAPI"); |
| | | return false; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | while (connectionPG instanceof DelegatingConnection) { |
| | | connectionPG = ((DelegatingConnection) connectionPG).getDelegate(); |
| | | } |
| | | |
| | | if (!(connectionPG instanceof PGConnection)) { |
| | | return false; |
| | | } |
| | | |
| | | final int MAX_BATCHSIZE = 250; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | // connectionPG.setAutoCommit(false); |
| | | String targetTableName = targetTableBaseName + FOWNER_SUFFIX; |
| | | String targetTempName = "tmp_" + targetTableName; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(MAX_BATCHSIZE); |
| | | |
| | | createOrClearTempTargetTable(connectionPG, targetTempName, |
| | | "(tid smallint not null, oid int not null, fowner smallint not null, flow varchar(20) not null)"); |
| | | |
| | | StringBuilder sb = new StringBuilder(); |
| | | |
| | | CopyManager cpMgr = ((PGConnection) connectionPG).getCopyAPI(); |
| | | PushbackReader reader = new PushbackReader(new StringReader(""), 10240); |
| | | |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int ownerId = rs.getInt(3); |
| | | short dirId = (short) rs.getInt(4); |
| | | String flowMark = null; |
| | | ConnectivityDirectionEnum dir = ConnectivityDirectionEnum.convertShort(dirId); |
| | | if ((ConnectivityDirectionEnum.ForwardflowON == dir) || |
| | | (ConnectivityDirectionEnum.ForwardFixflowON == dir)) { |
| | | flowMark = FORWARDFLOW_MARK; |
| | | |
| | | } else if ((ConnectivityDirectionEnum.BackflowON == dir) || |
| | | (ConnectivityDirectionEnum.BackFixflowON == dir)) { |
| | | flowMark = BACKFLOW_MARK; |
| | | } else if (ConnectivityDirectionEnum.Nondeterminate == dir) { |
| | | flowMark = NONFLOW_MARK; |
| | | } else { |
| | | flowMark = UNFLOW_MARK; |
| | | } |
| | | |
| | | if (cid > Short.MAX_VALUE) { |
| | | logger.info("Wrong Connectivity Table:" + cid + "-" + oid); |
| | | continue; |
| | | } |
| | | |
| | | sb.append(cid).append(','); |
| | | sb.append(oid).append(','); |
| | | sb.append(ownerId).append(','); |
| | | sb.append(flowMark).append('\n'); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | sb.delete(0, sb.length()); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | createTargetTableIndexAndDropTemp(connectionPG, targetTableName, targetTempName); |
| | | |
| | | logger.info("Execute Copy Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return true; |
| | | } |
| | | |
| | | private void createOrClearTempTargetTable(Connection connection, String tableName, String sql) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, null, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("DROP TABLE " + encodeSchemaTableName(null, tableName) + "CASCADE"); |
| | | } |
| | | |
| | | stmt.executeUpdate("CREATE TEMP TABLE " + encodeSchemaTableName(null, tableName) + " " + sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void createTargetTableIndexAndDropTemp(Connection connection, String tableName, String tempTable) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | boolean found = false; |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | found = true; |
| | | } |
| | | JDBCUtils.close(rs); |
| | | |
| | | if (!found) { |
| | | stmt.execute("CREATE TABLE " + tableName +" AS SELECT * FROM " + tempTable); |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("ALTER TABLE " + encodeSchemaTableName(_pgSchema, tableName) + |
| | | " ADD PRIMARY KEY (tid, oid)"); |
| | | } |
| | | } else { |
| | | stmt.execute("TRUNCATE "+ tableName + " CASCADE"); |
| | | // stmt.execute("SELECT * INTO " + tableName + " FROM " + tempTable); |
| | | stmt.execute("INSERT INTO " + tableName + " SELECT * FROM " + tempTable); |
| | | } |
| | | stmt.execute("DROP TABLE " + tempTable); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | } |
| | |
| | | import org.apache.commons.collections.map.LinkedMap; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | |
| | | } |
| | | } |
| | | |
| | | GeometryFactory _geomFactory = new GeometryFactory(); |
| | | |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | return new OracleConvertShapefilesJobContext(filterPath, profileMode, useTransform, useEPSG3826); |
| | | boolean profileMode, boolean useTransform) { |
| | | return new OracleConvertShapefilesJobContext(filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getName(); |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | String targetSchemaName = null; |
| | | |
| | |
| | | |
| | | for (String orgSchema : _orgSchema) { |
| | | OracleConvertShapefilesJobContext jobContext = (OracleConvertShapefilesJobContext) |
| | | prepareJobContext(targetSchemaName, _filterPath, isProfileMode(), isTransformed(), isEPSG3826()); |
| | | prepareJobContext(targetSchemaName, _filterPath, isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | jobContext.setDataPath(_dataPath); |
| | | jobContext.setConvertElementIn(_convertElementIn); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | if (bFirst) { |
| | | copyConnectivity(jobContext); |
| | |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | /** |
| | | * Connectivity½Æ»s¤@Óª©¥»¡A¦b¬d¸ß¹q¬y¤è¦V®É¥Î¨Ó¤ñ¹ïOMS¸ê®Æ®wªº¹q¾¹³s±µ©Ê(Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws SQLException sql exception |
| | | */ |
| | | private void copyConnectivity(OracleConvertShapefilesJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(OracleConvertShapefilesJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK); |
| | | stmt.execute(OracleConvertShapefilesJobContext.COPY_CONNECTIVITY_TO_WEBCHECK); |
| | | } |
| | | |
| | | private void exetcuteConvert(OracleConvertShapefilesJobContext jobContext, |
| | |
| | | } |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¯Á¤Þ¹ÏÀɪº¤u§@ |
| | | * �����ഫ�����ɪ��u�@ |
| | | * |
| | | * @param context ¤u§@°õ¦æÀô¹Ò |
| | | * @param context �u�@�������� |
| | | * @throws org.quartz.JobExecutionException |
| | | * exception |
| | | */ |
| | |
| | | |
| | | for (File dgnFile : dgnFiles) { |
| | | IndexDgnConvertShpJobContext convertContext = new IndexDgnConvertShpJobContext(getDataPath(), isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.debug("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¨ä¥L³]p¹ÏÀɪº¤u§@ |
| | | * �����ഫ��L�]�p���ɪ��u�@ |
| | | * |
| | | * @param context jobContext |
| | | * @throws org.quartz.JobExecutionException |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | GeneralDgnConvertShpJobContext convertContext = new GeneralDgnConvertShpJobContext(getDataPath(), |
| | | isProfileMode(), |
| | | isTransformed(), |
| | | isEPSG3826()); |
| | | isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | FeatureDgnConvertShpJobContext convertContext = new FeatureDgnConvertShpJobContext(getDataPath(), _filterPath, |
| | | isProfileMode(), |
| | | isTransformed(), |
| | | isEPSG3826()); |
| | | isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
New file |
| | |
| | | package com.ximple.eofms.jobs; |
| | | |
| | | import java.io.IOException; |
| | | import java.io.PushbackReader; |
| | | import java.io.StringReader; |
| | | import java.net.URL; |
| | | import java.sql.Connection; |
| | | import java.sql.DatabaseMetaData; |
| | | import java.sql.PreparedStatement; |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.sql.Timestamp; |
| | | import java.util.ArrayList; |
| | | import java.util.Arrays; |
| | | import java.util.Date; |
| | | import java.util.HashMap; |
| | | import java.util.List; |
| | | import java.util.Map; |
| | | import java.util.TreeMap; |
| | | import java.util.TreeSet; |
| | | |
| | | import com.ximple.eofms.geoserver.config.XGeosDataConfig; |
| | | import com.ximple.eofms.geoserver.config.XGeosDataConfigMapping; |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.OracleConvertPostGISJobContext; |
| | | import com.ximple.eofms.util.ConnectivityDirectionEnum; |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.PrintfFormat; |
| | | import com.ximple.eofms.util.XGeosConfigDigesterUtils; |
| | | import org.apache.commons.collections.MultiMap; |
| | | import org.apache.commons.dbcp.DelegatingConnection; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisNGDataStoreFactory; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.postgresql.PGConnection; |
| | | import org.postgresql.copy.CopyManager; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | public class OracleConvertThemes2PostGISJob extends AbstractOracleDatabaseJob { |
| | | final static Log logger = LogFactory.getLog(GeoserverIntegrateConfigJob.class); |
| | | |
| | | private static final String SKIPCONFIGJOB = "SKIPCONFIGJOB"; |
| | | private static final String MASTERMODE = "MASTERMODE"; |
| | | private static final String EPSG = "EPSG:"; |
| | | private static final String XGEOSDATACONFIG_PATH = "xgeosdataconfig.xml"; |
| | | |
| | | // private static final int MAGIC_BLOCKSIZE = (64 * 1024 * 1024) - (32 * 1024); |
| | | |
| | | private static final String QUERY_VIEWDEFSQL = "SELECT table_name, view_definition FROM information_schema.views " + |
| | | "WHERE table_schema = ? AND table_name LIKE "; |
| | | |
| | | private static final String CREATE_VIEWSQL = "CREATE OR REPLACE VIEW \"%s\" AS SELECT * FROM \"%s\".\"%s\""; |
| | | private static final String EXTRAWHERE_VIEWSQL = " WHERE \"%s\".level = %s AND \"%s\".symweight = %s"; |
| | | |
| | | private static final String ALTER_VIEWSQL = "ALTER TABLE \"%s\" OWNER TO "; |
| | | // private static final String GRANT_VIEWSQL = "GRANT SELECT ON TABLE \"%s\" TO public"; |
| | | private static final int SRSID_TWD97_ZONE119 = 3825; |
| | | private static final int SRSID_TWD97_ZONE121 = 3826; |
| | | public static final String DEFAULT_STORENAME = "pgDMMS"; |
| | | public static final String DEFAULT_GEODMMS_NAMESPACE = "http://tpc.ximple.com.tw/geodmms"; |
| | | |
| | | private static final String PGHOST = "PGHOST"; |
| | | private static final String PGDATBASE = "PGDATBASE"; |
| | | private static final String PGPORT = "PGPORT"; |
| | | private static final String PGSCHEMA = "PGSCHEMA"; |
| | | private static final String PGUSER = "PGUSER"; |
| | | private static final String PGPASS = "PGPASS"; |
| | | private static final String USEWKB = "USEWKB"; |
| | | |
| | | private static final boolean useTpclidText = false; |
| | | |
| | | private static final int FETCHSIZE = 30; |
| | | private static final int COMMITSIZE = 100; |
| | | |
| | | public static final String FORWARDFLOW_MARK = "shape://ccarrow"; |
| | | public static final String BACKFLOW_MARK = "shape://rccarrow"; |
| | | public static final String UNFLOW_MARK = "shape://backslash"; |
| | | public static final String NONFLOW_MARK = "shape://slash"; |
| | | |
| | | private static String FETCH_CONNFDR = "SELECT FSC, UFID, FDR1, DIR FROM BASEDB.CONNECTIVITY ORDER BY FSC"; |
| | | private static String FETCH_COLORTAB = "SELECT TAG_SFSC, TAG_LUFID, COLOR FROM OCSDB.COLOR WHERE TAG_BCOMPID = 0 ORDER BY TAG_SFSC"; |
| | | |
| | | private static String CREATE_OWNERTABLE = "CREATE TABLE s (tid smallint not null, oid int not null, owner smallint not null)"; |
| | | private static String CREATE_COLORTABLE = "CREATE TABLE s (tid smallint not null, oid int not null, dyncolor varchar(10) not null)"; |
| | | |
| | | public static final String FDYNCOLOR_SUFFIX = "_fdyncolor"; |
| | | public static final String FOWNER_SUFFIX = "_fowner"; |
| | | |
| | | private static XGeosDataConfigMapping xgeosDataConfigMapping = null; |
| | | protected JDBCDataStore targetDataStore; |
| | | protected Map<String, String> pgProperties; |
| | | |
| | | protected static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | protected String _pgHost; |
| | | protected String _pgDatabase; |
| | | protected String _pgPort; |
| | | protected String _pgSchema; |
| | | protected String _pgUsername; |
| | | protected String _pgPassword; |
| | | protected String _pgUseWKB; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | | private String currentThemeTable = null; |
| | | private Short currentThemeStatus = -1; |
| | | |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, |
| | | boolean useTransform) { |
| | | return new OracleConvertPostGISJobContext(getDataPath(), |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _pgHost = dataMap.getString(PGHOST); |
| | | _pgDatabase = dataMap.getString(PGDATBASE); |
| | | _pgPort = dataMap.getString(PGPORT); |
| | | _pgSchema = dataMap.getString(PGSCHEMA); |
| | | _pgUsername = dataMap.getString(PGUSER); |
| | | _pgPassword = dataMap.getString(PGPASS); |
| | | _pgUseWKB = dataMap.getString(USEWKB); |
| | | |
| | | Log logger = getLogger(); |
| | | if (_pgHost == null) { |
| | | logger.warn("PGHOST is null"); |
| | | throw new JobExecutionException("Unknown PostGIS host."); |
| | | } |
| | | if (_pgDatabase == null) { |
| | | logger.warn("PGDATABASE is null"); |
| | | throw new JobExecutionException("Unknown PostGIS database."); |
| | | } |
| | | if (_pgPort == null) { |
| | | logger.warn("PGPORT is null"); |
| | | throw new JobExecutionException("Unknown PostGIS port."); |
| | | } |
| | | if (_pgSchema == null) { |
| | | logger.warn("PGSCHEMA is null"); |
| | | throw new JobExecutionException("Unknown PostGIS schema."); |
| | | } |
| | | if (_pgUsername == null) { |
| | | logger.warn("PGUSERNAME is null"); |
| | | throw new JobExecutionException("Unknown PostGIS username."); |
| | | } |
| | | if (_pgPassword == null) { |
| | | logger.warn("PGPASSWORD is null"); |
| | | throw new JobExecutionException("Unknown PostGIS password."); |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put(PostgisNGDataStoreFactory.DBTYPE.key, "postgis"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(PostgisNGDataStoreFactory.HOST.key, _pgHost); |
| | | remote.put(PostgisNGDataStoreFactory.PORT.key, _pgPort); |
| | | remote.put(PostgisNGDataStoreFactory.DATABASE.key, _pgDatabase); |
| | | remote.put(PostgisNGDataStoreFactory.USER.key, _pgUsername); |
| | | remote.put(PostgisNGDataStoreFactory.PASSWD.key, _pgPassword); |
| | | // remote.put( "namespace", null); |
| | | pgProperties = remote; |
| | | } |
| | | |
| | | protected XGeosDataConfigMapping getConfigMapping() { |
| | | if (xgeosDataConfigMapping == null) { |
| | | Digester digester = XGeosConfigDigesterUtils.getXGeosConfigDigester(); |
| | | final URL configDataURL = XGeosDataConfigMapping.class.getResource(XGEOSDATACONFIG_PATH); |
| | | try { |
| | | xgeosDataConfigMapping = (XGeosDataConfigMapping) digester.parse(configDataURL); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SAXException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | |
| | | } |
| | | return xgeosDataConfigMapping; |
| | | } |
| | | |
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) { |
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " + |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | @Override |
| | | public void execute(JobExecutionContext context) throws JobExecutionException { |
| | | |
| | | // Every job has its own job detail |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | // Log the time the job started |
| | | logger.info(jobName + " fired at " + new Date()); |
| | | extractJobConfiguration(jobDetail); |
| | | |
| | | createSourceDataStore(); |
| | | createTargetDataStore(); |
| | | if (getSourceDataStore() == null) { |
| | | logger.warn("Cannot connect source oracle database."); |
| | | throw new JobExecutionException("Cannot connect source oracle database."); |
| | | } |
| | | |
| | | if (getTargetDataStore() == null) { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetThemeTable; |
| | | try { |
| | | logger.info("-- step:clearOutputDatabase --"); |
| | | |
| | | OracleConvertPostGISJobContext jobContext = null; |
| | | |
| | | if (checkConvertPWThemes()) { |
| | | targetThemeTable = determineTargetThemeTableName(); |
| | | jobContext = (OracleConvertPostGISJobContext) prepareJobContext("public", _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | long tStep = System.currentTimeMillis(); |
| | | if (!convertPowerOwnerThemeWithCopyAPI(jobContext, targetThemeTable)) { |
| | | convertPowerOwnerTheme(jobContext, targetThemeTable); |
| | | } |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-convertFeatureDesignFile", tStep, tStepEnd); |
| | | } |
| | | tStep = System.currentTimeMillis(); |
| | | if (!convertDynamicColorThemeWithCopyAPI(jobContext, targetThemeTable)) |
| | | convertDynamicColorTheme(jobContext, targetThemeTable); |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-convertFeatureDesignFile", tStep, tStepEnd); |
| | | } |
| | | jobContext.closeOracleConnection(); |
| | | |
| | | updatePWThemeStatusToReady(targetThemeTable); |
| | | } |
| | | |
| | | long t2 = System.currentTimeMillis(); |
| | | // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss"; |
| | | // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); |
| | | logTimeDiff("Total ", t1, t2); |
| | | |
| | | } catch (IOException ex) { |
| | | disconnect(); |
| | | logger.warn(ex.getMessage(), ex); |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } finally { |
| | | disconnect(); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | |
| | | |
| | | createTargetDataStore(); |
| | | if (getTargetDataStore() == null) { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | try { |
| | | logger.info("-- step:resetThemesViewMapping --"); |
| | | long tStep = System.currentTimeMillis(); |
| | | resetThemesViewMapping(context); |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-resetThemesViewMapping", tStep, tStepEnd); |
| | | } |
| | | logger.info("-- step:resetGeoServerConfig --"); |
| | | tStep = System.currentTimeMillis(); |
| | | // resetGeoServerConfig(jobExecutionContext); |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-resetGeoServerConfig", tStep, tStepEnd); |
| | | } |
| | | } finally { |
| | | disconnect(); |
| | | } |
| | | } |
| | | |
| | | /** |
| | | * 重新建立所有重新建立所有PostGIS中的資料庫視景 |
| | | * |
| | | * @param executionContext 批次執行的關係 |
| | | */ |
| | | private void resetThemesViewMapping(JobExecutionContext executionContext) throws JobExecutionException { |
| | | assert executionContext != null; |
| | | Connection connection = null; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | String ownerName = _pgUsername; |
| | | |
| | | String currentTargetThemesName = retrieveCurrentThemeName(connection, |
| | | DataReposVersionManager.VSSTATUS_READY); |
| | | if (currentTargetThemesName == null) { |
| | | logger.info("Cannot found themes that status is VSSTATUS_READY[" + |
| | | DataReposVersionManager.VSSTATUS_READY + "]"); |
| | | return; |
| | | } |
| | | |
| | | ArrayList<String> realTableNames = new ArrayList<String>(); |
| | | retrieveAllRealTableName(connection, currentTargetThemesName, realTableNames); |
| | | |
| | | resetThemesBaseView(connection, ownerName, currentTargetThemesName); |
| | | |
| | | if (currentThemeTable == null) { |
| | | transferThemesVersionStatus(DataReposVersionManager.VSSTATUS_READY, |
| | | DataReposVersionManager.VSSTATUS_LINKVIEW, false); |
| | | } else { |
| | | transferThemesVersionStatus(DataReposVersionManager.VSSTATUS_READY, |
| | | currentThemeStatus, true); |
| | | } |
| | | |
| | | /* |
| | | updateCurrentThemeStatus(connection, currentTargetThemesName, |
| | | DataReposVersionManager.VSSTATUS_LINKVIEW); |
| | | */ |
| | | |
| | | // String[] featureNames = dataStore.getTypeNames(); |
| | | // logger.info("featureNames[] size = " + featureNames.length); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | if (connection != null) |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | // if (dataStore != null) dataStore.dispose(); |
| | | } |
| | | } |
| | | |
| | | private void retrieveAllRealTableName(Connection connection, String targetSchema, |
| | | ArrayList<String> realTableNames) throws SQLException { |
| | | ResultSet rsMeta = null; |
| | | try { |
| | | rsMeta = connection.getMetaData().getTables("", targetSchema, "fsc%", new String[]{"TABLE"}); |
| | | while (rsMeta.next()) { |
| | | String tableName = rsMeta.getString(3); |
| | | realTableNames.add(tableName); |
| | | } |
| | | rsMeta.close(); |
| | | rsMeta = null; |
| | | |
| | | rsMeta = connection.getMetaData().getTables("", targetSchema, "index%", new String[]{"TABLE"}); |
| | | while (rsMeta.next()) { |
| | | String tableName = rsMeta.getString(3); |
| | | realTableNames.add(tableName); |
| | | } |
| | | rsMeta.close(); |
| | | rsMeta = null; |
| | | |
| | | rsMeta = connection.getMetaData().getTables("", targetSchema, "lndtpc%", new String[]{"TABLE"}); |
| | | while (rsMeta.next()) { |
| | | String tableName = rsMeta.getString(3); |
| | | realTableNames.add(tableName); |
| | | } |
| | | } finally { |
| | | if (rsMeta != null) rsMeta.close(); |
| | | } |
| | | } |
| | | |
| | | private void resetPostgisDataView(Connection connection, HashMap<String, String> viewDefs, |
| | | String ownerName, String schemaName, String tableName) throws SQLException { |
| | | String[] splits = tableName.split("-"); |
| | | if (splits.length > 3) { |
| | | // feature table |
| | | |
| | | StringBuilder viewBuilder = new StringBuilder(); |
| | | viewBuilder.append(splits[0]); |
| | | viewBuilder.append('-'); |
| | | viewBuilder.append(splits[1]); |
| | | viewBuilder.append('-'); |
| | | viewBuilder.append(splits[2]); |
| | | viewBuilder.append(splits[3]); |
| | | String viewName = viewBuilder.toString(); |
| | | if (viewDefs.containsKey(viewName)) { |
| | | String viewDef = viewDefs.get(viewName); |
| | | int pos = viewDef.indexOf("FROM"); |
| | | String subView = viewDef.substring(pos + 4); |
| | | // String[] viewSources = subView.split("\\."); |
| | | String[] viewSources = subView.split("(\\.\"|\")"); |
| | | if (!viewSources[0].equalsIgnoreCase(schemaName)) { |
| | | createOrReplaceView(connection, schemaName, tableName, viewName, ownerName); |
| | | } |
| | | } else { |
| | | createOrReplaceView(connection, schemaName, tableName, viewName, ownerName); |
| | | } |
| | | |
| | | } else { |
| | | |
| | | splits = tableName.split("_"); |
| | | if (splits.length > 0) { |
| | | StringBuilder viewBuilder = new StringBuilder(); |
| | | viewBuilder.append(splits[0]); |
| | | if (splits.length > 1) viewBuilder.append(splits[1]); |
| | | if (splits.length > 2) viewBuilder.append(splits[2]); |
| | | String viewName = viewBuilder.toString(); |
| | | if (viewDefs.containsKey(viewName)) { |
| | | String viewDef = viewDefs.get(viewName); |
| | | int pos = viewDef.indexOf("FROM"); |
| | | String subView = viewDef.substring(pos + 4); |
| | | String[] viewSources = subView.split("(\\.\"|\")"); |
| | | if (!viewSources[0].equalsIgnoreCase(schemaName)) { |
| | | createOrReplaceView(connection, schemaName, tableName, viewName, ownerName); |
| | | } |
| | | } else { |
| | | createOrReplaceView(connection, schemaName, tableName, viewName, ownerName); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | private void resetThemesBaseView(Connection connection, String ownerName, String currentThemesName) |
| | | throws SQLException { |
| | | String viewName = "xpwtheme" + FDYNCOLOR_SUFFIX; |
| | | String tableName = currentThemesName + FDYNCOLOR_SUFFIX; |
| | | PrintfFormat pf = new PrintfFormat("CREATE OR REPLACE VIEW \"%s\" AS SELECT * FROM \"%s\""); |
| | | String sql = pf.sprintf(new Object[]{viewName, tableName}); |
| | | Statement stmt = connection.createStatement(); |
| | | try { |
| | | stmt.execute(sql); |
| | | pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(viewName); |
| | | stmt.execute(sql); |
| | | |
| | | viewName = "xpwtheme" + FOWNER_SUFFIX; |
| | | tableName = currentThemesName + FOWNER_SUFFIX; |
| | | pf = new PrintfFormat("CREATE OR REPLACE VIEW \"%s\" AS SELECT * FROM \"%s\""); |
| | | sql = pf.sprintf(new Object[]{viewName, tableName}); |
| | | |
| | | stmt.execute(sql); |
| | | pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(viewName); |
| | | stmt.execute(sql); |
| | | } catch (SQLException e) { |
| | | // logger.warn(e.getMessage(), e); |
| | | logger.info(sql == null ? "SQL=NULL" : "SQL=" + sql); |
| | | throw e; |
| | | } finally { |
| | | stmt.close(); |
| | | } |
| | | } |
| | | |
| | | |
| | | private void resetThemesPostgisDataView(Connection connection, String ownerName, |
| | | String currentSchema, String viewName) throws SQLException { |
| | | String themeViewName = viewName + "-oms"; |
| | | // PrintfFormat pf = new PrintfFormat(CREATE_VIEWSQL); |
| | | // String sql = pf.sprintf(new Object[]{viewName, schemaName, tableName}); |
| | | ResultSet rs = null; |
| | | Statement stmt = connection.createStatement(); |
| | | |
| | | try { |
| | | StringBuilder sbSQL = new StringBuilder("CREATE OR REPLACE VIEW \""); |
| | | sbSQL.append(themeViewName).append("\" AS SELECT "); |
| | | |
| | | rs = connection.getMetaData().getColumns(null, currentSchema, viewName, "%"); |
| | | while (rs.next()) { |
| | | String fieldName = rs.getString("COLUMN_NAME"); |
| | | sbSQL.append("t." + fieldName).append(", "); |
| | | } |
| | | sbSQL.append("fc.dyncolor, fo.fowner FROM "); |
| | | if (currentSchema != null) |
| | | sbSQL.append("\"").append(currentSchema).append("\".\"").append(viewName).append("\" AS t,"); |
| | | else |
| | | sbSQL.append("\"").append(viewName).append("\" AS t,"); |
| | | sbSQL.append("xpwtheme").append(FDYNCOLOR_SUFFIX).append(" AS fc,"); |
| | | sbSQL.append("xpwtheme").append(FOWNER_SUFFIX).append(" AS fo WHERE "); |
| | | sbSQL.append("t.tid = fc.tid AND t.oid = fc.oid AND "); |
| | | sbSQL.append("t.tid = fo.tid AND t.oid = fo.oid"); |
| | | |
| | | // sbSQL.delete(sbSQL.length() - 2, sbSQL.length()); |
| | | String sql = sbSQL.toString(); |
| | | stmt.execute(sql); |
| | | sbSQL.delete(0, sbSQL.length()); |
| | | |
| | | PrintfFormat pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(themeViewName); |
| | | stmt.execute(sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void resetFlowThemesPostgisDataView(Connection connection, String ownerName, |
| | | String currentSchema, String viewName) throws SQLException { |
| | | String themeViewName = viewName + "-flow-oms"; |
| | | ResultSet rs = null; |
| | | Statement stmt = connection.createStatement(); |
| | | |
| | | try { |
| | | StringBuilder sbSQL = new StringBuilder("CREATE OR REPLACE VIEW \""); |
| | | sbSQL.append(themeViewName).append("\" AS SELECT "); |
| | | |
| | | rs = connection.getMetaData().getColumns(null, currentSchema, viewName, "%"); |
| | | while (rs.next()) { |
| | | String fieldName = rs.getString("COLUMN_NAME"); |
| | | sbSQL.append("t." + fieldName).append(", "); |
| | | } |
| | | sbSQL.append("fc.dyncolor, fo.fowner, fo.flow FROM "); |
| | | if (currentSchema != null) |
| | | sbSQL.append("\"").append(currentSchema).append("\".\"").append(viewName).append("\" AS t,"); |
| | | else |
| | | sbSQL.append("\"").append(viewName).append("\" AS t,"); |
| | | sbSQL.append("xpwtheme").append(FDYNCOLOR_SUFFIX).append(" AS fc,"); |
| | | sbSQL.append("xpwtheme").append(FOWNER_SUFFIX).append(" AS fo WHERE "); |
| | | sbSQL.append("t.tid = fc.tid AND t.oid = fc.oid AND "); |
| | | sbSQL.append("t.tid = fo.tid AND t.oid = fo.oid"); |
| | | |
| | | // sbSQL.delete(sbSQL.length() - 2, sbSQL.length()); |
| | | String sql = sbSQL.toString(); |
| | | stmt.execute(sql); |
| | | sbSQL.delete(0, sbSQL.length()); |
| | | |
| | | PrintfFormat pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(themeViewName); |
| | | stmt.execute(sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private HashMap<String, String> retrieveViewDef(Connection connection, String schemaName, String tablePattern) throws SQLException { |
| | | PreparedStatement stmt = connection.prepareStatement(QUERY_VIEWDEFSQL + "'" + tablePattern + "'"); |
| | | stmt.setString(1, schemaName); |
| | | // stmt.setString(2, tablePattern); |
| | | HashMap<String, String> result = new HashMap<String, String>(); |
| | | ResultSet rs = stmt.executeQuery(); |
| | | while (rs.next()) { |
| | | String tableName = rs.getString(1); |
| | | String viewDef = rs.getString(2); |
| | | result.put(tableName, viewDef); |
| | | } |
| | | rs.close(); |
| | | stmt.close(); |
| | | return result; |
| | | } |
| | | |
| | | private void createOrReplaceView(Connection connection, String schemaName, String tableName, String viewName, |
| | | String ownerName) throws SQLException { |
| | | PrintfFormat pf = new PrintfFormat(CREATE_VIEWSQL); |
| | | String sql = pf.sprintf(new Object[]{viewName, schemaName, tableName}); |
| | | Statement stmt = connection.createStatement(); |
| | | try { |
| | | stmt.execute(sql); |
| | | pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(viewName); |
| | | stmt.execute(sql); |
| | | } catch (SQLException e) { |
| | | // logger.warn(e.getMessage(), e); |
| | | logger.info(sql == null ? "SQL=NULL" : "SQL=" + sql); |
| | | throw e; |
| | | } finally { |
| | | stmt.close(); |
| | | } |
| | | // connection.commit(); |
| | | } |
| | | |
| | | private void createOrReplaceExtraView(Connection connection, String schemaName, String tableName, String viewName, |
| | | String ownerName, XGeosDataConfig xgeosConfig) throws SQLException { |
| | | PrintfFormat pf = new PrintfFormat(CREATE_VIEWSQL); |
| | | String sql = pf.sprintf(new Object[]{viewName, schemaName, tableName}); |
| | | |
| | | PrintfFormat pfWhere = new PrintfFormat(EXTRAWHERE_VIEWSQL); |
| | | sql += pfWhere.sprintf(new String[]{tableName, Short.toString(xgeosConfig.getLEV()), |
| | | tableName, Short.toString(xgeosConfig.getWEIGHT())}); |
| | | |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(sql); |
| | | |
| | | pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(viewName); |
| | | stmt.execute(sql); |
| | | stmt.close(); |
| | | // connection.commit(); |
| | | } |
| | | |
| | | private Timestamp retrieveCurrentThemeTimestamp(Connection connection, short status) throws SQLException { |
| | | StringBuilder sbSQL = new StringBuilder("SELECT vpttimestamp, vptname, vptstatus FROM "); |
| | | sbSQL.append(DataReposVersionManager.XPTVERSIONTABLE_NAME); |
| | | sbSQL.append(" WHERE vptstatus = "); |
| | | sbSQL.append(status); |
| | | sbSQL.append(" ORDER BY vptid"); |
| | | |
| | | Timestamp result = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | // get first result |
| | | if (rs.next()) { |
| | | result = rs.getTimestamp(1); |
| | | } |
| | | return result; |
| | | } finally { |
| | | if (rs != null) rs.close(); |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private void updateCurrentThemeStatus(Connection connection, String themeTableName, short newStatus) |
| | | throws SQLException { |
| | | StringBuilder sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(DataReposVersionManager.XPTVERSIONTABLE_NAME).append(' '); |
| | | sbSQL.append(" SET vptstatus = "); |
| | | sbSQL.append(newStatus); |
| | | sbSQL.append(", vpttimestamp = CURRENT_TIMESTAMP WHERE vptname = '"); |
| | | sbSQL.append(themeTableName).append("'"); |
| | | |
| | | Statement stmt = null; |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sbSQL.toString()); |
| | | } finally { |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | |
| | | private boolean checkCurrentThemeStatus(Connection connection, short status) { |
| | | try { |
| | | return (retrieveCurrentThemeName(connection, status) != null); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | return false; |
| | | } |
| | | } |
| | | |
| | | private String retrieveCurrentThemeName(Connection connection, short status) throws SQLException { |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vptname, vpttimestamp, vptstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sbSQL.append(" WHERE vptstatus = "); |
| | | sbSQL.append(status); |
| | | sbSQL.append("ORDER BY vptid"); |
| | | |
| | | String result = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | // get first result |
| | | if (rs.next()) { |
| | | result = rs.getString(1); |
| | | } |
| | | return result; |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | protected String[] retrieveTargetStoreAllViewNames(Connection connection) { |
| | | try { |
| | | final int TABLE_NAME_COL = 3; |
| | | List<String> list = new ArrayList<String>(); |
| | | |
| | | DatabaseMetaData meta = connection.getMetaData(); |
| | | // String[] tableType = { "TABLE", "VIEW" }; |
| | | String[] tableType = { "VIEW" }; |
| | | ResultSet tables = meta.getTables(null, _pgSchema, "%", tableType); |
| | | |
| | | while (tables.next()) { |
| | | String tableName = tables.getString(TABLE_NAME_COL); |
| | | list.add(tableName); |
| | | /* |
| | | if (allowTable(tableName)) { |
| | | list.add(tableName); |
| | | } |
| | | */ |
| | | } |
| | | tables.close(); |
| | | return (String[]) list.toArray(new String[list.size()]); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | return null; |
| | | } |
| | | |
| | | public DataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | protected void createTargetDataStore() throws JobExecutionException { |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | |
| | | /* |
| | | if (!isDriverFound()) |
| | | { |
| | | throw new JobExecutionException("Oracle JDBC Driver not found.-" + JDBC_DRIVER); |
| | | } |
| | | */ |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | if (!dataStoreFactory.canProcess(pgProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = dataStoreFactory.createDataStore(pgProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | protected void disconnect() { |
| | | super.disconnect(); |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | } |
| | | |
| | | private String determineTargetThemeTableName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetTable = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | // Create XPTVERSIONTABLE_NAME |
| | | needCreate = false; |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) { |
| | | createXPWThemeVersionTable(connection, _pgSchema); |
| | | } |
| | | rs.close(); |
| | | rs = null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vptname, vptstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vptid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpTablenames = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vptname"); |
| | | values[1] = rs.getShort("vptstatus"); |
| | | tmpTablenames.add(values); |
| | | if (((Short) values[1]) >= DataReposVersionManager.VSSTATUS_LINKVIEW) { |
| | | current = i; |
| | | currentThemeTable = (String) values[0]; |
| | | currentThemeStatus = (Short) values[1]; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current == -1) { |
| | | Object[] values = tmpTablenames.get(0); |
| | | targetTable = (String) values[0]; |
| | | } else if (current < (tmpTablenames.size() - 1)) { |
| | | Object[] values = tmpTablenames.get(current + 1); |
| | | targetTable = (String) values[0]; |
| | | } else { |
| | | Object[] values = tmpTablenames.get(0); |
| | | targetTable = (String) values[0]; |
| | | } |
| | | |
| | | sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vptstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_COVERT); |
| | | sbSQL.append(" WHERE vptname = '"); |
| | | sbSQL.append(targetTable).append("'"); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetTable + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetTable; |
| | | } |
| | | |
| | | public String encodeSchemaTableName(String schemaName, String tableName) { |
| | | if (schemaName == null) |
| | | return "\"" + tableName + "\""; |
| | | return "\"" + schemaName + "\".\"" + tableName + "\""; |
| | | } |
| | | |
| | | private boolean convertDynamicColorThemeWithCopyAPI(AbstractOracleJobContext context, String targetTableBaseName) |
| | | throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertDynamicColorThemeWithCopyAPI"); |
| | | return false; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | while (connectionPG instanceof DelegatingConnection) { |
| | | connectionPG = ((DelegatingConnection) connectionPG).getDelegate(); |
| | | } |
| | | |
| | | if (!(connectionPG instanceof PGConnection)) { |
| | | return false; |
| | | } |
| | | |
| | | final int MAX_BATCHSIZE = 250; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | // connectionPG.setAutoCommit(false); |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | String targetTableName = targetTableBaseName + FDYNCOLOR_SUFFIX; |
| | | String targetTempName = "tmp_" + targetTableName; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(MAX_BATCHSIZE); |
| | | |
| | | createOrClearTempTargetTable(connectionPG, targetTempName, |
| | | "(tid smallint not null, oid int not null, dyncolor varchar(10) not null)"); |
| | | StringBuilder sb = new StringBuilder(); |
| | | |
| | | CopyManager cpMgr = ((PGConnection) connectionPG).getCopyAPI(); |
| | | PushbackReader reader = new PushbackReader(new StringReader(""), 10240); |
| | | |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int colorId = rs.getInt(3); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | if (cid > Short.MAX_VALUE) { |
| | | logger.info("Wrong Color Table:" + cid + "-" + oid); |
| | | continue; |
| | | } |
| | | sb.append(cid).append(','); |
| | | sb.append(oid).append(','); |
| | | sb.append(colorText).append("\n"); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | sb.delete(0, sb.length()); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | createTargetTableIndexAndDropTemp(connectionPG, targetTableName, targetTempName, "tid, oid, dyncolor"); |
| | | |
| | | logger.info("Execute Copy Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return true; |
| | | } |
| | | |
| | | private boolean convertPowerOwnerThemeWithCopyAPI(AbstractOracleJobContext context, String targetTableBaseName) |
| | | throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertPowerOwnerThemeWithCopyAPI"); |
| | | return false; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | while (connectionPG instanceof DelegatingConnection) { |
| | | connectionPG = ((DelegatingConnection) connectionPG).getDelegate(); |
| | | } |
| | | |
| | | if (!(connectionPG instanceof PGConnection)) { |
| | | return false; |
| | | } |
| | | |
| | | final int MAX_BATCHSIZE = 250; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | // connectionPG.setAutoCommit(false); |
| | | String targetTableName = targetTableBaseName + FOWNER_SUFFIX; |
| | | String targetTempName = "tmp_" + targetTableName; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(MAX_BATCHSIZE); |
| | | |
| | | createOrClearTempTargetTable(connectionPG, targetTempName, |
| | | "(tid smallint not null, oid int not null, fowner smallint not null, flow varchar(20) not null)"); |
| | | |
| | | StringBuilder sb = new StringBuilder(); |
| | | |
| | | CopyManager cpMgr = ((PGConnection) connectionPG).getCopyAPI(); |
| | | PushbackReader reader = new PushbackReader(new StringReader(""), 10240); |
| | | |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int ownerId = rs.getInt(3); |
| | | short dirId = (short) rs.getInt(4); |
| | | String flowMark; |
| | | ConnectivityDirectionEnum dir = ConnectivityDirectionEnum.convertShort(dirId); |
| | | if ((ConnectivityDirectionEnum.ForwardflowON == dir) || |
| | | (ConnectivityDirectionEnum.ForwardFixflowON == dir)) { |
| | | flowMark = FORWARDFLOW_MARK; |
| | | |
| | | } else if ((ConnectivityDirectionEnum.BackflowON == dir) || |
| | | (ConnectivityDirectionEnum.BackFixflowON == dir)) { |
| | | flowMark = BACKFLOW_MARK; |
| | | } else if (ConnectivityDirectionEnum.Nondeterminate == dir) { |
| | | flowMark = NONFLOW_MARK; |
| | | } else { |
| | | flowMark = UNFLOW_MARK; |
| | | } |
| | | |
| | | if (cid > Short.MAX_VALUE) { |
| | | logger.info("Wrong Connectivity Table:" + cid + "-" + oid); |
| | | continue; |
| | | } |
| | | |
| | | sb.append(cid).append(','); |
| | | sb.append(oid).append(','); |
| | | sb.append(ownerId).append(','); |
| | | sb.append(flowMark).append('\n'); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | sb.delete(0, sb.length()); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | createTargetTableIndexAndDropTemp(connectionPG, targetTableName, targetTempName, "tid, oid, fowner, flow"); |
| | | |
| | | logger.info("Execute Copy Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return true; |
| | | } |
| | | |
| | | private void convertPowerOwnerTheme(AbstractOracleJobContext context, String targetTableBaseName) throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertPowerOwnerTheme"); |
| | | return; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | PreparedStatement pstmt = null; |
| | | try { |
| | | connectionPG.setAutoCommit(false); |
| | | String targetTableName = targetTableBaseName + FOWNER_SUFFIX; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(50); |
| | | |
| | | createOrClearTargetTable(connectionPG, targetTableName, |
| | | "(tid smallint not null, oid int not null, fowner smallint not null, flow varchar(20) not null)"); |
| | | |
| | | pstmt = connectionPG.prepareStatement("INSERT INTO " + |
| | | encodeSchemaTableName(_pgSchema, targetTableName) + |
| | | " (tid, oid, fowner, flow) VALUES (?, ?, ?, ?)" ); |
| | | |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int ownerId = rs.getInt(3); |
| | | short dirId = (short) rs.getInt(4); |
| | | pstmt.setShort(1, (short) cid); |
| | | pstmt.setInt(2, (int) oid); |
| | | pstmt.setShort(3, (short) ownerId); |
| | | ConnectivityDirectionEnum dir = ConnectivityDirectionEnum.convertShort(dirId); |
| | | if ((ConnectivityDirectionEnum.ForwardflowON == dir) || |
| | | (ConnectivityDirectionEnum.ForwardFixflowON == dir)) { |
| | | pstmt.setString(4, "shape://ccarrow"); |
| | | |
| | | } else if ((ConnectivityDirectionEnum.BackflowON == dir) || |
| | | (ConnectivityDirectionEnum.BackFixflowON == dir)) { |
| | | pstmt.setString(4, "shape://rccarrow"); |
| | | } else { |
| | | pstmt.setString(4, "shape://backslash"); |
| | | } |
| | | pstmt.addBatch(); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | pstmt.executeBatch(); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | pstmt.executeBatch(); |
| | | createTargetTableIndex(connectionPG, targetTableName); |
| | | |
| | | logger.info("Execute Update Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(pstmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void createOrClearTargetTable(Connection connection, String tableName, String sql) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("DROP TABLE " + encodeSchemaTableName(_pgSchema, tableName) + "CASCADE"); |
| | | } |
| | | |
| | | stmt.executeUpdate("CREATE TABLE " + encodeSchemaTableName(_pgSchema, tableName) + " " + sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void createTargetTableIndex(Connection connection, String tableName) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("ALTER TABLE " + encodeSchemaTableName(_pgSchema, tableName) + |
| | | " ADD PRIMARY KEY (tid, oid)"); |
| | | } |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void createTargetTableIndexAndDropTemp(Connection connection, String tableName, String tempTable, |
| | | String fields) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | boolean found = false; |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | found = true; |
| | | } |
| | | JDBCUtils.close(rs); |
| | | |
| | | if (!found) { |
| | | stmt.execute("CREATE TABLE " + tableName +" AS SELECT * FROM " + tempTable); |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("ALTER TABLE " + encodeSchemaTableName(_pgSchema, tableName) + |
| | | " ADD PRIMARY KEY (tid, oid)"); |
| | | } |
| | | } else { |
| | | stmt.execute("TRUNCATE "+ tableName + " CASCADE"); |
| | | stmt.execute("INSERT INTO " + tableName + "(" + fields + ") SELECT " + |
| | | fields + " FROM " + tempTable); |
| | | /* |
| | | --insert into xpwtheme1_fdyncolor (tid, oid, dyncolor) select tid, oid, dyncolor from xpwtheme2_fdyncolor; |
| | | --reindex table xpwtheme1_fdyncolor; |
| | | --alter table xpwtheme1_fdyncolor drop constraint xpwtheme1_fdyncolor_pkey; |
| | | --alter table xpwtheme1_fdyncolor ADD PRIMARY KEY (tid, oid); |
| | | */ |
| | | } |
| | | stmt.execute("DROP TABLE " + tempTable); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void createOrClearTempTargetTable(Connection connection, String tableName, String sql) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, null, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("DROP TABLE " + encodeSchemaTableName(null, tableName) + "CASCADE"); |
| | | } |
| | | |
| | | stmt.executeUpdate("CREATE TEMP TABLE " + encodeSchemaTableName(null, tableName) + " " + sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void convertDynamicColorTheme(AbstractOracleJobContext context, String targetTableBaseName) throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertDynamicColorTheme"); |
| | | return; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | PreparedStatement pstmt = null; |
| | | try { |
| | | |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | String targetTableName = targetTableBaseName + FDYNCOLOR_SUFFIX; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(50); |
| | | |
| | | createOrClearTargetTable(connectionPG, targetTableName, |
| | | "(tid smallint not null, oid int not null, dyncolor varchar(10) not null)"); |
| | | |
| | | pstmt = connectionPG.prepareStatement("INSERT INTO " + |
| | | encodeSchemaTableName(_pgSchema, targetTableName) + |
| | | " (tid, oid, dyncolor) VALUES (?, ?, ?)" ); |
| | | |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int colorId = rs.getInt(3); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | |
| | | pstmt.setShort(1, (short) cid); |
| | | pstmt.setInt(2, (int) oid); |
| | | pstmt.setString(3, colorText); |
| | | pstmt.addBatch(); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | pstmt.executeBatch(); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | pstmt.executeBatch(); |
| | | createTargetTableIndex(connectionPG, targetTableName); |
| | | |
| | | logger.info("Execute Update Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(pstmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void updatePWThemeStatusToReady(String targetSchema) { |
| | | if (targetDataStore == null) return; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | StringBuilder sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vptstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_READY); |
| | | sbSQL.append(" , vpttimestamp = CURRENT_TIMESTAMP WHERE vptname = '"); |
| | | sbSQL.append(targetSchema).append("'"); |
| | | |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | stmt = connection.createStatement(); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetSchema + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void createXPWThemeVersionTable(Connection connection, String pgSchema) throws SQLException { |
| | | Statement stmt = null; |
| | | StringBuilder sql = new StringBuilder("CREATE TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" ( vptid serial PRIMARY KEY, "); |
| | | sql.append(" vptname character varying(64) NOT NULL, "); |
| | | sql.append(" vptstatus smallint NOT NULL, "); |
| | | sql.append(" vpttimestamp timestamp with time zone ) "); |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("ALTER TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" OWNER TO ").append(_pgUsername); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("GRANT ALL ON TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" TO public"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | for (String schemaName : DataReposVersionManager.DEFAULTXPTVERSIONTABLE_NAMES) { |
| | | sql = new StringBuilder("INSERT INTO "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" (vptname, vptstatus) VALUES ('"); |
| | | sql.append(schemaName).append("', "); |
| | | sql.append(DataReposVersionManager.VSSTATUS_AVAILABLE).append(" )"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | } |
| | | |
| | | } finally { |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | protected void transferThemesVersionStatus(Connection connection, |
| | | short vsstatusBefore, short vsstatusAfter, boolean exclusive) throws JobExecutionException { |
| | | |
| | | try { |
| | | String currentTargetTheme = retrieveCurrentThemeName(connection, vsstatusBefore); |
| | | if (currentTargetTheme == null) { |
| | | logger.info("Cannot found target schema in dataStore. status=" + vsstatusBefore); |
| | | return; |
| | | } |
| | | String existTargetSchema = null; |
| | | if (exclusive) |
| | | existTargetSchema = retrieveCurrentThemeName(connection, vsstatusAfter); |
| | | |
| | | |
| | | updateCurrentThemeStatus(connection, currentTargetTheme, vsstatusAfter); |
| | | if ((exclusive) && (existTargetSchema != null)) { |
| | | updateCurrentThemeStatus(connection, existTargetSchema, |
| | | DataReposVersionManager.VSSTATUS_AVAILABLE); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("Update " + DataReposVersionManager.XPTVERSIONTABLE_NAME + |
| | | " has error-", e); |
| | | } |
| | | } |
| | | |
| | | protected void transferThemesVersionStatus(short vsstatusBefore, short vsstatusAfter, boolean exclusive) throws JobExecutionException { |
| | | |
| | | if (targetDataStore == null) return; |
| | | Connection connection = null; |
| | | |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | |
| | | transferThemesVersionStatus(connection, vsstatusBefore, vsstatusAfter, exclusive); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.jobs; |
| | | |
| | | import java.io.IOException; |
| | | import java.nio.BufferOverflowException; |
| | | import java.nio.ByteBuffer; |
| | | import java.nio.ByteOrder; |
| | | import java.sql.Connection; |
| | | import java.sql.PreparedStatement; |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.sql.Types; |
| | | import java.util.ArrayList; |
| | | import java.util.Date; |
| | | import java.util.Map; |
| | | import java.util.TreeMap; |
| | | import java.util.logging.Logger; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.OracleIncrementPostGISJobContext; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Dgn7fileException; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.ElementType; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.IElementHandler; |
| | | import com.ximple.util.PrintfFormat; |
| | | import oracle.sql.BLOB; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisNGDataStoreFactory; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | import static com.ximple.eofms.jobs.context.postgis.OracleIncrementPostGISJobContext.*; |
| | | |
| | | public class OracleIncrementDgn2PostGISJob extends AbstractOracleDatabaseJob { |
| | | final static Log logger = LogFactory.getLog(OracleIncrementDgn2PostGISJob.class); |
| | | |
| | | private static final String PGHOST = "PGHOST"; |
| | | private static final String PGDATBASE = "PGDATBASE"; |
| | | private static final String PGPORT = "PGPORT"; |
| | | private static final String PGSCHEMA = "PGSCHEMA"; |
| | | private static final String PGUSER = "PGUSER"; |
| | | private static final String PGPASS = "PGPASS"; |
| | | private static final String USEWKB = "USEWKB"; |
| | | |
| | | private static final int FETCHSIZE = 30; |
| | | private static final int COMMITSIZE = 10; |
| | | |
| | | protected static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | protected String _pgHost; |
| | | protected String _pgDatabase; |
| | | protected String _pgPort; |
| | | protected String _pgSchema; |
| | | protected String _pgUsername; |
| | | protected String _pgPassword; |
| | | protected String _pgUseWKB; |
| | | |
| | | protected Map<String, String> pgProperties; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | | |
| | | public final void accumulateQueryTime() { |
| | | queryTime += System.currentTimeMillis() - queryTimeStart; |
| | | } |
| | | |
| | | public long getQueryTime() { |
| | | return queryTime; |
| | | } |
| | | |
| | | public final void markQueryTime() { |
| | | queryTimeStart = System.currentTimeMillis(); |
| | | } |
| | | |
| | | public final void resetQueryTime() { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | @Override |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | public DataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | @Override |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _pgHost = dataMap.getString(PGHOST); |
| | | _pgDatabase = dataMap.getString(PGDATBASE); |
| | | _pgPort = dataMap.getString(PGPORT); |
| | | _pgSchema = dataMap.getString(PGSCHEMA); |
| | | _pgUsername = dataMap.getString(PGUSER); |
| | | _pgPassword = dataMap.getString(PGPASS); |
| | | _pgUseWKB = dataMap.getString(USEWKB); |
| | | |
| | | Log logger = getLogger(); |
| | | if (_pgHost == null) { |
| | | logger.warn("PGHOST is null"); |
| | | throw new JobExecutionException("Unknown PostGIS host."); |
| | | } |
| | | if (_pgDatabase == null) { |
| | | logger.warn("PGDATABASE is null"); |
| | | throw new JobExecutionException("Unknown PostGIS database."); |
| | | } |
| | | if (_pgPort == null) { |
| | | logger.warn("PGPORT is null"); |
| | | throw new JobExecutionException("Unknown PostGIS port."); |
| | | } |
| | | if (_pgSchema == null) { |
| | | logger.warn("PGSCHEMA is null"); |
| | | throw new JobExecutionException("Unknown PostGIS schema."); |
| | | } |
| | | if (_pgUsername == null) { |
| | | logger.warn("PGUSERNAME is null"); |
| | | throw new JobExecutionException("Unknown PostGIS username."); |
| | | } |
| | | if (_pgPassword == null) { |
| | | logger.warn("PGPASSWORD is null"); |
| | | throw new JobExecutionException("Unknown PostGIS password."); |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put(PostgisNGDataStoreFactory.DBTYPE.key, "postgis"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(PostgisNGDataStoreFactory.HOST.key, _pgHost); |
| | | remote.put(PostgisNGDataStoreFactory.PORT.key, _pgPort); |
| | | remote.put(PostgisNGDataStoreFactory.DATABASE.key, _pgDatabase); |
| | | remote.put(PostgisNGDataStoreFactory.USER.key, _pgUsername); |
| | | remote.put(PostgisNGDataStoreFactory.PASSWD.key, _pgPassword); |
| | | // remote.put( "namespace", null); |
| | | pgProperties = remote; |
| | | } |
| | | |
| | | @Override |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, boolean profileMode, boolean useTransform) { |
| | | return new OracleIncrementPostGISJobContext(getDataPath(), |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | protected void createTargetDataStore() throws JobExecutionException { |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | |
| | | /* |
| | | if (!isDriverFound()) |
| | | { |
| | | throw new JobExecutionException("Oracle JDBC Driver not found.-" + JDBC_DRIVER); |
| | | } |
| | | */ |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | /* |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.WKBENABLED.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.WKBENABLED.key, "true"); |
| | | } |
| | | */ |
| | | |
| | | if (!dataStoreFactory.canProcess(pgProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = dataStoreFactory.createDataStore(pgProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | @Override |
| | | protected void disconnect() { |
| | | super.disconnect(); |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | } |
| | | |
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) { |
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " + |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | @Override |
| | | public void execute(JobExecutionContext context) throws JobExecutionException { |
| | | // Every job has its own job detail |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | // Log the time the job started |
| | | logger.info(jobName + " fired at " + new Date()); |
| | | extractJobConfiguration(jobDetail); |
| | | |
| | | createSourceDataStore(); |
| | | createTargetDataStore(); |
| | | if (getSourceDataStore() == null) { |
| | | logger.warn("Cannot connect source oracle database."); |
| | | throw new JobExecutionException("Cannot connect source oracle database."); |
| | | } |
| | | |
| | | if (getTargetDataStore() == null) { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName, targetThemeTable; |
| | | |
| | | try { |
| | | logger.info("-- step:incrementConvertOracleDB --"); |
| | | targetSchemaName = determineCurrentTargetSchemaName(); |
| | | if (targetSchemaName == null) return; |
| | | |
| | | OracleIncrementPostGISJobContext jobContext = null; |
| | | |
| | | jobContext = (OracleIncrementPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | long tStep = System.currentTimeMillis(); |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-Copy Connectivity", tStep, tStepEnd); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | jobContext.resetProcessTime(); |
| | | jobContext.resetUpdateTime(); |
| | | } |
| | | tStep = System.currentTimeMillis(); |
| | | exetcuteIncrementConvert(jobContext, _dataPath); |
| | | |
| | | //close all open filewriter instance |
| | | jobContext.closeFeatureWriter(); |
| | | |
| | | if (isProfileMode()) { |
| | | logger.warn("Profile-Current Query Oracle Cost-" + |
| | | ((int) ((getQueryTime()) / 60000.0)) + " min - " + |
| | | (((int) ((getQueryTime()) % 60000.0)) / 1000) + " sec"); |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logger.warn("Profile-Current Process Cost-" + |
| | | ((int) ((getProcessTime()) / 60000.0)) + " min - " + |
| | | (((int) ((getProcessTime()) % 60000.0)) / 1000) + " sec"); |
| | | logger.warn("Profile-Current Update Cost-" + |
| | | ((int) ((getUpdateTime()) / 60000.0)) + " min - " + |
| | | (((int) ((getUpdateTime()) % 60000.0)) / 1000) + " sec"); |
| | | logger.warn("Profile-Current JobContext Process Cost-" + |
| | | ((int) ((jobContext.getProcessTime()) / 60000.0)) + " min - " + |
| | | (((int) ((jobContext.getProcessTime()) % 60000.0)) / 1000) + " sec"); |
| | | logger.warn("Profile-Current JobContext Update Cost-" + |
| | | ((int) ((jobContext.getUpdateTime()) / 60000.0)) + " min - " + |
| | | (((int) ((jobContext.getUpdateTime()) % 60000.0)) / 1000) + " sec"); |
| | | logTimeDiff("Profile-Convert[ Increment ]", tStep, tStepEnd); |
| | | |
| | | resetQueryTime(); |
| | | resetProcessTime(); |
| | | resetUpdateTime(); |
| | | } |
| | | |
| | | jobContext.closeOracleConnection(); |
| | | |
| | | long t2 = System.currentTimeMillis(); |
| | | // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss"; |
| | | // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); |
| | | logTimeDiff("Total ", t1, t2); |
| | | |
| | | } catch (SQLException e) { |
| | | disconnect(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("Database error. " + e.getMessage(), e); |
| | | } catch (IOException ex) { |
| | | disconnect(); |
| | | logger.warn(ex.getMessage(), ex); |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } finally { |
| | | disconnect(); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | private String determineCurrentTargetSchemaName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetSchema = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | // Create XGVERSIONTABLE_NAME |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | rs.close(); |
| | | if (needCreate) return null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vsschema, vsstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vsid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpSchemas = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vsschema"); |
| | | values[1] = rs.getShort("vsstatus"); |
| | | tmpSchemas.add(values); |
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) { |
| | | current = i; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current != -1) { |
| | | Object[] values = tmpSchemas.get(current); |
| | | targetSchema = (String) values[0]; |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetSchema; |
| | | } |
| | | |
| | | public String encodeSchemaTableName(String schemaName, String tableName) { |
| | | if (schemaName == null) |
| | | return "\"" + tableName + "\""; |
| | | return "\"" + schemaName + "\".\"" + tableName + "\""; |
| | | } |
| | | |
| | | /** |
| | | * CREATE TABLE CMMS_POSTDB.GEO_EXCHANGE |
| | | * ( |
| | | * ID NUMBER NOT NULL, |
| | | * TAG_LUFID NUMBER(10) NOT NULL, |
| | | * TAG_SFSC NUMBER(5) NOT NULL, |
| | | * TAG_BCOMPID NUMBER(3) NOT NULL, |
| | | * TAG_SOCCID NUMBER(5) NOT NULL, |
| | | * STATUS NUMBER(3) NOT NULL, |
| | | * IGDSELM BLOB, |
| | | * UPDATETIME DATE DEFAULT sysdate NOT NULL, |
| | | * TASKID NUMBER(10) NOT NULL, |
| | | * ISEXCHANGE NUMBER DEFAULT 0 NOT NULL |
| | | * ) |
| | | * |
| | | * STATUS 欄位 :0:新增 2:編輯 3:刪除設備 4:刪除元件 |
| | | * ISEXCHANGE 欄位:0 未同步 1已同步 或者已同步就刪除 |
| | | * |
| | | * |
| | | * @param jobContext |
| | | * @param targetSchemaName |
| | | * @throws SQLException |
| | | */ |
| | | private void exetcuteIncrementConvert(OracleIncrementPostGISJobContext jobContext, String targetSchemaName) throws SQLException { |
| | | |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | if (connection == null) { |
| | | logger.warn("Cannot Get Oracle Connection for DMMS."); |
| | | return; |
| | | } |
| | | |
| | | // SELECT COUNT(*) FROM CMMS_POSTDB.GEO_EXCHANGE WHERE ISEXCHANGE = 0 |
| | | int exchangeCount = fetchExchangeCount(connection); |
| | | logger.info("exchangeCount=" + exchangeCount); |
| | | |
| | | try { |
| | | processIncrementElement(jobContext, exchangeCount); |
| | | // jobContext.setCurrentSchema(querySchema); |
| | | |
| | | } finally { |
| | | } |
| | | |
| | | } |
| | | |
| | | private int fetchExchangeCount(Connection connection) throws SQLException { |
| | | // SELECT COUNT(*) FROM CMMS_POSTDB.GEO_EXCHANGE WHERE ISEXCHANGE <> 0 |
| | | Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | ResultSet rs = null; |
| | | StringBuilder sbSQL = new StringBuilder(); |
| | | sbSQL.append("SELECT COUNT(*) FROM \"CMMS_POSTDB\".\"GEO_EXCHANGE\" WHERE ISEXCHANGE = 0"); |
| | | |
| | | int size = -1; |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | if (rs.next()) { |
| | | size = (int) rs.getLong(1); |
| | | } |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | |
| | | return size; |
| | | } |
| | | |
| | | static class IncrementRecord { |
| | | Element element; |
| | | }; |
| | | |
| | | private void processIncrementElement(OracleIncrementPostGISJobContext jobContext, int exchangeCount) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | if (exchangeCount == 0) { |
| | | logger.info("GEO_EXCHANGE ELEMENT COUNT IS ZERO."); |
| | | return; |
| | | } |
| | | |
| | | // SELECT ID, TAG_LUFID, TAG_SFSC, TAG_BCOMPID, TAG_SOCCID, STATUS, TASKID, IGDSELM |
| | | // FROM CMMS_POSTDB.GEO_EXCHANGE ORDER BY UPDATETIME WHERE ISEXCHANGE <> 0 |
| | | String fetchSrcStmtFmt = "SELECT ID, TAG_LUFID, TAG_SFSC, TAG_BCOMPID, TAG_SOCCID, STATUS, TASKID, IGDSELM " + |
| | | "FROM \"%s\".\"%s\" WHERE ISEXCHANGE = 0 ORDER BY UPDATETIME"; |
| | | // String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" |
| | | // WHERE TAG_SFSC = 423 AND TAG_LUFID = 21612065 ORDER BY ROWID"; |
| | | PrintfFormat spf = new PrintfFormat(fetchSrcStmtFmt); |
| | | String fetchSrcStmt = spf.sprintf(new Object[]{"CMMS_POSTDB", "GEO_EXCHANGE"}); |
| | | Statement stmtSrc = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | |
| | | stmtSrc.setFetchSize(FETCHSIZE); |
| | | ResultSet rsSrc = stmtSrc.executeQuery(fetchSrcStmt); |
| | | int igdsMetaType = rsSrc.getMetaData().getColumnType(1); |
| | | ArrayList<Integer> transIds = new ArrayList<Integer>(); |
| | | |
| | | int step = exchangeCount / 100; |
| | | int order = 0; |
| | | int current = 0; |
| | | jobContext.getExecutionContext().put("IncrementDgn2PostGISJobProgress", 0); |
| | | |
| | | while (rsSrc.next()) { |
| | | if (isProfileMode()) { |
| | | markQueryTime(); |
| | | } |
| | | ElementTransactionContext xContext = new ElementTransactionContext(); |
| | | xContext.transcationId = rsSrc.getInt(1); |
| | | xContext.oid = rsSrc.getInt(2); |
| | | xContext.cid = (short) rsSrc.getInt(3); |
| | | xContext.compid = (short) rsSrc.getInt(4); |
| | | xContext.occid = (short) rsSrc.getInt(5); |
| | | xContext.transcationType = rsSrc.getInt(6); |
| | | xContext.taskid = rsSrc.getInt(7); |
| | | |
| | | try { |
| | | if (xContext.transcationType <= 2) { |
| | | byte[] raw = null; |
| | | if (igdsMetaType == Types.BLOB) { |
| | | BLOB blob = (BLOB) rsSrc.getBlob(8); |
| | | |
| | | try { |
| | | raw = getBytesFromBLOB(blob); |
| | | } catch (BufferOverflowException e) { |
| | | logger.warn("Wrong Element Structure-", e); |
| | | } finally { |
| | | // blob.close(); |
| | | } |
| | | } else { |
| | | raw = rsSrc.getBytes(8); |
| | | } |
| | | if (raw != null) { |
| | | Element element = fetchBinaryElement(raw); |
| | | if (isProfileMode()) { |
| | | accumulateQueryTime(); |
| | | } |
| | | xContext.element = element; |
| | | } else { |
| | | if (isProfileMode()) { |
| | | accumulateQueryTime(); |
| | | } |
| | | } |
| | | } else { |
| | | xContext.element = null; |
| | | } |
| | | |
| | | if (xContext.transcationType > 1) { |
| | | // remove first |
| | | } |
| | | |
| | | jobContext.processFeatureContext(xContext); |
| | | transIds.add(xContext.transcationId); |
| | | |
| | | } catch (Dgn7fileException e) { |
| | | logger.warn("Dgn7Exception", e); |
| | | } |
| | | |
| | | if ((order % COMMITSIZE) == 0) { |
| | | // OracleConnection connection = jobContext.getOracleConnection(); |
| | | // connection.commitTransaction(); |
| | | jobContext.commitTransaction(); |
| | | //jobContext.startTransaction(); |
| | | System.gc(); |
| | | System.runFinalization(); |
| | | } |
| | | |
| | | if (step != 0) { |
| | | int now = order % step; |
| | | if (now != current) { |
| | | current = now; |
| | | jobContext.getExecutionContext().put("IncrementDgn2PostGISJobProgress", current); |
| | | |
| | | } |
| | | } else { |
| | | jobContext.getExecutionContext().put("IncrementDgn2PostGISJobProgress", current); |
| | | current++; |
| | | } |
| | | } |
| | | |
| | | jobContext.getExecutionContext().put("IncrementDgn2PostGISJobProgress", 100); |
| | | |
| | | jobContext.commitTransaction(); |
| | | jobContext.resetFeatureContext(); |
| | | |
| | | JDBCUtils.close(rsSrc); |
| | | JDBCUtils.close(stmtSrc); |
| | | |
| | | if (!transIds.isEmpty()) { |
| | | completeTransactionAction(connection, transIds); |
| | | } |
| | | } |
| | | |
| | | private void completeTransactionAction(Connection connection, ArrayList<Integer> transIds) { |
| | | if (transIds.isEmpty()) return; |
| | | |
| | | boolean autoCommit = true; |
| | | PreparedStatement statement = null; |
| | | try { |
| | | autoCommit = connection.getAutoCommit(); |
| | | connection.setAutoCommit(false); |
| | | String sql = "UPDATE \"CMMS_POSTDB\".\"GEO_EXCHANGE\" SET ISEXCHANGE=? WHERE ID=?"; |
| | | |
| | | statement = connection.prepareStatement(sql); |
| | | for (int id : transIds) { |
| | | statement.setInt((int) 1, 1); |
| | | statement.setInt((int) 2, id); |
| | | statement.executeUpdate(); |
| | | } |
| | | connection.commit(); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | try { |
| | | connection.rollback(); |
| | | } catch (SQLException e1) { |
| | | logger.warn(e.getMessage(), e1); |
| | | } |
| | | } finally { |
| | | JDBCUtils.close(statement); |
| | | try { |
| | | connection.setAutoCommit(autoCommit); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | } |
| | | |
| | | // Binary to Element |
| | | private Element fetchBinaryElement(byte[] raws) throws Dgn7fileException { |
| | | ByteBuffer buffer = ByteBuffer.wrap(raws); |
| | | buffer.order(ByteOrder.LITTLE_ENDIAN); |
| | | short signature = buffer.getShort(); |
| | | |
| | | // byte type = (byte) (buffer.get() & 0x7f); |
| | | byte type = (byte) ((signature >>> 8) & 0x007f); |
| | | |
| | | // silly Bentley say contentLength is in 2-byte words |
| | | // and ByteByffer uses raws. |
| | | // track the record location |
| | | int elementLength = (buffer.getShort() * 2) + 4; |
| | | ElementType recordType = ElementType.forID(type); |
| | | IElementHandler handler; |
| | | |
| | | handler = recordType.getElementHandler(); |
| | | |
| | | Element dgnElement = (Element) handler.read(buffer, signature, elementLength); |
| | | if (recordType.isComplexElement() && (elementLength < raws.length)) { |
| | | int offset = elementLength; |
| | | while (offset < (raws.length - 4)) { |
| | | buffer.position(offset); |
| | | signature = buffer.getShort(); |
| | | type = (byte) ((signature >>> 8) & 0x007f); |
| | | elementLength = (buffer.getShort() * 2) + 4; |
| | | if (raws.length < (offset + elementLength)) { |
| | | logger.debug("Length not match:" + offset + ":" + buffer.position() + ":" + buffer.limit()); |
| | | break; |
| | | } |
| | | recordType = ElementType.forID(type); |
| | | handler = recordType.getElementHandler(); |
| | | if (handler != null) { |
| | | Element subElement = (Element) handler.read(buffer, signature, elementLength); |
| | | ((ComplexElement) dgnElement).add(subElement); |
| | | offset += elementLength; |
| | | } else { |
| | | byte[] remain = new byte[buffer.remaining()]; |
| | | System.arraycopy(raws, offset, remain, 0, buffer.remaining()); |
| | | for (int i = 0; i < remain.length; i++) { |
| | | if (remain[i] != 0) { |
| | | logger.info("fetch element has some error. index=" + (offset + i) + ":value=" + remain[i]); |
| | | } |
| | | } |
| | | break; |
| | | } |
| | | } |
| | | } |
| | | |
| | | return dgnElement; |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.jobs; |
| | | |
| | | import java.io.FileWriter; |
| | | import java.io.IOException; |
| | | import java.sql.Connection; |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.util.ArrayList; |
| | | import java.util.Date; |
| | | import java.util.List; |
| | | import java.util.Map; |
| | | import java.util.TreeMap; |
| | | |
| | | import au.com.bytecode.opencsv.CSVWriter; |
| | | import au.com.bytecode.opencsv.ResultSetHelper; |
| | | import au.com.bytecode.opencsv.ResultSetHelperService; |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.OracleConvertPostGISJobContext; |
| | | import com.ximple.eofms.util.ConnectivityDirectionEnum; |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisNGDataStoreFactory; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | public class OracleTransformColorOwner2CSVJob extends AbstractOracleDatabaseJob { |
| | | final static Log logger = LogFactory.getLog(OracleTransformColorOwner2CSVJob.class); |
| | | |
| | | public static String FETCH_TPDATA = "SELECT TPID, TPNAME FROM BASEDB.TPDATA"; |
| | | public static String FETCH_CONNFDR = "SELECT FSC, UFID, FDR1, DIR FROM BASEDB.CONNECTIVITY ORDER BY FSC"; |
| | | public static String FETCH_FDRCOLOR = "SELECT FRREDERID, COLOR FROM BASEDB.FEEDER"; |
| | | public static String FETCH_COLORTAB = "SELECT TAG_SFSC, TAG_LUFID, COLOR FROM OCSDB.COLOR ORDER BY TAG_SFSC"; |
| | | |
| | | private static final String PGHOST = "PGHOST"; |
| | | private static final String PGDATBASE = "PGDATBASE"; |
| | | private static final String PGPORT = "PGPORT"; |
| | | private static final String PGSCHEMA = "PGSCHEMA"; |
| | | private static final String PGUSER = "PGUSER"; |
| | | private static final String PGPASS = "PGPASS"; |
| | | private static final String USEWKB = "USEWKB"; |
| | | |
| | | private static final boolean useTpclidText = false; |
| | | |
| | | private static final int FETCHSIZE = 100; |
| | | private static final int COMMITSIZE = 100; |
| | | |
| | | protected static class Pair { |
| | | Object first; |
| | | Object second; |
| | | |
| | | public Pair(Object first, Object second) { |
| | | this.first = first; |
| | | this.second = second; |
| | | } |
| | | } |
| | | |
| | | protected static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | protected String _pgHost; |
| | | protected String _pgDatabase; |
| | | protected String _pgPort; |
| | | protected String _pgSchema; |
| | | protected String _pgUsername; |
| | | protected String _pgPassword; |
| | | protected String _pgUseWKB; |
| | | |
| | | protected Map<String, String> pgProperties; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _pgHost = dataMap.getString(PGHOST); |
| | | _pgDatabase = dataMap.getString(PGDATBASE); |
| | | _pgPort = dataMap.getString(PGPORT); |
| | | _pgSchema = dataMap.getString(PGSCHEMA); |
| | | _pgUsername = dataMap.getString(PGUSER); |
| | | _pgPassword = dataMap.getString(PGPASS); |
| | | _pgUseWKB = dataMap.getString(USEWKB); |
| | | |
| | | Log logger = getLogger(); |
| | | /* |
| | | logger.info("PGHOST=" + _myHost); |
| | | logger.info("PGDATBASE=" + _myDatabase); |
| | | logger.info("PGPORT=" + _myPort); |
| | | logger.info("PGSCHEMA=" + _mySchema); |
| | | logger.info("PGUSER=" + _myUsername); |
| | | logger.info("PGPASS=" + _myPassword); |
| | | logger.info("USEWKB=" + _myUseWKB); |
| | | */ |
| | | |
| | | if (_pgHost == null) { |
| | | logger.warn("PGHOST is null"); |
| | | throw new JobExecutionException("Unknown PostGIS host."); |
| | | } |
| | | if (_pgDatabase == null) { |
| | | logger.warn("PGDATABASE is null"); |
| | | throw new JobExecutionException("Unknown PostGIS database."); |
| | | } |
| | | if (_pgPort == null) { |
| | | logger.warn("PGPORT is null"); |
| | | throw new JobExecutionException("Unknown PostGIS port."); |
| | | } |
| | | if (_pgSchema == null) { |
| | | logger.warn("PGSCHEMA is null"); |
| | | throw new JobExecutionException("Unknown PostGIS schema."); |
| | | } |
| | | if (_pgUsername == null) { |
| | | logger.warn("PGUSERNAME is null"); |
| | | throw new JobExecutionException("Unknown PostGIS username."); |
| | | } |
| | | if (_pgPassword == null) { |
| | | logger.warn("PGPASSWORD is null"); |
| | | throw new JobExecutionException("Unknown PostGIS password."); |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put(PostgisNGDataStoreFactory.DBTYPE.key, "postgis"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(PostgisNGDataStoreFactory.HOST.key, _pgHost); |
| | | remote.put(PostgisNGDataStoreFactory.PORT.key, _pgPort); |
| | | remote.put(PostgisNGDataStoreFactory.DATABASE.key, _pgDatabase); |
| | | remote.put(PostgisNGDataStoreFactory.USER.key, _pgUsername); |
| | | remote.put(PostgisNGDataStoreFactory.PASSWD.key, _pgPassword); |
| | | // remote.put( "namespace", null); |
| | | pgProperties = remote; |
| | | } |
| | | |
| | | @Override |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | @Override |
| | | public void execute(JobExecutionContext context) throws JobExecutionException { |
| | | // Every job has its own job detail |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | // Log the time the job started |
| | | logger.info(jobName + " fired at " + new Date()); |
| | | extractJobConfiguration(jobDetail); |
| | | |
| | | createSourceDataStore(); |
| | | createTargetDataStore(); |
| | | if (getSourceDataStore() == null) { |
| | | logger.warn("Cannot connect source oracle database."); |
| | | throw new JobExecutionException("Cannot connect source oracle database."); |
| | | } |
| | | |
| | | if (getTargetDataStore() == null) { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName; |
| | | try { |
| | | logger.info("-- step:clearOutputDatabase --"); |
| | | clearOutputDatabase(); |
| | | |
| | | logger.info("-- step:transformOracleDMMSDB --"); |
| | | targetSchemaName = determineTargetSchemaName(); |
| | | |
| | | OracleConvertPostGISJobContext jobContext = |
| | | (OracleConvertPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | long tStep = System.currentTimeMillis(); |
| | | |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | mergeConnectivityOwner(jobContext); |
| | | |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-Merge Connectivity Owner", tStep, tStepEnd); |
| | | } |
| | | |
| | | tStep = System.currentTimeMillis(); |
| | | mergeDynamicColor(jobContext); |
| | | |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-Merge ColorTable", tStep, tStepEnd); |
| | | } |
| | | |
| | | jobContext.closeOracleConnection(); |
| | | |
| | | long t2 = System.currentTimeMillis(); |
| | | // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss"; |
| | | // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); |
| | | logTimeDiff("Total ", t1, t2); |
| | | |
| | | } catch (SQLException e) { |
| | | disconnect(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("Database error. " + e.getMessage(), e); |
| | | } catch (IOException ex) { |
| | | disconnect(); |
| | | logger.warn(ex.getMessage(), ex); |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } finally { |
| | | disconnect(); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | /** |
| | | * Connectivity (Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws java.sql.SQLException sql exception |
| | | */ |
| | | protected void mergeConnectivityOwner(AbstractOracleJobContext jobContext) throws SQLException, IOException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | String targetSchemaName = determineTargetSchemaName(); |
| | | logger.info("target schema:" + targetSchemaName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(FETCHSIZE); |
| | | |
| | | ResultSetHelper resultService = new ResultSetHelperService(); |
| | | final String[] header = new String[] { "tid", "oid", "owner", "flow" }; |
| | | CSVWriter writer = new CSVWriter(new FileWriter("featureowner.csv"), ','); |
| | | writer.writeNext(header); |
| | | while (rs.next()) { |
| | | short dirId = (short) rs.getInt(4); |
| | | String[] values = resultService.getColumnValues(rs); |
| | | ConnectivityDirectionEnum dir = ConnectivityDirectionEnum.convertShort(dirId); |
| | | if ((ConnectivityDirectionEnum.ForwardflowON == dir) || |
| | | (ConnectivityDirectionEnum.ForwardFixflowON == dir)) { |
| | | values[3] = "shape://ccarrow"; |
| | | |
| | | } else if ((ConnectivityDirectionEnum.BackflowON == dir) || |
| | | (ConnectivityDirectionEnum.BackFixflowON == dir)) { |
| | | values[3] = "shape://rccarrow"; |
| | | } else { |
| | | values[3] = "shape://backslash"; |
| | | } |
| | | writer.writeNext(values); |
| | | } |
| | | writer.flush(); |
| | | writer.close(); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void mergeDynamicColor(OracleConvertPostGISJobContext jobContext) throws SQLException, IOException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | String targetSchemaName = determineTargetSchemaName(); |
| | | logger.info("target schema:" + targetSchemaName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(FETCHSIZE); |
| | | |
| | | ResultSetHelper resultService = new ResultSetHelperService(); |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | |
| | | final String[] header = new String[] { "tid", "oid", "dyncolor" }; |
| | | CSVWriter writer = new CSVWriter(new FileWriter("featurecolor.csv"), ','); |
| | | // writer.writeAll(rs, true); |
| | | writer.writeNext(header); |
| | | while (rs.next()) { |
| | | int colorId = rs.getInt(3); |
| | | String[] values = resultService.getColumnValues(rs); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | values[2] = colorText; |
| | | writer.writeNext(values); |
| | | } |
| | | writer.flush(); |
| | | writer.close(); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void batchExecuteSQL(ArrayList<String> sqlStmts) throws IOException { |
| | | if (targetDataStore == null) return; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | // ResultSet rs = null; |
| | | int[] results = null; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | connection.setAutoCommit(false); |
| | | stmt = connection.createStatement(); |
| | | for (String sqlStmt : sqlStmts) { |
| | | stmt.addBatch(sqlStmt); |
| | | } |
| | | results = stmt.executeBatch(); |
| | | connection.commit(); |
| | | } catch (SQLException e) { |
| | | if (results != null) { |
| | | } |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | // JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | |
| | | private List<String> fetchTargetTableList(String targetSchemaName, int cid) throws IOException { |
| | | ArrayList<String> result = new ArrayList<String>(); |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | String[] types = {"TABLE"}; |
| | | rs = connection.getMetaData().getTables(null, targetSchemaName, "fsc-" + cid +"%", types); |
| | | while (rs.next()) { |
| | | String tableName = rs.getString("TABLE_NAME"); |
| | | logger.info("table:" + tableName); |
| | | result.add(tableName); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | |
| | | return result; //To change body of created methods use File | Settings | File Templates. |
| | | } |
| | | |
| | | |
| | | @Override |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, boolean profileMode, boolean useTransform) { |
| | | return new OracleConvertPostGISJobContext(getDataPath(), |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) { |
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " + |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | public DataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | protected void createTargetDataStore() throws JobExecutionException { |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | if (!dataStoreFactory.canProcess(pgProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = dataStoreFactory.createDataStore(pgProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | protected void disconnect() { |
| | | super.disconnect(); |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | } |
| | | |
| | | private String determineTargetSchemaName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetSchema = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) { |
| | | throw new IOException("cannot found " + DataReposVersionManager.XGVERSIONTABLE_NAME); |
| | | } |
| | | rs.close(); |
| | | rs = null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vsschema, vsstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vsid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpSchemas = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vsschema"); |
| | | values[1] = rs.getShort("vsstatus"); |
| | | tmpSchemas.add(values); |
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) { |
| | | current = i; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current != -1) { |
| | | Object[] values = tmpSchemas.get(current); |
| | | targetSchema = (String) values[0]; |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetSchema; |
| | | } |
| | | |
| | | public String encodeSchemaTableName(String schemaName, String tableName) { |
| | | return "\"" + schemaName + "\".\"" + tableName + "\""; |
| | | } |
| | | |
| | | public final void accumulateQueryTime() { |
| | | queryTime += System.currentTimeMillis() - queryTimeStart; |
| | | } |
| | | |
| | | public long getQueryTime() { |
| | | return queryTime; |
| | | } |
| | | |
| | | public final void markQueryTime() { |
| | | queryTimeStart = System.currentTimeMillis(); |
| | | } |
| | | |
| | | public final void resetQueryTime() { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | private void clearOutputDatabase() { |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.jobs; |
| | | |
| | | import java.io.IOException; |
| | | import java.sql.Connection; |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.util.ArrayList; |
| | | import java.util.Date; |
| | | import java.util.List; |
| | | import java.util.Map; |
| | | import java.util.TreeMap; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.OracleConvertPostGISJobContext; |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisNGDataStoreFactory; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | /** |
| | | * |
| | | */ |
| | | @Deprecated |
| | | public class OracleTransformColorOwnerJob extends AbstractOracleDatabaseJob { |
| | | final static Log logger = LogFactory.getLog(OracleTransformColorOwnerJob.class); |
| | | |
| | | public static String FETCH_TPDATA = "SELECT TPID, TPNAME FROM BASEDB.TPDATA"; |
| | | public static String FETCH_CONNFDR = "SELECT FSC, UFID, FDR1 FROM BASEDB.CONNECTIVITY ORDER BY FSC"; |
| | | public static String FETCH_FDRCOLOR = "SELECT FRREDERID, COLOR FROM BASEDB.FEEDER"; |
| | | public static String FETCH_COLORTAB = "SELECT TAG_SFSC, TAG_LUFID, COLOR FROM OCSDB.COLOR ORDER BY TAG_SFSC"; |
| | | |
| | | private static final String PGHOST = "PGHOST"; |
| | | private static final String PGDATBASE = "PGDATBASE"; |
| | | private static final String PGPORT = "PGPORT"; |
| | | private static final String PGSCHEMA = "PGSCHEMA"; |
| | | private static final String PGUSER = "PGUSER"; |
| | | private static final String PGPASS = "PGPASS"; |
| | | private static final String USEWKB = "USEWKB"; |
| | | |
| | | private static final boolean useTpclidText = false; |
| | | |
| | | private static final int FETCHSIZE = 30; |
| | | private static final int COMMITSIZE = 100; |
| | | |
| | | protected static class Pair { |
| | | Object first; |
| | | Object second; |
| | | |
| | | public Pair(Object first, Object second) { |
| | | this.first = first; |
| | | this.second = second; |
| | | } |
| | | } |
| | | |
| | | protected static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | protected String _pgHost; |
| | | protected String _pgDatabase; |
| | | protected String _pgPort; |
| | | protected String _pgSchema; |
| | | protected String _pgUsername; |
| | | protected String _pgPassword; |
| | | protected String _pgUseWKB; |
| | | |
| | | protected Map<String, String> pgProperties; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _pgHost = dataMap.getString(PGHOST); |
| | | _pgDatabase = dataMap.getString(PGDATBASE); |
| | | _pgPort = dataMap.getString(PGPORT); |
| | | _pgSchema = dataMap.getString(PGSCHEMA); |
| | | _pgUsername = dataMap.getString(PGUSER); |
| | | _pgPassword = dataMap.getString(PGPASS); |
| | | _pgUseWKB = dataMap.getString(USEWKB); |
| | | |
| | | Log logger = getLogger(); |
| | | /* |
| | | logger.info("PGHOST=" + _myHost); |
| | | logger.info("PGDATBASE=" + _myDatabase); |
| | | logger.info("PGPORT=" + _myPort); |
| | | logger.info("PGSCHEMA=" + _mySchema); |
| | | logger.info("PGUSER=" + _myUsername); |
| | | logger.info("PGPASS=" + _myPassword); |
| | | logger.info("USEWKB=" + _myUseWKB); |
| | | */ |
| | | |
| | | if (_pgHost == null) { |
| | | logger.warn("PGHOST is null"); |
| | | throw new JobExecutionException("Unknown PostGIS host."); |
| | | } |
| | | if (_pgDatabase == null) { |
| | | logger.warn("PGDATABASE is null"); |
| | | throw new JobExecutionException("Unknown PostGIS database."); |
| | | } |
| | | if (_pgPort == null) { |
| | | logger.warn("PGPORT is null"); |
| | | throw new JobExecutionException("Unknown PostGIS port."); |
| | | } |
| | | if (_pgSchema == null) { |
| | | logger.warn("PGSCHEMA is null"); |
| | | throw new JobExecutionException("Unknown PostGIS schema."); |
| | | } |
| | | if (_pgUsername == null) { |
| | | logger.warn("PGUSERNAME is null"); |
| | | throw new JobExecutionException("Unknown PostGIS username."); |
| | | } |
| | | if (_pgPassword == null) { |
| | | logger.warn("PGPASSWORD is null"); |
| | | throw new JobExecutionException("Unknown PostGIS password."); |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put(PostgisNGDataStoreFactory.DBTYPE.key, "postgis"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(PostgisNGDataStoreFactory.HOST.key, _pgHost); |
| | | remote.put(PostgisNGDataStoreFactory.PORT.key, _pgPort); |
| | | remote.put(PostgisNGDataStoreFactory.DATABASE.key, _pgDatabase); |
| | | remote.put(PostgisNGDataStoreFactory.USER.key, _pgUsername); |
| | | remote.put(PostgisNGDataStoreFactory.PASSWD.key, _pgPassword); |
| | | // remote.put( "namespace", null); |
| | | pgProperties = remote; |
| | | } |
| | | |
| | | @Override |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | @Override |
| | | public void execute(JobExecutionContext context) throws JobExecutionException { |
| | | // Every job has its own job detail |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | // Log the time the job started |
| | | logger.info(jobName + " fired at " + new Date()); |
| | | extractJobConfiguration(jobDetail); |
| | | |
| | | createSourceDataStore(); |
| | | createTargetDataStore(); |
| | | if (getSourceDataStore() == null) { |
| | | logger.warn("Cannot connect source oracle database."); |
| | | throw new JobExecutionException("Cannot connect source oracle database."); |
| | | } |
| | | |
| | | if (getTargetDataStore() == null) { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName; |
| | | try { |
| | | logger.info("-- step:clearOutputDatabase --"); |
| | | clearOutputDatabase(); |
| | | |
| | | logger.info("-- step:transformOracleDMMSDB --"); |
| | | targetSchemaName = determineTargetSchemaName(); |
| | | |
| | | OracleConvertPostGISJobContext jobContext = |
| | | (OracleConvertPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | long tStep = System.currentTimeMillis(); |
| | | |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | mergeConnectivityOwner(jobContext); |
| | | |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-Merge Connectivity Owner", tStep, tStepEnd); |
| | | } |
| | | |
| | | tStep = System.currentTimeMillis(); |
| | | mergeDynamicColor(jobContext); |
| | | |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-Merge ColorTable", tStep, tStepEnd); |
| | | } |
| | | |
| | | jobContext.closeOracleConnection(); |
| | | |
| | | long t2 = System.currentTimeMillis(); |
| | | // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss"; |
| | | // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); |
| | | logTimeDiff("Total ", t1, t2); |
| | | |
| | | } catch (SQLException e) { |
| | | disconnect(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("Database error. " + e.getMessage(), e); |
| | | } catch (IOException ex) { |
| | | disconnect(); |
| | | logger.warn(ex.getMessage(), ex); |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } finally { |
| | | disconnect(); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | /** |
| | | * Connectivity (Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws java.sql.SQLException sql exception |
| | | */ |
| | | protected void mergeConnectivityOwner(AbstractOracleJobContext jobContext) throws SQLException, IOException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | String targetSchemaName = determineTargetSchemaName(); |
| | | logger.info("target schema:" + targetSchemaName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(50); |
| | | int lastClass = -1; |
| | | boolean changeType = false; |
| | | List<String> tables = null; |
| | | ArrayList<String> sqlBatchStmts = new ArrayList<String>(); |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int ownerId = rs.getInt(3); |
| | | if (lastClass != cid) { |
| | | logger.info("change type to :" + cid); |
| | | } |
| | | changeType = (lastClass != cid); |
| | | if (changeType) { |
| | | tables = fetchTargetTableList(targetSchemaName, cid); |
| | | if (tables == null) |
| | | logger.info("tables is null." + cid); |
| | | } |
| | | if (tables != null) { |
| | | for (String t : tables) { |
| | | String sqlStmt = generatrTargetOwnerSql(targetSchemaName, t, cid, oid, ownerId); |
| | | sqlBatchStmts.add(sqlStmt); |
| | | } |
| | | } |
| | | |
| | | if (MAX_BATCHSIZE < sqlBatchStmts.size()) { |
| | | batchExecuteSQL(sqlBatchStmts); |
| | | count += sqlBatchStmts.size(); |
| | | sqlBatchStmts.clear(); |
| | | } |
| | | lastClass = cid; |
| | | } |
| | | |
| | | if (!sqlBatchStmts.isEmpty()) { |
| | | batchExecuteSQL(sqlBatchStmts); |
| | | count += sqlBatchStmts.size(); |
| | | } |
| | | logger.info("Execute Update Count=" + count); |
| | | // } catch (SQLException e) |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private String generatrTargetOwnerSql(String schemaName, String t, int cid, long oid, int ownerId) { |
| | | StringBuilder sb = new StringBuilder("UPDATE "); |
| | | sb.append(schemaName).append(".\"").append(t).append("\""); |
| | | sb.append(" SET fowner = ").append(ownerId); |
| | | sb.append(" WHERE tid=").append(cid); |
| | | sb.append(" AND oid=").append(oid); |
| | | return sb.toString(); |
| | | } |
| | | |
| | | private void updateTargetOwner(Connection connection, |
| | | String schemaName, String t, int cid, long oid, int ownerId) |
| | | throws SQLException, IOException { |
| | | if (connection == null) return; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate("UPDATE " + schemaName + "." + t + " SET fowner = " + ownerId + " WHERE oid=" + oid); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void mergeDynamicColor(OracleConvertPostGISJobContext jobContext) throws SQLException, IOException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | String targetSchemaName = determineTargetSchemaName(); |
| | | logger.info("target schema:" + targetSchemaName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(50); |
| | | int lastClass = -1; |
| | | boolean changeType = false; |
| | | List<String> tables = null; |
| | | ArrayList<String> sqlBatchStmts = new ArrayList<String>(); |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int colorId = rs.getInt(3); |
| | | if (lastClass != cid) { |
| | | logger.info("change type to :" + cid); |
| | | } |
| | | changeType = (lastClass != cid); |
| | | if (changeType) { |
| | | tables = fetchTargetTableList(targetSchemaName, cid); |
| | | if (tables == null) |
| | | logger.info("tables is null." + cid); |
| | | } |
| | | if (tables != null) { |
| | | for (String t : tables) { |
| | | String sqlStmt = generatrTargetDynamicColorSql(targetSchemaName, t, cid, oid, colorId); |
| | | sqlBatchStmts.add(sqlStmt); |
| | | } |
| | | } |
| | | if (MAX_BATCHSIZE < sqlBatchStmts.size()) { |
| | | batchExecuteSQL(sqlBatchStmts); |
| | | count += sqlBatchStmts.size(); |
| | | sqlBatchStmts.clear(); |
| | | } |
| | | lastClass = cid; |
| | | } |
| | | if (!sqlBatchStmts.isEmpty()) { |
| | | batchExecuteSQL(sqlBatchStmts); |
| | | count += sqlBatchStmts.size(); |
| | | } |
| | | logger.info("Execute Update Count=" + count); |
| | | // } catch (SQLException e) |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private String generatrTargetDynamicColorSql(String schemaName, String t, int cid, long oid, int colorId) { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | StringBuilder sb = new StringBuilder("UPDATE "); |
| | | sb.append(schemaName).append(".\"").append(t).append("\""); |
| | | sb.append(" SET dyncolor = '").append(colorText).append("'"); |
| | | sb.append(" WHERE tid=").append(cid); |
| | | sb.append(" AND oid=").append(oid); |
| | | return sb.toString(); |
| | | } |
| | | |
| | | private void updateTargetDynamicColor(Connection connection, String schemaName, |
| | | String t, int cid, long oid, int colorId) { |
| | | if (connection == null) return; |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | stmt.executeUpdate("UPDATE " + schemaName + "." + t + " SET dyncolor = '" + colorText + "' WHERE oid=" + oid); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void batchExecuteSQL(ArrayList<String> sqlStmts) throws IOException { |
| | | if (targetDataStore == null) return; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | // ResultSet rs = null; |
| | | int[] results = null; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | connection.setAutoCommit(false); |
| | | stmt = connection.createStatement(); |
| | | for (String sqlStmt : sqlStmts) { |
| | | stmt.addBatch(sqlStmt); |
| | | } |
| | | results = stmt.executeBatch(); |
| | | connection.commit(); |
| | | } catch (SQLException e) { |
| | | if (results != null) { |
| | | } |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | // JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | |
| | | private List<String> fetchTargetTableList(String targetSchemaName, int cid) throws IOException { |
| | | ArrayList<String> result = new ArrayList<String>(); |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | String[] types = {"TABLE"}; |
| | | rs = connection.getMetaData().getTables(null, targetSchemaName, "fsc-" + cid +"%", types); |
| | | while (rs.next()) { |
| | | String tableName = rs.getString("TABLE_NAME"); |
| | | logger.info("table:" + tableName); |
| | | result.add(tableName); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | |
| | | return result; //To change body of created methods use File | Settings | File Templates. |
| | | } |
| | | |
| | | |
| | | @Override |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, boolean profileMode, boolean useTransform) { |
| | | return new OracleConvertPostGISJobContext(getDataPath(), |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) { |
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " + |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | public DataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | protected void createTargetDataStore() throws JobExecutionException { |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | if (!dataStoreFactory.canProcess(pgProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = dataStoreFactory.createDataStore(pgProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | protected void disconnect() { |
| | | super.disconnect(); |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | } |
| | | |
| | | private String determineTargetSchemaName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetSchema = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) { |
| | | throw new IOException("cannot found " + DataReposVersionManager.XGVERSIONTABLE_NAME); |
| | | } |
| | | rs.close(); |
| | | rs = null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vsschema, vsstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vsid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpSchemas = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vsschema"); |
| | | values[1] = rs.getShort("vsstatus"); |
| | | tmpSchemas.add(values); |
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) { |
| | | current = i; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current != -1) { |
| | | Object[] values = tmpSchemas.get(current); |
| | | targetSchema = (String) values[0]; |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetSchema; |
| | | } |
| | | |
| | | public String encodeSchemaTableName(String schemaName, String tableName) { |
| | | return "\"" + schemaName + "\".\"" + tableName + "\""; |
| | | } |
| | | |
| | | public final void accumulateQueryTime() { |
| | | queryTime += System.currentTimeMillis() - queryTimeStart; |
| | | } |
| | | |
| | | public long getQueryTime() { |
| | | return queryTime; |
| | | } |
| | | |
| | | public final void markQueryTime() { |
| | | queryTimeStart = System.currentTimeMillis(); |
| | | } |
| | | |
| | | public final void resetQueryTime() { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | private void clearOutputDatabase() { |
| | | } |
| | | } |
| | |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getName(); |
| | | String jobName = jobDetail.getKey().getName(); |
| | | String targetSchemaName = null; |
| | | |
| | | // Log the time the job started |
| | |
| | | } |
| | | |
| | | AbstractOracleJobContext jobContext = prepareJobContext(targetSchemaName, _filterPath, isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | |
| | | try { |
| | |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | return new OracleUpgradeJobContext(profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | return new OracleUpgradeJobContext(profileMode, useTransform); |
| | | } |
| | | |
| | | private void exetcuteConvert(AbstractOracleJobContext jobContext, |
| | |
| | |
|
| | | import java.util.Map;
|
| | |
|
| | | import com.ximple.eofms.filter.ElementDispatcher;
|
| | | import org.apache.commons.digester3.Digester;
|
| | | import org.apache.commons.digester3.binder.DigesterLoader;
|
| | | import org.apache.commons.digester3.xmlrules.FromXmlRulesModule;
|
| | | import org.apache.commons.logging.Log;
|
| | | import org.quartz.JobExecutionContext;
|
| | |
|
| | | import com.ximple.io.dgn7.Dgn7fileReader;
|
| | |
|
| | | public abstract class AbstractDgnFileJobContext {
|
| | |
|
| | | // ------------------------------ FIELDS ------------------------------
|
| | |
|
| | | /**
|
| | |
| | | private boolean _elementLogging;
|
| | | private boolean _profileMode = false;
|
| | | private boolean _useTransform = true;
|
| | | private boolean _useEPSG3826 = true;
|
| | |
|
| | | private long _processTime;
|
| | | private long _updateTime;
|
| | | private long _processTimeStart;
|
| | | private long _updateTimeStart;
|
| | |
|
| | | private short distId;
|
| | |
|
| | | // --------------------------- CONSTRUCTORS ---------------------------
|
| | |
|
| | | public AbstractDgnFileJobContext(String dataPath, boolean profileMode,
|
| | | boolean useTransform, boolean useEPSG3826) {
|
| | | boolean useTransform) {
|
| | | _dataPath = dataPath;
|
| | | _profileMode = profileMode;
|
| | | _useTransform = useTransform;
|
| | | _useEPSG3826 = useEPSG3826;
|
| | | }
|
| | |
|
| | | // --------------------- GETTER / SETTER METHODS ---------------------
|
| | |
| | | return _useTransform;
|
| | | }
|
| | |
|
| | | public boolean isEPSG3826() {
|
| | | return _useEPSG3826;
|
| | | public short getDistId() {
|
| | | return distId;
|
| | | }
|
| | |
|
| | | // -------------------------- OTHER METHODS --------------------------
|
| | | public void setDistId(short distId) {
|
| | | this.distId = distId;
|
| | | }
|
| | |
|
| | | // -------------------------- OTHER METHODS --------------------------
|
| | |
|
| | | public final void accumulateProcessTime() {
|
| | | _processTime += System.currentTimeMillis() - _processTimeStart;
|
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/AbstractOracleJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/OracleUpgradeJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/AbstractDgnToEdbGeoJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/AbstractOracleToEdbGeoJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/DummyFeatureConvertEdbGeoJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/FeatureDgnConvertEdbGeoJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/GeneralDgnConvertEdbGeoJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/IndexDgnConvertEdbGeoJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/OracleConvertEdbGeoJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/AbstractDgnToMySQLJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/AbstractOracleToMySQLJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/DummyFeatureConvertMySQlJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/FeatureDgnConvertMySQLJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/GeneralDgnConvertMySQLJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/IndexDgnConvertMySQLJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/OracleConvertMySQLJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/orasdo/AbstractDgnToOraSDOJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/orasdo/AbstractOracleToOraSDOJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/orasdo/DummyFeatureConvertOraSDOJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/orasdo/FeatureDgnConvertOraSDOJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/orasdo/GeneralDgnConvertOraSDOJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/orasdo/IndexDgnConvertOraSDOJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/orasdo/OracleConvertOraSDOJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/postgis/AbstractDgnToPostGISJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/postgis/AbstractOracleToPostGISJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/postgis/DummyFeatureConvertPostGISJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/postgis/FeatureDgnConvertPostGISJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/postgis/GeneralDgnConvertPostGISJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/postgis/IndexDgnConvertPostGISJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/postgis/OracleConvertGeoServerContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/postgis/OracleConvertPostGISJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/postgis/OracleIncrementPostGISJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/shapefile/AbstractDgnToShapefileJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/shapefile/DummyFeatureConvertShpJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/shapefile/FeatureDgnConvertShpJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/shapefile/GeneralDgnConvertShpJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/shapefile/IndexDgnConvertShpJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/shapefile/OracleConvertShapefilesJobContext.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/BinConverter.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/Bits.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/ByteArrayCompressor.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/ConnectivityDirectionEnum.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/ElementDigesterUtils.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/FeatureTypeBuilderUtil.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/GeomUtil.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/LangUtil.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/TPCLIDConverter.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/TWDDatumConverter.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/XGeosConfigDigesterUtils.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/postjts/JTSShape.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/postjts/JtsGisWrapper.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/postjts/JtsWrapper.java
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/filter/ElementDispatcherRules.xml
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/filter/FeatureClassificationRules.xml
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/filter/digester-rules-3.0.dtd
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/geoserver/config/DefaultXGeosDataConfigRules.xml
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/geoserver/config/digester-rules-3.0.dtd
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/geoserver/config/xgeosdataconfig-1.xml
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/geoserver/config/xgeosdataconfig.xml
xdgnjobs/ximple-spatialjob/src/main/resources/conf/DefaultConvertShpFilter.xml
xdgnjobs/ximple-spatialjob/src/test/java/com/ximple/eofms/filter/ElementDispatcherTest.java
xdgnjobs/ximple-spatialjob/src/test/java/com/ximple/eofms/util/FeatureTypeBuilderUtilTest.java |