| | |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/Dgn7fileException.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/Dgn7fileHeader.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/Dgn7fileReader.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/Dgn7fileWriter.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/Element.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/ElementFactory.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/ElementType.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/EllipseElement.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/main/java/com/ximple/io/dgn7/FrammeAttributeData.java svneol=native#text/plain |
| | |
| | | xdgnjobs/ximple-dgnio/src/test/java/com/ximple/io/dgn7/Dgn7OracleReaderTest.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/test/java/com/ximple/io/dgn7/Dgn7TextElementReaderTest.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/test/java/com/ximple/io/dgn7/Dgn7fileReaderTest.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/test/java/com/ximple/io/dgn7/Dgn7fileWriterTest.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-dgnio/src/test/java/com/ximple/io/dgn7/ElementFactoryTest.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-dgnio/src/test/java/com/ximple/io/dgn7/OracleTarget.java svneol=native#text/plain |
| | | xdgnjobs/ximple-dgnio/src/test/resources/com/ximple/io/dgn7/test-data/Demo.dgn -text |
| | | xdgnjobs/ximple-dgnio/src/test/resources/com/ximple/io/dgn7/test-data/HV88491-1.dgn -text |
| | | xdgnjobs/ximple-dgnio/src/test/resources/com/ximple/io/dgn7/test-data/HV88491_0888888.dgn -text |
| | | xdgnjobs/ximple-dgnio/src/test/resources/com/ximple/io/dgn7/test-data/HV88494_0.dgn -text |
| | | xdgnjobs/ximple-dgnio/src/test/resources/com/ximple/io/dgn7/test-data/dgnseed2d.dgn -text |
| | | xdgnjobs/ximple-dgnio/src/test/resources/com/ximple/io/dgn7/test-data/testHV.dgn -text |
| | | xdgnjobs/ximple-elmparser/pom.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-elmparser/src/main/java/com/ximple/eofms/XElementFetcher.java svneol=native#text/plain |
| | |
| | | xdgnjobs/ximple-elmparser/src/main/resources/com/ximple/eofms/XElementParser.properties svneol=native#text/plain |
| | | xdgnjobs/ximple-elmparser/src/main/resources/com/ximple/eofms/XElementParser_zh_TW.properties svneol=native#text/plain |
| | | xdgnjobs/ximple-elmparser/src/main/resources/log4j.properties svneol=native#text/plain |
| | | xdgnjobs/ximple-jobcarrier/log4j.properties -text |
| | | xdgnjobs/ximple-jobcarrier/pom.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-jobcarrier/quartz.properties -text |
| | | xdgnjobs/ximple-jobcarrier/quartz_jobs.xml -text |
| | | xdgnjobs/ximple-jobcarrier/src/main/java/com/ximple/eofms/XQuartzJobCarrier.java svneol=native#text/plain |
| | | xdgnjobs/ximple-jobcarrier/src/main/java/com/ximple/eofms/XQuartzJobWizard.java svneol=native#text/plain |
| | | xdgnjobs/ximple-jobcarrier/src/main/resources/com/ximple/eofms/XQuartzJobWizard.properties svneol=native#text/plain |
| | |
| | | xdgnjobs/ximple-jobcarrier/src/main/resources/log4j.properties svneol=native#text/plain |
| | | xdgnjobs/ximple-jobcarrier/src/main/resources/quartz.properties svneol=native#text/plain |
| | | xdgnjobs/ximple-jobcarrier/src/main/resources/quartz_jobs.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-jobcarrier/src/main/resources/quartz_jobs_edb.xml -text svneol=unset#text/xml |
| | | xdgnjobs/ximple-jobcarrier/src/main/resources/quartz_jobs_shapefiles.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-jobcarrier/src/test/java/com/ximple/eofms/XQuartzJobCarrierTest.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/pom.xml svneol=native#text/xml |
| | |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/AbstractOracleDatabaseJob.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/DataReposVersionManager.java -text |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/DummyFeatureConvertJobContext.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2EdbGeoJob.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2MySQLJob.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2OraSDOJob.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2PostGISJob.java svneol=native#text/plain |
| | |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/AbstractDgnFileJobContext.java -text |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/AbstractOracleJobContext.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/OracleUpgradeJobContext.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/AbstractDgnToEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/AbstractOracleToEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/DummyFeatureConvertEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/FeatureDgnConvertEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/GeneralDgnConvertEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/IndexDgnConvertEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/edbgeo/OracleConvertEdbGeoJobContext.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/AbstractDgnToMySQLJobContext.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/AbstractOracleToMySQLJobContext.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/DummyFeatureConvertMySQlJobContext.java svneol=native#text/plain |
| | |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/ByteArrayCompressor.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/ColorTableMapping.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/DefaultColorTable.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/DigesterUtils.java -text |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/EPSG3825GeometryConverterDecorator.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/EPSG3826GeometryConverterDecorator.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/FeatureTypeBuilderUtil.java svneol=native#text/plain |
| | |
| | | xdgnjobs/ximple-spatialjob/src/main/resources/conf/DefaultConvertShpFilter.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-spatialjob/src/main/resources/conf/DefaultMapGroups.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-spatialjob/src/test/java/com/ximple/eofms/filter/ElementDispatcherTest.java svneol=native#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/test/java/com/ximple/eofms/util/FeatureTypeBuilderUtilTest.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/test/java/com/ximple/eofms/util/FileUtilsTest.java -text svneol=unset#text/plain |
| | | xdgnjobs/ximple-spatialjob/src/test/resources/com/ximple/eofms/filter/test-data/testElementFilter.xml svneol=native#text/xml |
| | | xdgnjobs/ximple-spatialjob/src/test/resources/com/ximple/eofms/filter/test-data/testRules.xml svneol=native#text/xml |
| | |
| | | <oracle.jdbc>true</oracle.jdbc> |
| | | <test.maxHeapSize>512M</test.maxHeapSize> |
| | | <src.output>${basedir}/target</src.output> |
| | | <java5>1.5</java5> |
| | | <xdgnio.version>1.2.0</xdgnio.version> |
| | | <gt2.version>2.6.2</gt2.version> |
| | | <java5>1.6</java5> |
| | | <xdgnio.version>2.1.1</xdgnio.version> |
| | | <gt.version>10.3.x</gt.version> |
| | | <failIfNoTests>false</failIfNoTests> |
| | | <stress.skip.pattern></stress.skip.pattern> |
| | | <online.skip.pattern></online.skip.pattern> |
| | | <java.awt.headless>false</java.awt.headless> |
| | | <allow.test.failure.ignore>false</allow.test.failure.ignore> |
| | | </properties> |
| | | |
| | | <profiles> |
| | |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-javadoc-plugin</artifactId> |
| | | <version>2.6.1</version> |
| | | <configuration> |
| | | <source>1.5</source> |
| | | </configuration> |
| | |
| | | </site> |
| | | </distributionManagement> |
| | | </profile> |
| | | |
| | | <!-- =========================================================== --> |
| | | <!-- Build Configuration --> |
| | | <!-- copies all JARs in a single directory. --> |
| | | <!-- =========================================================== --> |
| | | <profile> |
| | | <id>collect</id> |
| | | <build> |
| | | <plugins> |
| | | <plugin> |
| | | <groupId>com.ximple.eofms.maven</groupId> |
| | | <artifactId>ximple-jar-collector</artifactId> |
| | | <version>${project.version}</version> |
| | | <executions> |
| | | <execution> |
| | | <goals> |
| | | <goal>collect</goal> |
| | | </goals> |
| | | </execution> |
| | | </executions> |
| | | </plugin> |
| | | </plugins> |
| | | </build> |
| | | </profile> |
| | | </profiles> |
| | | |
| | | <scm> |
| | |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnjobs</artifactId> |
| | | <packaging>pom</packaging> |
| | | <version>1.2.0</version> |
| | | <version>2.1.1</version> |
| | | <name>ximple-dgnjobs</name> |
| | | <url>http://www.ximple.com.tw</url> |
| | | |
| | |
| | | <url>http://www.ximple.com.tw</url> |
| | | </organization> |
| | | |
| | | <inceptionYear>2010</inceptionYear> |
| | | <inceptionYear>2012</inceptionYear> |
| | | |
| | | <!-- =========================================================== --> |
| | | <!-- Issue managements and mailing lists. --> |
| | |
| | | <!-- =========================================================== --> |
| | | <dependencyManagement> |
| | | <dependencies> |
| | | <!-- GeoAPI and its dependencies --> |
| | | <dependency> |
| | | <groupId>org.opengis</groupId> |
| | | <artifactId>geoapi</artifactId> |
| | | <version>2.3-M1</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.opengis</groupId> |
| | | <artifactId>geoapi-dummy-pending</artifactId> |
| | | <version>2.3-M1</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.opengis</groupId> |
| | | <artifactId>geoapi-pending</artifactId> |
| | | <version>2.3-M1</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>net.java.dev.jsr-275</groupId> |
| | | <artifactId>jsr-275</artifactId> |
| | | <version>1.0-beta-2</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>com.vividsolutions</groupId> |
| | | <artifactId>jts</artifactId> |
| | | <version>1.10</version> |
| | | <version>1.13</version> |
| | | </dependency> |
| | | |
| | | <!-- Apache --> |
| | |
| | | <version>3.2.1</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-digester</groupId> |
| | | <artifactId>commons-digester</artifactId> |
| | | <version>2.0</version> |
| | | <groupId>org.apache.commons</groupId> |
| | | <artifactId>commons-digester3</artifactId> |
| | | <version>3.2</version> |
| | | <!--classifier>with-deps</classifier--> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-pool</groupId> |
| | | <artifactId>commons-pool</artifactId> |
| | | <version>1.5.4</version> |
| | | <version>1.6</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-logging</groupId> |
| | |
| | | <version>1.2</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-io</groupId> |
| | | <artifactId>commons-io</artifactId> |
| | | <version>2.4</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>log4j</groupId> |
| | | <artifactId>log4j</artifactId> |
| | | <version>1.2.15</version> |
| | | <version>1.2.17</version> |
| | | <!-- Same as the dependency in commons-logging --> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.apache.poi</groupId> |
| | | <artifactId>poi</artifactId> |
| | | <version>3.6</version> |
| | | <version>3.9</version> |
| | | </dependency> |
| | | |
| | | <!-- geotools --> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-api</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-main</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-shapefile</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-sample-data</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <version>${gt.version}</version> |
| | | <scope>test</scope> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-data</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-opengis</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-metadata</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-jdbc</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-oracle-spatial</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-oracle</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-postgis</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-postgis</artifactId> |
| | | <version>${gt.version}</version> |
| | | <!--exclusions> |
| | | <exclusion> |
| | | <groupId>org.postgis</groupId> |
| | | <artifactId>postgis-driver</artifactId> |
| | | </exclusion> |
| | | </exclusions--> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-mysql</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-mysql</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | |
| | | <!-- because main and sample-data depend on referencing we need a tie breaker --> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-referencing</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-epsg-hsql</artifactId> |
| | | <version>${gt.version}</version> |
| | | <!--exclusions> |
| | | <exclusion> |
| | | <groupId>com.h2database</groupId> |
| | | <artifactId>h2</artifactId> |
| | | </exclusion> |
| | | </exclusions--> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-epsg-wkt</artifactId> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>org.jdom</groupId> |
| | | <artifactId>jdom</artifactId> |
| | | <version>1.1</version> |
| | | <version>1.1.3</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>org.apache.velocity</groupId> |
| | | <artifactId>velocity</artifactId> |
| | | <version>1.6.3</version> |
| | | </dependency> |
| | | |
| | | <!-- We need this to make the referencing module useful --> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-epsg-hsql</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <scope>test</scope> |
| | | <version>1.7</version> |
| | | </dependency> |
| | | |
| | | <!-- ORACLE --> |
| | |
| | | <version>11.1.0</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <!--dependency> |
| | | <groupId>postgresql</groupId> |
| | | <artifactId>postgresql</artifactId> |
| | | <version>8.4-701.jdbc3</version> |
| | | </dependency> |
| | | <version>8.4-702.jdbc3</version> |
| | | </dependency--> |
| | | <dependency> |
| | | <groupId>org.postgis</groupId> |
| | | <artifactId>postgis-driver</artifactId> |
| | | <version>1.5.0</version> |
| | | <version>2.0.2</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <!--dependency> |
| | | <groupId>mysql</groupId> |
| | | <artifactId>mysql-connector-java</artifactId> |
| | | <version>5.1.12</version> |
| | | </dependency> |
| | | <version>5.1.18</version> |
| | | </dependency--> |
| | | |
| | | <!--dependency> |
| | | <groupId>com.h2database</groupId> |
| | | <artifactId>h2</artifactId> |
| | | <version>1.3.163</version> |
| | | </dependency--> |
| | | |
| | | <!-- EnterpriseDB --> |
| | | <dependency> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>xedb-gt-geospatial</artifactId> |
| | | <version>0.1.1</version> |
| | | </dependency> |
| | | |
| | | <!-- quartz-scheduler--> |
| | | <dependency> |
| | | <groupId>org.quartz-scheduler</groupId> |
| | | <artifactId>quartz</artifactId> |
| | | <version>1.7.3</version> |
| | | <version>2.2.1</version> |
| | | <exclusions> |
| | | <exclusion> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-api</artifactId> |
| | | </exclusion> |
| | | </exclusions> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.quartz-scheduler</groupId> |
| | | <artifactId>quartz-jobs</artifactId> |
| | | <version>2.2.1</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-api</artifactId> |
| | | <version>1.7.5</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-log4j12</artifactId> |
| | | <version>1.7.5</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>jcl-over-slf4j</artifactId> |
| | | <version>1.7.5</version> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.awl</groupId> |
| | |
| | | <version>1.1.0-rc</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>it.geosolutions</groupId> |
| | | <artifactId>geoserver-manager</artifactId> |
| | | <!--version>1.5.2</version--> |
| | | <version>1.6-SNAPSHOT</version> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>xerces</groupId> |
| | | <artifactId>xercesImpl</artifactId> |
| | | <version>2.11.0</version> |
| | | </dependency> |
| | | <!-- Tests or legacy --> |
| | | <dependency> |
| | | <groupId>org.testng</groupId> |
| | | <artifactId>testng</artifactId> |
| | | <version>5.11</version> |
| | | <classifier>jdk15</classifier> |
| | | <version>6.8.7</version> |
| | | <scope>test</scope> |
| | | </dependency> |
| | | </dependencies> |
| | |
| | | <!-- Dependencies to be inherited by all modules. --> |
| | | <!-- =========================================================== --> |
| | | <dependencies> |
| | | <dependency> |
| | | <groupId>org.opengis</groupId> |
| | | <artifactId>geoapi</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.opengis</groupId> |
| | | <artifactId>geoapi-dummy-pending</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.opengis</groupId> |
| | | <artifactId>geoapi-pending</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>net.java.dev.jsr-275</groupId> |
| | | <artifactId>jsr-275</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>com.vividsolutions</groupId> |
| | | <artifactId>jts</artifactId> |
| | |
| | | <groupId>commons-collections</groupId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-digester</groupId> |
| | | <artifactId>commons-digester</artifactId> |
| | | <groupId>org.apache.commons</groupId> |
| | | <artifactId>commons-digester3</artifactId> |
| | | <!--classifier>with-deps</classifier--> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-pool</groupId> |
| | |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-main</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-epsg-hsql</artifactId> |
| | | </dependency> |
| | | |
| | | <!--dependency> |
| | | <groupId>com.h2database</groupId> |
| | | <artifactId>h2</artifactId> |
| | | </dependency--> |
| | | |
| | | <dependency> |
| | | <artifactId>testng</artifactId> |
| | | <groupId>xerces</groupId> |
| | | <artifactId>xercesImpl</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>it.geosolutions</groupId> |
| | | <artifactId>geoserver-manager</artifactId> |
| | | <exclusions> |
| | | <exclusion> |
| | | <groupId>commons-io</groupId> |
| | | <artifactId>commons-io</artifactId> |
| | | </exclusion> |
| | | </exclusions> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>org.testng</groupId> |
| | | <classifier>jdk15</classifier> |
| | | <artifactId>testng</artifactId> |
| | | <scope>test</scope> |
| | | </dependency> |
| | | </dependencies> |
| | |
| | | <artifactId>maven-eclipse-plugin</artifactId> |
| | | <version>2.5</version> |
| | | </plugin> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-javadoc-plugin</artifactId> |
| | | <version>2.6.1</version> |
| | | </plugin> |
| | | </plugins> |
| | | </pluginManagement> |
| | | |
| | |
| | | <convention>gt2/jalopygeotools.xml</convention> |
| | | <failOnError>false</failOnError> |
| | | </configuration> |
| | | <!-- |
| | | <dependencies> |
| | | <dependency> |
| | | <groupId>org.geotools.maven</groupId> |
| | | <artifactId>gt2-build-configs</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | </dependencies> |
| | | --> |
| | | </plugin> |
| | | |
| | | |
| | |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-compiler-plugin</artifactId> |
| | | <configuration> |
| | | <source>1.5</source> |
| | | <source>1.6</source> |
| | | <!-- The -source argument for the Java compiler. --> |
| | | <target>1.5</target> |
| | | <target>1.6</target> |
| | | <!-- The -target argument for the Java compiler. --> |
| | | <debug>true</debug> |
| | | <!-- Whether to include debugging information. --> |
| | | <encoding>ISO-8859-1</encoding> |
| | | <encoding>UTF-8</encoding> |
| | | <!-- The -encoding argument for the Java compiler. --> |
| | | </configuration> |
| | | </plugin> |
| | |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-surefire-plugin</artifactId> |
| | | <version>2.14.1</version> |
| | | <configuration> |
| | | <includes> |
| | | <include>**/*Test.java</include> |
| | |
| | | <!-- ======================================================= --> |
| | | <!-- Code coverage --> |
| | | <!-- ======================================================= --> |
| | | <!-- |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-clover-plugin</artifactId> |
| | | <version>2.4</version> |
| | | <configuration> |
| | | <jdk>1.5</jdk> |
| | | <licenseLocation> |
| | | http://svn.geotools.org/geotools/branches/2.4.x/build/maven/build-configs/src/main/resources/gt2/clover.license |
| | | http://svn.geotools.org/geotools/branches/2.6.x/build/maven/build-configs/src/main/resources/gt2/clover.license |
| | | </licenseLocation> |
| | | <flushPolicy>directed</flushPolicy> |
| | | </configuration> |
| | |
| | | <phase>pre-site</phase> |
| | | <goals> |
| | | <goal>instrument</goal> |
| | | <!-- aggregation is disabled due to the bug: --> |
| | | <!-- http://jira.codehaus.org/browse/MCLOVER-34 --> |
| | | < - aggregation is disabled due to the bug: - !> |
| | | < - http://jira.codehaus.org/browse/MCLOVER-34 - !> |
| | | </goals> |
| | | </execution> |
| | | </executions> |
| | |
| | | <dependency> |
| | | <groupId>org.geotools.maven</groupId> |
| | | <artifactId>gt2-build-configs</artifactId> |
| | | <version>${gt2.version}</version> |
| | | <version>${gt.version}</version> |
| | | </dependency> |
| | | </dependencies> |
| | | </plugin> |
| | | |
| | | --> |
| | | |
| | | <!-- ======================================================= --> |
| | | <!-- JAR packaging. --> |
| | |
| | | </plugin> |
| | | |
| | | <!-- ======================================================= --> |
| | | <!-- JavaDoc packaging. --> |
| | | <!-- ======================================================= --> |
| | | <plugin> |
| | | <groupId>org.apache.maven.plugins</groupId> |
| | | <artifactId>maven-javadoc-plugin</artifactId> |
| | | </plugin> |
| | | |
| | | <!-- ======================================================= --> |
| | | <!-- Source packaging. --> |
| | | <!-- ======================================================= --> |
| | | <plugin> |
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms.maven</groupId> |
| | | <artifactId>ximple-maven</artifactId> |
| | | <version>1.2.0</version> |
| | | <version>2.1.1</version> |
| | | </parent> |
| | | |
| | | |
| | |
| | | <!-- =========================================================== --> |
| | | <groupId>com.ximple.eofms.maven</groupId> |
| | | <artifactId>ximple-jar-collector</artifactId> |
| | | <version>2.1.1</version> |
| | | <packaging>maven-plugin</packaging> |
| | | <name>JAR files collector</name> |
| | | |
| | |
| | | private String jarName; |
| | | |
| | | /** |
| | | * Project dependencies. |
| | | * |
| | | * @parameter expression="${project.artifacts}" |
| | | * @required |
| | | */ |
| | | private Set /*<Artifact>*/ dependencies; |
| | | |
| | | /** |
| | | * The Maven project running this plugin. |
| | | * |
| | | * @parameter expression="${project}" |
| | |
| | | * Gets the parent "target" directory. |
| | | */ |
| | | MavenProject parent = project; |
| | | |
| | | while (parent.hasParent()) { |
| | | parent = parent.getParent(); |
| | | } |
| | | |
| | | collectDirectory = parent.getBuild().getDirectory(); |
| | | |
| | | /* |
| | | * Now collects the JARs. |
| | | */ |
| | |
| | | * such file. Some modules use pom packaging, which do not produce any JAR file. |
| | | */ |
| | | final File jarFile = new File(outputDirectory, jarName + ".jar"); |
| | | |
| | | if (!jarFile.isFile()) { |
| | | return; |
| | | } |
| | | |
| | | /* |
| | | * Get the "target" directory of the parent pom.xml and make sure it exists. |
| | | */ |
| | | File collect = new File(collectDirectory); |
| | | |
| | | if (!collect.exists()) { |
| | | if (!collect.mkdir()) { |
| | | throw new MojoExecutionException("Failed to create target directory."); |
| | | throw new MojoExecutionException("Failed to create target directory: " + collect.getAbsolutePath()); |
| | | } |
| | | } |
| | | |
| | | if (collect.getCanonicalFile().equals(jarFile.getParentFile().getCanonicalFile())) { |
| | | /* |
| | | * The parent's directory is the same one than this module's directory. |
| | |
| | | */ |
| | | return; |
| | | } |
| | | |
| | | /* |
| | | * Creates a "binaries" subdirectory inside the "target" directory. |
| | | */ |
| | | collect = new File(collect, SUB_DIRECTORY); |
| | | |
| | | if (!collect.exists()) { |
| | | if (!collect.mkdir()) { |
| | | throw new MojoExecutionException("Failed to create binaries directory."); |
| | | } |
| | | } |
| | | |
| | | int count = 1; |
| | | FileUtils.copyFileToDirectory(jarFile, collect); |
| | | |
| | | Set<Artifact> dependencies = project.getDependencyArtifacts(); |
| | | if (dependencies != null) { |
| | | for (final Iterator it = dependencies.iterator(); it.hasNext();) { |
| | | final Artifact artifact = (Artifact) it.next(); |
| | | for (final Artifact artifact : dependencies) { |
| | | System.out.println("+++++++++++++++++++++++ DEP: " + artifact.getDependencyTrail()); |
| | | final String scope = artifact.getScope(); |
| | | |
| | | if ((scope != null) // Maven 2.0.6 bug? |
| | | && (scope.equalsIgnoreCase(Artifact.SCOPE_COMPILE) |
| | | || scope.equalsIgnoreCase(Artifact.SCOPE_RUNTIME))) { |
| | | if (scope != null && // Maven 2.0.6 bug? |
| | | (scope.equalsIgnoreCase(Artifact.SCOPE_COMPILE) || |
| | | scope.equalsIgnoreCase(Artifact.SCOPE_RUNTIME))) |
| | | { |
| | | final File file = artifact.getFile(); |
| | | final File copy = new File(collect, file.getName()); |
| | | |
| | | if (!copy.exists()) { |
| | | /* |
| | | * Copies the dependency only if it was not already copied. Note that |
| | | * the module's JAR was copied inconditionnaly above (because it may |
| | | * be the result of a new compilation). If a Geotools JAR from the |
| | | * dependencies list changed, it will be copied inconditionnaly when |
| | | * the module for this JAR will be processed by Maven. |
| | | */ |
| | | FileUtils.copyFileToDirectory(file, collect); |
| | | count++; |
| | | if (!artifact.getGroupId().startsWith("com.ximple.eofms")) { |
| | | final File copy = new File(collect, file.getName()); |
| | | if (copy.exists()) { |
| | | /* |
| | | * Copies the dependency only if it was not already copied. Note that |
| | | * the module's JAR was copied inconditionnaly above (because it may |
| | | * be the result of a new compilation). If a Geotools JAR from the |
| | | * dependencies list changed, it will be copied inconditionnaly when |
| | | * the module for this JAR will be processed by Maven. |
| | | */ |
| | | continue; |
| | | } |
| | | } |
| | | FileUtils.copyFileToDirectory(file, collect); |
| | | } |
| | | } |
| | | } |
| | | |
| | | getLog().info("Copied " + count + " JAR to parent directory."); |
| | | } |
| | | } |
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms.maven</groupId> |
| | | <artifactId>ximple-build</artifactId> |
| | | <version>1.2.0</version> |
| | | <version>2.1.1</version> |
| | | </parent> |
| | | |
| | | |
| | |
| | | <dependency> |
| | | <groupId>org.codehaus.plexus</groupId> |
| | | <artifactId>plexus-utils</artifactId> |
| | | <version>2.0.1</version> |
| | | <version>2.1</version> |
| | | </dependency> |
| | | </dependencies> |
| | | |
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnjobs</artifactId> |
| | | <version>1.2.0</version> |
| | | <version>2.1.1</version> |
| | | </parent> |
| | | |
| | | |
| | |
| | | <!-- =========================================================== --> |
| | | <groupId>com.ximple.eofms.maven</groupId> |
| | | <artifactId>ximple-build</artifactId> |
| | | <version>1.2.0</version> |
| | | <version>2.1.1</version> |
| | | <packaging>pom</packaging> |
| | | <name>Build tools for Ximple DgnJobs</name> |
| | | |
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnjobs</artifactId> |
| | | <version>1.2.0</version> |
| | | <version>2.1.1</version> |
| | | </parent> |
| | | |
| | | <!-- =========================================================== --> |
| | |
| | | <!-- =========================================================== --> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnio</artifactId> |
| | | <version>1.2.0</version> |
| | | <version>2.1.1</version> |
| | | <packaging>jar</packaging> |
| | | <name>ximple-dgnio</name> |
| | | <url>http://www.ximple.com.tw</url> |
| | |
| | | <groupId>org.apache.poi</groupId> |
| | | <artifactId>poi</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>commons-io</groupId> |
| | | <artifactId>commons-io</artifactId> |
| | | </dependency> |
| | | </dependencies> |
| | | |
| | | <!-- =========================================================== --> |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/26 ¤U¤È 06:41:45 |
| | | * @since 2006/5/26 �U�� 06:41:45 |
| | | */ |
| | | public class ArcElement extends Element implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(ArcElement.class); |
| | | |
| | | public ArcElement(byte[] raw) { |
| | | ArcElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | package com.ximple.io.dgn7; |
| | | |
| | | import java.util.ArrayList; |
| | | import java.util.Arrays; |
| | | import java.util.Collection; |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 03:44:56 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class ComplexChainElement extends Element implements ComplexElement, GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(ComplexChainElement.class); |
| | | |
| | | protected ArrayList<Element> list = new ArrayList<Element>(); |
| | | |
| | | public ComplexChainElement(byte[] raw) { |
| | | ComplexChainElement(byte[] raw) { |
| | | super(raw); |
| | | attrOffset = 4; |
| | | } |
| | |
| | | return factory.createMultiLineString(lines); |
| | | } |
| | | |
| | | public double getElementSize() { |
| | | public short getTotalLength() { |
| | | return raw[18]; |
| | | } |
| | | |
| | | protected void setTotalLength(short value) { |
| | | raw[18] = value; |
| | | } |
| | | |
| | | public short getNumOfElement() { |
| | | return raw[19]; |
| | | } |
| | | |
| | | protected void setNumOfElement(short value) { |
| | | raw[19] = value; |
| | | } |
| | | |
| | | |
| | | public short[] getAttributes() { |
| | | return Arrays.copyOfRange(raw, 20, 23); |
| | | } |
| | | |
| | | protected void setAttributes(short[] values) { |
| | | if (values.length < 4) return; |
| | | System.arraycopy(values, 0, raw, 20, 24 - 20); |
| | | } |
| | | |
| | | public boolean isClosed() { |
| | | if (isEmpty()) { |
| | | return false; |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 04:17:37 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public interface ComplexElement extends List<Element> { |
| | | } |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 03:45:15 |
| | | * @since 2006/5/18 �U�� 03:45:15 |
| | | */ |
| | | public class ComplexShapeElement extends Element implements ComplexElement, GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(ComplexShapeElement.class); |
| | | |
| | | ArrayList<Element> list = new ArrayList<Element>(); |
| | | |
| | | public ComplexShapeElement(byte[] raw) { |
| | | ComplexShapeElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | * Dgn7OracleReader |
| | | * User: Ulysses |
| | | * Date: 2007/10/24 |
| | | * Time: ¤U¤È 01:01:08 |
| | | * Time: |
| | | */ |
| | | public class Dgn7OracleReader implements Iterator<Element> { |
| | | private final static Logger logger = Logger.getLogger(Dgn7OracleReader.class); |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/17 ¤U¤È 01:21:00 |
| | | * @since 2006/5/17 |
| | | */ |
| | | public class Dgn7fileHeader { |
| | | private short elmtype; |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/17 ¤U¤È 01:24:10 |
| | | * @since 2006/5/17 |
| | | */ |
| | | public class Dgn7fileReader { |
| | | private static final Logger logger = LogManager.getLogger(Dgn7fileReader.class); |
| | |
| | | ByteBuffer buffer; |
| | | private ElementType fileElementType = ElementType.UNDEFINED; |
| | | private ByteBuffer headerTransfer; |
| | | private final Record record = new Record(); |
| | | private final Element.FileRecord record = new Element.FileRecord(); |
| | | private final boolean randomAccessEnabled; |
| | | private Lock lock; |
| | | private boolean useMemoryMappedBuffer; |
| | | private long currentOffset = 0L; |
| | | private StreamLogging streamLogger = new StreamLogging("Shapefile Reader"); |
| | | private StreamLogging streamLogger = new StreamLogging("Dgn7 Reader"); |
| | | private int maxElementId = 0; |
| | | |
| | | public Dgn7fileReader(ReadableByteChannel channel, boolean strict, boolean useMemoryMapped, Lock lock) |
| | | public Dgn7fileReader(FileChannel channel, boolean strict, boolean useMemoryMapped, Lock lock) |
| | | throws IOException, Dgn7fileException { |
| | | this.channel = channel; |
| | | this.useMemoryMappedBuffer = useMemoryMapped; |
| | |
| | | init(strict); |
| | | } |
| | | |
| | | public Dgn7fileReader(ReadableByteChannel channel, Lock lock) throws IOException, Dgn7fileException { |
| | | public Dgn7fileReader(FileChannel channel, Lock lock) throws IOException, Dgn7fileException { |
| | | this(channel, true, true, lock); |
| | | } |
| | | |
| | |
| | | return randomAccessEnabled; |
| | | } |
| | | |
| | | public Record nextElement() throws IOException, Dgn7fileException { |
| | | public Element.FileRecord nextElement() throws IOException, Dgn7fileException { |
| | | // need to update position |
| | | buffer.position(this.toBufferOffset(record.end)); |
| | | |
| | |
| | | record.length = elementLength; |
| | | record.signature = signature; |
| | | record.number = recordNumber; |
| | | record.buffer = buffer; |
| | | |
| | | // remember, we read one int already... |
| | | record.end = this.toFileOffset(buffer.position()) + elementLength - 4; |
| | |
| | | } |
| | | } |
| | | |
| | | public Record elementAt(int offset) throws IOException, UnsupportedOperationException, Dgn7fileException { |
| | | public Element.FileRecord elementAt(int offset) throws IOException, UnsupportedOperationException, Dgn7fileException { |
| | | if (randomAccessEnabled) { |
| | | this.goTo(offset); |
| | | |
| | |
| | | while (reader.hasNext()) { |
| | | size++; |
| | | |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | |
| | | } catch (IOException e) { |
| | | logger.warn("Stop read dgn file", e); |
| | | } catch (Dgn7fileException e) { |
| | | e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | reader.close(); |
| | | } |
| | | |
| | | System.out.println("count=" + count + " size=" + size); |
| | | logger.debug("count=" + count + " size=" + size); |
| | | // reader.close(); |
| | | } catch (IOException ioe) { |
| | | System.out.println(ioe); |
| | | ioe.printStackTrace(); |
| | | logger.warn(ioe.getMessage(), ioe); |
| | | } catch (Dgn7fileException e) { |
| | | e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | System.exit(0); |
| | | } |
| | | |
| | | public final class Record { |
| | | int length; |
| | | int number = 0; |
| | | int offset; // Relative to the whole file |
| | | int start = 0; // Relative to the current loaded buffer |
| | | short signature = 0; |
| | | |
| | | /** |
| | | * The minimum X value. |
| | | */ |
| | | public double minX; |
| | | |
| | | /** |
| | | * The minimum Y value. |
| | | */ |
| | | public double minY; |
| | | |
| | | /** |
| | | * The minimum Z value. |
| | | */ |
| | | public double minZ; |
| | | |
| | | /** |
| | | * The maximum X value. |
| | | */ |
| | | public double maxX; |
| | | |
| | | /** |
| | | * The maximum Y value. |
| | | */ |
| | | public double maxY; |
| | | |
| | | /** |
| | | * The maximum Z value. |
| | | */ |
| | | public double maxZ; |
| | | |
| | | // ElementType type; |
| | | int end = 0; // Relative to the whole file |
| | | Object element = null; |
| | | IElementHandler handler; |
| | | |
| | | public Object element() { |
| | | if (element == null) { |
| | | buffer.position(start); |
| | | buffer.order(ByteOrder.LITTLE_ENDIAN); |
| | | |
| | | if (handler == null) { |
| | | return null; |
| | | } |
| | | |
| | | element = handler.read(buffer, signature, length); |
| | | } |
| | | |
| | | return element; |
| | | } |
| | | |
| | | public int offset() { |
| | | return offset; |
| | | } |
| | | |
| | | /** |
| | | * A summary of the record. |
| | | */ |
| | | public String toString() { |
| | | return "Record " + number + " length " + length + " bounds " + minX + "," + minY + " " + maxX + "," + maxY; |
| | | } |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.io.dgn7;
|
| | |
|
| | | import com.ximple.util.DgnUtility;
|
| | | import org.apache.log4j.LogManager;
|
| | | import org.apache.log4j.Logger;
|
| | |
|
| | | import java.io.EOFException;
|
| | | import java.io.IOException;
|
| | | import java.nio.ByteBuffer;
|
| | | import java.nio.ByteOrder;
|
| | | import java.nio.MappedByteBuffer;
|
| | | import java.nio.ShortBuffer;
|
| | | import java.nio.channels.FileChannel;
|
| | | import java.nio.channels.ReadableByteChannel;
|
| | | import java.nio.channels.WritableByteChannel;
|
| | |
|
| | | public class Dgn7fileWriter {
|
| | | private static final Logger logger = LogManager.getLogger(Dgn7fileWriter.class);
|
| | |
|
| | | private Dgn7fileHeader header;
|
| | | private FileChannel channel;
|
| | | ByteBuffer buffer;
|
| | | private ElementType fileElementType = ElementType.UNDEFINED;
|
| | | private ByteBuffer headerTransfer;
|
| | | private final Element.FileRecord record = new Element.FileRecord();
|
| | | private final boolean randomAccessEnabled;
|
| | | private Lock lock;
|
| | | private boolean useMemoryMappedBuffer;
|
| | | private long currentOffset = 0L;
|
| | | private StreamLogging streamLogger = new StreamLogging("Dgn7 Writer");
|
| | | private int maxElementId = 0;
|
| | |
|
| | | public Dgn7fileWriter(FileChannel channel, boolean strict, boolean useMemoryMapped, Lock lock)
|
| | | throws IOException, Dgn7fileException {
|
| | | this.channel = channel;
|
| | | this.useMemoryMappedBuffer = useMemoryMapped;
|
| | | streamLogger.open();
|
| | | randomAccessEnabled = channel instanceof FileChannel;
|
| | | this.lock = lock;
|
| | | lock.lockRead();
|
| | | lock.lockWrite();
|
| | | // init(strict);
|
| | | }
|
| | |
|
| | | public Dgn7fileWriter(FileChannel channel, Lock lock) throws IOException, Dgn7fileException {
|
| | | this(channel, true, true, lock);
|
| | | }
|
| | |
|
| | | protected boolean hasNext() throws IOException {
|
| | | // mark current position
|
| | | int position = buffer.position();
|
| | |
|
| | | // ensure the proper position, regardless of read or handler behavior
|
| | | try {
|
| | | buffer.position(this.toBufferOffset(record.end));
|
| | | } catch (IllegalArgumentException e) {
|
| | | logger.warn("position=" + this.toBufferOffset(record.end), e);
|
| | |
|
| | | return false;
|
| | | }
|
| | |
|
| | | // no more data left
|
| | | if (buffer.remaining() < 4) {
|
| | | return false;
|
| | | }
|
| | |
|
| | | // looks good
|
| | | boolean hasNext = true;
|
| | | short type = buffer.getShort();
|
| | |
|
| | | if (type == -1) {
|
| | | hasNext = false;
|
| | | }
|
| | |
|
| | | // reset things to as they were
|
| | | buffer.position(position);
|
| | |
|
| | | return hasNext;
|
| | | }
|
| | |
|
| | | protected Element.FileRecord nextElement() throws IOException, Dgn7fileException {
|
| | | // need to update position
|
| | | buffer.position(this.toBufferOffset(record.end));
|
| | |
|
| | | // record header is big endian
|
| | | buffer.order(ByteOrder.LITTLE_ENDIAN);
|
| | |
|
| | | // read shape record header
|
| | | int recordNumber = ++maxElementId;
|
| | | short signature = buffer.getShort();
|
| | |
|
| | | // byte type = (byte) (buffer.get() & 0x7f);
|
| | | byte type = (byte) ((signature >>> 8) & 0x007f);
|
| | |
|
| | | // silly Bentley say contentLength is in 2-byte words
|
| | | // and ByteByffer uses bytes.
|
| | | // track the record location
|
| | | int elementLength = (buffer.getShort() * 2) + 4;
|
| | |
|
| | | if (!buffer.isReadOnly() && !useMemoryMappedBuffer) {
|
| | | // capacity is less than required for the record
|
| | | // copy the old into the newly allocated
|
| | | if (buffer.capacity() < elementLength) {
|
| | | this.currentOffset += buffer.position();
|
| | |
|
| | | ByteBuffer old = buffer;
|
| | |
|
| | | // ensure enough capacity for one more record header
|
| | | buffer = Dgn7fileReader.ensureCapacity(buffer, elementLength, useMemoryMappedBuffer);
|
| | | buffer.put(old);
|
| | | fill(buffer, channel);
|
| | | buffer.position(0);
|
| | | } else
|
| | |
|
| | | // remaining is less than record length
|
| | | // compact the remaining data and read again,
|
| | | // allowing enough room for one more record header
|
| | | if (buffer.remaining() < elementLength) {
|
| | | this.currentOffset += buffer.position();
|
| | | buffer.compact();
|
| | | fill(buffer, channel);
|
| | | buffer.position(0);
|
| | | }
|
| | | }
|
| | |
|
| | | // shape record is all little endian
|
| | | // buffer.order(ByteOrder.LITTLE_ENDIAN);
|
| | | // read the type, handlers don't need it
|
| | | ElementType recordType = ElementType.forID(type);
|
| | |
|
| | | logger.debug("nextElement at " + this.toBufferOffset(record.end) + ":type=" + type);
|
| | |
|
| | | // this usually happens if the handler logic is bunk,
|
| | | // but bad files could exist as well...
|
| | |
|
| | | /*
|
| | | * if (recordType != ElementType.NULL && recordType != fileElementType)
|
| | | * {
|
| | | * throw new IllegalStateException("ShapeType changed illegally from " + fileElementType + " to " + recordType);
|
| | | * }
|
| | | */
|
| | |
|
| | | // peek at bounds, then reset for handler
|
| | | // many handler's may ignore bounds reading, but we don't want to
|
| | | // second guess them...
|
| | | buffer.mark();
|
| | |
|
| | | if (recordType.isMultiPoint()) {
|
| | | int lowCoorX = buffer.getInt();
|
| | |
|
| | | lowCoorX = DgnUtility.convertFromDGN(lowCoorX);
|
| | | record.minX = DgnUtility.converUnitToCoord(lowCoorX);
|
| | |
|
| | | int lowCoorY = buffer.getInt();
|
| | |
|
| | | lowCoorY = DgnUtility.convertFromDGN(lowCoorY);
|
| | | record.minY = DgnUtility.converUnitToCoord(lowCoorY);
|
| | |
|
| | | int lowCoorZ = buffer.getInt();
|
| | |
|
| | | lowCoorZ = DgnUtility.convertFromDGN(lowCoorZ);
|
| | | record.minZ = DgnUtility.converUnitToCoord(lowCoorZ);
|
| | |
|
| | | int highCoorX = buffer.getInt();
|
| | |
|
| | | highCoorX = DgnUtility.convertFromDGN(highCoorX);
|
| | | record.maxX = DgnUtility.converUnitToCoord(highCoorX);
|
| | |
|
| | | int highCoorY = buffer.getInt();
|
| | |
|
| | | highCoorY = DgnUtility.convertFromDGN(highCoorY);
|
| | | record.maxY = DgnUtility.converUnitToCoord(highCoorY);
|
| | |
|
| | | int highCoorZ = buffer.getInt();
|
| | |
|
| | | highCoorZ = DgnUtility.convertFromDGN(highCoorZ);
|
| | | record.maxZ = DgnUtility.converUnitToCoord(highCoorZ);
|
| | | }
|
| | |
|
| | | buffer.reset();
|
| | | record.offset = record.end;
|
| | |
|
| | | // update all the record info.
|
| | | record.length = elementLength;
|
| | | record.signature = signature;
|
| | | record.number = recordNumber;
|
| | | record.buffer = buffer;
|
| | |
|
| | | // remember, we read one int already...
|
| | | record.end = this.toFileOffset(buffer.position()) + elementLength - 4;
|
| | | // record.end = this.toFileOffset(buffer.position()) + elementLength;
|
| | |
|
| | | // mark this position for the reader
|
| | | record.start = buffer.position();
|
| | |
|
| | | // clear any cached record
|
| | | record.handler = recordType.getElementHandler();
|
| | | record.element = null;
|
| | |
|
| | | return record;
|
| | | }
|
| | |
|
| | | private void init(boolean strict) throws IOException, Dgn7fileException {
|
| | | header = readHeader(channel, strict);
|
| | |
|
| | | if (useMemoryMappedBuffer) {
|
| | | FileChannel fc = channel;
|
| | |
|
| | | buffer = fc.map(FileChannel.MapMode.READ_WRITE, 0, fc.size());
|
| | |
|
| | | // buffer.position(100);
|
| | | buffer.position(header.size());
|
| | | this.currentOffset = 0;
|
| | | } else {
|
| | | // force useMemoryMappedBuffer to false
|
| | | this.useMemoryMappedBuffer = false;
|
| | |
|
| | | // start with 8K buffer
|
| | | buffer = ByteBuffer.allocateDirect(8 * 1024);
|
| | | fill(buffer, channel);
|
| | | buffer.flip();
|
| | | this.currentOffset = header.size();
|
| | | }
|
| | |
|
| | | headerTransfer = ByteBuffer.allocate(4);
|
| | | headerTransfer.order(ByteOrder.LITTLE_ENDIAN);
|
| | |
|
| | | // make sure the record end is set now...
|
| | | record.end = toFileOffset(buffer.position());
|
| | | }
|
| | |
|
| | | public static Dgn7fileHeader readHeader(FileChannel channel, boolean strict) throws IOException {
|
| | | ByteBuffer buffer = ByteBuffer.allocateDirect(4);
|
| | |
|
| | | if (fill(buffer, channel) == -1) {
|
| | | throw new EOFException("Premature end of header");
|
| | | }
|
| | |
|
| | | buffer.order(ByteOrder.LITTLE_ENDIAN);
|
| | |
|
| | | int length = buffer.getShort(2) * 2;
|
| | | ByteBuffer old = buffer;
|
| | |
|
| | | old.position(0);
|
| | |
|
| | | // ensure enough capacity for one more record header
|
| | | buffer = ByteBuffer.allocateDirect(length + 4);
|
| | | buffer.put(old);
|
| | |
|
| | | if (fill(buffer, channel) == -1) {
|
| | | throw new EOFException("Premature end of header");
|
| | | }
|
| | |
|
| | | buffer.position(0);
|
| | |
|
| | | Dgn7fileHeader header = new Dgn7fileHeader();
|
| | |
|
| | | header.read(buffer, strict);
|
| | |
|
| | | return header;
|
| | | }
|
| | |
|
| | | protected static int fill(ByteBuffer buffer, FileChannel channel) throws IOException {
|
| | | int r = buffer.remaining();
|
| | |
|
| | | // channel reads return -1 when EOF or other error
|
| | | // because they a non-blocking reads, 0 is a valid return value!!
|
| | | while ((buffer.remaining() > 0) && (r != -1)) {
|
| | | r = channel.read(buffer);
|
| | | }
|
| | |
|
| | | if (r == -1) {
|
| | | buffer.limit(buffer.position());
|
| | | }
|
| | |
|
| | | return r;
|
| | | }
|
| | |
|
| | | private void allocateBuffers() {
|
| | | buffer = ByteBuffer.allocateDirect(16 * 1024);
|
| | | }
|
| | |
|
| | | private void checkShapeBuffer(int size) {
|
| | | if (buffer.capacity() < size) {
|
| | | if (buffer != null)
|
| | | NIOUtilities.clean(buffer);
|
| | | buffer = ByteBuffer.allocateDirect(size);
|
| | | }
|
| | | }
|
| | |
|
| | | private void drain() throws IOException {
|
| | | buffer.flip();
|
| | | while (buffer.remaining() > 0)
|
| | | channel.write(buffer);
|
| | | buffer.flip().limit(buffer.capacity());
|
| | | }
|
| | |
|
| | | private int toBufferOffset(int offset) {
|
| | | return (int) (offset - currentOffset);
|
| | | }
|
| | |
|
| | | private int toFileOffset(int offset) {
|
| | | return (int) (currentOffset + offset);
|
| | | }
|
| | |
|
| | | public void writeElement(Element element) throws IOException {
|
| | | if (element == null) return;
|
| | | if (element.getElementType().isComplexElement()) {
|
| | | writeTo(element);
|
| | | ComplexElement complexElement = (ComplexElement) element;
|
| | | for (Element component : complexElement) {
|
| | | writeTo(component);
|
| | | }
|
| | | } else {
|
| | | writeTo(element);
|
| | | }
|
| | | }
|
| | |
|
| | | private void writeTo(Element element) throws IOException {
|
| | | ByteBuffer writeBuffer = ByteBuffer.allocateDirect(element.raw.length * 2);
|
| | | writeBuffer.order(ByteOrder.LITTLE_ENDIAN);
|
| | | for (short word : element.raw) {
|
| | | writeBuffer.putShort(word);
|
| | | }
|
| | | writeBuffer.rewind();
|
| | |
|
| | | channel.write(writeBuffer);
|
| | | }
|
| | |
|
| | |
|
| | |
|
| | | public void toEnd() throws IOException, Dgn7fileException {
|
| | | while (hasNext()) {
|
| | | nextElement();
|
| | | }
|
| | | }
|
| | |
|
| | | public void close() throws IOException {
|
| | | lock.unlockWrite();
|
| | | lock.unlockRead();
|
| | |
|
| | | if (channel.isOpen()) {
|
| | | channel.close();
|
| | | streamLogger.close();
|
| | | }
|
| | |
|
| | | if (buffer instanceof MappedByteBuffer) {
|
| | | NIOUtilities.clean(buffer);
|
| | | }
|
| | |
|
| | | channel = null;
|
| | | header = null;
|
| | | }
|
| | |
|
| | | public void writeEOF() throws IOException {
|
| | | ByteBuffer writeBuffer = ByteBuffer.allocateDirect(2);
|
| | | writeBuffer.order(ByteOrder.LITTLE_ENDIAN);
|
| | | writeBuffer.putShort((short) -1);
|
| | | channel.write(writeBuffer);
|
| | |
|
| | | }
|
| | | }
|
| | |
| | | import com.ximple.util.DgnUtility; |
| | | |
| | | /** |
| | | * Record |
| | | * FileRecord |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤W¤È 11:14:50 |
| | | */ |
| | | public class Element { |
| | | public static final int CONSTRUCTION_CLASS = 0; |
| | |
| | | protected short[] raw; |
| | | protected byte attrOffset = 0; |
| | | protected ByteBuffer rawBuffer; |
| | | protected boolean newElement = false; |
| | | |
| | | public Element(byte[] raw) { |
| | | Element(byte[] raw) { |
| | | // this.raw = raw; |
| | | this.raw = new short[raw.length / 2]; |
| | | rawBuffer = ByteBuffer.wrap(raw); |
| | |
| | | |
| | | public int getLineStyle() { |
| | | return (raw[17] & 0x0007); |
| | | } |
| | | |
| | | protected void setLineStyle(int value) { |
| | | if (value > -1 && value < 8) |
| | | raw[17] = (short) ((raw[17] & 0xfff8) | (value & 0x0007)); |
| | | else |
| | | new IllegalArgumentException("Out of Range!"); |
| | | } |
| | | |
| | | public Envelope getRange() { |
| | |
| | | raw[5] = (short) (temp >> 16 & 0x0000ffff); |
| | | raw[4] = (short) (temp & 0x0000ffff); |
| | | |
| | | // lowZ |
| | | raw[7] = 0; |
| | | raw[8] = 0; |
| | | |
| | | int highCoorX = DgnUtility.converCoordToUnit(bbox.getMaxX()); |
| | | temp = DgnUtility.converToDGN(highCoorX); |
| | | raw[9] = (short) (temp >> 16 & 0x0000ffff); |
| | |
| | | temp = DgnUtility.converToDGN(highCoorY); |
| | | raw[11] = (short) (temp >> 16 & 0x0000ffff); |
| | | raw[10] = (short) (temp & 0x0000ffff); |
| | | |
| | | // highZ |
| | | raw[13] = (short) 0xffff; |
| | | raw[12] = (short) 0xffff; |
| | | |
| | | } |
| | | |
| | | public boolean isComponentElement() { |
| | | return (short) ((raw[0] >>> 7) & 0x0001) == 1; |
| | | } |
| | | |
| | | protected void setComponentElement(boolean value) { |
| | | raw[0] = (short) ((raw[0] & 0xff7f) | (value ? 0x0080 : 0x0)); |
| | | } |
| | | |
| | | public boolean removeUserAttributeData(int iLinkageId) { |
| | |
| | | return (short) ((raw[0] >>> 15) & 0x0001) == 1; |
| | | } |
| | | |
| | | protected void setDeleted(boolean value) { |
| | | raw[0] = (short) ((raw[0] & 0x7fff) | ((((value) ? 1 : 0) << 15) & 0x8000)); |
| | | } |
| | | |
| | | public int getColorIndex() { |
| | | return ((raw[17] >>> 8) & 0x00ff); |
| | | } |
| | | |
| | | protected void setColorIndex(int value) { |
| | | if (value > -1 && value < 256) |
| | | { |
| | | raw[17] = (short) ((raw[17] & 0x00ff) | (value << 8 & 0xff00)); |
| | | } else new IllegalArgumentException("Out of Range!"); |
| | | } |
| | | |
| | | public int getType() { |
| | | return ((raw[0] >>> 8) & 0x007f); |
| | | } |
| | | |
| | | protected void setType(int value) { |
| | | raw[0] = (short) ((raw[0] & 0x80ff) | (value << 8) & 0x3f00); |
| | | } |
| | | |
| | | public ElementType getElementType() { |
| | |
| | | } |
| | | } |
| | | |
| | | public short getFollowLength() { |
| | | return raw[1]; |
| | | } |
| | | |
| | | protected void setFollowLength(short value) { |
| | | assert (raw.length >= value + 2); |
| | | raw[1] = value; |
| | | } |
| | | |
| | | public void addUserAttributeData(byte[] pDataBlock, Class dataClass, int iLinkageId) throws Element.Exception { |
| | | } |
| | | |
| | |
| | | } |
| | | |
| | | return true; |
| | | } |
| | | |
| | | public int getUserAttributeDataOffset() { |
| | | return (raw[15] + 16); |
| | | } |
| | | |
| | | public List<UserAttributeData> getUserAttributeData() { |
| | |
| | | public Exception() { |
| | | } |
| | | |
| | | // Constructs an Record.Exception with no detail message. |
| | | // Constructs an ElementRecord.Exception with no detail message. |
| | | public Exception(String oStrMessage) { |
| | | super(oStrMessage); |
| | | } |
| | |
| | | return elementType; |
| | | } |
| | | |
| | | public Object read(ByteBuffer buffer, short signature, int length) { |
| | | public Element read(ByteBuffer buffer, short signature, int length) { |
| | | byte[] dst = new byte[length]; |
| | | try { |
| | | buffer.get(dst, 4, dst.length - 4); |
| | |
| | | return new Element(raw); |
| | | } |
| | | } |
| | | |
| | | public static final class FileRecord { |
| | | int length; |
| | | int number = 0; |
| | | int offset; // Relative to the whole file |
| | | int start = 0; // Relative to the current loaded buffer |
| | | short signature = 0; |
| | | |
| | | /** |
| | | * The minimum X value. |
| | | */ |
| | | public double minX; |
| | | |
| | | /** |
| | | * The minimum Y value. |
| | | */ |
| | | public double minY; |
| | | |
| | | /** |
| | | * The minimum Z value. |
| | | */ |
| | | public double minZ; |
| | | |
| | | /** |
| | | * The maximum X value. |
| | | */ |
| | | public double maxX; |
| | | |
| | | /** |
| | | * The maximum Y value. |
| | | */ |
| | | public double maxY; |
| | | |
| | | /** |
| | | * The maximum Z value. |
| | | */ |
| | | public double maxZ; |
| | | |
| | | // ElementType type; |
| | | int end = 0; // Relative to the whole file |
| | | Object element = null; |
| | | IElementHandler handler; |
| | | ByteBuffer buffer; |
| | | |
| | | public Object element() { |
| | | if (element == null) { |
| | | buffer.position(start); |
| | | buffer.order(ByteOrder.LITTLE_ENDIAN); |
| | | |
| | | if (handler == null) { |
| | | return null; |
| | | } |
| | | |
| | | element = handler.read(buffer, signature, length); |
| | | } |
| | | |
| | | return element; |
| | | } |
| | | |
| | | public int offset() { |
| | | return offset; |
| | | } |
| | | |
| | | /** |
| | | * A summary of the record. |
| | | */ |
| | | public String toString() { |
| | | return "FileRecord " + number + " length " + length + " bounds " + minX + "," + minY + " " + maxX + "," + maxY; |
| | | } |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.io.dgn7;
|
| | |
|
| | | import com.vividsolutions.jts.geom.Coordinate;
|
| | | import com.vividsolutions.jts.geom.GeometryFactory;
|
| | | import com.vividsolutions.jts.geom.LineString;
|
| | | import com.vividsolutions.jts.geom.LinearRing;
|
| | |
|
| | | import java.util.Arrays;
|
| | | import java.util.LinkedList;
|
| | |
|
| | | public class ElementFactory {
|
| | | static final int DEFAULT_ELMHEAD_LENGTH = 28;
|
| | | static final int DEFAULT_DISPHEAD_LENGTH = 8;
|
| | | static final int MINIMAL_ELEMLENGTH = 18 * 2;
|
| | |
|
| | | static final int MAXINUM_LINESTRING_PTLEN = 100;
|
| | |
|
| | | private static ElementFactory elementFactory = new ElementFactory();
|
| | | private static GeometryFactory factory = new GeometryFactory();
|
| | |
|
| | | public static Element createLineString(LineString linestring) {
|
| | | Coordinate[] pts = linestring.getCoordinates();
|
| | | LineStringElement element = elementFactory.createLineStringElement(pts);
|
| | | element.setRange(linestring.getEnvelopeInternal());
|
| | | return element;
|
| | | }
|
| | |
|
| | | public static Element createShape(LinearRing ring) {
|
| | | Coordinate[] pts = ring.getCoordinates();
|
| | | int elmsize = MINIMAL_ELEMLENGTH + 2 + pts.length * 8;
|
| | | ShapeElement element = new ShapeElement(new byte[elmsize]);
|
| | | for (int i = 0; i < pts.length; i++) {
|
| | | element.setX(i, pts[i].x);
|
| | | element.setY(i, pts[i].y);
|
| | | }
|
| | | element.setVerticeSize(pts.length);
|
| | | element.setType(ElementType.SHAPE.id);
|
| | | element.setFollowLength((short) ((elmsize / 2) - 2));
|
| | | element.setRange(ring.getEnvelopeInternal());
|
| | | element.setLevelIndex(0);
|
| | | element.setColorIndex(0);
|
| | | element.setWeight(0);
|
| | | element.setLineStyle(0);
|
| | | return element;
|
| | | }
|
| | |
|
| | | private LineStringElement createLineStringElement(Coordinate[] pts) {
|
| | | int elmsize = MINIMAL_ELEMLENGTH + 2 + pts.length * 8;
|
| | | LineStringElement element = new LineStringElement(new byte[elmsize]);
|
| | | for (int i = 0; i < pts.length; i++) {
|
| | | element.setX(i, pts[i].x);
|
| | | element.setY(i, pts[i].y);
|
| | | }
|
| | | element.setVerticeSize(pts.length);
|
| | | element.setType(ElementType.LINESTRING.id);
|
| | | element.setFollowLength((short) ((elmsize / 2) - 2));
|
| | | element.setLevelIndex(0);
|
| | | element.setColorIndex(0);
|
| | | element.setWeight(0);
|
| | | element.setLineStyle(0);
|
| | | return element;
|
| | | }
|
| | |
|
| | | private ShapeElement createShapeElement(Coordinate[] pts) {
|
| | | int elmsize = MINIMAL_ELEMLENGTH + 2 + pts.length * 8;
|
| | | ShapeElement element = new ShapeElement(new byte[elmsize]);
|
| | | for (int i = 0; i < pts.length; i++) {
|
| | | element.setX(i, pts[i].x);
|
| | | element.setY(i, pts[i].y);
|
| | | }
|
| | | element.setVerticeSize(pts.length);
|
| | | element.setType(ElementType.SHAPE.id);
|
| | | element.setFollowLength((short) ((elmsize / 2) - 2));
|
| | | element.setLevelIndex(0);
|
| | | element.setColorIndex(0);
|
| | | element.setWeight(0);
|
| | | element.setLineStyle(0);
|
| | | return element;
|
| | | }
|
| | |
|
| | | public static Element createComplexChain(LineString linestring) {
|
| | | LinkedList<LineStringElement> elms = new LinkedList<LineStringElement>();
|
| | | Coordinate[] allpts = linestring.getCoordinates();
|
| | | int segsize = allpts.length / MAXINUM_LINESTRING_PTLEN;
|
| | | int currentpos = 0;
|
| | | int totalLength = 0;
|
| | | for (int seg = 0; seg < segsize; seg++) {
|
| | | Coordinate[] pts = Arrays.copyOfRange(allpts,
|
| | | currentpos, currentpos + MAXINUM_LINESTRING_PTLEN + 1, Coordinate[].class);
|
| | | LineStringElement element = elementFactory.createLineStringElement(pts);
|
| | | currentpos += MAXINUM_LINESTRING_PTLEN;
|
| | | element.setRange(element.toGeometry(factory).getEnvelopeInternal());
|
| | | element.setComponentElement(true);
|
| | | element.setLevelIndex(0);
|
| | | totalLength += element.raw.length;
|
| | | elms.add(element);
|
| | | }
|
| | | int remain = allpts.length % MAXINUM_LINESTRING_PTLEN;
|
| | | Coordinate[] pts = Arrays.copyOfRange(allpts,
|
| | | currentpos, currentpos + remain, Coordinate[].class);
|
| | | LineStringElement element = elementFactory.createLineStringElement(pts);
|
| | | element.setRange(element.toGeometry(factory).getEnvelopeInternal());
|
| | | element.setComponentElement(true);
|
| | | element.setLevelIndex(0);
|
| | | elms.add(element);
|
| | | totalLength += element.raw.length;
|
| | |
|
| | | ComplexChainElement result = new ComplexChainElement(new byte[MINIMAL_ELEMLENGTH + 12]);
|
| | | result.addAll(elms);
|
| | | result.setRange(linestring.getEnvelopeInternal());
|
| | | result.setType(ElementType.COMPLEXCHAIN.id);
|
| | | result.setFollowLength((short) (((MINIMAL_ELEMLENGTH + 12) / 2) - 2));
|
| | | result.setNumOfElement((short) elms.size());
|
| | | totalLength += result.raw.length;
|
| | | totalLength -= 19;
|
| | | result.setTotalLength((short) totalLength);
|
| | | result.setLevelIndex(0);
|
| | | result.setColorIndex(0);
|
| | | result.setWeight(0);
|
| | | result.setLineStyle(0);
|
| | |
|
| | | return result;
|
| | | }
|
| | | }
|
| | |
| | | | 23 Circular Truncated Cone | |
| | | | 24 B-Spline Surface (complex) | |
| | | | 25 B-Spline Surface boundary | |
| | | | 26 B-Spline Knot Record | |
| | | | 26 B-Spline Knot ElementRecord | |
| | | | 27 B-Spline Curve (complex) | |
| | | | 28 B-Spline Weight Factor | |
| | | | 33 Dimension Record | |
| | | | 34 Shared Cell Definition Record | |
| | | | 35 Shared Cell Record | |
| | | | 36 Multiline Record | |
| | | | 37 Attribute Record | |
| | | | 33 Dimension ElementRecord | |
| | | | 34 Shared Cell Definition ElementRecord | |
| | | | 35 Shared Cell ElementRecord | |
| | | | 36 Multiline ElementRecord | |
| | | | 37 Attribute ElementRecord | |
| | | | 38 DgnStore Component | |
| | | | 39 DgnStore Header | |
| | | | 66 MicroStation Application | |
| | |
| | | | 88 Raster Component | |
| | | | 90 Raster Reference Attachment | |
| | | | 91 Raster Reference Component | |
| | | | 92 Raster Hierarchy Record | |
| | | | 92 Raster Hierarchy ElementRecord | |
| | | | 93 Raster Hierarchy Component | |
| | | | 94 Raster Frame Record | |
| | | | 95 Table Entry Record | |
| | | | 96 Table Header Record | |
| | | | 97 View Group Record | |
| | | | 98 View Record | |
| | | | 99 Level Mask Record | |
| | | | 100 Reference Attach Record | |
| | | | 94 Raster Frame ElementRecord | |
| | | | 95 Table Entry ElementRecord | |
| | | | 96 Table Header ElementRecord | |
| | | | 97 View Group ElementRecord | |
| | | | 98 View ElementRecord | |
| | | | 99 Level Mask ElementRecord | |
| | | | 100 Reference Attach ElementRecord | |
| | | | 101 Matrix Header | |
| | | | 102 Matrix Int Data | |
| | | | 103 Matrix Double Data | |
| | | | 105 Mesh Header | |
| | | | 106 Extended Record (graphic) (complex) | |
| | | | 107 Extended Record (non-graphic) (complex) | |
| | | | 108 Reference Override Record | |
| | | | 106 Extended ElementRecord (graphic) (complex) | |
| | | | 107 Extended ElementRecord (non-graphic) (complex) | |
| | | | 108 Reference Override ElementRecord | |
| | | | 110 Named Group Header | |
| | | | 111 Named Group Component | |
| | | | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/17 ¤U¤È 01:26:49 |
| | | * @since 2006/5/17 �U�� 01:26:49 |
| | | */ |
| | | public final class ElementType { |
| | | /** |
| | |
| | | public class EllipseElement extends Element implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(EllipseElement.class); |
| | | |
| | | public EllipseElement(byte[] raw) { |
| | | EllipseElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 06:36:55 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class FrammeAttributeData extends UserAttributeData { |
| | | public FrammeAttributeData(short id) { |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤W¤È 11:38:57 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public interface GeometryConverter { |
| | | public Geometry toGeometry(GeometryFactory factory); |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/17 ¤U¤È 01:50:26 |
| | | * @since 2006/5/17 �U�� 01:50:26 |
| | | */ |
| | | public interface IElementHandler { |
| | | public ElementType getElementType(); |
| | | |
| | | public Object read(ByteBuffer buffer, short signature, int length); |
| | | public Element read(ByteBuffer buffer, short signature, int length); |
| | | |
| | | public void write(ByteBuffer buffer, Object element); |
| | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤W¤È 11:34:59 |
| | | * @since 2006/5/18 �W�� 11:34:59 |
| | | */ |
| | | public class LineElement extends Element implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(LineElement.class); |
| | | |
| | | public LineElement(byte[] raw) { |
| | | LineElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 02:48:58 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class LineStringElement extends Element implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(LineStringElement.class); |
| | | |
| | | public LineStringElement(byte[] raw) { |
| | | LineStringElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | return raw[18] & 0x0000ffff; |
| | | } |
| | | |
| | | public void setVerticeSize(int size) { |
| | | raw[18] = (short) (size & 0x0000ffff); |
| | | } |
| | | |
| | | public double getLength() { |
| | | double result = 0.0; |
| | | Coordinate[] vset = getVertices(); |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤W¤È 10:27:24 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class Lock { |
| | | Logger logger = LogManager.getLogger("com.ximple.io.dgn7"); |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 03:08:43 |
| | | * @since 2006/5/18 �U�� 03:08:43 |
| | | */ |
| | | public class ShapeElement extends LineStringElement implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(ShapeElement.class); |
| | | |
| | | public ShapeElement(byte[] raw) { |
| | | ShapeElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤W¤È 10:31:08 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class StreamLogging { |
| | | private static final Logger LOGGER = LogManager.getLogger("com.ximple.io.dgn7"); |
| | |
| | | public class TagElement extends Element implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(TagElement.class); |
| | | |
| | | public TagElement(byte[] raw) { |
| | | TagElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 05:03:46 |
| | | * @since 2006/5/18 �U�� 05:03:46 |
| | | */ |
| | | public class TcbElement extends Element { |
| | | private static final Logger logger = Logger.getLogger(TcbElement.class); |
| | | |
| | | public TcbElement(byte[] raw) { |
| | | TcbElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | package com.ximple.io.dgn7; |
| | | |
| | | import com.vividsolutions.jts.geom.Coordinate; |
| | | import com.vividsolutions.jts.geom.CoordinateList; |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import com.ximple.util.DgnUtility; |
| | | import org.apache.log4j.Logger; |
| | | |
| | | import java.awt.geom.AffineTransform; |
| | | import java.nio.ByteBuffer; |
| | | import java.nio.CharBuffer; |
| | | import java.nio.charset.CharacterCodingException; |
| | | import java.nio.charset.Charset; |
| | | import java.nio.charset.CharsetDecoder; |
| | | import java.util.ArrayList; |
| | | |
| | | import org.apache.log4j.Logger; |
| | | |
| | | import com.vividsolutions.jts.geom.Coordinate; |
| | | import com.vividsolutions.jts.geom.CoordinateList; |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.util.DgnUtility; |
| | | |
| | | /** |
| | | * TextElement |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤W¤È 11:45:29 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class TextElement extends Element implements GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(TextElement.class); |
| | |
| | | public static final int TXTJUST_RD = 24; /* Right Descender */ |
| | | public static final int TXTJUST_NONE = 127;/* no justfication */ |
| | | |
| | | public TextElement(byte[] raw) { |
| | | TextElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | } |
| | | |
| | | public boolean isChinese() { |
| | | if (raw.length < 31) return false; |
| | | int isChinese = raw[30] & 0x0000ffff; |
| | | |
| | | return (isChinese == 0xfdff); |
| | |
| | | return ""; |
| | | } |
| | | |
| | | if(30+num/2 > raw.length) |
| | | { |
| | | logger.warn("getTextLength() too long." ); |
| | | if (30 + num / 2 > raw.length) { |
| | | logger.warn("getTextLength() too long."); |
| | | return ""; |
| | | } |
| | | |
| | |
| | | |
| | | val.append(temp[i]); |
| | | } |
| | | } |
| | | else |
| | | { |
| | | } else { |
| | | byte[] strRaw = new byte[num * 2]; |
| | | ArrayList byteResult = new ArrayList(); |
| | | for (int i = 0; i < num; i++) |
| | | { |
| | | for (int i = 0; i < num; i++) { |
| | | short charValue = raw[i + 31]; |
| | | byte hi = (byte) (charValue >>> 8); |
| | | byte lo = (byte) charValue; |
| | | strRaw[i * 2] = hi; |
| | | strRaw[i * 2 + 1] = lo; |
| | | |
| | | } |
| | | |
| | | try { |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 04:02:58 |
| | | * @since 2006/5/18 �U�� 04:02:58 |
| | | */ |
| | | public class TextNodeElement extends Element implements ComplexElement, GeometryConverter { |
| | | private static final Logger logger = Logger.getLogger(TextElement.class); |
| | | |
| | | private ArrayList<Element> list = new ArrayList<Element>(); |
| | | |
| | | public TextNodeElement(byte[] raw) { |
| | | TextNodeElement(byte[] raw) { |
| | | super(raw); |
| | | } |
| | | |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 02:29:29 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public class UserAttributeData { |
| | | protected short[] _src; |
| | |
| | | * |
| | | * @author Ulysses |
| | | * @version 0.1 |
| | | * @since 2006/5/18 ¤U¤È 01:33:00 |
| | | * @since 2006/5/18 |
| | | */ |
| | | public final class DgnUtility { |
| | | private static final Logger logger = Logger.getLogger(DgnUtility.class); |
| | |
| | | * Dgn7OracleReaderTest |
| | | * User: Ulysses |
| | | * Date: 2007/10/24 |
| | | * Time: ¤W¤È 10:49:54 |
| | | */ |
| | | public class Dgn7OracleReaderTest { |
| | | @BeforeTest |
| | |
| | | |
| | | } |
| | | |
| | | @Test |
| | | // @Test |
| | | public void testOracleReader() throws SQLException, IOException { |
| | | OracleConnection connection = OracleTarget.getInstance().getOracleConnection(); |
| | | // String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" ORDER BY ROWID"; |
| | |
| | | * Dgn7TextElementReaderTest |
| | | * User: Ulysses |
| | | * Date: 2008/1/10 |
| | | * Time: ¤W¤È 12:19:14 |
| | | */ |
| | | public class Dgn7TextElementReaderTest { |
| | | private final static Logger logger = Logger.getLogger(Dgn7fileReaderTest.class); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | count++; |
| | | } |
| | | |
| | | logger.info("ElementRecord Count=" + count); |
| | | logger.info("FileRecord Count=" + count); |
| | | } |
| | | } |
| | |
| | | * Dgn7fileReaderTest |
| | | * User: Ulysses |
| | | * Date: 2007/10/24 |
| | | * Time: ¤W¤È 01:43:41 |
| | | * Time: �W�� 01:43:41 |
| | | * To change this template use File | Settings | File Templates. |
| | | */ |
| | | public class Dgn7fileReaderTest { |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | count++; |
| | | } |
| | | |
| | | logger.info("ElementRecord Count=" + count); |
| | | logger.info("FileRecord Count=" + count); |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.io.dgn7;
|
| | |
|
| | | import org.apache.commons.io.FileUtils;
|
| | | import org.apache.log4j.Logger;
|
| | | import org.geotools.TestData;
|
| | | import org.testng.Assert;
|
| | | import org.testng.annotations.BeforeTest;
|
| | | import org.testng.annotations.Test;
|
| | |
|
| | | import java.io.File;
|
| | | import java.io.FileInputStream;
|
| | | import java.io.FileOutputStream;
|
| | | import java.io.IOException;
|
| | | import java.io.RandomAccessFile;
|
| | | import java.nio.channels.FileChannel;
|
| | |
|
| | | public class Dgn7fileWriterTest {
|
| | | private final static Logger logger = Logger.getLogger(Dgn7fileReaderTest.class);
|
| | |
|
| | | // private final static String testFilePath = "test-data\\testHV.dgn";
|
| | | private final static String testFilePath = "testHV.dgn";
|
| | | private FileInputStream _fs;
|
| | |
|
| | | @BeforeTest
|
| | | public void setUp() throws IOException {
|
| | | File dataFile = TestData.file(this, testFilePath);
|
| | | if (!dataFile.exists()) {
|
| | | return;
|
| | | }
|
| | |
|
| | | _fs = new FileInputStream(dataFile);
|
| | | }
|
| | | |
| | | @Test
|
| | | public void testWrite() {
|
| | | }
|
| | |
|
| | | @Test
|
| | | public void testCopy() throws Dgn7fileException, IOException {
|
| | | File target = TestData.temp(this, "testdgn2d.dgn");
|
| | | FileUtils.copyFile(TestData.file(this, "dgnseed2d.dgn"), target);
|
| | | RandomAccessFile targetStream = new RandomAccessFile(target, "rw");
|
| | | FileChannel fctarget = targetStream.getChannel();
|
| | | Lock lock = new Lock();
|
| | |
|
| | | Dgn7fileReader targetReader = new Dgn7fileReader(fctarget, new Lock());
|
| | | while (targetReader.hasNext()) {
|
| | | targetReader.nextElement();
|
| | | }
|
| | |
|
| | | Dgn7fileWriter writer = new Dgn7fileWriter(fctarget, lock);
|
| | |
|
| | | FileChannel fc = _fs.getChannel();
|
| | | Dgn7fileReader reader = new Dgn7fileReader(fc, new Lock());
|
| | | int count = 0;
|
| | | Element lastComplex = null;
|
| | | while (reader.hasNext()) {
|
| | | Element.FileRecord record = reader.nextElement();
|
| | | if (record.element() != null) {
|
| | | Element element = (Element) record.element();
|
| | | ElementType type = element.getElementType();
|
| | | boolean completed = false;
|
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) {
|
| | | if (lastComplex != null) {
|
| | | // @todo add process in here
|
| | | processCompleteElement(lastComplex, writer);
|
| | | lastComplex = null;
|
| | | }
|
| | |
|
| | | // @todo add process in here
|
| | | processCompleteElement(element, writer);
|
| | | } else if (element.isComponentElement()) {
|
| | | if (lastComplex != null) {
|
| | | ((ComplexElement) lastComplex).add(element);
|
| | | } else {
|
| | | logger.warn("wong." + element.toString());
|
| | | Assert.fail("Component Element cannot found parent.");
|
| | | }
|
| | | } else if (type.isComplexElement()) {
|
| | | if (lastComplex != null) {
|
| | | // @todo add process in here
|
| | | processCompleteElement(lastComplex, writer);
|
| | | }
|
| | | lastComplex = element;
|
| | | }
|
| | | // writer.writeElement(element);
|
| | | }
|
| | | count++;
|
| | | }
|
| | | writer.writeEOF();
|
| | | writer.close();
|
| | |
|
| | | // FileUtils.copyFile(target, new File("G://target.dgn"));
|
| | | }
|
| | |
|
| | | private boolean processCompleteElement(Element element, Dgn7fileWriter writer) throws IOException {
|
| | | writer.writeElement(element);
|
| | | return true;
|
| | | }
|
| | | }
|
New file |
| | |
| | | package com.ximple.io.dgn7;
|
| | |
|
| | | import com.vividsolutions.jts.geom.CoordinateList;
|
| | | import com.vividsolutions.jts.geom.GeometryFactory;
|
| | | import com.vividsolutions.jts.geom.LineString;
|
| | | import com.vividsolutions.jts.geom.MultiLineString;
|
| | | import org.apache.commons.io.FileUtils;
|
| | | import org.apache.log4j.Logger;
|
| | | import org.geotools.TestData;
|
| | | import org.testng.Assert;
|
| | | import org.testng.annotations.BeforeTest;
|
| | | import org.testng.annotations.Test;
|
| | |
|
| | | import java.io.File;
|
| | | import java.io.FileInputStream;
|
| | | import java.io.IOException;
|
| | | import java.io.RandomAccessFile;
|
| | | import java.nio.channels.FileChannel;
|
| | |
|
| | | /**
|
| | | * Created by IntelliJ IDEA.
|
| | | * User: Ulysses
|
| | | * Date: 2010/4/21
|
| | | */
|
| | | public class ElementFactoryTest {
|
| | | private final static Logger logger = Logger.getLogger(Dgn7fileReaderTest.class);
|
| | |
|
| | | // private final static String testFilePath = "test-data\\testHV.dgn";
|
| | | private final static String testFilePath = "testHV.dgn";
|
| | |
|
| | | private static GeometryFactory factory = new GeometryFactory();
|
| | |
|
| | | private FileInputStream _fs;
|
| | | private LineStringElement _testLineString = null;
|
| | | private ComplexChainElement _testComplexChain = null;
|
| | |
|
| | | @BeforeTest
|
| | | public void setUp() throws IOException, Dgn7fileException {
|
| | | File dataFile = TestData.file(this, testFilePath);
|
| | | if (!dataFile.exists()) {
|
| | | return;
|
| | | }
|
| | |
|
| | | _fs = new FileInputStream(dataFile);
|
| | |
|
| | | fetchTestElement(_fs);
|
| | | _fs.close();
|
| | | }
|
| | |
|
| | | private void fetchTestElement(FileInputStream fs) throws Dgn7fileException, IOException {
|
| | | FileChannel fc = _fs.getChannel();
|
| | | Dgn7fileReader reader = new Dgn7fileReader(fc, new Lock());
|
| | | int count = 0;
|
| | | Element lastComplex = null;
|
| | | while (reader.hasNext()) {
|
| | | Element.FileRecord record = reader.nextElement();
|
| | | if (record.element() != null) {
|
| | | Element element = (Element) record.element();
|
| | | ElementType type = element.getElementType();
|
| | | boolean completed = false;
|
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) {
|
| | | if (lastComplex != null) {
|
| | | // @todo add process in here
|
| | | if (!processCompleteElement(lastComplex)) break;
|
| | | lastComplex = null;
|
| | | }
|
| | |
|
| | | // @todo add process in here
|
| | | if (!processCompleteElement(element)) break;
|
| | | } else if (element.isComponentElement()) {
|
| | | if (lastComplex != null) {
|
| | | ((ComplexElement) lastComplex).add(element);
|
| | | } else {
|
| | | logger.warn("wong." + element.toString());
|
| | | Assert.fail("Component Element cannot found parent.");
|
| | | }
|
| | | } else if (type.isComplexElement()) {
|
| | | if (lastComplex != null) {
|
| | | // @todo add process in here
|
| | | if (!processCompleteElement(lastComplex)) break;
|
| | | }
|
| | | lastComplex = element;
|
| | | }
|
| | | }
|
| | | count++;
|
| | | }
|
| | | }
|
| | |
|
| | | private boolean processCompleteElement(Element element) throws IOException {
|
| | | if (element instanceof LineStringElement) {
|
| | | _testLineString = (LineStringElement) element;
|
| | | }
|
| | |
|
| | | if (element instanceof ComplexChainElement) {
|
| | | _testComplexChain = (ComplexChainElement) element;
|
| | | }
|
| | |
|
| | | if ((_testLineString != null) && (_testComplexChain != null)) return false;
|
| | | return true;
|
| | | }
|
| | |
|
| | | @Test
|
| | | public void testCreateLineString() throws IOException, Dgn7fileException {
|
| | | Assert.assertNotNull(_testLineString);
|
| | | LineStringElement originElement = _testLineString;
|
| | |
|
| | | LineString geometry = (LineString) originElement.toGeometry(factory);
|
| | | Element cloneElement = ElementFactory.createLineString(geometry);
|
| | | Assert.assertTrue(cloneElement instanceof LineStringElement);
|
| | | cloneElement.setLevelIndex(originElement.getLevelIndex());
|
| | | cloneElement.setColorIndex(originElement.getColorIndex());
|
| | | cloneElement.setWeight(originElement.getWeight());
|
| | | cloneElement.setLineStyle(originElement.getLineStyle());
|
| | | cloneElement.setRange(geometry.getEnvelopeInternal());
|
| | |
|
| | | int len = originElement.raw.length;
|
| | | int lenClone = cloneElement.raw.length;
|
| | | // Assert.assertEquals(lenClone, len - originElement.getUserAttributeDataOffset() + 1);
|
| | | if (originElement.hasUserAttributeData()) {
|
| | | Assert.assertEquals(lenClone, originElement.getUserAttributeDataOffset());
|
| | | }
|
| | | System.out.println("origin size=(" + len + ")-:- Clone size=(" + lenClone + ")");
|
| | | int headerSize = ElementFactory.MINIMAL_ELEMLENGTH / 2;
|
| | | for (int i = 0; i <= headerSize; i++) {
|
| | | if (originElement.raw[i] != cloneElement.raw[i]) {
|
| | | System.out.print("different index=" + i + ":");
|
| | | System.out.println("origin[" + Integer.toHexString(originElement.raw[i])
|
| | | + "]-clone:[" + Integer.toHexString(cloneElement.raw[i]) + "]");
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | @Test
|
| | | public void testCreateComplexChain() throws IOException, Dgn7fileException {
|
| | | Assert.assertNotNull(_testComplexChain);
|
| | | ComplexChainElement originElement = _testComplexChain;
|
| | |
|
| | | System.out.print("len=" + originElement.raw.length);
|
| | | for (Element elm : originElement) {
|
| | | System.out.print(":" + elm.raw.length);
|
| | | }
|
| | | System.out.println();
|
| | |
|
| | | MultiLineString geometries = (MultiLineString) originElement.toGeometry(factory);
|
| | | LineString geometry = factory.createLineString(geometries.getCoordinates());
|
| | |
|
| | | ComplexChainElement cloneElement = (ComplexChainElement) ElementFactory.createComplexChain(geometry);
|
| | | Assert.assertTrue(cloneElement instanceof ComplexChainElement);
|
| | | cloneElement.setLevelIndex(originElement.getLevelIndex());
|
| | | cloneElement.setColorIndex(originElement.getColorIndex());
|
| | | cloneElement.setWeight(originElement.getWeight());
|
| | | cloneElement.setLineStyle(originElement.getLineStyle());
|
| | | cloneElement.setRange(geometry.getEnvelopeInternal());
|
| | |
|
| | | int len = originElement.raw.length;
|
| | | int lenClone = cloneElement.raw.length;
|
| | | // Assert.assertEquals(lenClone, len - originElement.getUserAttributeDataOffset() + 1);
|
| | | System.out.print("clonelen=" + cloneElement.raw.length);
|
| | | for (Element elm : cloneElement) {
|
| | | System.out.print(":" + elm.raw.length);
|
| | | }
|
| | | System.out.println();
|
| | |
|
| | | if (originElement.hasUserAttributeData()) {
|
| | | Assert.assertEquals(lenClone, originElement.getUserAttributeDataOffset() + 4);
|
| | | }
|
| | | System.out.println("origin size=(" + len + ")-:- Clone size=(" + lenClone + ")");
|
| | | int headerSize = ElementFactory.MINIMAL_ELEMLENGTH / 2 + 6;
|
| | | for (int i = 0; i < headerSize; i++) {
|
| | | if (originElement.raw[i] != cloneElement.raw[i]) {
|
| | | System.out.print("different index=" + i + ":");
|
| | | System.out.println("origin[" + Integer.toHexString(originElement.raw[i])
|
| | | + "]-clone:[" + Integer.toHexString(cloneElement.raw[i]) + "]");
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | |
| | | private static final String ORACLE_URL = "jdbc:oracle:thin:@"; |
| | | private static final String _propUsrKey = "user"; |
| | | private static final String _propPassKey = "password"; |
| | | private static String _oracleHost = "192.168.11.200"; |
| | | private static String _oracleInstance = "NNTPC"; |
| | | private static String _oracleHost = "10.10.1.7"; |
| | | private static String _oracleInstance = "ORCL"; |
| | | private static String _oraclePort = "1521"; |
| | | |
| | | static { |
| | |
| | | |
| | | private OracleTarget() { |
| | | properties = new Properties(); |
| | | properties.put(_propUsrKey, "SPATIALDB"); |
| | | properties.put(_propPassKey, "SPATIALDB000"); |
| | | properties.put(_propUsrKey, "system"); |
| | | properties.put(_propPassKey, "simple000"); |
| | | } |
| | | |
| | | public static String getOracleHost() { |
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnjobs</artifactId> |
| | | <version>1.2.0</version> |
| | | <version>2.1.1</version> |
| | | </parent> |
| | | |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-elmparser</artifactId> |
| | | <version>1.2.0</version> |
| | | <version>2.1.1</version> |
| | | <packaging>jar</packaging> |
| | | <name>ximple-elmparser</name> |
| | | <url>http://maven.apache.org</url> |
| | | |
| | | <properties> |
| | | <xdgnio.version>1.2.0</xdgnio.version> |
| | | <xdgnio.version>1.2.2</xdgnio.version> |
| | | </properties> |
| | | |
| | | <scm> |
| | |
| | | |
| | | <!-- Ximple Library --> |
| | | <dependency> |
| | | <artifactId>ximple-dgnio</artifactId> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <version>${xdgnio.version}</version> |
| | | <artifactId>ximple-dgnio</artifactId> |
| | | <version>${project.version}</version> |
| | | </dependency> |
| | | <dependency> |
| | | <artifactId>ximple-spatialjob</artifactId> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <version>${xdgnio.version}</version> |
| | | <artifactId>ximple-spatialjob</artifactId> |
| | | <version>${project.version}</version> |
| | | </dependency> |
| | | </dependencies> |
| | | |
| | |
| | | Use maven from the command line: |
| | | mvn exec:java -Dexec.mainClass="com.ximple.eofms.XElementParser" |
| | | --> |
| | | <artifactId>exec-maven-plugin</artifactId> |
| | | <groupId>org.codehaus.mojo</groupId> |
| | | <artifactId>exec-maven-plugin</artifactId> |
| | | <version>1.2.1</version> |
| | | <!-- |
| | | <executions> |
| | | <execution> |
| | |
| | | import org.geotools.feature.Feature; |
| | | import org.geotools.feature.FeatureType; |
| | | import org.geotools.feature.SimpleFeature; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.FeatureCollection; |
| | | import org.geotools.feature.FeatureCollections; |
| | | import org.geotools.data.FeatureWriter; |
| | |
| | | return; |
| | | } |
| | | |
| | | // §PÂ_¬O§_²Å©M±ø¥ó |
| | | // �P�_�O�_�ũM��� |
| | | Feature feature = elementDispatcher.execute(element, false); |
| | | if (feature == null) |
| | | { |
New file |
| | |
| | | # Create stdout appender
|
| | | log4j.rootLogger=info, logfile, stdout
|
| | |
|
| | | # Configure the stdout appender to go to the Console
|
| | | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
|
| | |
|
| | | # Configure stdout appender to use the PatternLayout
|
| | | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
| | |
|
| | | # Pattern output the caller's filename and line #
|
| | | log4j.appender.stdout.layout.ConversionPattern=%5p [%t] (%F:%L) - %m%n
|
| | | #log4j.appender.stdout.encoding=UTF-8
|
| | |
|
| | | log4j.appender.logfile=org.apache.log4j.FileAppender
|
| | | log4j.appender.logfile.file=xjobcarrier.log
|
| | | log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
|
| | | log4j.appender.logfile.layout.ConversionPattern=%5p [%t] (%F:%L) - %m%n
|
| | | #log4j.appender.logfile.encoding=UTF-8
|
| | |
|
| | | log4j.appender.remoteout=com.holub.log4j.RemoteAppender
|
| | | log4j.appender.remoteout.Port=8011
|
| | | log4j.appender.remoteout.layout=org.apache.log4j.PatternLayout
|
| | | log4j.appender.remoteout.layout.ConversionPattern=%5p [%t] (%F:%L) - %m%n
|
| | | #log4j.appender.remoteout.encoding=UTF-8
|
| | |
|
| | | # Print messages of level INFO or above for examples
|
| | | log4j.logger.org.cavaness.quartzbook=INFO
|
| | | log4j.logger.org.quartz=DEBUG
|
| | | log4j.logger.com.ximple.eofms=DEBUG |
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnjobs</artifactId> |
| | | <version>1.2.0</version> |
| | | <version>2.1.1</version> |
| | | </parent> |
| | | |
| | | |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-jobcarrier</artifactId> |
| | | <version>1.2.0</version> |
| | | <version>2.1.1</version> |
| | | <packaging>jar</packaging> |
| | | <name>ximple-jobcarrier</name> |
| | | <url>http://maven.apache.org</url> |
| | | |
| | | <properties> |
| | | <xdgnio.version>1.2.0</xdgnio.version> |
| | | <xdgnio.version>2.1.1</xdgnio.version> |
| | | </properties> |
| | | |
| | | <scm> |
| | |
| | | <url>http://www.ximple.com.tw</url> |
| | | </organization> |
| | | |
| | | <inceptionYear>2010</inceptionYear> |
| | | <inceptionYear>2012</inceptionYear> |
| | | |
| | | <developers> |
| | | <developer> |
| | |
| | | <contributors> |
| | | </contributors> |
| | | |
| | | <profiles> |
| | | <profile> |
| | | <id>qa</id> |
| | | <build> |
| | | <plugins> |
| | | <plugin> |
| | | <artifactId>maven-dependency-plugin</artifactId> |
| | | <executions> |
| | | <execution> |
| | | <phase>install</phase> |
| | | <goals> |
| | | <goal>copy-dependencies</goal> |
| | | </goals> |
| | | <configuration> |
| | | <outputDirectory>${project.build.directory}/lib</outputDirectory> |
| | | </configuration> |
| | | </execution> |
| | | </executions> |
| | | </plugin> |
| | | </plugins> |
| | | </build> |
| | | </profile> |
| | | </profiles> |
| | | |
| | | <!-- =========================================================== --> |
| | | <!-- Dependencies to be inherited by all modules. --> |
| | | <!-- =========================================================== --> |
| | |
| | | <dependency> |
| | | <groupId>org.quartz-scheduler</groupId> |
| | | <artifactId>quartz</artifactId> |
| | | <exclusions> |
| | | <exclusion> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-api</artifactId> |
| | | </exclusion> |
| | | </exclusions> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.quartz-scheduler</groupId> |
| | | <artifactId>quartz-jobs</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-api</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-log4j12</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>jcl-over-slf4j</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-opengis</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-jdbc</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-postgis</artifactId> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-postgis</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-oracle</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-mysql</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-oracle-spatial</artifactId> |
| | | <artifactId>gt-opengis</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-mysql</artifactId> |
| | | <artifactId>gt-metadata</artifactId> |
| | | </dependency> |
| | | |
| | | <!-- because main and sample-data depend on referencing we need a tie breaker --> |
| | |
| | | <!-- We need this to make the referencing module useful --> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-epsg-hsql</artifactId> |
| | | <scope>test</scope> |
| | | <artifactId>gt-epsg-wkt</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | |
| | | <artifactId>sdoutl</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <!--dependency> |
| | | <groupId>postgresql</groupId> |
| | | <artifactId>postgresql</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.postgis</groupId> |
| | | <artifactId>postgis-driver</artifactId> |
| | | </dependency> |
| | | </dependency--> |
| | | |
| | | <dependency> |
| | | <!--dependency> |
| | | <groupId>mysql</groupId> |
| | | <artifactId>mysql-connector-java</artifactId> |
| | | </dependency> |
| | | </dependency--> |
| | | |
| | | <!-- AWL --> |
| | | <dependency> |
| | | <groupId>org.awl</groupId> |
| | | <artifactId>awl</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>xml-apis</groupId> |
| | | <artifactId>xml-apis</artifactId> |
| | | <version>1.4.01</version> |
| | | </dependency> |
| | | |
| | | <!-- Ximple Library --> |
| | |
| | | <dependency> |
| | | <artifactId>ximple-spatialjob</artifactId> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <version>${xdgnio.version}</version> |
| | | <version>${project.version}</version> |
| | | </dependency> |
| | | </dependencies> |
| | | |
| | |
| | | Use maven from the command line: |
| | | mvn exec:java -Dexec.mainClass="com.ximple.eofms.XQuartzJobCarrier" |
| | | --> |
| | | <artifactId>exec-maven-plugin</artifactId> |
| | | <groupId>org.codehaus.mojo</groupId> |
| | | <!-- |
| | | <artifactId>exec-maven-plugin</artifactId> |
| | | <version>1.2.1</version> |
| | | <executions> |
| | | <execution> |
| | | <phase>package</phase> |
| | | <!--<phase>package</phase> --> |
| | | <goals> |
| | | <goal>run</goal> |
| | | <goal>java</goal> |
| | | </goals> |
| | | </execution> |
| | | </executions> |
| | | --> |
| | | <configuration> |
| | | <mainClass>com.ximple.eofms.XQuartzJobCarrier</mainClass> |
| | | <!-- |
| | | <commandlineArgs>-wizard</commandlineArgs> |
| | | <commandlineArgs>-Dlog4j.configuration=log4j.properties</commandlineArgs> |
| | | --> |
| | | <systemProperties> |
| | | <systemProperty> |
| | | <key>log4j.configuration</key> |
| | | <value>log4j.properties</value> |
| | | </systemProperty> |
| | | </systemProperties> |
| | | </configuration> |
| | | <!-- |
| | | <dependencies> |
| | |
| | | <plugin> |
| | | <groupId>org.codehaus.mojo</groupId> |
| | | <artifactId>native2ascii-maven-plugin</artifactId> |
| | | <version>1.0-alpha-1</version> |
| | | <version>1.0-beta-1</version> |
| | | <configuration> |
| | | <dest>target/classes/com/ximple/eofms</dest> |
| | | <src>src/main/resources/com/ximple/eofms</src> |
| | |
| | | </goals> |
| | | <configuration> |
| | | <encoding>UTF8</encoding> |
| | | <includes>XQuartzJobWizard_zh*.properties</includes> |
| | | <includes> |
| | | <include>XQuartzJobWizard_zh*.properties</include> |
| | | </includes> |
| | | </configuration> |
| | | </execution> |
| | | </executions> |
| | |
| | | --> |
| | | <resource> |
| | | <directory>src/main/resources</directory> |
| | | <!-- |
| | | <includes> |
| | | <include>log4j.properties</include> |
| | | <include>quartz.properties</include> |
| | | <include>quartz_jobs.xml</include> |
| | | <include>quartz_jobs_edb.xml</include> |
| | | <include>quartz_jobs_sharpefiles.xml</include> |
| | | </includes> |
| | | --> |
| | | <!-- |
| | | <excludes> |
| | | <exclude>log4j.properties</exclude> |
| | | <exclude>quartz_jobs.xml</exclude> |
| | | </excludes> |
| | | --> |
| | | <!-- |
| | | <excludes> |
| | | <exclude>XQuartzJobWizard*.properties</exclude> |
New file |
| | |
| | | #===============================================================
|
| | | #Configure Main Scheduler Properties
|
| | | #===============================================================
|
| | | org.quartz.scheduler.instanceName = QuartzScheduler
|
| | | org.quartz.scheduler.instanceId = AUTO
|
| | |
|
| | | #===============================================================
|
| | | #Configure ThreadPool
|
| | | #===============================================================
|
| | | org.quartz.threadPool.threadCount = 5
|
| | | org.quartz.threadPool.threadPriority = 5
|
| | | org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool
|
| | |
|
| | | #===============================================================
|
| | | #Configure JobStore
|
| | | #===============================================================
|
| | | org.quartz.jobStore.class = org.quartz.simpl.RAMJobStore
|
| | | org.quartz.jobStore.misfireThreshold = 60000
|
| | |
|
| | | #===============================================================
|
| | | #Configure Plugins
|
| | | #===============================================================
|
| | | org.quartz.plugin.triggHistory.class = org.quartz.plugins.history.LoggingJobHistoryPlugin
|
| | |
|
| | | org.quartz.plugin.jobInitializer.class: org.quartz.plugins.xml.XMLSchedulingDataProcessorPlugin
|
| | | org.quartz.plugin.jobInitializer.fileNames = quartz_jobs.xml
|
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_edb.xml
|
| | | org.quartz.plugin.jobInitializer.failOnFileNotFound = true
|
| | | org.quartz.plugin.jobInitializer.scanInterval = 10
|
| | | org.quartz.plugin.jobInitializer.wrapInUserTransaction = false
|
| | |
|
| | | org.quartz.plugin.shutdownhook.class = org.quartz.plugins.management.ShutdownHookPlugin
|
| | | org.quartz.plugin.shutdownhook.cleanShutdown = true |
New file |
| | |
| | | <?xml version='1.0' encoding='utf-8'?> |
| | | |
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd" |
| | | version="1.8"> |
| | | |
| | | <pre-processing-commands> |
| | | <delete-jobs-in-group>*</delete-jobs-in-group> <!-- clear all jobs in scheduler --> |
| | | <delete-triggers-in-group>*</delete-triggers-in-group> <!-- clear all triggers in scheduler --> |
| | | </pre-processing-commands> |
| | | |
| | | <processing-directives> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them --> |
| | | <overwrite-existing-data>true</overwrite-existing-data> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error --> |
| | | <ignore-duplicates>false</ignore-duplicates> |
| | | </processing-directives> |
| | | |
| | | <schedule> |
| | | <job> |
| | | <name>ConvertDMMS2PostGisWithGeoserver</name> |
| | | <group>DEFAULT</group> |
| | | <description>A job that convert dgn to postgis</description> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleConvertDgn2PostGISJob</job-class--> |
| | | <job-class>com.ximple.eofms.jobs.GeoserverIntegrateConfigJob</job-class> |
| | | <!--volatility>false</volatility--> |
| | | <durability>false</durability> |
| | | <recover>false</recover> |
| | | <!--job-data-map allows-transient-data="true"--> |
| | | <job-data-map> |
| | | <entry> |
| | | <key>JOBDATA_DIR</key> |
| | | <!--value>/Users/Shared/Public/Projects/XGeoDMMS/xjobrun/nstpcjobs/jobdata</value--> |
| | | <value>/mnt/hdisk/home.data/private/projects/xdcad/xjobrun/nntpcjobs/jobdata</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGHOST</key> |
| | | <value>192.168.11.99</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGDATBASE</key> |
| | | <value>pgNNTPC</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPORT</key> |
| | | <value>5432</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGSCHEMA</key> |
| | | <value>public</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGUSER</key> |
| | | <value>tpcdb</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAHOST</key> |
| | | <value>10.10.1.7</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAINST</key> |
| | | <value>orcl</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPORT</key> |
| | | <value>1521</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAUSER</key> |
| | | <value>system</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPASS</key> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORGSCHEMA</key> |
| | | <!--value>SPATIALDB</value--> |
| | | <value>SPATIALDB, CMMS_SPATIALDB</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTDB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTFILE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTELEMIN</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CREATEDUMMY</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ELEMLOG</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEWKB</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTMODE</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>TESTCOUNT</key> |
| | | <value>2</value> |
| | | </entry> |
| | | <entry> |
| | | <key>COPYCONNECTIVITYMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PROFILEMODE</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEZONE121</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_URL</key> |
| | | <value>http://192.168.11.99:8080/geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_USER</key> |
| | | <value>admin</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_PASS</key> |
| | | <value>geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>IGNORE_DBETL</key> |
| | | <value>false</value> |
| | | </entry> |
| | | </job-data-map> |
| | | </job> |
| | | |
| | | <trigger> |
| | | <simple> |
| | | <name>convertTrigger</name> |
| | | <group>DEFAULT</group> |
| | | <job-name>ConvertDMMS2PostGisWithGeoserver</job-name> |
| | | <job-group>DEFAULT</job-group> |
| | | <start-time>2013-03-01T18:00:00</start-time> |
| | | <!-- repeat indefinitely every 10 seconds --> |
| | | <repeat-count>0</repeat-count> |
| | | <repeat-interval>500</repeat-interval> |
| | | <!-- <repeat-interval>72000000</repeat-interval> --> |
| | | </simple> |
| | | </trigger> |
| | | |
| | | </schedule> |
| | | |
| | | </job-scheduling-data> |
| | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.awl.Wizard; |
| | | import org.quartz.DateBuilder; |
| | | import org.quartz.JobBuilder; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.Scheduler; |
| | | import org.quartz.SchedulerException; |
| | | import org.quartz.Trigger; |
| | | import org.quartz.TriggerBuilder; |
| | | import org.quartz.TriggerUtils; |
| | | import org.quartz.impl.JobDetailImpl; |
| | | import org.quartz.impl.StdSchedulerFactory; |
| | | |
| | | import com.ximple.eofms.jobs.OracleConvertDgn2ShpJob; |
| | |
| | | static Log logger = LogFactory.getLog(XQuartzJobCarrier.class); |
| | | static Options options = new Options(); |
| | | |
| | | private static final String VERSION = "0.9.0"; |
| | | private static final String VERSION = "1.3.1"; |
| | | |
| | | public static void main(String[] args) { |
| | | XQuartzJobCarrier instance = new XQuartzJobCarrier(); |
| | |
| | | |
| | | } catch (SchedulerException ex) { |
| | | // deal with any exceptions |
| | | logger.error(ex); |
| | | logger.error(ex, ex); |
| | | shutdown = true; |
| | | } catch (Throwable throwable) { |
| | | logger.error(throwable.getMessage(), throwable); |
| | |
| | | private void scheduleJob(Scheduler scheduler) throws SchedulerException { |
| | | |
| | | // Create a JobDetail for the Job |
| | | JobDetail jobDetail = new JobDetail("ScanDirectory", Scheduler.DEFAULT_GROUP, |
| | | /* |
| | | JobDetailImpl jobDetail = new JobDetailImpl("ScanDirectory", Scheduler.DEFAULT_GROUP, |
| | | OracleConvertDgn2ShpJob.class); |
| | | |
| | | */ |
| | | JobDetail jobDetail = JobBuilder.newJob(OracleConvertDgn2ShpJob.class) |
| | | .withIdentity("ScanDirectory", Scheduler.DEFAULT_GROUP) |
| | | .usingJobData("SCAN_DIR", "c:\\quartz-book\\input") |
| | | .build(); |
| | | // Configure the directory to scan |
| | | jobDetail.getJobDataMap().put("SCAN_DIR", "c:\\quartz-book\\input"); |
| | | // jobDetail.getJobDataMap().put("SCAN_DIR", "c:\\quartz-book\\input"); |
| | | |
| | | // Create a trigger that fires every 10 seconds, forever |
| | | Trigger trigger = TriggerUtils.makeSecondlyTrigger(10); |
| | | trigger.setName("scanTrigger"); |
| | | // Trigger trigger = TriggerUtils.makeSecondlyTrigger(10); |
| | | // trigger.setName("scanTrigger"); |
| | | Trigger trigger = TriggerBuilder.newTrigger().withIdentity("scanTrigger") |
| | | .startAt(DateBuilder.futureDate(10, DateBuilder.IntervalUnit.SECOND)) |
| | | .build(); |
| | | // Start the trigger firing from now |
| | | trigger.setStartTime(new Date()); |
| | | // trigger.setStartTime(new Date()); |
| | | |
| | | // Associate the trigger with the job in the scheduler |
| | | scheduler.scheduleJob(jobDetail, trigger); |
| | |
| | | title=Ximple Quartz Job Wizard |
| | | |
| | | first.title=ªÅ¶¡¸ê®ÆÂà´« |
| | | first.description=ªÅ¶¡¸ê®ÆÂà´«±N·|Ū¨ú OMS/CMMS ªÅ¶¡¸ê®Æ®w¤º®eÂà´«¦Ü PostGIS ªÅ¶¡¸ê®Æ®w |
| | | first.title=空間資料轉換 |
| | | first.description=空間資料轉換將會讀取 OMS/CMMS 空間資料庫內容轉換至 PostGIS 空間資料庫 |
| | | first.label.text=<html>This wizard will not install anything on your computer.<br/>It only demonstrates <b>Awl</b> functionnalities.</html> |
| | | |
| | | second.title=ª©ÅvÁn©ú |
| | | second.title=版權聲明 |
| | | second.description=Accept the license of this software |
| | | second.licenseAccepted=I accept the terms of this license agreement |
| | | second.licenseRefused=I do not accept the terms of this license agreement |
| | | second.messageContent=Accept the terms of the license |
| | | |
| | | third.title=²ÕºA³]©w¦ì¸m |
| | | third.title=組態設定位置 |
| | | third.description=Select the location where to install this software |
| | | third.label.text=Install location |
| | | third.fileChooser.selectLabel=Select |
| | | third.fileChooser.description=Select |
| | | third.messageContent=You must choose a valid location |
| | | |
| | | fourth.title=¶}©lÂàÀÉ... |
| | | fourth.title=開始轉檔... |
| | | fourth.description=Installation progress |
| | | fourth.inProgress.text=Installation in progress... |
| | | fourth.finished.text=Installation finished |
| | | fourth.finished.text=Installation finished |
| | |
| | | |
| | | # Print messages of level INFO or above for examples |
| | | log4j.logger.org.cavaness.quartzbook=INFO |
| | | log4j.logger.com.ximple.eofms=INFO |
| | | log4j.logger.com.ximple.eofms=INFO |
| | | |
| | | it.geosolutions.geoserver=INIFO |
| | |
| | | #Configure JobStore |
| | | #=============================================================== |
| | | org.quartz.jobStore.class = org.quartz.simpl.RAMJobStore |
| | | org.quartz.jobStore.misfireThreshold = 60000 |
| | | |
| | | #=============================================================== |
| | | #Configure Plugins |
| | | #=============================================================== |
| | | org.quartz.plugin.jobInitializer.class = org.quartz.plugins.xml.JobInitializationPlugin |
| | | org.quartz.plugin.triggHistory.class = org.quartz.plugins.history.LoggingJobHistoryPlugin |
| | | |
| | | org.quartz.plugin.jobInitializer.fileName = quartz_jobs.xml |
| | | org.quartz.plugin.jobInitializer.class = org.quartz.plugins.xml.XMLSchedulingDataProcessorPlugin |
| | | org.quartz.plugin.jobInitializer.fileNames = quartz_jobs.xml |
| | | #org.quartz.plugin.jobInitializer.fileNames = quartz_jobs_edb.xml |
| | | |
| | | org.quartz.plugin.jobInitializer.overWriteExistingJobs = true |
| | | org.quartz.plugin.jobInitializer.failOnFileNotFound = true |
| | | org.quartz.plugin.jobInitializer.validating=false |
| | | org.quartz.plugin.jobInitializer.scanInterval = 10 |
| | | org.quartz.plugin.jobInitializer.wrapInUserTransaction = false |
| | | |
| | | org.quartz.plugin.shutdownhook.class = org.quartz.plugins.management.ShutdownHookPlugin |
| | | org.quartz.plugin.shutdownhook.cleanShutdown = true |
| | |
| | | <?xml version='1.0' encoding='utf-8'?> |
| | | |
| | | <quartz xmlns="http://www.opensymphony.com/quartz/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.opensymphony.com/quartz/JobSchedulingData |
| | | http://www.opensymphony.com/quartz/xml/job_scheduling_data_1_5.xsd" |
| | | version="1.5"> |
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" |
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd" |
| | | version="1.8"> |
| | | |
| | | <job> |
| | | <job-detail> |
| | | <name>ConvertDgn2PostGisIntoPostgre</name> |
| | | <pre-processing-commands> |
| | | <delete-jobs-in-group>*</delete-jobs-in-group> |
| | | <!-- clear all jobs in scheduler --> |
| | | <delete-triggers-in-group>*</delete-triggers-in-group> |
| | | <!-- clear all triggers in scheduler --> |
| | | </pre-processing-commands> |
| | | |
| | | <processing-directives> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them --> |
| | | <overwrite-existing-data>true</overwrite-existing-data> |
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error --> |
| | | <ignore-duplicates>false</ignore-duplicates> |
| | | </processing-directives> |
| | | |
| | | <schedule> |
| | | <job> |
| | | <name>ConvertDMMS2PostGisWithGeoserver</name> |
| | | <group>DEFAULT</group> |
| | | <description>A job that convert dgn to shapefiles</description> |
| | | <job-class>com.ximple.eofms.jobs.OracleConvertDgn2PostGISJob</job-class> |
| | | <volatility>false</volatility> |
| | | <description>A job that convert dgn to postgis</description> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleConvertDgn2PostGISJob</job-class--> |
| | | <job-class>com.ximple.eofms.jobs.GeoserverIntegrateConfigJob</job-class> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwnerJob</job-class--> |
| | | <!--job-class>com.ximple.eofms.jobs.OracleTransformColorOwner2CSVJob</job-class--> |
| | | <!--volatility>false</volatility--> |
| | | <durability>false</durability> |
| | | <recover>false</recover> |
| | | <job-data-map allows-transient-data="true"> |
| | | <!--job-data-map allows-transient-data="true"--> |
| | | <job-data-map> |
| | | <entry> |
| | | <key>JOBDATA_DIR</key> |
| | | <value>C:\DBS\XDGNDATA</value> |
| | | <value>/Users/Shared/Public/Projects/XGeoDMMS/xjobrun/tctpcjobs/jobdata</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGHOST</key> |
| | | <value>127.0.0.1</value> |
| | | <value>10.10.1.17</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGDDATBASE</key> |
| | | <value>tpc</value> |
| | | <key>PGDATBASE</key> |
| | | <value>pgDMMS</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPORT</key> |
| | |
| | | </entry> |
| | | <entry> |
| | | <key>PGUSER</key> |
| | | <value>spatialdb</value> |
| | | <value>tpcdb</value> |
| | | </entry> |
| | | <entry> |
| | | <key>PGPASS</key> |
| | | <value>spatialdb000</value> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAHOST</key> |
| | | <value>10.206.120.190</value> |
| | | <value>10.10.1.17</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAINST</key> |
| | | <value>nntpc</value> |
| | | <value>orcl</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPORT</key> |
| | |
| | | </entry> |
| | | <entry> |
| | | <key>ORAUSER</key> |
| | | <value>spatialdb</value> |
| | | <value>system</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORAPASS</key> |
| | | <value>spatialdb000</value> |
| | | <value>simple000</value> |
| | | </entry> |
| | | <entry> |
| | | <key>ORGSCHEMA</key> |
| | | <!--value>SPATIALDB</value--> |
| | | <value>SPATIALDB, CMMS_SPATIALDB</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTDB</key> |
| | | <value>true</value> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTFILE</key> |
| | | <value>true</value> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTELEMIN</key> |
| | | <value>false</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CONVERTPWTHEMES</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>CREATEDUMMY</key> |
| | |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>USEEPSG3826</key> |
| | | <key>USEZONE121</key> |
| | | <value>true</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_URL</key> |
| | | <value>http://10.10.1.17:8080/geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_USER</key> |
| | | <value>admin</value> |
| | | </entry> |
| | | <entry> |
| | | <key>GEOSERVER_PASS</key> |
| | | <value>geoserver</value> |
| | | </entry> |
| | | <entry> |
| | | <key>IGNORE_DBETL</key> |
| | | <value>false</value> |
| | | </entry> |
| | | </job-data-map> |
| | | </job-detail> |
| | | </job> |
| | | |
| | | <trigger> |
| | | <simple> |
| | | <name>convertTrigger</name> |
| | | <group>DEFAULT</group> |
| | | <job-name>ConvertDgn2PostGisIntoPostgre</job-name> |
| | | <job-name>ConvertDMMS2PostGisWithGeoserver</job-name> |
| | | <job-group>DEFAULT</job-group> |
| | | <start-time>2008-03-01T18:10:00</start-time> |
| | | <start-time>2013-03-01T18:00:00</start-time> |
| | | <!-- repeat indefinitely every 10 seconds --> |
| | | <repeat-count>0</repeat-count> |
| | | <repeat-interval>500</repeat-interval> |
| | |
| | | </simple> |
| | | </trigger> |
| | | |
| | | </job> |
| | | </quartz> |
| | | </schedule> |
| | | |
| | | </job-scheduling-data> |
New file |
| | |
| | | <?xml version='1.0' encoding='utf-8'?>
|
| | |
|
| | | <job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData"
|
| | | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
| | | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd"
|
| | | version="1.8">
|
| | |
|
| | | <pre-processing-commands>
|
| | | <delete-jobs-in-group>*</delete-jobs-in-group>
|
| | | <!-- clear all jobs in scheduler -->
|
| | | <delete-triggers-in-group>*</delete-triggers-in-group>
|
| | | <!-- clear all triggers in scheduler -->
|
| | | </pre-processing-commands>
|
| | |
|
| | | <processing-directives>
|
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), overwrite them -->
|
| | | <overwrite-existing-data>true</overwrite-existing-data>
|
| | | <!-- if there are any jobs/trigger in scheduler of same name (as in this file), and over-write is false, ignore them rather then generating an error -->
|
| | | <ignore-duplicates>false</ignore-duplicates>
|
| | | </processing-directives>
|
| | |
|
| | | <schedule>
|
| | | <job>
|
| | | <name>ConvertDgn2GeoSpatialIntoEdb</name>
|
| | | <group>DEFAULT</group>
|
| | | <description>A job that convert dgn to gdb spatial</description>
|
| | | <!--job-class>com.ximple.eofms.jobs.OracleConvertDgn2EdbGeoJob</job-class-->
|
| | | <job-class>com.ximple.eofms.jobs.OracleConvertDgn2EdbGeoJob</job-class>
|
| | | <volatility>false</volatility>
|
| | | <durability>false</durability>
|
| | | <recover>false</recover>
|
| | | <!--job-data-map allows-transient-data="true"-->
|
| | | <job-data-map>
|
| | | <entry>
|
| | | <key>JOBDATA_DIR</key>
|
| | | <value>C:\Usr\Projects\XDCAD\nstpcjobs\jobdata</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>EDBHOST</key>
|
| | | <value>192.168.11.99</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>EDBDATBASE</key>
|
| | | <value>tpcdb</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>EDBPORT</key>
|
| | | <value>5444</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>EDBSCHEMA</key>
|
| | | <value>public</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>EDBUSER</key>
|
| | | <value>tpcdb</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>EDBPASS</key>
|
| | | <value>simple000</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ORAHOST</key>
|
| | | <value>192.168.11.200</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ORAINST</key>
|
| | | <value>nntpc</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ORAPORT</key>
|
| | | <value>1521</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ORAUSER</key>
|
| | | <value>spatialdb</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ORAPASS</key>
|
| | | <value>spatialdb000</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ORGSCHEMA</key>
|
| | | <value>SPATIALDB, CMMS_SPATIALDB</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>CONVERTDB</key>
|
| | | <value>true</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>CONVERTFILE</key>
|
| | | <value>false</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>CONVERTELEMIN</key>
|
| | | <value>false</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>CREATEDUMMY</key>
|
| | | <value>false</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>ELEMLOG</key>
|
| | | <value>true</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>USEWKB</key>
|
| | | <value>true</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>TESTMODE</key>
|
| | | <value>false</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>TESTCOUNT</key>
|
| | | <value>2</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>COPYCONNECTIVITYMODE</key>
|
| | | <value>false</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>PROFILEMODE</key>
|
| | | <value>true</value>
|
| | | </entry>
|
| | | <entry>
|
| | | <key>USEZONE121</key>
|
| | | <value>true</value>
|
| | | </entry>
|
| | | </job-data-map>
|
| | | </job>
|
| | |
|
| | | <trigger>
|
| | | <simple>
|
| | | <name>convertTrigger</name>
|
| | | <group>DEFAULT</group>
|
| | | <job-name>ConvertDgn2GeoSpatialIntoEdb</job-name>
|
| | | <job-group>DEFAULT</job-group>
|
| | | <start-time>2008-03-01T18:10:00</start-time>
|
| | | <!-- repeat indefinitely every 10 seconds -->
|
| | | <repeat-count>0</repeat-count>
|
| | | <repeat-interval>500</repeat-interval>
|
| | | <!-- <repeat-interval>72000000</repeat-interval> -->
|
| | | </simple>
|
| | | </trigger>
|
| | |
|
| | | </schedule>
|
| | | </job-scheduling-data>
|
| | |
| | | <parent> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-dgnjobs</artifactId> |
| | | <version>1.2.0</version> |
| | | <version>2.1.1</version> |
| | | </parent> |
| | | |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>ximple-spatialjob</artifactId> |
| | | <version>1.2.0</version> |
| | | <version>2.1.1</version> |
| | | <packaging>jar</packaging> |
| | | <name>ximple-spatialjob</name> |
| | | <url>http://www.ximple.com.tw</url> |
| | | |
| | | <properties> |
| | | <xdgnio.version>1.2.0</xdgnio.version> |
| | | <xdgnio.version>2.1.1</xdgnio.version> |
| | | </properties> |
| | | |
| | | <description> |
| | |
| | | <groupId>org.quartz-scheduler</groupId> |
| | | <artifactId>quartz</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.quartz-scheduler</groupId> |
| | | <artifactId>quartz-jobs</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-api</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>slf4j-log4j12</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.slf4j</groupId> |
| | | <artifactId>jcl-over-slf4j</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | |
| | | <artifactId>gt-jdbc</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-postgis</artifactId> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-postgis</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-oracle-spatial</artifactId> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-oracle</artifactId> |
| | | </dependency> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-mysql</artifactId> |
| | | <groupId>org.geotools.jdbc</groupId> |
| | | <artifactId>gt-jdbc-mysql</artifactId> |
| | | </dependency> |
| | | |
| | | <!-- because main and sample-data depend on referencing we need a tie breaker --> |
| | |
| | | <!-- We need this to make the referencing module useful --> |
| | | <dependency> |
| | | <groupId>org.geotools</groupId> |
| | | <artifactId>gt-epsg-hsql</artifactId> |
| | | <scope>test</scope> |
| | | <artifactId>gt-epsg-wkt</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | |
| | | <artifactId>sdoutl</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <!--dependency> |
| | | <groupId>postgresql</groupId> |
| | | <artifactId>postgresql</artifactId> |
| | | </dependency> |
| | | </dependency--> |
| | | <dependency> |
| | | <groupId>org.postgis</groupId> |
| | | <artifactId>postgis-driver</artifactId> |
| | | </dependency> |
| | | |
| | | <dependency> |
| | | <groupId>com.ximple.eofms</groupId> |
| | | <artifactId>xedb-gt-geospatial</artifactId> |
| | | </dependency> |
| | | |
| | | <!--dependency> |
| | | <groupId>mysql</groupId> |
| | | <artifactId>mysql-connector-java</artifactId> |
| | | </dependency--> |
| | | |
| | | <dependency> |
| | | <groupId>net.sf.opencsv</groupId> |
| | | <artifactId>opencsv</artifactId> |
| | | <version>2.3</version> |
| | | </dependency> |
| | | |
| | | <!-- Ximple Library --> |
| | |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | |
| | | |
| | | public class CreateArcLineStringStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateLineStringStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | } |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | SimpleFeature feature = null; |
| | |
| | | ArcElement lineStringElement = (ArcElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineStringElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineStringElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(lineStringElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineStringElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | import com.ximple.io.dgn7.*; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | |
| | | |
| | | public class CreateComplexChainStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateLineStringStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | } |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | SimpleFeature feature = null; |
| | |
| | | if (element instanceof LineStringElement) { |
| | | LineStringElement lineStringElement = (LineStringElement) element; |
| | | Geometry gobj; |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineStringElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineStringElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | convertDecorator.setConverter(lineStringElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineStringElement.toGeometry(geometryFactory); |
| | | } |
| | |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | ComplexChainElement complexChain = (ComplexChainElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(complexChain); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(complexChain); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(complexChain); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else |
| | | gobj = complexChain.toGeometry(geometryFactory); |
| | | if (gobj instanceof LineString) { |
| | |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | LineElement lineElement = (LineElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(lineElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineElement.toGeometry(geometryFactory); |
| | | } |
| | |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | ArcElement arcElement = (ArcElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(arcElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(arcElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(arcElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = arcElement.toGeometry(geometryFactory); |
| | | } |
| | |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | |
| | | |
| | | public class CreateEllipseShapeStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateShapeStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | } |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | SimpleFeature feature = null; |
| | |
| | | EllipseElement ellipseElement = (EllipseElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(ellipseElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(ellipseElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(ellipseElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = ellipseElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import com.ximple.io.dgn7.Element; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | |
| | | public SimpleFeatureType createFeatureElement(String featureName) throws SchemaException; |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException; |
| | | short distId, boolean useTransform) throws IllegalAttributeException; |
| | | |
| | | public void addCreateFeatureTypeEventListener(CreateFeatureTypeEventListener listener); |
| | | |
| | |
| | | import com.ximple.io.dgn7.*; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | |
| | | |
| | | public class CreateLineStringStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateLineStringStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | } |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | SimpleFeature feature = null; |
| | |
| | | LineStringElement lineStringElement = (LineStringElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineStringElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineStringElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(lineStringElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineStringElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | ComplexChainElement complexChain = (ComplexChainElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(complexChain); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(complexChain); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(complexChain); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = complexChain.toGeometry(geometryFactory); |
| | | } |
| | |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | LineElement lineElement = (LineElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(lineElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | ArcElement arcElement = (ArcElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(arcElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(arcElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(arcElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = arcElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | import com.ximple.io.dgn7.*; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | |
| | | |
| | | public class CreateLineTextStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateLineTextStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | } |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | SimpleFeature feature = null; |
| | |
| | | LineStringElement lineStringElement = (LineStringElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineStringElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineStringElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(lineStringElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineStringElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | ptEnd.y = ptOrigin.y + txtElement.getTextHeight(); |
| | | Coordinate[] vect = new Coordinate[2]; |
| | | if (useTransform) { |
| | | vect[0] = useEPSG3826 ? |
| | | vect[0] = (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826) ? |
| | | TWDDatumConverter.fromTM2ToEPSG3826(ptOrigin) : |
| | | TWDDatumConverter.fromTM2ToEPSG3825(ptOrigin); |
| | | vect[1] = useEPSG3826 ? |
| | | vect[1] = FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826 ? |
| | | TWDDatumConverter.fromTM2ToEPSG3826(ptEnd) : |
| | | TWDDatumConverter.fromTM2ToEPSG3825(ptEnd); |
| | | } else { |
| | |
| | | |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | line, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | ComplexChainElement complexChain = (ComplexChainElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(complexChain); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(complexChain); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(complexChain); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = complexChain.toGeometry(geometryFactory); |
| | | } |
| | |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | LineElement lineElement = (LineElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(lineElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(lineElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(lineElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = lineElement.toGeometry(geometryFactory); |
| | | } |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | import com.ximple.io.dgn7.*; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | |
| | | |
| | | public class CreateMultiSymbolStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateMultiSymbolStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826AnchorGeometryConverterDecorator(), |
| | | new EPSG3825AnchorGeometryConverterDecorator() |
| | | }; |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | } |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | SimpleFeature feature = null; |
| | |
| | | String content = txtElement.getText().trim(); |
| | | if (content.length() == 0) { |
| | | logger.info("CreateMultiSymbolStrategy cannot conver " + element.toString() + |
| | | "to Feature - getText() is empty."); |
| | | "to Feature - getText() is empty."); |
| | | return null; |
| | | } |
| | | StringBuilder sb = new StringBuilder(); |
| | |
| | | |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(txtElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(txtElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(txtElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = txtElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | } else if (element instanceof TextNodeElement) { |
| | | TextNodeElement nodeElement = (TextNodeElement) element; |
| | | |
| | | |
| | | double angle = nodeElement.getRotationAngle(); |
| | | angle = BigDecimal.valueOf(angle).setScale(3, RoundingMode.HALF_UP).doubleValue(); |
| | | if (nodeElement.size() == 0) { |
| | | logger.info("CreateMultiSymbolStrategy cannot conver " + element.toString() + |
| | | "to Feature - getText() is empty."); |
| | | "to Feature - getText() is empty."); |
| | | return null; |
| | | } |
| | | |
| | | Iterator<Element> txtElement = nodeElement.iterator(); |
| | | Iterator<Element> txtElement = nodeElement.iterator(); |
| | | |
| | | while(txtElement.hasNext()) |
| | | { |
| | | TextElement txtChildElement = (TextElement) element; |
| | | |
| | | char[] charArray = txtChildElement.getText().toCharArray(); |
| | | while (txtElement.hasNext()) { |
| | | if (txtElement instanceof TextElement) { |
| | | TextElement txtChildElement = (TextElement) element; |
| | | char[] charArray = txtChildElement.getText().toCharArray(); |
| | | |
| | | |
| | | if (charArray.length == 0) { |
| | | logger.info("CreateMultiSymbolStrategy cannot conver " + element.toString() + |
| | | "to Feature - getText() is empty."); |
| | | return null; |
| | | } |
| | | if (charArray.length == 0) { |
| | | logger.info("CreateMultiSymbolStrategy cannot conver " + element.toString() + |
| | | "to Feature - getText() is empty."); |
| | | return null; |
| | | } |
| | | |
| | | |
| | | for(int i = 0 ; i < charArray.length ; i++) |
| | | { |
| | | for (int i = 0; i < charArray.length; i++) { |
| | | |
| | | StringBuilder sb = new StringBuilder(); |
| | | sb.append("OCT"); |
| | | char id = charArray[i]; |
| | | sb.append(Integer.toOctalString((int) id)); |
| | | sb.append("-"); |
| | | sb.append(txtChildElement.getFontIndex()); |
| | | StringBuilder sb = new StringBuilder(); |
| | | sb.append("OCT"); |
| | | char id = charArray[i]; |
| | | sb.append(Integer.toOctalString((int) id)); |
| | | sb.append("-"); |
| | | sb.append(txtChildElement.getFontIndex()); |
| | | |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(txtChildElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(txtChildElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = txtChildElement.toGeometry(geometryFactory); |
| | | } |
| | | |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | | fLinkage.getOccID(), |
| | | (short) txtChildElement.getLevelIndex(), |
| | | colorTable.getColorCode(nodeElement.getColorIndex()), |
| | | (short) txtChildElement.getWeight(), |
| | | (short) txtChildElement.getLineStyle(), |
| | | (short) txtChildElement.getJustification(), |
| | | (float) txtChildElement.getTextHeight(), |
| | | (float) txtChildElement.getTextWidth(), |
| | | (float) angle, |
| | | sb.toString() |
| | | }, null); |
| | | } |
| | | } else { |
| | | convertDecorator[1].setConverter(txtChildElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | logger.info("CreateMultiSymbolStrategy cannot conver " + element.toString() + "to Feature"); |
| | | return null; |
| | | } |
| | | } else { |
| | | gobj = txtChildElement.toGeometry(geometryFactory); |
| | | } |
| | | |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | | fLinkage.getOccID(), |
| | | (short) txtChildElement.getLevelIndex(), |
| | | colorTable.getColorCode(nodeElement.getColorIndex()), |
| | | (short) txtChildElement.getWeight(), |
| | | (short) txtChildElement.getLineStyle(), |
| | | (short) txtChildElement.getJustification(), |
| | | (float) txtChildElement.getTextHeight(), |
| | | (float) txtChildElement.getTextWidth(), |
| | | (float) angle, |
| | | sb.toString() |
| | | }, null); |
| | | } |
| | | } |
| | | } else { |
| | | logger.info("CreateMultiSymbolStrategy cannot conver " + element.toString() + "to Feature"); |
| | | return null; |
| | | } |
| | | return feature; |
| | | } |
| | |
| | | import com.ximple.io.dgn7.*; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | |
| | | |
| | | public class CreateShapeStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateShapeStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | } |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | SimpleFeature feature = null; |
| | |
| | | ShapeElement shapeElement = (ShapeElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(shapeElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(shapeElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(shapeElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = shapeElement.toGeometry(geometryFactory); |
| | | } |
| | |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | ComplexShapeElement complexShape = (ComplexShapeElement) element; |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(complexShape); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(complexShape); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(complexShape); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = complexShape.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | package com.ximple.eofms.filter; |
| | | |
| | | import com.vividsolutions.jts.geom.Coordinate; |
| | | import com.vividsolutions.jts.geom.Envelope; |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import com.vividsolutions.jts.geom.Point; |
| | | import com.ximple.eofms.util.*; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | import javax.swing.event.EventListenerList; |
| | | import java.awt.geom.AffineTransform; |
| | | import java.math.BigDecimal; |
| | | import java.math.RoundingMode; |
| | | import java.util.List; |
| | |
| | | |
| | | public class CreateSymbolStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateSymbolStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | } |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | SimpleFeature feature = null; |
| | |
| | | sb.append("-"); |
| | | sb.append(txtElement.getFontIndex()); |
| | | |
| | | Geometry gobj; |
| | | Geometry gobj, geomOrigin = null; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(txtElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(txtElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(txtElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = txtElement.toGeometry(geometryFactory); |
| | | } |
| | | |
| | | if (gobj instanceof Point) { |
| | | geomOrigin = gobj; |
| | | Coordinate originPt = gobj.getCoordinate(); |
| | | gobj = buildBoundaryPolygon(originPt, // txtElement.getOrigin() |
| | | txtElement.getTextWidth(), txtElement.getTextHeight(), |
| | | angle, txtElement.getJustification(), gobj.getSRID()); |
| | | } else { |
| | | gobj = null; |
| | | } |
| | | |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | (float) txtElement.getTextHeight(), |
| | | (float) txtElement.getTextWidth(), |
| | | (float) angle, |
| | | sb.toString() |
| | | sb.toString(), |
| | | geomOrigin |
| | | }, null); |
| | | } else { |
| | | logger.info("CreateSymbolStrategy cannot conver " + element.toString() + "to Feature"); |
| | |
| | | } |
| | | } |
| | | } |
| | | |
| | | private Geometry buildBoundaryPolygon(Coordinate p, double symbolWidth, double symbolHeight, double rotationAngle, |
| | | int justification, int srid) { |
| | | double angle = Math.toRadians(rotationAngle); |
| | | |
| | | AffineTransform at = new AffineTransform(); |
| | | // at.translate(width, height); |
| | | at.setToRotation(angle, p.x, p.y); |
| | | at.scale(1, 1); |
| | | |
| | | double width = symbolWidth; |
| | | switch (justification) { |
| | | case TextElement.TXTJUST_LT: |
| | | case TextElement.TXTJUST_LC: |
| | | case TextElement.TXTJUST_LB: |
| | | width = width / 2; |
| | | break; |
| | | |
| | | case TextElement.TXTJUST_CT: |
| | | case TextElement.TXTJUST_CC: |
| | | case TextElement.TXTJUST_CB: |
| | | width = 0; |
| | | break; |
| | | |
| | | case TextElement.TXTJUST_RT: |
| | | case TextElement.TXTJUST_RC: |
| | | case TextElement.TXTJUST_RB: |
| | | width = -(width / 2); |
| | | break; |
| | | } |
| | | |
| | | double height = symbolHeight; |
| | | switch (justification) { |
| | | case TextElement.TXTJUST_LB: |
| | | case TextElement.TXTJUST_CB: |
| | | case TextElement.TXTJUST_RB: // bottom |
| | | height = height / 2; |
| | | break; |
| | | |
| | | case TextElement.TXTJUST_LC: |
| | | case TextElement.TXTJUST_CC: |
| | | case TextElement.TXTJUST_RC: // center |
| | | height = 0; |
| | | break; |
| | | |
| | | case TextElement.TXTJUST_LT: |
| | | case TextElement.TXTJUST_CT: |
| | | case TextElement.TXTJUST_RT: // height |
| | | height = -(height / 2); |
| | | break; |
| | | } |
| | | |
| | | |
| | | Envelope envelope = new Envelope(new Coordinate(p.x + width, p.y + height)); |
| | | envelope.expandBy(symbolWidth / 2, symbolHeight / 2); |
| | | |
| | | double[] srcPt = new double[8]; |
| | | double[] dstPt = new double[8]; |
| | | srcPt[0] = envelope.getMinX(); |
| | | srcPt[1] = envelope.getMinY(); |
| | | srcPt[2] = envelope.getMinX(); |
| | | srcPt[3] = envelope.getMaxY(); |
| | | srcPt[4] = envelope.getMaxX(); |
| | | srcPt[5] = envelope.getMaxY(); |
| | | srcPt[6] = envelope.getMaxX(); |
| | | srcPt[7] = envelope.getMinY(); |
| | | |
| | | at.transform(srcPt, 0, dstPt, 0, 4); |
| | | |
| | | Coordinate[] coords = new Coordinate[5]; |
| | | for (int i = 0; i < 4; i++) { |
| | | coords[i] = new Coordinate(dstPt[i*2], dstPt[i*2+1]); |
| | | } |
| | | coords[4] = new Coordinate(dstPt[0], dstPt[1]); |
| | | |
| | | Geometry geom = geometryFactory.createPolygon(geometryFactory.createLinearRing(coords), null); |
| | | if (geom.getSRID() != srid) { |
| | | geom.setSRID(srid); |
| | | } |
| | | return geom; |
| | | } |
| | | } |
| | | |
| | |
| | | import com.ximple.io.dgn7.*; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | |
| | | |
| | | public class CreateTextStrategy implements CreateFeatureTypeStrategy { |
| | | static final Log logger = LogFactory.getLog(CreateTextStrategy.class); |
| | | GeometryFactory geometryFactory = new GeometryFactory(); |
| | | GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | TreeMap<String, SimpleFeatureType> typeBuilders = new TreeMap<String, SimpleFeatureType>(); |
| | | static final GeometryConverterDecorator convertDecorator[] = new GeometryConverterDecorator[]{ |
| | | new EPSG3826GeometryConverterDecorator(), |
| | | new EPSG3825GeometryConverterDecorator() |
| | | }; |
| | | |
| | | // Create the listener list |
| | | protected EventListenerList listenerList = new EventListenerList(); |
| | |
| | | } |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element, |
| | | boolean useTransform, boolean useEPSG3826) throws IllegalAttributeException { |
| | | short distId, boolean useTransform) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | FrammeAttributeData fLinkage = getFeatureLinkage(element); |
| | | SimpleFeature feature = null; |
| | |
| | | content = content.replace('\u0000', ' '); |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(txtElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(txtElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(txtElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = txtElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | angle = BigDecimal.valueOf(angle).setScale(3, RoundingMode.HALF_UP).doubleValue(); |
| | | Geometry gobj; |
| | | if (useTransform) { |
| | | if (useEPSG3826) { |
| | | convertDecorator[0].setConverter(nodeElement); |
| | | gobj = convertDecorator[0].toGeometry(geometryFactory); |
| | | } else { |
| | | convertDecorator[1].setConverter(nodeElement); |
| | | gobj = convertDecorator[1].toGeometry(geometryFactory); |
| | | } |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(nodeElement); |
| | | gobj = convertDecorator.toGeometry(geometryFactory); |
| | | } else { |
| | | gobj = nodeElement.toGeometry(geometryFactory); |
| | | } |
| | | if (gobj != null) |
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | gobj, |
| | | distId, |
| | | fLinkage.getFsc(), |
| | | (long) fLinkage.getUfid(), |
| | | (short) fLinkage.getComponentID(), |
| | |
| | | public interface ElementDispatchableFilter { |
| | | public boolean isDispatchable(Element element); |
| | | |
| | | public SimpleFeature execute(Element element, boolean useTransform, boolean useEPSG3826); |
| | | public SimpleFeature execute(Element element, short distId, boolean useTransform); |
| | | |
| | | void setUseLongName(boolean useLongName); |
| | | |
| | |
| | | } |
| | | } |
| | | |
| | | public SimpleFeature execute(Element element, boolean useTransform, boolean useEPSG3826) { |
| | | public SimpleFeature execute(Element element, short distId, boolean useTransform) { |
| | | for (ElementDispatchableFilter rule : rules) { |
| | | if (rule.isDispatchable(element)) { |
| | | return rule.execute(element, useTransform, useEPSG3826); |
| | | return rule.execute(element, distId, useTransform); |
| | | } |
| | | } |
| | | return null; |
| | |
| | | import com.ximple.eofms.util.StringUtils; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | |
| | | (compareType(element) == 0); |
| | | } |
| | | |
| | | public SimpleFeature execute(Element element, boolean useTransform, boolean useEPSG3826) { |
| | | public SimpleFeature execute(Element element, short distId, boolean useTransform) { |
| | | try { |
| | | String ftName = getFeatureTypeName(element); |
| | | SimpleFeatureType ftype = createStrategy.createFeatureElement(ftName); |
| | | return createStrategy.createFeature(ftype, element, useTransform, useEPSG3826); |
| | | return createStrategy.createFeature(ftype, element, distId, useTransform); |
| | | } catch (SchemaException e) { |
| | | logger.error(e.getMessage(), e); |
| | | } catch (IllegalAttributeException e) { |
| | |
| | | import com.ximple.eofms.util.StringUtils; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | |
| | | (lid == element.getLevelIndex()) && (compareLevel(element) == 0); |
| | | } |
| | | |
| | | public SimpleFeature execute(Element element, boolean useTransform, boolean useEPSG3826) { |
| | | public SimpleFeature execute(Element element, short distId, boolean useTransform) { |
| | | try { |
| | | String ftName = getFeatureTypeName(element); |
| | | SimpleFeatureType ftype = createStrategy.createFeatureElement(ftName); |
| | | return createStrategy.createFeature(ftype, element, useTransform, useEPSG3826); |
| | | return createStrategy.createFeature(ftype, element, distId, useTransform); |
| | | } catch (SchemaException e) { |
| | | logger.error(e.getMessage(), e); |
| | | } catch (IllegalAttributeException e) { |
| | |
| | | import com.ximple.eofms.util.StringUtils; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | |
| | | (compareType(element) == 0); |
| | | } |
| | | |
| | | public SimpleFeature execute(Element element, boolean useTransform, boolean useEPSG3826) { |
| | | public SimpleFeature execute(Element element, short distId, boolean useTransform) { |
| | | try { |
| | | String ftName = getFeatureTypeName(element); |
| | | SimpleFeatureType ftype = createStrategy.createFeatureElement(ftName); |
| | | return createStrategy.createFeature(ftype, element, useTransform, useEPSG3826); |
| | | return createStrategy.createFeature(ftype, element, distId, useTransform); |
| | | } catch (SchemaException e) { |
| | | logger.error(e.getMessage(), e); |
| | | } catch (IllegalAttributeException e) { |
New file |
| | |
| | | package com.ximple.eofms.geoserver.config; |
| | | |
| | | public class XGeosDataConfig { |
| | | private String PG; |
| | | private short FSC; |
| | | private short COMP; |
| | | private short LEV; |
| | | private short WEIGHT; |
| | | private String FTYPE; |
| | | |
| | | public XGeosDataConfig() { |
| | | } |
| | | |
| | | public String getPG() { |
| | | return PG; |
| | | } |
| | | |
| | | public void setPG(String PG) { |
| | | this.PG = PG; |
| | | } |
| | | |
| | | public short getFSC() { |
| | | return FSC; |
| | | } |
| | | |
| | | public void setFSC(short FSC) { |
| | | this.FSC = FSC; |
| | | } |
| | | |
| | | public short getCOMP() { |
| | | return COMP; |
| | | } |
| | | |
| | | public void setCOMP(short COMP) { |
| | | this.COMP = COMP; |
| | | } |
| | | |
| | | public short getLEV() { |
| | | return LEV; |
| | | } |
| | | |
| | | public void setLEV(short LEV) { |
| | | this.LEV = LEV; |
| | | } |
| | | |
| | | public short getWEIGHT() { |
| | | return WEIGHT; |
| | | } |
| | | |
| | | public void setWEIGHT(short WEIGHT) { |
| | | this.WEIGHT = WEIGHT; |
| | | } |
| | | |
| | | public String getFTYPE() { |
| | | return FTYPE; |
| | | } |
| | | |
| | | public void setFTYPE(String FTYPE) { |
| | | this.FTYPE = FTYPE; |
| | | } |
| | | |
| | | public String toString() { |
| | | return "XGeosDataConfig{" + |
| | | "PG='" + (PG != null ? PG : "null") + '\'' + |
| | | ", FSC=" + FSC + |
| | | ", COMP=" + COMP + |
| | | ", LEV=" + LEV + |
| | | ", WEIGHT=" + WEIGHT + |
| | | ", FTYPE='" + (FTYPE != null ? FTYPE : "null") + '\'' + |
| | | '}'; |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.geoserver.config; |
| | | |
| | | import org.apache.commons.collections.MultiMap; |
| | | import org.apache.commons.collections.map.MultiValueMap; |
| | | |
| | | public class XGeosDataConfigMapping { |
| | | private MultiValueMap mappings; |
| | | |
| | | public XGeosDataConfigMapping() { |
| | | mappings = new MultiValueMap(); |
| | | } |
| | | |
| | | public void addConfig(XGeosDataConfig config) { |
| | | mappings.put(config.getPG(), config); |
| | | } |
| | | |
| | | public MultiMap getMapping() { |
| | | return mappings; |
| | | } |
| | | } |
| | |
| | | import java.io.InputStream; |
| | | import java.nio.BufferOverflowException; |
| | | import java.nio.ByteBuffer; |
| | | import java.sql.Connection; |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.util.ArrayList; |
| | | import java.util.Map; |
| | | import java.util.StringTokenizer; |
| | | import java.util.TreeMap; |
| | | |
| | | import com.vividsolutions.jts.util.Assert; |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import oracle.sql.BLOB; |
| | | import org.apache.commons.logging.Log; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.oracle.OracleDataStore; |
| | | import org.geotools.data.oracle.OracleDataStoreFactory; |
| | | import org.geotools.data.oracle.OracleNGDataStoreFactory; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.quartz.Job; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | import com.vividsolutions.jts.util.Assert; |
| | | |
| | | import oracle.sql.BLOB; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | |
| | | public abstract class AbstractOracleDatabaseJob implements Job { |
| | | /** |
| | |
| | | private static final String CONVERTDB = "CONVERTDB"; |
| | | private static final String CONVERTFILE = "CONVERTFILE"; |
| | | private static final String CONVERTELEMIN = "CONVERTELEMIN"; |
| | | private static final String CONVERTPWTHEMES = "CONVERTPWTHEMES"; |
| | | private static final String CREATEDUMMY = "CREATEDUMMY"; |
| | | private static final String ELEMLOG = "ELEMLOG"; |
| | | private static final String ORAHOST = "ORAHOST"; |
| | |
| | | private static final String TESTCOUNT = "TESTCOUNT"; |
| | | private static final String COPYCONNECTIVITYMODE = "COPYCONNECTIVITYMODE"; |
| | | private static final String PROFILEMODE = "PROFILEMODE"; |
| | | private static final String USEEPSG3826 = "USEEPSG3826"; |
| | | private static final String USEZONE121 = "USEZONE121"; |
| | | private static final String IGNORE_DBETL = "IGNORE_DBETL"; |
| | | |
| | | protected static OracleDataStoreFactory dataStoreFactory = new OracleDataStoreFactory(); |
| | | protected static OracleNGDataStoreFactory dataStoreFactory = new OracleNGDataStoreFactory(); |
| | | |
| | | protected String _dataPath; |
| | | protected String _filterPath; |
| | |
| | | protected String _convertDB; |
| | | protected String _convertFile; |
| | | protected String _convertElementIn; |
| | | protected String _convertPWThemes; |
| | | protected String _elementLogging; |
| | | protected String _createDummy; |
| | | protected ArrayList<String> _orgSchema = new ArrayList<String>(); |
| | | protected boolean _testMode = false; |
| | | protected boolean _copyConnectivityMode = false; |
| | | protected boolean _profileMode = false; |
| | | protected boolean _useEPSG3826 = true; |
| | | protected boolean _useZone121 = true; |
| | | protected boolean _useTransform = true; |
| | | protected boolean _ignoreDBETL = false; |
| | | |
| | | protected int _testCount = -1; |
| | | protected OracleDataStore sourceDataStore; |
| | | protected JDBCDataStore sourceDataStore; |
| | | private boolean driverFound = true; |
| | | |
| | | private long _processTime; |
| | |
| | | _convertDB = dataMap.getString(CONVERTDB); |
| | | _convertFile = dataMap.getString(CONVERTFILE); |
| | | _convertElementIn = dataMap.getString(CONVERTELEMIN); |
| | | _convertPWThemes = dataMap.getString(CONVERTPWTHEMES); |
| | | _elementLogging = dataMap.getString(ELEMLOG); |
| | | _createDummy = dataMap.getString(CREATEDUMMY); |
| | | |
| | |
| | | _testCount = dataMap.getIntFromString(TESTCOUNT); |
| | | _copyConnectivityMode = dataMap.getBooleanFromString(COPYCONNECTIVITYMODE); |
| | | _profileMode = dataMap.getBooleanFromString(PROFILEMODE); |
| | | _useEPSG3826 = dataMap.getBooleanFromString(USEEPSG3826); |
| | | _useZone121 = dataMap.getBooleanFromString(USEZONE121); |
| | | _ignoreDBETL = dataMap.getBooleanFromString(IGNORE_DBETL); |
| | | |
| | | if (_useZone121) { |
| | | FeatureTypeBuilderUtil.setDefaultFeatureSRID(3826); |
| | | } else { |
| | | FeatureTypeBuilderUtil.setDefaultFeatureSRID(3825); |
| | | } |
| | | |
| | | // Validate the required input |
| | | if (_dataPath == null) { |
| | |
| | | } |
| | | |
| | | protected abstract AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826); |
| | | boolean profileMode, boolean useTransform); |
| | | |
| | | protected byte[] getBytesFromBLOB(BLOB blob) throws SQLException, BufferOverflowException { |
| | | byte[] raw = null; |
| | |
| | | !_convertElementIn.equalsIgnoreCase("no") && !_convertElementIn.equalsIgnoreCase("0"); |
| | | } |
| | | |
| | | public boolean checkConvertPWThemes() { |
| | | return _convertPWThemes != null && !_convertPWThemes.equalsIgnoreCase("false") && |
| | | !_convertPWThemes.equalsIgnoreCase("no") && !_convertPWThemes.equalsIgnoreCase("0"); |
| | | } |
| | | |
| | | public String getElementLogging() { |
| | | return _elementLogging; |
| | | } |
| | |
| | | return _useTransform; |
| | | } |
| | | |
| | | public boolean isEPSG3826() { |
| | | return _useEPSG3826; |
| | | public boolean isZone121() { |
| | | return _useZone121; |
| | | } |
| | | |
| | | public boolean isIgnoreDBETL() { |
| | | return _ignoreDBETL; |
| | | } |
| | | |
| | | public void set_ignoreDBETL(boolean _ignoreDBETL) { |
| | | this._ignoreDBETL = _ignoreDBETL; |
| | | } |
| | | |
| | | public DataStore getSourceDataStore() { |
| | |
| | | throw new JobExecutionException("Oracle JDBC Driver not found.-" + JDBC_DRIVER); |
| | | } |
| | | Map<String, String> map = new TreeMap<String, String>(); |
| | | map.put("host", _oracleHost); |
| | | map.put("port", _oraclePort); |
| | | map.put("instance", _oracleInstance); |
| | | map.put("user", _username); |
| | | map.put("passwd", _password); |
| | | map.put("dbtype", "oracle"); |
| | | map.put("alias", _oracleInstance); |
| | | map.put("namespace", null); |
| | | if (!map.containsKey(OracleDataStoreFactory.MAXCONN.key)) { |
| | | map.put(OracleDataStoreFactory.MAXCONN.key, "5"); |
| | | map.put(OracleNGDataStoreFactory.HOST.key, _oracleHost); |
| | | map.put(OracleNGDataStoreFactory.PORT.key, _oraclePort); |
| | | map.put(OracleNGDataStoreFactory.DATABASE.key, _oracleInstance); |
| | | map.put(OracleNGDataStoreFactory.USER.key, _username); |
| | | map.put(OracleNGDataStoreFactory.PASSWD.key, _password); |
| | | map.put(OracleNGDataStoreFactory.DBTYPE.key, "oracle"); |
| | | map.put(OracleNGDataStoreFactory.NAMESPACE.key, null); |
| | | |
| | | if (!map.containsKey(OracleNGDataStoreFactory.MAXCONN.key)) { |
| | | map.put(OracleNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | if (!map.containsKey(OracleDataStoreFactory.MINCONN.key)) { |
| | | map.put(OracleDataStoreFactory.MINCONN.key, "1"); |
| | | if (!map.containsKey(OracleNGDataStoreFactory.MINCONN.key)) { |
| | | map.put(OracleNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | if (!dataStoreFactory.canProcess(map)) { |
| | |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | sourceDataStore = (OracleDataStore) dataStoreFactory.createDataStore(map); |
| | | sourceDataStore = dataStoreFactory.createDataStore(map); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | |
| | | public final void resetUpdateTime() { |
| | | _updateTime = 0; |
| | | } |
| | | |
| | | /** |
| | | * Connectivity (Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws java.sql.SQLException sql exception |
| | | */ |
| | | protected void copyConnectivity(AbstractOracleJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | ResultSet rsMeta = connection.getMetaData().getTables(null, "BASEDB", |
| | | AbstractOracleJobContext.CONNECTIVITY_WEBCHECK_NAME + "%", |
| | | new String[]{"TABLE"}); |
| | | |
| | | boolean found = false; |
| | | try { |
| | | while (rsMeta.next()) { |
| | | String tablename = rsMeta.getString(3); |
| | | if (AbstractOracleJobContext.CONNECTIVITY_WEBCHECK_NAME.equalsIgnoreCase(tablename)) { |
| | | found = true; |
| | | break; |
| | | } |
| | | } |
| | | // } catch (SQLException e) |
| | | } finally { |
| | | if (rsMeta != null) { |
| | | rsMeta.close(); |
| | | rsMeta = null; |
| | | } |
| | | } |
| | | Statement stmt = connection.createStatement(); |
| | | if (found) { |
| | | stmt.execute(AbstractOracleJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK); |
| | | } else { |
| | | getLogger().info("Create CONNECTIVITY_WEBCHECK table."); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_1); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_2); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_3); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_4); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_5); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_6); |
| | | stmt.execute(AbstractOracleJobContext.ALTER_CONNECTIVITY_WEBCHECK_1); |
| | | stmt.execute(AbstractOracleJobContext.ALTER_CONNECTIVITY_WEBCHECK_2); |
| | | } |
| | | |
| | | stmt.execute(AbstractOracleJobContext.COPY_CONNECTIVITY_TO_WEBCHECK); |
| | | stmt.close(); |
| | | } |
| | | |
| | | protected void fetchTPData(AbstractOracleJobContext jobContext) { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | try { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = stmt.executeQuery(AbstractOracleJobContext.FETCH_TPDATA); |
| | | if (rs.next()) { |
| | | short disId = rs.getShort(1); |
| | | jobContext.setDistId(disId); |
| | | |
| | | String distName = rs.getString(2); |
| | | jobContext.setDistName(distName); |
| | | } |
| | | rs.close(); |
| | | stmt.close(); |
| | | } catch (SQLException e) { |
| | | getLogger().warn("Fetch TPDATA Error.", e); |
| | | } |
| | | } |
| | | |
| | | protected void createHibernateSequence(AbstractOracleJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | try { |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_HIBERNATE_SEQUENCE); |
| | | stmt.close(); |
| | | } catch (SQLException e) { |
| | | getLogger().warn("HIBERNATE_SEQUENCE is already exist."); |
| | | } |
| | | } |
| | | } |
| | |
| | | "gisrepo1", "gisrepo2" |
| | | }; |
| | | |
| | | public static final String XPTVERSIONTABLE_NAME = "xpwthemes_vsversion"; |
| | | public static final String[] DEFAULTXPTVERSIONTABLE_NAMES = new String[]{ |
| | | "xpwtheme1", "xpwtheme2" |
| | | }; |
| | | |
| | | public static final short VSSTATUS_AVAILABLE = 0x0000; |
| | | public static final short VSSTATUS_USING = 0x0100; |
| | | public static final short VSSTATUS_CONFIG = 0x0020; |
| | |
| | | import java.util.HashMap; |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | | import org.apache.commons.transaction.util.LoggerFacade; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.shapefile.ShapefileDataStore; |
| | | import org.geotools.data.shapefile.indexed.IndexType; |
| | | import org.geotools.data.shapefile.indexed.IndexedShapefileDataStore; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import java.util.TimeZone; |
| | | |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatchableFilter; |
| | |
| | | import com.ximple.eofms.filter.TypeCompLevelIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeIdDispatchableFilter; |
| | | import com.ximple.eofms.jobs.context.AbstractDgnFileJobContext; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | | import org.apache.commons.transaction.util.LoggerFacade; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.shapefile.ShapefileDataStore; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | public class DummyFeatureConvertJobContext extends AbstractDgnFileJobContext { |
| | | static final Log logger = LogFactory.getLog(DummyFeatureConvertJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | static final String SHPOUTPATH = "shpout"; |
| | | |
| | | private String dataOut = null; |
| | |
| | | private String _filterConfig; |
| | | |
| | | public DummyFeatureConvertJobContext(String dataPath, String filterConfig, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | return; |
| | | } |
| | | |
| | | SimpleFeature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID()))); |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | | logger.warn("----Complex Element size=" + complex.size()); |
| | |
| | | ShapefileDataStore shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL(), |
| | | true, Charset.forName("UTF-8")); |
| | | */ |
| | | ShapefileDataStore shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL()); |
| | | /* |
| | | if(namespace != null) { |
| | | shapefileDataStore.setNamespaceURI(namespace.toString()); |
| | | } |
| | | */ |
| | | shapefileDataStore.setMemoryMapped(true); |
| | | // shapefileDataStore.setBufferCachingEnabled(cacheMemoryMaps); |
| | | shapefileDataStore.setCharset(Charset.forName("UTF-8")); |
| | | shapefileDataStore.setTimeZone(TimeZone.getDefault()); |
| | | shapefileDataStore.setIndexed(true); |
| | | shapefileDataStore.setIndexCreationEnabled(true); |
| | | |
| | | if (!sfile.exists()) { |
| | | ShapefileDataStore shapefileDataStore = new IndexedShapefileDataStore(sfile.toURI().toURL(), |
| | | null, true, true, IndexType.QIX, Charset.forName("UTF-8")); |
| | | shapefileDataStore.createSchema(featureType); |
| | | writer = shapefileDataStore.getFeatureWriter(featureType.getTypeName(), |
| | | Transaction.AUTO_COMMIT); |
| | | } else { |
| | | ShapefileDataStore shapefileDataStore = new IndexedShapefileDataStore(sfile.toURI().toURL(), |
| | | null, true, true, IndexType.QIX, Charset.forName("UTF-8")); |
| | | writer = shapefileDataStore.getFeatureWriterAppend(featureType.getTypeName(), |
| | | Transaction.AUTO_COMMIT); |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.jobs; |
| | | |
| | | import com.vividsolutions.jts.geom.*; |
| | | import com.ximple.eofms.geoserver.config.XGeosDataConfig; |
| | | import com.ximple.eofms.geoserver.config.XGeosDataConfigMapping; |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.util.PrintfFormat; |
| | | import com.ximple.eofms.util.XGeosConfigDigesterUtils; |
| | | import it.geosolutions.geoserver.rest.GeoServerRESTManager; |
| | | import it.geosolutions.geoserver.rest.GeoServerRESTPublisher; |
| | | import it.geosolutions.geoserver.rest.GeoServerRESTReader; |
| | | import it.geosolutions.geoserver.rest.decoder.*; |
| | | import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder; |
| | | import it.geosolutions.geoserver.rest.encoder.GSLayerGroupEncoder; |
| | | import it.geosolutions.geoserver.rest.encoder.GSLayerGroupEncoder23; |
| | | import it.geosolutions.geoserver.rest.encoder.GSResourceEncoder; |
| | | import it.geosolutions.geoserver.rest.encoder.datastore.GSPostGISDatastoreEncoder; |
| | | import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder; |
| | | import it.geosolutions.geoserver.rest.manager.GeoServerRESTStoreManager; |
| | | import org.apache.commons.collections.MultiMap; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.geometry.GeneralEnvelope; |
| | | import org.jdom.Element; |
| | | import org.opengis.feature.type.FeatureType; |
| | | import org.opengis.feature.type.GeometryDescriptor; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import java.io.IOException; |
| | | import java.net.URI; |
| | | import java.net.URISyntaxException; |
| | | import java.net.URL; |
| | | import java.sql.*; |
| | | import java.util.*; |
| | | import java.util.Date; |
| | | |
| | | public class GeoserverIntegrateConfigJob extends OracleConvertDgn2PostGISJob { |
| | | final static Log logger = LogFactory.getLog(GeoserverIntegrateConfigJob.class); |
| | | |
| | | private static final String SKIPCONFIGJOB = "SKIPCONFIGJOB"; |
| | | private static final String MASTERMODE = "MASTERMODE"; |
| | | private static final String EPSG = "EPSG:"; |
| | | private static final String DEFAULT_NAMESPACE = "xtpc"; |
| | | private static final String XGEOSDATACONFIG_PATH = "xgeosdataconfig.xml"; |
| | | private static final String GEOSERVER_BASEURL = "GEOSERVER_URL"; |
| | | private static final String GEOSERVER_USER = "GEOSERVER_USER"; |
| | | private static final String GEOSERVER_PASS = "GEOSERVER_PASS"; |
| | | |
| | | // private static final int MAGIC_BLOCKSIZE = (64 * 1024 * 1024) - (32 * 1024); |
| | | |
| | | private static final String QUERY_VIEWDEFSQL = "SELECT table_name, view_definition FROM information_schema.views " + |
| | | "WHERE table_schema = ? AND table_name LIKE "; |
| | | |
| | | private static final String CREATE_VIEWSQL = "CREATE OR REPLACE VIEW \"%s\" AS SELECT * FROM \"%s\".\"%s\""; |
| | | private static final String EXTRAWHERE_VIEWSQL = " WHERE \"%s\".level = %s AND \"%s\".symweight = %s"; |
| | | |
| | | private static final String ALTER_VIEWSQL = "ALTER TABLE \"%s\" OWNER TO "; |
| | | // private static final String GRANT_VIEWSQL = "GRANT SELECT ON TABLE \"%s\" TO public"; |
| | | private static final int SRSID_TWD97_ZONE119 = 3825; |
| | | private static final int SRSID_TWD97_ZONE121 = 3826; |
| | | public static final String DEFAULT_STORENAME = "pgDMMS"; |
| | | public static final String DEFAULT_GEODMMS_NAMESPACE = "http://tpc.ximple.com.tw/geodmms"; |
| | | |
| | | private static XGeosDataConfigMapping xgeosDataConfigMapping = null; |
| | | |
| | | protected String _geoServerURL; |
| | | protected String _geoServerUser; |
| | | protected String _geoServerPass; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | | |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, |
| | | boolean useTransform) { |
| | | return super.prepareJobContext(targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _geoServerURL = dataMap.getString(GEOSERVER_BASEURL); |
| | | _geoServerUser = dataMap.getString(GEOSERVER_USER); |
| | | _geoServerPass = dataMap.getString(GEOSERVER_PASS); |
| | | |
| | | if (_geoServerURL == null) { |
| | | logger.warn("GEOSERVER_URL is null"); |
| | | throw new JobExecutionException("Unknown GEOSERVER_URL."); |
| | | } |
| | | if (_geoServerUser == null) { |
| | | logger.warn("GEOSERVER_USER is null"); |
| | | throw new JobExecutionException("Unknown GEOSERVER_USER."); |
| | | } |
| | | if (_geoServerPass == null) { |
| | | logger.warn("GEOSERVER_PASS is null"); |
| | | throw new JobExecutionException("Unknown GEOSERVER_PASS."); |
| | | } |
| | | } |
| | | |
| | | protected XGeosDataConfigMapping getConfigMapping() { |
| | | if (xgeosDataConfigMapping == null) { |
| | | Digester digester = XGeosConfigDigesterUtils.getXGeosConfigDigester(); |
| | | final URL configDataURL = XGeosDataConfigMapping.class.getResource(XGEOSDATACONFIG_PATH); |
| | | try { |
| | | xgeosDataConfigMapping = (XGeosDataConfigMapping) digester.parse(configDataURL); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SAXException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | |
| | | } |
| | | return xgeosDataConfigMapping; |
| | | } |
| | | |
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) { |
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " + |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | @Override |
| | | public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { |
| | | |
| | | super.execute(jobExecutionContext); |
| | | |
| | | createTargetDataStore(); |
| | | |
| | | if (getTargetDataStore() == null) { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName; |
| | | |
| | | try { |
| | | logger.info("-- step:resetPostgisViewMapping --"); |
| | | long tStep = System.currentTimeMillis(); |
| | | resetPostgisViewMapping(jobExecutionContext); |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-resetPostgisViewMapping", tStep, tStepEnd); |
| | | } |
| | | logger.info("-- step:resetGeoServerConfig --"); |
| | | tStep = System.currentTimeMillis(); |
| | | // resetGeoServerConfig(jobExecutionContext); |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-resetGeoServerConfig", tStep, tStepEnd); |
| | | } |
| | | } finally { |
| | | disconnect(); |
| | | } |
| | | } |
| | | |
| | | /** |
| | | * 重新建立所有重新建立所有PostGIS中的資料庫視景 |
| | | * |
| | | * @param executionContext 批次執行的關係 |
| | | */ |
| | | private void resetPostgisViewMapping(JobExecutionContext executionContext) { |
| | | assert executionContext != null; |
| | | Connection connection = null; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | String ownerName = _pgUsername; |
| | | String currentTargetSchema = retrieveCurrentSchemaName(connection, |
| | | DataReposVersionManager.VSSTATUS_READY); |
| | | if (currentTargetSchema == null) { |
| | | logger.info("Cannot found schema that status is VSSTATUS_READY[" + |
| | | DataReposVersionManager.VSSTATUS_READY + "]"); |
| | | return; |
| | | } |
| | | |
| | | ArrayList<String> realTableNames = new ArrayList<String>(); |
| | | retrieveAllRealTableName(connection, currentTargetSchema, realTableNames); |
| | | |
| | | HashMap<String, String> viewDefs = retrieveViewDef(connection, "public", "fsc%"); |
| | | HashMap<String, String> tempViewDefs = retrieveViewDef(connection, "public", "indexshape%"); |
| | | viewDefs.putAll(tempViewDefs); |
| | | tempViewDefs = viewDefs = retrieveViewDef(connection, "public", "lndtpc%"); |
| | | viewDefs.putAll(tempViewDefs); |
| | | |
| | | for (String tableName : realTableNames) { |
| | | resetPostgisDataView(connection, viewDefs, ownerName, currentTargetSchema, tableName); |
| | | } |
| | | |
| | | resetExtraPostgisDataView(connection, ownerName, currentTargetSchema, realTableNames); |
| | | |
| | | updateCurrentRepositoryStatus(connection, currentTargetSchema, |
| | | DataReposVersionManager.VSSTATUS_LINKVIEW); |
| | | |
| | | String currentTargetThemesName = retrieveCurrentThemeName(connection, |
| | | DataReposVersionManager.VSSTATUS_READY); |
| | | if (currentTargetThemesName == null) { |
| | | logger.info("Cannot found themes that status is VSSTATUS_READY[" + |
| | | DataReposVersionManager.VSSTATUS_READY + "]"); |
| | | return; |
| | | } |
| | | |
| | | resetThemesBaseView(connection, ownerName, currentTargetThemesName); |
| | | |
| | | XGeosDataConfigMapping configMapping = getConfigMapping(); |
| | | String[] allView = retrieveTargetStoreAllViewNames(connection); |
| | | TreeSet<String> allViewNames = new TreeSet<String>(); |
| | | if (allView != null) { |
| | | allViewNames.addAll(Arrays.asList(allView)); |
| | | } |
| | | List values = (List) configMapping.getMapping().get("pgOMS"); |
| | | for (Object value : values) { |
| | | XGeosDataConfig xgeosConfig = (XGeosDataConfig) value; |
| | | short tid = xgeosConfig.getFSC(); |
| | | short cid = xgeosConfig.getCOMP(); |
| | | StringBuilder sbTable = new StringBuilder("fsc-"); |
| | | sbTable.append(tid).append("-c-"); |
| | | sbTable.append(cid); |
| | | |
| | | int index = realTableNames.indexOf(sbTable.toString()); |
| | | if (index == -1) { |
| | | logger.debug("pgOMS LayerView Cannot found-" + xgeosConfig.toString()); |
| | | continue; |
| | | } |
| | | |
| | | StringBuilder sbView = new StringBuilder("fsc-"); |
| | | sbView.append(tid).append("-c"); |
| | | sbView.append(cid).append("-l"); |
| | | sbView.append(xgeosConfig.getLEV()).append("-w"); |
| | | sbView.append(xgeosConfig.getWEIGHT()); |
| | | String viewName = sbView.toString(); |
| | | if (allViewNames.contains(viewName)) { |
| | | resetThemesPostgisDataView(connection, ownerName, null, viewName); |
| | | } |
| | | } |
| | | |
| | | updateCurrentThemeStatus(connection, currentTargetThemesName, |
| | | DataReposVersionManager.VSSTATUS_LINKVIEW); |
| | | |
| | | // String[] featureNames = dataStore.getTypeNames(); |
| | | // logger.info("featureNames[] size = " + featureNames.length); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | if (connection != null) |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | // if (dataStore != null) dataStore.dispose(); |
| | | } |
| | | } |
| | | |
| | | private void retrieveAllRealTableName(Connection connection, String targetSchema, |
| | | ArrayList<String> realTableNames) throws SQLException { |
| | | ResultSet rsMeta = null; |
| | | try { |
| | | rsMeta = connection.getMetaData().getTables("", targetSchema, "fsc%", new String[]{"TABLE"}); |
| | | while (rsMeta.next()) { |
| | | String tableName = rsMeta.getString(3); |
| | | realTableNames.add(tableName); |
| | | } |
| | | rsMeta.close(); |
| | | rsMeta = null; |
| | | |
| | | rsMeta = connection.getMetaData().getTables("", targetSchema, "index%", new String[]{"TABLE"}); |
| | | while (rsMeta.next()) { |
| | | String tableName = rsMeta.getString(3); |
| | | realTableNames.add(tableName); |
| | | } |
| | | rsMeta.close(); |
| | | rsMeta = null; |
| | | |
| | | rsMeta = connection.getMetaData().getTables("", targetSchema, "lndtpc%", new String[]{"TABLE"}); |
| | | while (rsMeta.next()) { |
| | | String tableName = rsMeta.getString(3); |
| | | realTableNames.add(tableName); |
| | | } |
| | | } finally { |
| | | if (rsMeta != null) rsMeta.close(); |
| | | } |
| | | } |
| | | |
| | | private void resetPostgisDataView(Connection connection, HashMap<String, String> viewDefs, |
| | | String ownerName, String schemaName, String tableName) throws SQLException { |
| | | String[] splits = tableName.split("-"); |
| | | if (splits.length > 3) { |
| | | // feature table |
| | | |
| | | StringBuilder viewBuilder = new StringBuilder(); |
| | | viewBuilder.append(splits[0]); |
| | | viewBuilder.append('-'); |
| | | viewBuilder.append(splits[1]); |
| | | viewBuilder.append('-'); |
| | | viewBuilder.append(splits[2]); |
| | | viewBuilder.append(splits[3]); |
| | | String viewName = viewBuilder.toString(); |
| | | if (viewDefs.containsKey(viewName)) { |
| | | String viewDef = viewDefs.get(viewName); |
| | | int pos = viewDef.indexOf("FROM"); |
| | | String subView = viewDef.substring(pos + 4); |
| | | // String[] viewSources = subView.split("\\."); |
| | | String[] viewSources = subView.split("(\\.\"|\")"); |
| | | if (!viewSources[0].equalsIgnoreCase(schemaName)) { |
| | | createOrReplaceView(connection, schemaName, tableName, viewName, ownerName); |
| | | } |
| | | } else { |
| | | createOrReplaceView(connection, schemaName, tableName, viewName, ownerName); |
| | | } |
| | | |
| | | } else { |
| | | |
| | | splits = tableName.split("_"); |
| | | if (splits.length > 0) { |
| | | StringBuilder viewBuilder = new StringBuilder(); |
| | | viewBuilder.append(splits[0]); |
| | | if (splits.length > 1) viewBuilder.append(splits[1]); |
| | | if (splits.length > 2) viewBuilder.append(splits[2]); |
| | | String viewName = viewBuilder.toString(); |
| | | if (viewDefs.containsKey(viewName)) { |
| | | String viewDef = viewDefs.get(viewName); |
| | | int pos = viewDef.indexOf("FROM"); |
| | | String subView = viewDef.substring(pos + 4); |
| | | String[] viewSources = subView.split("(\\.\"|\")"); |
| | | if (!viewSources[0].equalsIgnoreCase(schemaName)) { |
| | | createOrReplaceView(connection, schemaName, tableName, viewName, ownerName); |
| | | } |
| | | } else { |
| | | createOrReplaceView(connection, schemaName, tableName, viewName, ownerName); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | private void resetExtraPostgisDataView(Connection connection, String ownerName, String currentSchema, |
| | | ArrayList<String> realTableNames) { |
| | | try { |
| | | // ArrayList<String> extraViewNames = new ArrayList<String>(); |
| | | XGeosDataConfigMapping configMapping = getConfigMapping(); |
| | | MultiMap configMultiMap = configMapping.getMapping(); |
| | | for (Object key : configMultiMap.keySet()) { |
| | | List values = (List) configMultiMap.get(key); |
| | | for (Object value : values) { |
| | | XGeosDataConfig xgeosConfig = (XGeosDataConfig) value; |
| | | short tid = xgeosConfig.getFSC(); |
| | | short cid = xgeosConfig.getCOMP(); |
| | | StringBuilder sbTable = new StringBuilder("fsc-"); |
| | | sbTable.append(tid).append("-c-"); |
| | | sbTable.append(cid); |
| | | int index = realTableNames.indexOf(sbTable.toString()); |
| | | if (index == -1) { |
| | | logger.debug("Cannot found-" + xgeosConfig.toString()); |
| | | continue; |
| | | } |
| | | StringBuilder sbView = new StringBuilder("fsc-"); |
| | | sbView.append(tid).append("-c"); |
| | | sbView.append(cid).append("-l"); |
| | | sbView.append(xgeosConfig.getLEV()).append("-w"); |
| | | sbView.append(xgeosConfig.getWEIGHT()); |
| | | // extraViewNames.add(sbView.toString()); |
| | | |
| | | createOrReplaceExtraView(connection, currentSchema, sbTable.toString(), sbView.toString(), |
| | | ownerName, xgeosConfig); |
| | | } |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | private void resetThemesBaseView(Connection connection, String ownerName, String currentThemesName) |
| | | throws SQLException { |
| | | String viewName = "xpwtheme" + FDYNCOLOR_SUFFIX; |
| | | String tableName = currentThemesName + FDYNCOLOR_SUFFIX; |
| | | PrintfFormat pf = new PrintfFormat("CREATE OR REPLACE VIEW \"%s\" AS SELECT * FROM \"%s\""); |
| | | String sql = pf.sprintf(new Object[]{viewName, tableName}); |
| | | Statement stmt = connection.createStatement(); |
| | | try { |
| | | stmt.execute(sql); |
| | | pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(viewName); |
| | | stmt.execute(sql); |
| | | |
| | | viewName = "xpwtheme" + FOWNER_SUFFIX; |
| | | tableName = currentThemesName + FOWNER_SUFFIX; |
| | | pf = new PrintfFormat("CREATE OR REPLACE VIEW \"%s\" AS SELECT * FROM \"%s\""); |
| | | sql = pf.sprintf(new Object[]{viewName, tableName}); |
| | | |
| | | stmt.execute(sql); |
| | | pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(viewName); |
| | | stmt.execute(sql); |
| | | } catch (SQLException e) { |
| | | // logger.warn(e.getMessage(), e); |
| | | logger.info(sql == null ? "SQL=NULL" : "SQL=" + sql); |
| | | throw e; |
| | | } finally { |
| | | stmt.close(); |
| | | } |
| | | } |
| | | |
| | | |
| | | private void resetThemesPostgisDataView(Connection connection, String ownerName, |
| | | String currentSchema, String viewName) throws SQLException { |
| | | String themeViewName = viewName + "-oms"; |
| | | // PrintfFormat pf = new PrintfFormat(CREATE_VIEWSQL); |
| | | // String sql = pf.sprintf(new Object[]{viewName, schemaName, tableName}); |
| | | ResultSet rs = null; |
| | | Statement stmt = connection.createStatement(); |
| | | |
| | | try { |
| | | StringBuilder sbSQL = new StringBuilder("CREATE OR REPLACE VIEW \""); |
| | | sbSQL.append(themeViewName).append("\" AS SELECT "); |
| | | |
| | | rs = connection.getMetaData().getColumns(null, currentSchema, viewName, "%"); |
| | | while (rs.next()) { |
| | | String fieldName = rs.getString("COLUMN_NAME"); |
| | | sbSQL.append("t." + fieldName).append(", "); |
| | | } |
| | | sbSQL.append("fc.dyncolor, fo.fowner FROM "); |
| | | if (currentSchema != null) |
| | | sbSQL.append("\"").append(currentSchema).append("\".\"").append(viewName).append("\" AS t,"); |
| | | else |
| | | sbSQL.append("\"").append(viewName).append("\" AS t,"); |
| | | sbSQL.append("xpwtheme").append(FDYNCOLOR_SUFFIX).append(" AS fc,"); |
| | | sbSQL.append("xpwtheme").append(FOWNER_SUFFIX).append(" AS fo WHERE "); |
| | | sbSQL.append("t.tid = fc.tid AND t.oid = fc.oid AND "); |
| | | sbSQL.append("t.tid = fo.tid AND t.oid = fo.oid"); |
| | | |
| | | // sbSQL.delete(sbSQL.length() - 2, sbSQL.length()); |
| | | String sql = sbSQL.toString(); |
| | | stmt.execute(sql); |
| | | sbSQL.delete(0, sbSQL.length()); |
| | | |
| | | PrintfFormat pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(themeViewName); |
| | | stmt.execute(sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private HashMap<String, String> retrieveViewDef(Connection connection, String schemaName, String tablePattern) throws SQLException { |
| | | PreparedStatement stmt = connection.prepareStatement(QUERY_VIEWDEFSQL + "'" + tablePattern + "'"); |
| | | stmt.setString(1, schemaName); |
| | | // stmt.setString(2, tablePattern); |
| | | HashMap<String, String> result = new HashMap<String, String>(); |
| | | ResultSet rs = stmt.executeQuery(); |
| | | while (rs.next()) { |
| | | String tableName = rs.getString(1); |
| | | String viewDef = rs.getString(2); |
| | | result.put(tableName, viewDef); |
| | | } |
| | | rs.close(); |
| | | stmt.close(); |
| | | return result; |
| | | } |
| | | |
| | | private void createOrReplaceView(Connection connection, String schemaName, String tableName, String viewName, |
| | | String ownerName) throws SQLException { |
| | | PrintfFormat pf = new PrintfFormat(CREATE_VIEWSQL); |
| | | String sql = pf.sprintf(new Object[]{viewName, schemaName, tableName}); |
| | | Statement stmt = connection.createStatement(); |
| | | try { |
| | | stmt.execute(sql); |
| | | pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(viewName); |
| | | stmt.execute(sql); |
| | | } catch (SQLException e) { |
| | | // logger.warn(e.getMessage(), e); |
| | | logger.info(sql == null ? "SQL=NULL" : "SQL=" + sql); |
| | | throw e; |
| | | } finally { |
| | | stmt.close(); |
| | | } |
| | | // connection.commit(); |
| | | } |
| | | |
| | | private void createOrReplaceExtraView(Connection connection, String schemaName, String tableName, String viewName, |
| | | String ownerName, XGeosDataConfig xgeosConfig) throws SQLException { |
| | | PrintfFormat pf = new PrintfFormat(CREATE_VIEWSQL); |
| | | String sql = pf.sprintf(new Object[]{viewName, schemaName, tableName}); |
| | | |
| | | PrintfFormat pfWhere = new PrintfFormat(EXTRAWHERE_VIEWSQL); |
| | | sql += pfWhere.sprintf(new String[]{tableName, Short.toString(xgeosConfig.getLEV()), |
| | | tableName, Short.toString(xgeosConfig.getWEIGHT())}); |
| | | |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(sql); |
| | | |
| | | pf = new PrintfFormat(ALTER_VIEWSQL + ownerName); |
| | | sql = pf.sprintf(viewName); |
| | | stmt.execute(sql); |
| | | stmt.close(); |
| | | // connection.commit(); |
| | | } |
| | | |
| | | private Timestamp retrieveCurrentSchemaTimestamp(Connection connection, short status) throws SQLException { |
| | | StringBuilder sbSQL = new StringBuilder("SELECT vstimestamp, vsschema, vsstatus FROM "); |
| | | sbSQL.append(DataReposVersionManager.XGVERSIONTABLE_NAME); |
| | | sbSQL.append(" WHERE vsstatus = "); |
| | | sbSQL.append(status); |
| | | sbSQL.append(" ORDER BY vsid"); |
| | | |
| | | Timestamp result = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | // get first result |
| | | if (rs.next()) { |
| | | result = rs.getTimestamp(1); |
| | | } |
| | | return result; |
| | | } finally { |
| | | if (rs != null) rs.close(); |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private void updateCurrentRepositoryStatus(Connection connection, String schemaName, short newStatus) |
| | | throws SQLException { |
| | | StringBuilder sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(DataReposVersionManager.XGVERSIONTABLE_NAME).append(' '); |
| | | sbSQL.append(" SET vsstatus = "); |
| | | sbSQL.append(newStatus); |
| | | sbSQL.append(", vstimestamp = CURRENT_TIMESTAMP WHERE vsschema = '"); |
| | | sbSQL.append(schemaName).append("'"); |
| | | |
| | | Statement stmt = null; |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sbSQL.toString()); |
| | | } finally { |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private boolean checkCurrentRepositoryStatus(Connection connection, short status) { |
| | | try { |
| | | return (retrieveCurrentSchemaName(connection, status) != null); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | return false; |
| | | } |
| | | } |
| | | |
| | | private String retrieveCurrentSchemaName(Connection connection, short status) throws SQLException { |
| | | StringBuilder sbSQL = new StringBuilder("SELECT vsschema, vstimestamp, vsstatus FROM "); |
| | | sbSQL.append(DataReposVersionManager.XGVERSIONTABLE_NAME); |
| | | sbSQL.append(" WHERE vsstatus = "); |
| | | sbSQL.append(status); |
| | | sbSQL.append(" ORDER BY vsid"); |
| | | |
| | | String result = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | // get first result |
| | | if (rs.next()) { |
| | | result = rs.getString(1); |
| | | } |
| | | return result; |
| | | } finally { |
| | | if (rs != null) rs.close(); |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private void updateCurrentThemeStatus(Connection connection, String schemaName, short newStatus) |
| | | throws SQLException { |
| | | StringBuilder sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(DataReposVersionManager.XPTVERSIONTABLE_NAME).append(' '); |
| | | sbSQL.append(" SET vptstatus = "); |
| | | sbSQL.append(newStatus); |
| | | sbSQL.append(", vpttimestamp = CURRENT_TIMESTAMP WHERE vptname = '"); |
| | | sbSQL.append(schemaName).append("'"); |
| | | |
| | | Statement stmt = null; |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sbSQL.toString()); |
| | | } finally { |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | |
| | | private boolean checkCurrentThemeStatus(Connection connection, short status) { |
| | | try { |
| | | return (retrieveCurrentThemeName(connection, status) != null); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | return false; |
| | | } |
| | | } |
| | | |
| | | |
| | | private String retrieveCurrentThemeName(Connection connection, short status) throws SQLException { |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vptname, vptstatus, vpttimestamp FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vptid"); |
| | | |
| | | String result = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | // get first result |
| | | if (rs.next()) { |
| | | result = rs.getString(1); |
| | | } |
| | | return result; |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | protected void transferXGeosVersionStatus(Connection connection, |
| | | short vsstatusBefore, short vsstatusAfter, boolean exclusive) throws JobExecutionException { |
| | | |
| | | try { |
| | | String currentTargetSchema = retrieveCurrentSchemaName(connection, vsstatusBefore); |
| | | if (currentTargetSchema == null) { |
| | | logger.info("Cannot found target schema in dataStore. status=" + vsstatusBefore); |
| | | return; |
| | | } |
| | | String existTargetSchema = null; |
| | | if (exclusive) |
| | | existTargetSchema = retrieveCurrentSchemaName(connection, vsstatusAfter); |
| | | |
| | | |
| | | updateCurrentRepositoryStatus(connection, currentTargetSchema, vsstatusAfter); |
| | | if ((exclusive) && (existTargetSchema != null)) { |
| | | updateCurrentRepositoryStatus(connection, existTargetSchema, |
| | | DataReposVersionManager.VSSTATUS_AVAILABLE); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("Update " + DataReposVersionManager.XGVERSIONTABLE_NAME + |
| | | " has error-", e); |
| | | } |
| | | } |
| | | |
| | | private HashMap<String, String> buildDefaultStylesMapping(XGeosDataConfigMapping configMapping) { |
| | | HashMap<String, String> result = new HashMap<String, String>(); |
| | | |
| | | for (Object key : configMapping.getMapping().keySet()) { |
| | | List xgeosConfigs = (List) configMapping.getMapping().get(key); |
| | | for (Object value : xgeosConfigs) { |
| | | XGeosDataConfig xgeosConfig = (XGeosDataConfig) value; |
| | | |
| | | StringBuilder sbView = new StringBuilder("fsc-"); |
| | | sbView.append(xgeosConfig.getFSC()).append("-c"); |
| | | sbView.append(xgeosConfig.getCOMP()).append("-l"); |
| | | sbView.append(xgeosConfig.getLEV()).append("-w"); |
| | | sbView.append(xgeosConfig.getWEIGHT()); |
| | | |
| | | String viewName = sbView.toString(); |
| | | if (!result.containsKey(viewName)) { |
| | | result.put(viewName, xgeosConfig.getFTYPE()); |
| | | } else { |
| | | if (xgeosConfig.getFTYPE() != null) { |
| | | if (!result.get(viewName).equals(xgeosConfig.getFTYPE())) |
| | | logger.info("Style Define Diff:" + result.get(viewName) + " - " + xgeosConfig.getFTYPE()); |
| | | } else { |
| | | logger.warn("xgeosConfig getFTYPE() is null - " + xgeosConfig.toString()); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | return result; |
| | | } |
| | | |
| | | private String buildDefaultWMSLayerNames(String namespace, List xgeosConfigs, GSLayerGroupEncoder lgEncoder) { |
| | | StringBuilder sbLayers = new StringBuilder(); |
| | | boolean first = true; |
| | | |
| | | for (Object value : xgeosConfigs) { |
| | | if (!first) { |
| | | sbLayers.append(','); |
| | | } else { |
| | | first = false; |
| | | } |
| | | XGeosDataConfig xgeosConfig = (XGeosDataConfig) value; |
| | | |
| | | StringBuilder sbLayerName = new StringBuilder(namespace); |
| | | sbLayerName.append(':'); |
| | | sbLayerName.append("fsc-"); |
| | | sbLayerName.append(xgeosConfig.getFSC()).append("-c"); |
| | | sbLayerName.append(xgeosConfig.getCOMP()).append("-l"); |
| | | sbLayerName.append(xgeosConfig.getLEV()).append("-w"); |
| | | sbLayerName.append(xgeosConfig.getWEIGHT()); |
| | | |
| | | String layerName = sbLayerName.toString(); |
| | | sbLayers.append(layerName); |
| | | if (lgEncoder != null) { |
| | | lgEncoder.addLayer(layerName); |
| | | } |
| | | } |
| | | |
| | | return sbLayers.toString(); |
| | | } |
| | | |
| | | protected String[] retrieveTargetStoreAllViewNames(Connection connection) { |
| | | try { |
| | | final int TABLE_NAME_COL = 3; |
| | | List list = new ArrayList(); |
| | | |
| | | DatabaseMetaData meta = connection.getMetaData(); |
| | | // String[] tableType = { "TABLE", "VIEW" }; |
| | | String[] tableType = { "VIEW" }; |
| | | ResultSet tables = meta.getTables(null, _pgSchema, "%", tableType); |
| | | |
| | | while (tables.next()) { |
| | | String tableName = tables.getString(TABLE_NAME_COL); |
| | | list.add(tableName); |
| | | /* |
| | | if (allowTable(tableName)) { |
| | | list.add(tableName); |
| | | } |
| | | */ |
| | | } |
| | | tables.close(); |
| | | return (String[]) list.toArray(new String[list.size()]); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | return null; |
| | | } |
| | | |
| | | private void resetFeatureTypesMapping(JobExecutionContext executionContext, GeoServerRESTManager manager) { |
| | | try { |
| | | GeoServerRESTReader reader = manager.getReader(); |
| | | GeoServerRESTPublisher publisher = manager.getPublisher(); |
| | | |
| | | Connection connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | if (!checkCurrentRepositoryStatus(connection, DataReposVersionManager.VSSTATUS_CONFIG)) { |
| | | return; |
| | | } |
| | | |
| | | List<String> styleList = reader.getStyles().getNames(); |
| | | Set<String> styles = new TreeSet<String>(styleList); |
| | | |
| | | XGeosDataConfigMapping mapping = getConfigMapping(); |
| | | HashMap<String, String> defaultStyles = buildDefaultStylesMapping(mapping); |
| | | |
| | | try { |
| | | String[] dsFTypeNames = retrieveTargetStoreAllViewNames(connection); |
| | | |
| | | for (String featureTypeName : dsFTypeNames) { |
| | | FeatureType featureType = null; |
| | | try { |
| | | featureType = targetDataStore.getFeatureSource(featureTypeName, Transaction.AUTO_COMMIT).getSchema(); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | if (featureType == null) continue; |
| | | |
| | | RESTLayer layer = reader.getLayer(DEFAULT_NAMESPACE, featureTypeName); |
| | | if (layer != null) { |
| | | // publisher.removeLayer(DEFAULT_NAMESPACE, featureTypeName); |
| | | if (!publisher.unpublishFeatureType(DEFAULT_NAMESPACE, DEFAULT_STORENAME, featureTypeName)) { |
| | | logger.info("Cannot remove featureType:" + featureTypeName); |
| | | } |
| | | } |
| | | |
| | | final GSFeatureTypeEncoder fte = new GSFeatureTypeEncoder(); |
| | | |
| | | // fte.setProjectionPolicy(GSResourceEncoder.ProjectionPolicy.REPROJECT_TO_DECLARED); |
| | | fte.setProjectionPolicy(GSResourceEncoder.ProjectionPolicy.FORCE_DECLARED); |
| | | fte.addKeyword("KEYWORD"); |
| | | fte.setTitle(featureTypeName); |
| | | fte.setName(featureTypeName); |
| | | String srs = "EPSG:" + SRSID_TWD97_ZONE121; |
| | | if (!this._useZone121) { |
| | | srs = "EPSG:" + SRSID_TWD97_ZONE119; |
| | | } |
| | | fte.setNativeCRS(srs); |
| | | fte.setSRS(srs); // srs=null?"EPSG:4326":srs); |
| | | |
| | | String defaultStyle = getDefaultFeatureTypeStyleId(styles, defaultStyles, featureType); |
| | | final GSLayerEncoder le = new GSLayerEncoder(); |
| | | le.setDefaultStyle(defaultStyle); |
| | | |
| | | if (!publisher.publishDBLayer(DEFAULT_NAMESPACE, DEFAULT_STORENAME, fte, le)) { |
| | | logger.info("Create Feature Failed. [" + featureTypeName + "]"); |
| | | } |
| | | } |
| | | } finally { |
| | | // if (dataStore != null) dataStore.dispose(); |
| | | if (connection != null) |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | |
| | | } |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | private void resetGeoServerConfig(JobExecutionContext jobExecutionContext) throws JobExecutionException { |
| | | Connection connection = null; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | transferXGeosVersionStatus(connection, DataReposVersionManager.VSSTATUS_LINKVIEW, |
| | | DataReposVersionManager.VSSTATUS_CONFIG, false); |
| | | URL geoServerURL = new URL(_geoServerURL); |
| | | GeoServerRESTManager manager = new GeoServerRESTManager(geoServerURL, _geoServerUser, _geoServerPass); |
| | | GeoServerRESTReader reader = manager.getReader(); |
| | | List<String> workSpaces = reader.getWorkspaceNames(); |
| | | boolean found = false; |
| | | for (String name : workSpaces) { |
| | | if (name.equalsIgnoreCase(DEFAULT_NAMESPACE)) { |
| | | found = true; |
| | | break; |
| | | } |
| | | } |
| | | |
| | | GeoServerRESTPublisher publisher = manager.getPublisher(); |
| | | if (!found) { |
| | | publisher.createWorkspace(DEFAULT_NAMESPACE, new URI(DEFAULT_GEODMMS_NAMESPACE)); |
| | | } |
| | | |
| | | RESTDataStore dataStore = reader.getDatastore(DEFAULT_NAMESPACE, DEFAULT_STORENAME); |
| | | if (dataStore == null) { |
| | | GeoServerRESTStoreManager storeManager = manager.getStoreManager(); |
| | | GSPostGISDatastoreEncoder store = new GSPostGISDatastoreEncoder(DEFAULT_STORENAME); |
| | | store.setHost(_pgHost); |
| | | store.setPort(Integer.parseInt(_pgPort)); |
| | | store.setDatabase(_pgDatabase); |
| | | store.setSchema(_pgSchema); |
| | | store.setUser(_pgUsername); |
| | | store.setPassword(_pgPassword); |
| | | storeManager.create(DEFAULT_NAMESPACE, store); |
| | | } |
| | | |
| | | |
| | | resetFeatureTypesMapping(jobExecutionContext, manager); |
| | | |
| | | resetGeoserverWMSConfig(jobExecutionContext, connection, manager, true); |
| | | resetWMSVirtualLayerMapping(jobExecutionContext, connection, manager, true); |
| | | |
| | | transferXGeosVersionStatus(connection, DataReposVersionManager.VSSTATUS_CONFIG, |
| | | DataReposVersionManager.VSSTATUS_USING, true); |
| | | Date lastUpdate = Calendar.getInstance().getTime(); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("resetGeoServerConfig has error-" + e.getMessage(), e); |
| | | } catch (URISyntaxException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("resetGeoServerConfig has error-" + e.getMessage(), e); |
| | | } finally { |
| | | if (connection != null) |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | |
| | | } |
| | | } |
| | | |
| | | protected String getDefaultFeatureTypeStyleId(Set<String> styles, HashMap<String, String> defaultStyles, FeatureType featureType) { |
| | | String ftName = featureType.getName().getLocalPart(); |
| | | boolean isNormalFeature = false; |
| | | boolean isLandBased = false; |
| | | boolean isIndex = false; |
| | | boolean isSmallIndex = false; |
| | | boolean isSymbol = false; |
| | | GeometryDescriptor geomAttrType = featureType.getGeometryDescriptor(); |
| | | Class geomType = geomAttrType.getType().getBinding(); |
| | | if (defaultStyles.containsKey(ftName)) { |
| | | String defaultStyleName = defaultStyles.get(ftName); |
| | | String styleName = retrieveDefaultStyle(styles, defaultStyleName, "unknown"); |
| | | if (!styleName.equals("unknown")) { |
| | | return styleName; |
| | | } |
| | | } |
| | | |
| | | if (ftName.indexOf("fsc") != -1) { |
| | | isNormalFeature = true; |
| | | } |
| | | if (ftName.indexOf("indexshape") != -1) { |
| | | isIndex = true; |
| | | } |
| | | if (ftName.indexOf("indexshapes") != -1) { |
| | | isSmallIndex = true; |
| | | } |
| | | if (ftName.indexOf("lnd") != -1) { |
| | | isLandBased = true; |
| | | } |
| | | if (featureType.getDescriptor("symbol") != null) { |
| | | isSymbol = true; |
| | | } |
| | | |
| | | if (Point.class.equals(geomType)) { |
| | | if (isSymbol) { |
| | | return retrieveDefaultStyle(styles, "xtpc-symbol", "point"); |
| | | } else if (isIndex) { |
| | | return retrieveDefaultStyle(styles, "xtpc-text2", "point"); |
| | | } else { |
| | | return retrieveDefaultStyle(styles, "xtpc-text", "point"); |
| | | } |
| | | } else if (LineString.class.equals(geomType)) { |
| | | if ((!isIndex) && (!isLandBased)) { |
| | | return retrieveDefaultStyle(styles, "xtpc-conductor", "line"); |
| | | } else if (isIndex) { |
| | | if (isSmallIndex) |
| | | return retrieveDefaultStyle(styles, "xtpc-indexshapes", "line"); |
| | | |
| | | return retrieveDefaultStyle(styles, "xtpc-indexshape", "line"); |
| | | } else if (isLandBased) { |
| | | return retrieveDefaultStyle(styles, "xtpc-lndcityLine", "line"); |
| | | } |
| | | } else if (MultiPoint.class.equals(geomType)) { |
| | | if (isSymbol) { |
| | | return retrieveDefaultStyle(styles, "xtpc-symbol", "point"); |
| | | } else { |
| | | return retrieveDefaultStyle(styles, "xtpc-text", "point"); |
| | | } |
| | | } else if (Polygon.class.equals(geomType)) { |
| | | if (isSymbol) { |
| | | return retrieveDefaultStyle(styles, "xtpc-symbol", "polygon"); |
| | | } else if ((!isIndex) && (!isLandBased)) { |
| | | return retrieveDefaultStyle(styles, "polygon", "polygon"); |
| | | } else if (isIndex) { |
| | | return retrieveDefaultStyle(styles, "xtpc-indexshape", "polygon"); |
| | | } else if (isLandBased) { |
| | | return retrieveDefaultStyle(styles, "xtpc-lndcityPolygon", "polygon"); |
| | | } |
| | | } else if (LinearRing.class.equals(geomType)) { |
| | | if (!isIndex) { |
| | | return retrieveDefaultStyle(styles, "polygon", "polygon"); |
| | | } else { |
| | | return retrieveDefaultStyle(styles, "xtpc-indexshape", "polygon"); |
| | | } |
| | | } else if (MultiLineString.class.equals(geomType)) { |
| | | if ((!isIndex) && (!isLandBased)) { |
| | | return retrieveDefaultStyle(styles, "xtpc-conductor", "line"); |
| | | } else if (isLandBased) { |
| | | return retrieveDefaultStyle(styles, "xtpc-lndcityLine", "line"); |
| | | } else { |
| | | return retrieveDefaultStyle(styles, "xtpc-indexshape", "line"); |
| | | } |
| | | } else if (MultiPolygon.class.equals(geomType)) { |
| | | return "polygon"; |
| | | } |
| | | |
| | | return "xtpc-symbol"; |
| | | } |
| | | |
| | | private static String retrieveDefaultStyle(Set styles, String styleName, String defaultStyleName) { |
| | | if (styles.contains(styleName)) { |
| | | return styleName; |
| | | } else |
| | | return defaultStyleName; |
| | | } |
| | | |
| | | protected void resetGeoserverWMSConfig(JobExecutionContext executionContext, Connection connection, |
| | | GeoServerRESTManager manager, boolean masterMode) |
| | | throws JobExecutionException, IOException { |
| | | |
| | | if ((masterMode) && (!checkCurrentRepositoryStatus(connection, DataReposVersionManager.VSSTATUS_CONFIG))) { |
| | | return; |
| | | } |
| | | |
| | | GeoServerRESTReader reader = manager.getReader(); |
| | | GeoServerRESTPublisher publisher = manager.getPublisher(); |
| | | |
| | | List<String> baseMapNames = reader.getLayers().getNames(); |
| | | XGeosDataConfigMapping configMapping = getConfigMapping(); |
| | | if (configMapping.getMapping().isEmpty()) { |
| | | logger.warn("XGeosDataConfigMapping is empty! Pleace check XGeosDataConfig file."); |
| | | return; |
| | | } |
| | | |
| | | LinkedList defaultMapNames = new LinkedList(configMapping.getMapping().keySet()); |
| | | if (defaultMapNames.isEmpty()) { |
| | | logger.warn("DefaultMapNames is emptyin XGeosDataConfigMapping! Pleace check XGeosDataConfig file."); |
| | | } |
| | | |
| | | for (Object key : baseMapNames) { |
| | | String baseMapTitle = (String) key; |
| | | if (configMapping.getMapping().containsKey(baseMapTitle)) { |
| | | int index = defaultMapNames.indexOf(baseMapTitle); |
| | | if (index != -1) |
| | | defaultMapNames.remove(index); |
| | | |
| | | List configs = (List) configMapping.getMapping().get(baseMapTitle); |
| | | String defaultLayerNames = buildDefaultWMSLayerNames(DEFAULT_NAMESPACE, configs, null); |
| | | // wmsConfig.getBaseMapLayers().put(baseMapTitle, defaultLayerNames); |
| | | logger.info(baseMapTitle + ":" + defaultLayerNames); |
| | | } else { |
| | | logger.warn(key.toString() + "-lv='" + baseMapTitle + "' cannot found config information in XGeosDataConfigMapping."); |
| | | } |
| | | } |
| | | |
| | | for (Object key : defaultMapNames) { |
| | | List configs = (List) configMapping.getMapping().get(key); |
| | | GSLayerGroupEncoder lge = new GSLayerGroupEncoder23() { |
| | | @Override |
| | | protected void addToRoot(Element... elements) { |
| | | for (Element e : elements) { |
| | | if (e != null) { |
| | | getRoot().addContent(e.cloneContent()); |
| | | } |
| | | } |
| | | } |
| | | }; |
| | | lge.setBounds("EPSG:3826",293838.061931726,2758423.49415501,311845.457747425,2768966.72993585); |
| | | String defaultLayerNames = buildDefaultWMSLayerNames(DEFAULT_NAMESPACE, configs, lge); |
| | | // logger.info(key + ":" + defaultLayerNames); |
| | | // logger.info(lge.toString()); |
| | | // wmsConfig.getBaseMapLayers().put(key, defaultLayerNames); |
| | | String layerGroupName = key.toString(); |
| | | RESTLayerGroup layerGroup = reader.getLayerGroup(DEFAULT_NAMESPACE, layerGroupName); |
| | | if (layerGroup == null) { |
| | | if (!publisher.createLayerGroup(DEFAULT_NAMESPACE, layerGroupName, lge)) { |
| | | logger.warn("Cannot create layergroups:" + layerGroupName + "-" + lge.toString()); |
| | | } |
| | | } else { |
| | | publisher.configureLayerGroup(DEFAULT_NAMESPACE, layerGroupName, lge); |
| | | |
| | | } |
| | | } |
| | | } |
| | | |
| | | private void resetWMSVirtualLayerMapping(JobExecutionContext jobExecutionContext, Connection connection, |
| | | GeoServerRESTManager manager, boolean masterMode) |
| | | throws JobExecutionException, IOException { |
| | | if ((masterMode) && (!checkCurrentRepositoryStatus(connection, DataReposVersionManager.VSSTATUS_CONFIG))) { |
| | | return; |
| | | } |
| | | |
| | | GeoServerRESTReader reader = manager.getReader(); |
| | | List<String> layernames = reader.getLayers().getNames(); |
| | | ArrayList<String> baseMapNames = new ArrayList<String>(layernames); |
| | | // Map baseMapLayers = wmsConfig.getBaseMapLayers(); |
| | | // Map baseMapEnvelopes = wmsConfig.getBaseMapEnvelopes(); |
| | | |
| | | for (Object key : baseMapNames) { |
| | | String baseMapTitle = (String) key; |
| | | if (baseMapTitle.startsWith("pg")) { |
| | | // GeneralEnvelope envelope = (GeneralEnvelope) baseMapEnvelopes.get(baseMapTitle); |
| | | |
| | | GeneralEnvelope selectedEnvelope = null; |
| | | // String baseLayersValue = (String) wmsConfig.getBaseMapLayers().get(baseMapTitle); |
| | | RESTLayerGroup layerGroup = reader.getLayerGroup(DEFAULT_NAMESPACE, baseMapTitle); |
| | | String baseLayersValue = layerGroup.getName(); |
| | | String[] layerNames = null; |
| | | if (baseLayersValue != null) { |
| | | layerNames = baseLayersValue.split(","); |
| | | } else { |
| | | logger.info("vl='" + baseMapTitle + "' is empty value."); |
| | | continue; |
| | | } |
| | | |
| | | // RESTLayer layer = reader.getLayer(DEFAULT_NAMESPACE, baseMapTitle); |
| | | // layer.Type. |
| | | |
| | | ArrayList<String> newLayerNames = new ArrayList<String>(); |
| | | for (int i = 0; i < layerNames.length; i++) { |
| | | String layerName = layerNames[i].trim(); |
| | | newLayerNames.add(layerName); |
| | | |
| | | /* |
| | | Integer layerType = catalog.getLayerType(layerName); |
| | | if (layerType != null) { |
| | | newLayerNames.add(layerName); |
| | | |
| | | if (layerType.intValue() == MapLayerInfo.TYPE_VECTOR) { |
| | | FeatureTypeInfo ftype = catalog.getFeatureTypeInfo(layerName); |
| | | ftype = ((ftype != null) ? ftype |
| | | : catalog.getFeatureTypeInfo(layerName |
| | | .substring(layerName.indexOf(":") + 1, layerName.length()))); |
| | | |
| | | if (selectedEnvelope == null) { |
| | | ReferencedEnvelope ftEnvelope = null; |
| | | |
| | | try { |
| | | if (ftype.getBoundingBox() instanceof ReferencedEnvelope |
| | | && !ftype.getBoundingBox().isNull()) { |
| | | ftEnvelope = ftype.getBoundingBox(); |
| | | } else { |
| | | // TODO Add Action Errors |
| | | return; |
| | | } |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | return; |
| | | } |
| | | |
| | | selectedEnvelope = new GeneralEnvelope(new double[]{ |
| | | ftEnvelope.getMinX(), ftEnvelope.getMinY() |
| | | }, |
| | | new double[]{ftEnvelope.getMaxX(), ftEnvelope.getMaxY()}); |
| | | selectedEnvelope.setCoordinateReferenceSystem(ftEnvelope |
| | | .getCoordinateReferenceSystem()); |
| | | } else { |
| | | final CoordinateReferenceSystem dstCRS = selectedEnvelope |
| | | .getCoordinateReferenceSystem(); |
| | | |
| | | ReferencedEnvelope ftEnvelope = null; |
| | | |
| | | try { |
| | | if (ftype.getBoundingBox() instanceof ReferencedEnvelope) { |
| | | ftEnvelope = (ReferencedEnvelope) ftype.getBoundingBox(); |
| | | ftEnvelope.transform(dstCRS, true); |
| | | } else { |
| | | // TODO Add Action Errors |
| | | return; |
| | | } |
| | | } catch (TransformException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | return; |
| | | } catch (FactoryException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | return; |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | return; |
| | | } |
| | | |
| | | ReferencedEnvelope newEnvelope = new ReferencedEnvelope(dstCRS); |
| | | newEnvelope.init(selectedEnvelope.getLowerCorner().getOrdinate(0), |
| | | selectedEnvelope.getUpperCorner().getOrdinate(0), |
| | | selectedEnvelope.getLowerCorner().getOrdinate(1), |
| | | selectedEnvelope.getUpperCorner().getOrdinate(1)); |
| | | |
| | | newEnvelope.expandToInclude(ftEnvelope); |
| | | |
| | | selectedEnvelope = new GeneralEnvelope(new double[]{ |
| | | newEnvelope.getMinX(), newEnvelope.getMinY() |
| | | }, |
| | | new double[]{newEnvelope.getMaxX(), newEnvelope.getMaxY()}); |
| | | selectedEnvelope.setCoordinateReferenceSystem(dstCRS); |
| | | } |
| | | } |
| | | } else { |
| | | logger.warn("Cannot found layer " + layerName + " in " + baseMapTitle); |
| | | } |
| | | */ |
| | | } |
| | | |
| | | if (layerNames.length != newLayerNames.size()) { |
| | | StringBuilder layerBuilder = new StringBuilder(); |
| | | boolean bFirst = true; |
| | | for (String newlayerName : newLayerNames) { |
| | | if (!bFirst) { |
| | | layerBuilder.append(','); |
| | | } else bFirst = false; |
| | | layerBuilder.append(newlayerName); |
| | | } |
| | | // baseMapLayers.put(baseMapTitle, layerBuilder.toString()); |
| | | logger.info(baseMapTitle +":"+ layerBuilder.toString()); |
| | | } |
| | | |
| | | /* |
| | | if (selectedEnvelope != null) { |
| | | if (envelope != null) { |
| | | envelope.setCoordinateReferenceSystem(selectedEnvelope |
| | | .getCoordinateReferenceSystem()); |
| | | envelope.setEnvelope(selectedEnvelope); |
| | | baseMapEnvelopes.put(baseMapTitle, envelope); |
| | | } else { |
| | | baseMapEnvelopes.put(baseMapTitle, selectedEnvelope); |
| | | } |
| | | } |
| | | */ |
| | | } |
| | | } |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.jobs;
|
| | |
|
| | | import java.io.File;
|
| | | import java.io.FileFilter;
|
| | | import java.io.FileInputStream;
|
| | | import java.io.FileNotFoundException;
|
| | | import java.io.FilenameFilter;
|
| | | import java.io.IOException;
|
| | | import java.math.BigDecimal;
|
| | | import java.nio.BufferOverflowException;
|
| | | import java.nio.ByteBuffer;
|
| | | import java.nio.ByteOrder;
|
| | | import java.nio.channels.FileChannel;
|
| | | import java.sql.Connection;
|
| | | import java.sql.ResultSet;
|
| | | import java.sql.SQLException;
|
| | | import java.sql.Statement;
|
| | | import java.sql.Types;
|
| | | import java.util.ArrayList;
|
| | | import java.util.Date;
|
| | | import java.util.List;
|
| | | import java.util.Map;
|
| | | import java.util.TreeMap;
|
| | |
|
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext;
|
| | | import com.ximple.eofms.jobs.context.edbgeo.FeatureDgnConvertEdbGeoJobContext;
|
| | | import com.ximple.eofms.jobs.context.edbgeo.GeneralDgnConvertEdbGeoJobContext;
|
| | | import com.ximple.eofms.jobs.context.edbgeo.IndexDgnConvertEdbGeoJobContext;
|
| | | import com.ximple.eofms.jobs.context.edbgeo.OracleConvertEdbGeoJobContext;
|
| | | import com.ximple.eofms.util.BinConverter;
|
| | | import com.ximple.eofms.util.ByteArrayCompressor;
|
| | | import com.ximple.eofms.util.FileUtils;
|
| | | import com.ximple.eofms.util.StringUtils;
|
| | | import com.ximple.io.dgn7.ComplexElement;
|
| | | import com.ximple.io.dgn7.Dgn7fileException;
|
| | | import com.ximple.io.dgn7.Dgn7fileReader;
|
| | | import com.ximple.io.dgn7.Element;
|
| | | import com.ximple.io.dgn7.ElementType;
|
| | | import com.ximple.io.dgn7.IElementHandler;
|
| | | import com.ximple.io.dgn7.Lock;
|
| | | import com.ximple.util.PrintfFormat;
|
| | | import oracle.jdbc.OracleConnection;
|
| | | import oracle.jdbc.OracleResultSet;
|
| | | import oracle.sql.ARRAY;
|
| | | import oracle.sql.BLOB;
|
| | | import org.apache.commons.collections.OrderedMap;
|
| | | import org.apache.commons.collections.OrderedMapIterator;
|
| | | import org.apache.commons.collections.map.LinkedMap;
|
| | | import org.apache.commons.logging.Log;
|
| | | import org.apache.commons.logging.LogFactory;
|
| | | import org.geotools.data.DataStore;
|
| | | import org.geotools.data.Transaction;
|
| | | import org.geotools.data.edbgeo.PostgisDataStoreFactory;
|
| | | import org.geotools.data.jdbc.JDBCUtils;
|
| | | import org.geotools.feature.SchemaException;
|
| | | import org.geotools.jdbc.JDBCDataStore;
|
| | | import org.opengis.feature.IllegalAttributeException;
|
| | | import org.quartz.JobDataMap;
|
| | | import org.quartz.JobDetail;
|
| | | import org.quartz.JobExecutionContext;
|
| | | import org.quartz.JobExecutionException;
|
| | |
|
| | | public class OracleConvertDgn2EdbGeoJob extends AbstractOracleDatabaseJob {
|
| | | final static Log logger = LogFactory.getLog(OracleConvertDgn2EdbGeoJob.class);
|
| | |
|
| | | private static final String EDBHOST = "EDBHOST";
|
| | | private static final String EDBDATBASE = "EDBDATBASE";
|
| | | private static final String EDBPORT = "EDBPORT";
|
| | | private static final String EDBSCHEMA = "EDBSCHEMA";
|
| | | private static final String EDBUSER = "EDBUSER";
|
| | | private static final String EDBPASS = "EDBPASS";
|
| | | private static final String USEWKB = "USEWKB";
|
| | |
|
| | | private static final boolean useTpclidText = false;
|
| | |
|
| | | private static final int FETCHSIZE = 30;
|
| | | private static final int COMMITSIZE = 100;
|
| | | private static final String INDEXPATHNAME = "index";
|
| | | private static final String OTHERPATHNAME = "other";
|
| | |
|
| | | protected static class Pair {
|
| | | Object first;
|
| | | Object second;
|
| | |
|
| | | public Pair(Object first, Object second) {
|
| | | this.first = first;
|
| | | this.second = second;
|
| | | }
|
| | | }
|
| | |
|
| | | protected static PostgisDataStoreFactory dataStoreFactory = new PostgisDataStoreFactory();
|
| | |
|
| | | protected String _edbHost;
|
| | | protected String _edbDatabase;
|
| | | protected String _edbPort;
|
| | | protected String _edbSchema;
|
| | | protected String _edbUsername;
|
| | | protected String _edbPassword;
|
| | | protected String _edbUseWKB;
|
| | |
|
| | | protected Map<String, String> edbProperties;
|
| | | protected JDBCDataStore targetDataStore;
|
| | | // protected OracleConvertEdbGeoJobContext oracleJobContext;
|
| | |
|
| | | private long queryTime = 0;
|
| | | private long queryTimeStart = 0;
|
| | |
|
| | | public Log getLogger() {
|
| | | return logger;
|
| | | }
|
| | |
|
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath,
|
| | | boolean profileMode,
|
| | | boolean useTransform) {
|
| | | return new OracleConvertEdbGeoJobContext(getDataPath(),
|
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform);
|
| | | }
|
| | |
|
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException {
|
| | | super.extractJobConfiguration(jobDetail);
|
| | | JobDataMap dataMap = jobDetail.getJobDataMap();
|
| | | _edbHost = dataMap.getString(EDBHOST);
|
| | | _edbDatabase = dataMap.getString(EDBDATBASE);
|
| | | _edbPort = dataMap.getString(EDBPORT);
|
| | | _edbSchema = dataMap.getString(EDBSCHEMA);
|
| | | _edbUsername = dataMap.getString(EDBUSER);
|
| | | _edbPassword = dataMap.getString(EDBPASS);
|
| | | _edbUseWKB = dataMap.getString(USEWKB);
|
| | |
|
| | | Log logger = getLogger();
|
| | | /*
|
| | | logger.info("EDBHOST=" + _myHost);
|
| | | logger.info("EDBDATBASE=" + _myDatabase);
|
| | | logger.info("EDBPORT=" + _myPort);
|
| | | logger.info("EDBSCHEMA=" + _mySchema);
|
| | | logger.info("EDBUSER=" + _myUsername);
|
| | | logger.info("EDBPASS=" + _myPassword);
|
| | | logger.info("USEWKB=" + _myUseWKB);
|
| | | */
|
| | |
|
| | | if (_edbHost == null) {
|
| | | logger.warn("EDBHOST is null");
|
| | | throw new JobExecutionException("Unknown EdbGeoSpatial host.");
|
| | | }
|
| | | if (_edbDatabase == null) {
|
| | | logger.warn("PGDATABASE is null");
|
| | | throw new JobExecutionException("Unknown EdbGeoSpatial database.");
|
| | | }
|
| | | if (_edbPort == null) {
|
| | | logger.warn("EDBPORT is null");
|
| | | throw new JobExecutionException("Unknown EdbGeoSpatial port.");
|
| | | }
|
| | | if (_edbSchema == null) {
|
| | | logger.warn("EDBSCHEMA is null");
|
| | | throw new JobExecutionException("Unknown EdbGeoSpatial schema.");
|
| | | }
|
| | | if (_edbUsername == null) {
|
| | | logger.warn("PGUSERNAME is null");
|
| | | throw new JobExecutionException("Unknown EdbGeoSpatial username.");
|
| | | }
|
| | | if (_edbPassword == null) {
|
| | | logger.warn("PGPASSWORD is null");
|
| | | throw new JobExecutionException("Unknown EdbGeoSpatial password.");
|
| | | }
|
| | |
|
| | | Map<String, String> remote = new TreeMap<String, String>();
|
| | | remote.put("dbtype", "edbgeo");
|
| | | remote.put("charset", "UTF-8");
|
| | | remote.put("host", _edbHost);
|
| | | remote.put("port", _edbPort);
|
| | | remote.put("database", _edbDatabase);
|
| | | remote.put("user", _edbUsername);
|
| | | remote.put("passwd", _edbPassword);
|
| | | remote.put("namespace", null);
|
| | | edbProperties = remote;
|
| | | }
|
| | |
|
| | | public void execute(JobExecutionContext context) throws JobExecutionException {
|
| | | // Every job has its own job detail
|
| | | JobDetail jobDetail = context.getJobDetail();
|
| | |
|
| | | // The name is defined in the job definition
|
| | | String jobName = jobDetail.getKey().getName();
|
| | |
|
| | | // Log the time the job started
|
| | | logger.info(jobName + " fired at " + new Date());
|
| | | extractJobConfiguration(jobDetail);
|
| | | createSourceDataStore();
|
| | | createTargetDataStore();
|
| | | if (getSourceDataStore() == null) {
|
| | | logger.warn("Cannot connect source oracle database.");
|
| | | throw new JobExecutionException("Cannot connect source oracle database.");
|
| | | }
|
| | |
|
| | | if (getTargetDataStore() == null) {
|
| | | logger.warn("Cannot connect source postgreSQL database.");
|
| | | throw new JobExecutionException("Cannot connect source postgreSQL database.");
|
| | | }
|
| | |
|
| | | if (isProfileMode()) {
|
| | | queryTime = 0;
|
| | | }
|
| | |
|
| | | long t1 = System.currentTimeMillis();
|
| | | String targetSchemaName;
|
| | | try {
|
| | | logger.info("-- step:clearOutputDatabase --");
|
| | | clearOutputDatabase();
|
| | | targetSchemaName = determineTargetSchemaName();
|
| | |
|
| | | if (checkConvertFile()) {
|
| | | logger.info("-- step:convertIndexDesignFile --");
|
| | | long tStep = System.currentTimeMillis();
|
| | | convertIndexDesignFile(context, targetSchemaName);
|
| | | if (isProfileMode()) {
|
| | | long tStepEnd = System.currentTimeMillis();
|
| | | logTimeDiff("Profile-convertIndexDesignFile", tStep, tStepEnd);
|
| | | }
|
| | |
|
| | | logger.info("-- step:convertOtherDesignFile --");
|
| | | tStep = System.currentTimeMillis();
|
| | | convertOtherDesignFile(context, targetSchemaName);
|
| | | if (isProfileMode()) {
|
| | | long tStepEnd = System.currentTimeMillis();
|
| | | logTimeDiff("Profile-convertOtherDesignFile", tStep, tStepEnd);
|
| | | }
|
| | |
|
| | | }
|
| | |
|
| | | if (checkConvertDB()) {
|
| | | logger.info("-- step:convertOracleDB --");
|
| | |
|
| | | OracleConvertEdbGeoJobContext jobContext =
|
| | | (OracleConvertEdbGeoJobContext) prepareJobContext(targetSchemaName, _filterPath,
|
| | | isProfileMode(), isTransformed());
|
| | | jobContext.setSourceDataStore(getSourceDataStore());
|
| | | // jobContext.setConvertElementIn(_convertElementIn);
|
| | | jobContext.setElementLogging(checkElementLogging());
|
| | | jobContext.setExecutionContext(context);
|
| | |
|
| | | createHibernateSequence(jobContext);
|
| | | fetchTPData(jobContext);
|
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" +
|
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName()));
|
| | |
|
| | | long tStep = System.currentTimeMillis();
|
| | |
|
| | | if (isCopyConnectivityMode()) {
|
| | | copyConnectivity(jobContext);
|
| | | }
|
| | |
|
| | | if (isProfileMode()) {
|
| | | long tStepEnd = System.currentTimeMillis();
|
| | | logTimeDiff("Profile-Copy Connectivity", tStep, tStepEnd);
|
| | | }
|
| | |
|
| | | for (String orgSchema : _orgSchema) {
|
| | | logger.info("----- start schema:" + orgSchema + " -----");
|
| | | if (isProfileMode()) {
|
| | | jobContext.resetProcessTime();
|
| | | jobContext.resetUpdateTime();
|
| | | }
|
| | | tStep = System.currentTimeMillis();
|
| | | exetcuteConvert(jobContext, orgSchema, _dataPath);
|
| | |
|
| | | //close all open filewriter instance
|
| | | jobContext.closeFeatureWriter();
|
| | |
|
| | | if (isProfileMode()) {
|
| | | logger.warn("Profile-Current Query Oracle Cost-" +
|
| | | ((int) ((getQueryTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((getQueryTime()) % 60000.0)) / 1000) + " sec");
|
| | | long tStepEnd = System.currentTimeMillis();
|
| | | logger.warn("Profile-Current Process Cost-" +
|
| | | ((int) ((getProcessTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((getProcessTime()) % 60000.0)) / 1000) + " sec");
|
| | | logger.warn("Profile-Current Update Cost-" +
|
| | | ((int) ((getUpdateTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((getUpdateTime()) % 60000.0)) / 1000) + " sec");
|
| | | logger.warn("Profile-Current JobContext Process Cost-" +
|
| | | ((int) ((jobContext.getProcessTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((jobContext.getProcessTime()) % 60000.0)) / 1000) + " sec");
|
| | | logger.warn("Profile-Current JobContext Update Cost-" +
|
| | | ((int) ((jobContext.getUpdateTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((jobContext.getUpdateTime()) % 60000.0)) / 1000) + " sec");
|
| | | logTimeDiff("Profile-Convert[" + orgSchema + "]", tStep, tStepEnd);
|
| | |
|
| | | resetQueryTime();
|
| | | resetProcessTime();
|
| | | resetUpdateTime();
|
| | | }
|
| | | }
|
| | |
|
| | | jobContext.closeOracleConnection();
|
| | | }
|
| | |
|
| | | if (checkConvertElementIn()) {
|
| | | logger.info("-- step:convertFeatureDesignFile --");
|
| | | long tStep = System.currentTimeMillis();
|
| | | convertFeatureDesignFile(context, targetSchemaName);
|
| | | if (isProfileMode()) {
|
| | | long tStepEnd = System.currentTimeMillis();
|
| | | logTimeDiff("Profile-convertFeatureDesignFile", tStep, tStepEnd);
|
| | | }
|
| | | }
|
| | |
|
| | | if (checkCreateDummy()) {
|
| | | logger.info("-- step:createDummyFeatureFile --");
|
| | | createDummyFeatureFile(context);
|
| | | }
|
| | |
|
| | | long t2 = System.currentTimeMillis();
|
| | | // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss";
|
| | | // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW);
|
| | | logTimeDiff("Total ", t1, t2);
|
| | |
|
| | | updateRepoStatusToReady(targetSchemaName);
|
| | |
|
| | | } catch (SQLException e) {
|
| | | disconnect();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException("Database error. " + e.getMessage(), e);
|
| | | } catch (IOException ex) {
|
| | | disconnect();
|
| | | logger.warn(ex.getMessage(), ex);
|
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex);
|
| | | } finally {
|
| | | disconnect();
|
| | | }
|
| | | logger.warn(jobName + " end at " + new Date());
|
| | | }
|
| | |
|
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) {
|
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " +
|
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec");
|
| | | }
|
| | |
|
| | | private void exetcuteConvert(OracleConvertEdbGeoJobContext jobContext,
|
| | | String querySchema, String targetSchemaName) throws SQLException {
|
| | | int order = 0;
|
| | | OrderedMap map = getBlobStorageList(jobContext.getOracleConnection(),
|
| | | querySchema, "SD$SPACENODES", null);
|
| | |
|
| | | logger.info("begin convert job:[" + map.size() + "]:testmode=" + _testMode);
|
| | |
|
| | | int total = map.size(); //spacenodes count
|
| | | int step = total / 100;
|
| | | int current = 0;
|
| | |
|
| | | if (total == 0) {
|
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is zero.");
|
| | | return;
|
| | | }
|
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is " + map.size());
|
| | |
|
| | | //jobContext.startTransaction();
|
| | | jobContext.setCurrentSchema(querySchema);
|
| | | jobContext.getExecutionContext().put("ConvertDgn2EdbGeoJobProgress", 0);
|
| | | for (OrderedMapIterator it = map.orderedMapIterator(); it.hasNext(); ) {
|
| | | it.next();
|
| | |
|
| | | Pair pair = (Pair) it.getValue();
|
| | | String tableSrc = (String) pair.first;
|
| | |
|
| | | logger.info("begin convert:[" + order + "]-" + tableSrc);
|
| | | queryIgsetElement(jobContext, querySchema, tableSrc);
|
| | |
|
| | |
|
| | | order++;
|
| | |
|
| | | if (_testMode) {
|
| | | if ((_testCount < 0) || (order >= _testCount))
|
| | | break;
|
| | | }
|
| | |
|
| | | if ((order % COMMITSIZE) == 0) {
|
| | | // OracleConnection connection = jobContext.getOracleConnection();
|
| | | // connection.commitTransaction();
|
| | | jobContext.commitTransaction();
|
| | | //jobContext.startTransaction();
|
| | | System.gc();
|
| | | System.runFinalization();
|
| | | }
|
| | |
|
| | | if (step != 0) {
|
| | | int now = order % step;
|
| | | if (now != current) {
|
| | | current = now;
|
| | | jobContext.getExecutionContext().put("ConvertDgn2EdbGeoSpatialJob", current);
|
| | |
|
| | | }
|
| | | } else {
|
| | | jobContext.getExecutionContext().put("ConvertDgn2EdbGeoSpatialJob", current);
|
| | | current++;
|
| | | }
|
| | | }
|
| | | jobContext.getExecutionContext().put("ConvertDgn2EdbGeoSpatialJob", 100);
|
| | |
|
| | | jobContext.commitTransaction();
|
| | | jobContext.resetFeatureContext();
|
| | |
|
| | | if (isProfileMode()) {
|
| | |
|
| | | }
|
| | |
|
| | | logger.info("end convert job:[" + order + "]");
|
| | | System.gc();
|
| | | System.runFinalization();
|
| | | }
|
| | |
|
| | | protected OrderedMap getBlobStorageList(Connection connection, String schemaSrc, String tableSrc,
|
| | | OrderedMap orderedMap) throws SQLException {
|
| | | if (orderedMap == null)
|
| | | orderedMap = new LinkedMap(99);
|
| | | String fetchStmtFmt = "SELECT SNID, SPACETABLE FROM \"%s\".\"%s\"";
|
| | | PrintfFormat spf = new PrintfFormat(fetchStmtFmt);
|
| | | String fetchStmt = spf.sprintf(new Object[]{schemaSrc, tableSrc});
|
| | | Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
| | | ResultSet rs = null;
|
| | |
|
| | | stmt.setFetchSize(FETCHSIZE);
|
| | | try {
|
| | | rs = stmt.executeQuery(fetchStmt);
|
| | | int size = rs.getMetaData().getColumnCount();
|
| | |
|
| | | while (rs.next()) {
|
| | | Object[] values = new Object[size];
|
| | |
|
| | | for (int i = 0; i < size; i++) {
|
| | | values[i] = rs.getObject(i + 1);
|
| | | }
|
| | |
|
| | | Integer key = ((BigDecimal) values[0]).intValue();
|
| | | String name = (String) values[1];
|
| | |
|
| | | Pair pair = (Pair) orderedMap.get(key);
|
| | | if (pair == null)
|
| | | orderedMap.put(key, new Pair(name, null));
|
| | | else
|
| | | pair.first = name;
|
| | | }
|
| | | } catch (SQLException e) {
|
| | | logger.error(e.toString(), e);
|
| | | logger.error("stmt=" + fetchStmt);
|
| | | throw e;
|
| | | } finally {
|
| | | JDBCUtils.close(rs);
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | |
|
| | | return orderedMap;
|
| | | }
|
| | |
|
| | | protected OrderedMap getRawFormatStorageList(OracleConnection connection, String schemaSrc, String tableSrc,
|
| | | OrderedMap orderedMap) throws SQLException {
|
| | | if (orderedMap == null)
|
| | | orderedMap = new LinkedMap(99);
|
| | | String fetchStmtFmt = "SELECT RNID, SPACETABLE FROM \"%s\".\"%s\"";
|
| | | PrintfFormat spf = new PrintfFormat(fetchStmtFmt);
|
| | | String fetchStmt = spf.sprintf(new Object[]{schemaSrc, tableSrc});
|
| | | Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
| | |
|
| | | stmt.setFetchSize(FETCHSIZE);
|
| | | ResultSet rs = stmt.executeQuery(fetchStmt);
|
| | | try {
|
| | | int size = rs.getMetaData().getColumnCount();
|
| | | while (rs.next()) {
|
| | | Object[] values = new Object[size];
|
| | |
|
| | | for (int i = 0; i < size; i++) {
|
| | | values[i] = rs.getObject(i + 1);
|
| | | }
|
| | |
|
| | | Integer key = ((BigDecimal) values[0]).intValue();
|
| | | String name = (String) values[1];
|
| | |
|
| | | Pair pair = (Pair) orderedMap.get(key);
|
| | | if (pair == null)
|
| | | orderedMap.put(key, new Pair(null, name));
|
| | | else
|
| | | pair.second = name;
|
| | | }
|
| | | } finally {
|
| | | JDBCUtils.close(rs);
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | | return orderedMap;
|
| | | }
|
| | |
|
| | | protected void queryIgsetElement(OracleConvertEdbGeoJobContext jobContext,
|
| | | String srcschema, String srctable) throws SQLException {
|
| | | Connection connection = jobContext.getOracleConnection();
|
| | | String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" ORDER BY ROWID";
|
| | | //String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" WHERE TAG_SFSC = 423 AND TAG_LUFID = 21612065 ORDER BY ROWID";
|
| | | PrintfFormat spf = new PrintfFormat(fetchSrcStmtFmt);
|
| | | String fetchSrcStmt = spf.sprintf(new Object[]{srcschema, srctable});
|
| | | Statement stmtSrc = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
| | |
|
| | | stmtSrc.setFetchSize(FETCHSIZE);
|
| | | ResultSet rsSrc = stmtSrc.executeQuery(fetchSrcStmt);
|
| | | int igdsMetaType = rsSrc.getMetaData().getColumnType(1);
|
| | | while (rsSrc.next()) {
|
| | | if (isProfileMode()) {
|
| | | markQueryTime();
|
| | | }
|
| | |
|
| | | byte[] raw = null;
|
| | | if (igdsMetaType == Types.BLOB) {
|
| | | BLOB blob = (BLOB) rsSrc.getBlob(1);
|
| | |
|
| | | try {
|
| | | raw = getBytesFromBLOB(blob);
|
| | | } catch (BufferOverflowException e) {
|
| | | logger.warn("Wrong Element Structure-", e);
|
| | | } finally {
|
| | | // blob.close();
|
| | | }
|
| | | } else {
|
| | | raw = rsSrc.getBytes(1);
|
| | | }
|
| | |
|
| | | try {
|
| | | if (raw != null) {
|
| | | Element element = fetchBinaryElement(raw);
|
| | | if (isProfileMode()) {
|
| | | accumulateQueryTime();
|
| | | }
|
| | | jobContext.putFeatureCollection(element);
|
| | | } else {
|
| | | if (isProfileMode()) {
|
| | | accumulateQueryTime();
|
| | | }
|
| | | }
|
| | | } catch (Dgn7fileException e) {
|
| | | logger.warn("Dgn7Exception", e);
|
| | | }
|
| | | }
|
| | |
|
| | | JDBCUtils.close(rsSrc);
|
| | | JDBCUtils.close(stmtSrc);
|
| | | }
|
| | |
|
| | | protected void queryRawElement(OracleConvertEdbGeoJobContext jobContext,
|
| | | String srcschema, String srctable) throws SQLException {
|
| | | Connection connection = jobContext.getOracleConnection();
|
| | | String fetchDestStmtFmt = "SELECT ELEMENT FROM \"%s\".\"%s\" ORDER BY ROWID";
|
| | | PrintfFormat spf = new PrintfFormat(fetchDestStmtFmt);
|
| | | String fetchDestStmt = spf.sprintf(new Object[]{srcschema, srctable});
|
| | | Statement stmtDest = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
|
| | |
|
| | | stmtDest.setFetchSize(FETCHSIZE);
|
| | | ResultSet rsDest = stmtDest.executeQuery(fetchDestStmt);
|
| | |
|
| | | try {
|
| | | while (rsDest.next()) {
|
| | | ARRAY rawsValue = ((OracleResultSet) rsDest).getARRAY(1);
|
| | | long[] rawData = rawsValue.getLongArray();
|
| | | byte[] comparessedValue;
|
| | |
|
| | | /*
|
| | | if (dataMode == TransferTask.DataMode.Normal)
|
| | | {
|
| | | comparessedValue = BinConverter.unmarshalByteArray(rawData, true);
|
| | | } else
|
| | | {
|
| | | comparessedValue = BinConverter.unmarshalCompactByteArray(rawData);
|
| | | }
|
| | | */
|
| | | comparessedValue = BinConverter.unmarshalByteArray(rawData, true);
|
| | |
|
| | | byte[] rawDest = ByteArrayCompressor.decompressByteArray(comparessedValue);
|
| | |
|
| | | try {
|
| | | Element element = fetchBinaryElement(rawDest);
|
| | | jobContext.putFeatureCollection(element);
|
| | | } catch (Dgn7fileException e) {
|
| | | logger.warn("Dgn7Exception:" + e.getMessage(), e);
|
| | | }
|
| | | }
|
| | | } finally {
|
| | | JDBCUtils.close(rsDest);
|
| | | JDBCUtils.close(stmtDest);
|
| | | }
|
| | | }
|
| | |
|
| | | // Binary to Element
|
| | | private Element fetchBinaryElement(byte[] raws) throws Dgn7fileException {
|
| | | ByteBuffer buffer = ByteBuffer.wrap(raws);
|
| | | buffer.order(ByteOrder.LITTLE_ENDIAN);
|
| | | short signature = buffer.getShort();
|
| | |
|
| | | // byte type = (byte) (buffer.get() & 0x7f);
|
| | | byte type = (byte) ((signature >>> 8) & 0x007f);
|
| | |
|
| | | // silly Bentley say contentLength is in 2-byte words
|
| | | // and ByteByffer uses raws.
|
| | | // track the record location
|
| | | int elementLength = (buffer.getShort() * 2) + 4;
|
| | | ElementType recordType = ElementType.forID(type);
|
| | | IElementHandler handler;
|
| | |
|
| | | handler = recordType.getElementHandler();
|
| | |
|
| | | Element dgnElement = (Element) handler.read(buffer, signature, elementLength);
|
| | | if (recordType.isComplexElement() && (elementLength < raws.length)) {
|
| | | int offset = elementLength;
|
| | | while (offset < (raws.length - 4)) {
|
| | | buffer.position(offset);
|
| | | signature = buffer.getShort();
|
| | | type = (byte) ((signature >>> 8) & 0x007f);
|
| | | elementLength = (buffer.getShort() * 2) + 4;
|
| | | if (raws.length < (offset + elementLength)) {
|
| | | logger.debug("Length not match:" + offset + ":" + buffer.position() + ":" + buffer.limit());
|
| | | break;
|
| | | }
|
| | | recordType = ElementType.forID(type);
|
| | | handler = recordType.getElementHandler();
|
| | | if (handler != null) {
|
| | | Element subElement = (Element) handler.read(buffer, signature, elementLength);
|
| | | ((ComplexElement) dgnElement).add(subElement);
|
| | | offset += elementLength;
|
| | | } else {
|
| | | byte[] remain = new byte[buffer.remaining()];
|
| | | System.arraycopy(raws, offset, remain, 0, buffer.remaining());
|
| | | for (int i = 0; i < remain.length; i++) {
|
| | | if (remain[i] != 0) {
|
| | | logger.info("fetch element has some error. index=" + (offset + i) + ":value=" + remain[i]);
|
| | | }
|
| | | }
|
| | | break;
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | return dgnElement;
|
| | | }
|
| | |
|
| | | /**
|
| | | * �����ഫ�����ɪ��u�@
|
| | | *
|
| | | * @param context �u�@��������
|
| | | * @throws org.quartz.JobExecutionException
|
| | | * exception
|
| | | */
|
| | | private void convertIndexDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException {
|
| | | File indexDir = new File(getDataPath(), INDEXPATHNAME);
|
| | | if (!indexDir.exists()) {
|
| | | logger.info("index dir=" + indexDir + " not exist.");
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!indexDir.isDirectory()) {
|
| | | logger.info("index dir=" + indexDir + " is not a directory.");
|
| | | }
|
| | |
|
| | | List<File> dgnFiles = FileUtils.recurseDir(indexDir, new FileFilter() {
|
| | | public boolean accept(File pathname) {
|
| | | return pathname.isDirectory() || pathname.getName().toLowerCase().endsWith("dgn");
|
| | | }
|
| | | });
|
| | |
|
| | | for (File dgnFile : dgnFiles) {
|
| | | if (dgnFile.isDirectory()) continue;
|
| | | IndexDgnConvertEdbGeoJobContext convertContext =
|
| | | new IndexDgnConvertEdbGeoJobContext(getDataPath(), getTargetDataStore(), targetSchemaName,
|
| | | isProfileMode(), isTransformed());
|
| | | logger.info("--- start index dgnfile-" + dgnFile.toString() + " ---");
|
| | | FileInputStream fs = null;
|
| | | FileChannel fc = null;
|
| | | Dgn7fileReader reader = null;
|
| | | try {
|
| | | convertContext.clearOutputDatabase();
|
| | | convertContext.setExecutionContext(context);
|
| | | String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator);
|
| | | convertContext.setFilename(dgnPaths[dgnPaths.length - 1]);
|
| | | convertContext.startTransaction();
|
| | |
|
| | | fs = new FileInputStream(dgnFile);
|
| | | fc = fs.getChannel();
|
| | | reader = new Dgn7fileReader(fc, new Lock());
|
| | | convertContext.setReader(reader);
|
| | |
|
| | | scanIndexDgnElement(convertContext);
|
| | |
|
| | | convertContext.commitTransaction();
|
| | | convertContext.closeFeatureWriter();
|
| | |
|
| | | System.gc();
|
| | | System.runFinalization();
|
| | | } catch (FileNotFoundException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (Dgn7fileException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (IOException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (IllegalAttributeException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (SchemaException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } finally {
|
| | | convertContext.closeFeatureWriter();
|
| | |
|
| | | if (reader != null) {
|
| | | try {
|
| | | reader.close();
|
| | | } catch (IOException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | if (fs != null) {
|
| | | try {
|
| | | fs.close();
|
| | | } catch (IOException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | if (isProfileMode()) {
|
| | | logger.warn("Profile-Current convertContext Process Cost-" +
|
| | | ((int) ((convertContext.getProcessTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((convertContext.getProcessTime()) % 60000.0)) / 1000) + " sec");
|
| | | logger.warn("Profile-Current convertContext Update Cost-" +
|
| | | ((int) ((convertContext.getUpdateTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((convertContext.getUpdateTime()) % 60000.0)) / 1000) + " sec");
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | protected void scanIndexDgnElement(IndexDgnConvertEdbGeoJobContext convertContext)
|
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException {
|
| | | Dgn7fileReader reader = convertContext.getReader();
|
| | | int count = 0;
|
| | | Element lastComplex = null;
|
| | |
|
| | | while (reader.hasNext()) {
|
| | | if (isProfileMode()) markProcessTime();
|
| | | Element.FileRecord record = reader.nextElement();
|
| | | if (record.element() != null) {
|
| | | Element element = (Element) record.element();
|
| | | ElementType type = element.getElementType();
|
| | |
|
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) {
|
| | | if (lastComplex != null) {
|
| | | processIndexElement(lastComplex, convertContext);
|
| | | lastComplex = null;
|
| | | }
|
| | |
|
| | | processIndexElement(element, convertContext);
|
| | | } else if (element.isComponentElement()) {
|
| | | if (lastComplex != null) {
|
| | | ((ComplexElement) lastComplex).add(element);
|
| | | }
|
| | | } else if (type.isComplexElement()) {
|
| | | if (lastComplex != null) {
|
| | | processIndexElement(lastComplex, convertContext);
|
| | | }
|
| | | lastComplex = element;
|
| | | }
|
| | | }
|
| | | count++;
|
| | | }
|
| | |
|
| | | if (lastComplex != null) {
|
| | | processIndexElement(lastComplex, convertContext);
|
| | | }
|
| | | logger.debug("ElementRecord Count=" + count);
|
| | | }
|
| | |
|
| | | private void processIndexElement(Element element, IndexDgnConvertEdbGeoJobContext convertContext)
|
| | | throws IllegalAttributeException, SchemaException {
|
| | | //if (useTpclidText) {
|
| | | // if (element instanceof TextElement) {
|
| | | // convertContext.putFeatureCollection(element);
|
| | | // }
|
| | | //} else {
|
| | | // if (element instanceof ShapeElement) {
|
| | | convertContext.putFeatureCollection(element);
|
| | | // }
|
| | | //}
|
| | | }
|
| | |
|
| | |
|
| | | /**
|
| | | * �����ഫ��L�]�p���ɪ��u�@
|
| | | *
|
| | | * @param context jobContext
|
| | | * @throws org.quartz.JobExecutionException
|
| | | * exception
|
| | | */
|
| | | private void convertOtherDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException {
|
| | | File otherDir = new File(getDataPath(), OTHERPATHNAME);
|
| | | if (!otherDir.exists()) {
|
| | | logger.info("other dir=" + otherDir + " not exist.");
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!otherDir.isDirectory()) {
|
| | | logger.info("other dir=" + otherDir + " is not a directory.");
|
| | | }
|
| | |
|
| | | List<File> dgnFiles = FileUtils.recurseDir(otherDir, new FileFilter() {
|
| | | public boolean accept(File pathname) {
|
| | | return pathname.isDirectory() || pathname.getName().toLowerCase().endsWith("dgn");
|
| | | }
|
| | | });
|
| | |
|
| | | for (File dgnFile : dgnFiles) {
|
| | | if (dgnFile.isDirectory()) continue;
|
| | |
|
| | | GeneralDgnConvertEdbGeoJobContext convertContext =
|
| | | new GeneralDgnConvertEdbGeoJobContext(getDataPath(), getTargetDataStore(), targetSchemaName,
|
| | | isProfileMode(), isTransformed());
|
| | | logger.info("--- start other dgnfile-" + dgnFile.toString() + " ---");
|
| | | FileInputStream fs = null;
|
| | | FileChannel fc;
|
| | | Dgn7fileReader reader = null;
|
| | | try {
|
| | | convertContext.setExecutionContext(context);
|
| | | String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator);
|
| | | convertContext.setFilename(dgnPaths[dgnPaths.length - 1]);
|
| | | convertContext.startTransaction();
|
| | |
|
| | | fs = new FileInputStream(dgnFile);
|
| | | fc = fs.getChannel();
|
| | | reader = new Dgn7fileReader(fc, new Lock());
|
| | | convertContext.setReader(reader);
|
| | |
|
| | | scanOtherDgnElement(convertContext);
|
| | |
|
| | | convertContext.commitTransaction();
|
| | | convertContext.closeFeatureWriter();
|
| | |
|
| | | System.gc();
|
| | | System.runFinalization();
|
| | | } catch (FileNotFoundException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (Dgn7fileException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (IOException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (IllegalAttributeException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (SchemaException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } finally {
|
| | | convertContext.closeFeatureWriter();
|
| | |
|
| | | if (reader != null) {
|
| | | try {
|
| | | reader.close();
|
| | | } catch (IOException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | if (fs != null) {
|
| | | try {
|
| | | fs.close();
|
| | | } catch (IOException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | if (isProfileMode()) {
|
| | | logger.warn("Profile-Current convertContext Process Cost-" +
|
| | | ((int) ((convertContext.getProcessTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((convertContext.getProcessTime()) % 60000.0)) / 1000) + " sec");
|
| | | logger.warn("Profile-Current convertContext Update Cost-" +
|
| | | ((int) ((convertContext.getUpdateTime()) / 60000.0)) + " min - " +
|
| | | (((int) ((convertContext.getUpdateTime()) % 60000.0)) / 1000) + " sec");
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | public void scanOtherDgnElement(GeneralDgnConvertEdbGeoJobContext convertContext)
|
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException {
|
| | | Dgn7fileReader reader = convertContext.getReader();
|
| | | int count = 0;
|
| | | Element lastComplex = null;
|
| | | while (reader.hasNext()) {
|
| | | Element.FileRecord record = reader.nextElement();
|
| | | if (record.element() != null) {
|
| | | Element element = (Element) record.element();
|
| | | ElementType type = element.getElementType();
|
| | |
|
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) {
|
| | | if (lastComplex != null) {
|
| | | processOtherElement(lastComplex, convertContext);
|
| | | lastComplex = null;
|
| | | }
|
| | |
|
| | | processOtherElement(element, convertContext);
|
| | | } else if (element.isComponentElement()) {
|
| | | if (lastComplex != null) {
|
| | | ((ComplexElement) lastComplex).add(element);
|
| | | }
|
| | | } else if (type.isComplexElement()) {
|
| | | if (lastComplex != null) {
|
| | | processOtherElement(lastComplex, convertContext);
|
| | | }
|
| | | lastComplex = element;
|
| | | }
|
| | | }
|
| | | count++;
|
| | | }
|
| | |
|
| | | if (lastComplex != null) {
|
| | | processOtherElement(lastComplex, convertContext);
|
| | | }
|
| | | logger.debug("ElementRecord Count=" + count);
|
| | | }
|
| | |
|
| | | private void processOtherElement(Element element, GeneralDgnConvertEdbGeoJobContext convertContext)
|
| | | throws IllegalAttributeException, SchemaException {
|
| | | convertContext.putFeatureCollection(element);
|
| | | }
|
| | |
|
| | | private void clearOutputDatabase() {
|
| | | /*
|
| | | File outDataPath = new File(getDataPath(), OracleConvertEdbGeoJobContext.SHPOUTPATH);
|
| | | if (outDataPath.exists() && outDataPath.isDirectory())
|
| | | {
|
| | | deleteFilesInPath(outDataPath);
|
| | | }
|
| | | outDataPath = new File(getDataPath(), IndexDgnConvertShpJobContext.SHPOUTPATH);
|
| | | if (outDataPath.exists() && outDataPath.isDirectory())
|
| | | {
|
| | | deleteFilesInPath(outDataPath);
|
| | | }
|
| | | outDataPath = new File(getDataPath(), GeneralDgnConvertShpJobContext.SHPOUTPATH);
|
| | | if (outDataPath.exists() && outDataPath.isDirectory())
|
| | | {
|
| | | deleteFilesInPath(outDataPath);
|
| | | }
|
| | | */
|
| | | }
|
| | |
|
| | | private void deleteFilesInPath(File outDataPath) {
|
| | | deleteFilesInPath(outDataPath, true);
|
| | | }
|
| | |
|
| | | private void deleteFilesInPath(File outDataPath, boolean removeSubDir) {
|
| | | if (!outDataPath.isDirectory()) {
|
| | | return;
|
| | | }
|
| | | File[] files = outDataPath.listFiles();
|
| | | for (File file : files) {
|
| | | if (file.isFile()) {
|
| | | if (!file.delete()) {
|
| | | logger.info("Cannot delete file-" + file.toString());
|
| | | }
|
| | | } else if (file.isDirectory()) {
|
| | | deleteFilesInPath(file, removeSubDir);
|
| | | if (removeSubDir) {
|
| | | if (file.delete()) {
|
| | | logger.info("Cannot delete dir-" + file.toString());
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | private void convertFeatureDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException {
|
| | | File elminDir = new File(getDataPath(), "elmin");
|
| | | if (!elminDir.exists()) {
|
| | | logger.info("elmin dir=" + elminDir + " not exist.");
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!elminDir.isDirectory()) {
|
| | | logger.info("elmin dir=" + elminDir + " is not a directory.");
|
| | | }
|
| | |
|
| | | File[] dgnFiles = elminDir.listFiles(new FilenameFilter() {
|
| | | public boolean accept(File dir, String name) {
|
| | | return name.toLowerCase().endsWith(".dgn");
|
| | | }
|
| | | });
|
| | |
|
| | | for (File dgnFile : dgnFiles) {
|
| | | FeatureDgnConvertEdbGeoJobContext convertContext =
|
| | | new FeatureDgnConvertEdbGeoJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, _filterPath,
|
| | | isProfileMode(), isTransformed());
|
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---");
|
| | | try {
|
| | | convertContext.setExecutionContext(context);
|
| | | String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator);
|
| | | convertContext.setFilename(dgnPaths[dgnPaths.length - 1]);
|
| | | convertContext.startTransaction();
|
| | |
|
| | | FileInputStream fs = new FileInputStream(dgnFile);
|
| | | FileChannel fc = fs.getChannel();
|
| | | Dgn7fileReader reader = new Dgn7fileReader(fc, new Lock());
|
| | | convertContext.setReader(reader);
|
| | |
|
| | | scanFeatureDgnElement(convertContext);
|
| | |
|
| | | convertContext.commitTransaction();
|
| | | convertContext.closeFeatureWriter();
|
| | | System.gc();
|
| | | System.runFinalization();
|
| | | } catch (FileNotFoundException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (Dgn7fileException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (IOException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (IllegalAttributeException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } catch (SchemaException e) {
|
| | | convertContext.rollbackTransaction();
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | } finally {
|
| | | convertContext.closeFeatureWriter();
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | public void scanFeatureDgnElement(FeatureDgnConvertEdbGeoJobContext convertContext)
|
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException {
|
| | | Dgn7fileReader reader = convertContext.getReader();
|
| | | int count = 0;
|
| | | Element lastComplex = null;
|
| | | while (reader.hasNext()) {
|
| | | Element.FileRecord record = reader.nextElement();
|
| | | if (record.element() != null) {
|
| | | Element element = (Element) record.element();
|
| | | ElementType type = element.getElementType();
|
| | |
|
| | | if ((!type.isComplexElement()) && (!element.isComponentElement())) {
|
| | | if (lastComplex != null) {
|
| | | processFeatureElement(lastComplex, convertContext);
|
| | | lastComplex = null;
|
| | | }
|
| | |
|
| | | processFeatureElement(element, convertContext);
|
| | | } else if (element.isComponentElement()) {
|
| | | if (lastComplex != null) {
|
| | | ((ComplexElement) lastComplex).add(element);
|
| | | }
|
| | | } else if (type.isComplexElement()) {
|
| | | if (lastComplex != null) {
|
| | | processFeatureElement(lastComplex, convertContext);
|
| | | }
|
| | | lastComplex = element;
|
| | | }
|
| | | }
|
| | | count++;
|
| | | }
|
| | |
|
| | | if (lastComplex != null) {
|
| | | processFeatureElement(lastComplex, convertContext);
|
| | | }
|
| | | logger.debug("ElementRecord Count=" + count);
|
| | | }
|
| | |
|
| | | private void processFeatureElement(Element element, FeatureDgnConvertEdbGeoJobContext convertContext)
|
| | | throws IllegalAttributeException, SchemaException {
|
| | | convertContext.putFeatureCollection(element);
|
| | | }
|
| | |
|
| | | private void createDummyFeatureFile(JobExecutionContext context) throws JobExecutionException {
|
| | | /*
|
| | | DummyFeatureConvertShpJobContext convertContext = new DummyFeatureConvertShpJobContext(getDataPath(), _filterPath);
|
| | | try {
|
| | | convertContext.startTransaction();
|
| | | convertContext.commitTransaction();
|
| | | convertContext.closeFeatureWriter();
|
| | | } catch (IOException e)
|
| | | {
|
| | | logger.warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | }
|
| | | */
|
| | | }
|
| | |
|
| | | public DataStore getTargetDataStore() {
|
| | | return targetDataStore;
|
| | | }
|
| | |
|
| | | protected void createTargetDataStore() throws JobExecutionException {
|
| | | if (targetDataStore != null) {
|
| | | targetDataStore.dispose();
|
| | | targetDataStore = null;
|
| | | }
|
| | |
|
| | | /*
|
| | | if (!isDriverFound())
|
| | | {
|
| | | throw new JobExecutionException("Oracle JDBC Driver not found.-" + JDBC_DRIVER);
|
| | | }
|
| | | */
|
| | |
|
| | | if (!edbProperties.containsKey(PostgisDataStoreFactory.MAXCONN.key)) {
|
| | | edbProperties.put(PostgisDataStoreFactory.MAXCONN.key, "5");
|
| | | }
|
| | |
|
| | | if (!edbProperties.containsKey(PostgisDataStoreFactory.MINCONN.key)) {
|
| | | edbProperties.put(PostgisDataStoreFactory.MINCONN.key, "1");
|
| | | }
|
| | |
|
| | | if (!edbProperties.containsKey(PostgisDataStoreFactory.WKBENABLED.key)) {
|
| | | edbProperties.put(PostgisDataStoreFactory.WKBENABLED.key, "true");
|
| | | }
|
| | |
|
| | | if (!dataStoreFactory.canProcess(edbProperties)) {
|
| | | getLogger().warn("cannot process properties-");
|
| | | throw new JobExecutionException("cannot process properties-");
|
| | | }
|
| | | try {
|
| | | targetDataStore = (JDBCDataStore) dataStoreFactory.createDataStore(edbProperties);
|
| | | } catch (IOException e) {
|
| | | getLogger().warn(e.getMessage(), e);
|
| | | throw new JobExecutionException(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | protected void disconnect() {
|
| | | super.disconnect();
|
| | | if (targetDataStore != null) {
|
| | | targetDataStore.dispose();
|
| | | targetDataStore = null;
|
| | | }
|
| | | }
|
| | |
|
| | | private String determineTargetSchemaName() throws IOException {
|
| | | if (targetDataStore == null) return null;
|
| | | Connection connection = null;
|
| | | Statement stmt = null;
|
| | | ResultSet rs = null;
|
| | | String targetSchema = null;
|
| | | boolean needCreate = false;
|
| | | try {
|
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT);
|
| | | rs = connection.getMetaData().getTables(null, _edbSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"});
|
| | | if (!rs.next()) needCreate = true;
|
| | | rs.close();
|
| | | rs = null;
|
| | |
|
| | | stmt = connection.createStatement();
|
| | | stmt.execute("SET edb_redwood_date TO OFF");
|
| | | stmt.execute("SET edb_redwood_strings TO OFF");
|
| | | // stmt.execute("SET edb_stmt_level_tx TO OFF");
|
| | | stmt.close();
|
| | |
|
| | | if (needCreate)
|
| | | createXGeosVersionTable(connection, _edbSchema);
|
| | |
|
| | | StringBuilder sbSQL = new StringBuilder("SELECT ");
|
| | | sbSQL.append("vsschema, vsstatus FROM ");
|
| | | sbSQL.append(encodeSchemaTableName(_edbSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' ');
|
| | | sbSQL.append("ORDER BY vsid");
|
| | | stmt = connection.createStatement();
|
| | | rs = stmt.executeQuery(sbSQL.toString());
|
| | | ArrayList<Object[]> tmpSchemas = new ArrayList<Object[]>();
|
| | | int i = 0;
|
| | | int current = -1;
|
| | | while (rs.next()) {
|
| | | Object[] values = new Object[2];
|
| | | values[0] = rs.getString("vsschema");
|
| | | values[1] = rs.getShort("vsstatus");
|
| | | tmpSchemas.add(values);
|
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) {
|
| | | current = i;
|
| | | }
|
| | | i++;
|
| | | }
|
| | |
|
| | | if (current == -1) {
|
| | | Object[] values = tmpSchemas.get(0);
|
| | | targetSchema = (String) values[0];
|
| | | } else if (current < (tmpSchemas.size() - 1)) {
|
| | | Object[] values = tmpSchemas.get(current + 1);
|
| | | targetSchema = (String) values[0];
|
| | | } else {
|
| | | Object[] values = tmpSchemas.get(0);
|
| | | targetSchema = (String) values[0];
|
| | | }
|
| | |
|
| | | sbSQL = new StringBuilder("UPDATE ");
|
| | | sbSQL.append(encodeSchemaTableName(_edbSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' ');
|
| | | sbSQL.append(" SET vsstatus = ");
|
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_COVERT);
|
| | | sbSQL.append(" WHERE vsschema = '");
|
| | | sbSQL.append(targetSchema).append("'");
|
| | | int count = stmt.executeUpdate(sbSQL.toString());
|
| | | if (count != 1) {
|
| | | logger.info("update status for " + targetSchema + " update result count="
|
| | | + count);
|
| | | }
|
| | | } catch (SQLException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | } finally {
|
| | | JDBCUtils.close(rs);
|
| | | JDBCUtils.close(stmt);
|
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null);
|
| | | }
|
| | | return targetSchema;
|
| | | }
|
| | |
|
| | | public String encodeSchemaTableName(String schemaName, String tableName) {
|
| | | return "\"" + schemaName + "\".\"" + tableName + "\"";
|
| | | }
|
| | |
|
| | | private void createXGeosVersionTable(Connection connection, String pgSchema) throws SQLException {
|
| | | Statement stmt = null;
|
| | | StringBuilder sql = new StringBuilder("CREATE TABLE ");
|
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME));
|
| | | sql.append(" ( vsid serial PRIMARY KEY, ");
|
| | | sql.append(" vsschema character varying(64) NOT NULL, ");
|
| | | sql.append(" vsstatus smallint NOT NULL, ");
|
| | | sql.append(" vstimestamp timestamp with time zone ) ");
|
| | | try {
|
| | | stmt = connection.createStatement();
|
| | | stmt.executeUpdate(sql.toString());
|
| | |
|
| | | sql = new StringBuilder("ALTER TABLE ");
|
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME));
|
| | | sql.append(" OWNER TO ").append(_edbUsername);
|
| | | stmt.executeUpdate(sql.toString());
|
| | |
|
| | | sql = new StringBuilder("GRANT ALL ON TABLE ");
|
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME));
|
| | | sql.append(" TO public");
|
| | | stmt.executeUpdate(sql.toString());
|
| | |
|
| | | for (String schemaName : DataReposVersionManager.DEFAULTXGVERSIONSCHEMA_NAMES) {
|
| | | sql = new StringBuilder("INSERT INTO ");
|
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME));
|
| | | sql.append(" (vsschema, vsstatus) VALUES ('");
|
| | | sql.append(schemaName).append("', ");
|
| | | sql.append(DataReposVersionManager.VSSTATUS_AVAILABLE).append(" )");
|
| | | stmt.executeUpdate(sql.toString());
|
| | |
|
| | | createIfNotExistNewSchema(connection, schemaName);
|
| | | }
|
| | |
|
| | | } finally {
|
| | | if (stmt != null) stmt.close();
|
| | | }
|
| | | }
|
| | |
|
| | | private void updateRepoStatusToReady(String targetSchema) {
|
| | | if (targetDataStore == null) return;
|
| | | Connection connection = null;
|
| | | Statement stmt = null;
|
| | | ResultSet rs = null;
|
| | | boolean needCreate = false;
|
| | | try {
|
| | | StringBuilder sbSQL = new StringBuilder("UPDATE ");
|
| | | sbSQL.append(encodeSchemaTableName(_edbSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' ');
|
| | | sbSQL.append(" SET vsstatus = ");
|
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_READY);
|
| | | sbSQL.append(" , vstimestamp = CURRENT_TIMESTAMP WHERE vsschema = '");
|
| | | sbSQL.append(targetSchema).append("'");
|
| | |
|
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT);
|
| | | stmt = connection.createStatement();
|
| | | int count = stmt.executeUpdate(sbSQL.toString());
|
| | | if (count != 1) {
|
| | | logger.info("update status for " + targetSchema + " update result count="
|
| | | + count);
|
| | | }
|
| | | } catch (SQLException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | } catch (IOException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | } finally {
|
| | | JDBCUtils.close(rs);
|
| | | JDBCUtils.close(stmt);
|
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null);
|
| | | }
|
| | | }
|
| | |
|
| | | private void createIfNotExistNewSchema(Connection connection, String s) throws SQLException {
|
| | | Statement stmt = null;
|
| | | ResultSet rs = null;
|
| | | try {
|
| | | /*
|
| | | rs = connection.getMetaData().getSchemas(null, s);
|
| | | if (rs.next()) return;
|
| | | rs.close();
|
| | | rs = null;
|
| | | */
|
| | |
|
| | | StringBuilder sbSQL = new StringBuilder("CREATE SCHEMA ");
|
| | | sbSQL.append(s).append(' ');
|
| | | sbSQL.append("AUTHORIZATION ").append(_edbUsername);
|
| | | stmt = connection.createStatement();
|
| | | stmt.executeUpdate(sbSQL.toString());
|
| | |
|
| | | sbSQL = new StringBuilder("GRANT ALL ON SCHEMA ");
|
| | | sbSQL.append(s).append(' ');
|
| | | sbSQL.append("TO public");
|
| | | stmt.executeUpdate(sbSQL.toString());
|
| | | } catch (SQLException e) {
|
| | | logger.info("create schema:" + s + " has exception.");
|
| | | logger.info(e.getMessage(), e);
|
| | | } finally {
|
| | | if (rs != null) rs.close();
|
| | | if (stmt != null) stmt.close();
|
| | | }
|
| | | }
|
| | |
|
| | | public final void accumulateQueryTime() {
|
| | | queryTime += System.currentTimeMillis() - queryTimeStart;
|
| | | }
|
| | |
|
| | | public long getQueryTime() {
|
| | | return queryTime;
|
| | | }
|
| | |
|
| | | public final void markQueryTime() {
|
| | | queryTimeStart = System.currentTimeMillis();
|
| | | }
|
| | |
|
| | | public final void resetQueryTime() {
|
| | | queryTime = 0;
|
| | | }
|
| | | }
|
| | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.mysql.MySQLDataStore; |
| | | import org.geotools.data.mysql.MySQLDataStoreFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import oracle.jdbc.OracleConnection; |
| | | import oracle.jdbc.OracleResultSet; |
| | |
| | | |
| | | protected static MySQLDataStoreFactory dataStoreFactory = new MySQLDataStoreFactory(); |
| | | |
| | | GeometryFactory _geomFactory = new GeometryFactory(); |
| | | protected String _myHost; |
| | | protected String _myDatabase; |
| | | protected String _myPort; |
| | |
| | | protected String _myUseWKB; |
| | | |
| | | protected Map<String, String> myProperties; |
| | | protected MySQLDataStore targetDataStore; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | boolean useTransform) { |
| | | return new OracleConvertMySQLJobContext(getDataPath(), getTargetDataStore(), filterPath, profileMode, |
| | | useTransform, useEPSG3826); |
| | | useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getName(); |
| | | String jobName = jobDetail.getKey().getName(); |
| | | String targetSchemaName = null; |
| | | |
| | | // Log the time the job started |
| | |
| | | for (String orgSchema : _orgSchema) { |
| | | OracleConvertMySQLJobContext jobContext = |
| | | (OracleConvertMySQLJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed(), isEPSG3826()); |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | // jobContext.setConvertElementIn(_convertElementIn); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | if (bFirst) |
| | | copyConnectivity(jobContext); |
| | |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | /** |
| | | * Connectivity½Æ»s¤@Óª©¥»¡A¦b¬d¸ß¹q¬y¤è¦V®É¥Î¨Ó¤ñ¹ïOMS¸ê®Æ®wªº¹q¾¹³s±µ©Ê(Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws SQLException sql exception |
| | | */ |
| | | private void copyConnectivity(OracleConvertMySQLJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(AbstractOracleJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK); |
| | | stmt.execute(AbstractOracleJobContext.COPY_CONNECTIVITY_TO_WEBCHECK); |
| | | } |
| | | |
| | | private void exetcuteConvert(OracleConvertMySQLJobContext jobContext, |
| | |
| | | } |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¯Á¤Þ¹ÏÀɪº¤u§@ |
| | | * �����ഫ�����ɪ��u�@ |
| | | * |
| | | * @param context ¤u§@°õ¦æÀô¹Ò |
| | | * @param context �u�@�������� |
| | | * @throws org.quartz.JobExecutionException |
| | | * exception |
| | | */ |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | IndexDgnConvertMySQLJobContext convertContext = |
| | | new IndexDgnConvertMySQLJobContext(getDataPath(), getTargetDataStore(), isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.debug("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¨ä¥L³]p¹ÏÀɪº¤u§@ |
| | | * �����ഫ��L�]�p���ɪ��u�@ |
| | | * |
| | | * @param context jobContext |
| | | * @throws org.quartz.JobExecutionException |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | GeneralDgnConvertMySQLJobContext convertContext = |
| | | new GeneralDgnConvertMySQLJobContext(getDataPath(), getTargetDataStore(), isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | |
| | | private void clearOutputDatabase() { |
| | | /* |
| | | File outDataPath = new File(getDataPath(), OracleConvertPostGISJobContext.SHPOUTPATH); |
| | | File outDataPath = new File(getDataPath(), OracleConvertEdbGeoJobContext.SHPOUTPATH); |
| | | if (outDataPath.exists() && outDataPath.isDirectory()) |
| | | { |
| | | deleteFilesInPath(outDataPath); |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | FeatureDgnConvertMySQLJobContext convertContext = |
| | | new FeatureDgnConvertMySQLJobContext(getDataPath(), getTargetDataStore(), _filterPath, isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | } |
| | | */ |
| | | |
| | | if (!myProperties.containsKey("max connections" /*MySQLDataStoreFactory.MAXCONN.key */)) { |
| | | myProperties.put("max connections", "2"); |
| | | if (!myProperties.containsKey(MySQLDataStoreFactory.MAXCONN.key)) { |
| | | myProperties.put(MySQLDataStoreFactory.MAXCONN.key, "2"); |
| | | } |
| | | |
| | | if (!myProperties.containsKey("min connections" /* MySQLDataStoreFactory.MINCONN.key */)) { |
| | | myProperties.put("min connections", "1"); |
| | | if (!myProperties.containsKey(MySQLDataStoreFactory.MINCONN.key)) { |
| | | myProperties.put(MySQLDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | /* |
| | | if (!myProperties.containsKey(MySQLDataStoreFactory.WKBENABLED.key)) { |
| | | myProperties.put(MySQLDataStoreFactory.WKBENABLED.key, "true"); |
| | | } |
| | | */ |
| | | |
| | | if (!dataStoreFactory.canProcess(myProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = (MySQLDataStore) dataStoreFactory.createDataStore(myProperties); |
| | | targetDataStore = dataStoreFactory.createDataStore(myProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.oracle.OracleDataStore; |
| | | import org.geotools.data.oracle.OracleDataStoreFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.data.oracle.OracleNGDataStoreFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | |
| | | } |
| | | } |
| | | |
| | | protected static OracleDataStoreFactory dataStoreFactory = new OracleDataStoreFactory(); |
| | | protected static OracleNGDataStoreFactory dataStoreFactory = new OracleNGDataStoreFactory(); |
| | | |
| | | GeometryFactory _geomFactory = new GeometryFactory(); |
| | | protected String _sdoHost; |
| | | protected String _sdoDatabase; |
| | | protected String _sdoPort; |
| | |
| | | protected String _sdoUseWKB; |
| | | |
| | | protected Map<String, String> sdoProperties; |
| | | protected OracleDataStore targetDataStore; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | boolean profileMode, boolean useTransform) { |
| | | return new OracleConvertOraSDOJobContext(getDataPath(), getTargetDataStore(), filterPath, profileMode, |
| | | useTransform, useEPSG3826); |
| | | useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put("dbtype", "OraSDO"); |
| | | remote.put("charset", "UTF-8"); |
| | | remote.put("host", _sdoHost); |
| | | remote.put("port", _sdoPort); |
| | | remote.put("database", _sdoDatabase); |
| | | remote.put("user", _sdoUsername); |
| | | remote.put("passwd", _sdoPassword); |
| | | remote.put("namespace", null); |
| | | remote.put(OracleNGDataStoreFactory.DBTYPE.key, "oracle"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(OracleNGDataStoreFactory.HOST.key, _sdoHost); |
| | | remote.put(OracleNGDataStoreFactory.PORT.key, _sdoPort); |
| | | remote.put(OracleNGDataStoreFactory.DATABASE.key, _sdoDatabase); |
| | | remote.put(OracleNGDataStoreFactory.USER.key, _sdoUsername); |
| | | remote.put(OracleNGDataStoreFactory.PASSWD.key, _sdoPassword); |
| | | // remote.put("namespace", null); |
| | | sdoProperties = remote; |
| | | } |
| | | |
| | |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getName(); |
| | | String jobName = jobDetail.getKey().getName(); |
| | | String targetSchemaName = null; |
| | | |
| | | // Log the time the job started |
| | |
| | | for (String orgSchema : _orgSchema) { |
| | | OracleConvertOraSDOJobContext jobContext = |
| | | (OracleConvertOraSDOJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | // jobContext.setConvertElementIn(_convertElementIn); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | if (bFirst) |
| | | copyConnectivity(jobContext); |
| | |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | /** |
| | | * Connectivity½Æ»s¤@Óª©¥»¡A¦b¬d¸ß¹q¬y¤è¦V®É¥Î¨Ó¤ñ¹ïOMS¸ê®Æ®wªº¹q¾¹³s±µ©Ê(Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws SQLException sql exception |
| | | */ |
| | | private void copyConnectivity(OracleConvertOraSDOJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(AbstractOracleJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK); |
| | | stmt.execute(AbstractOracleJobContext.COPY_CONNECTIVITY_TO_WEBCHECK); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | |
| | | private void exetcuteConvert(OracleConvertOraSDOJobContext jobContext, |
| | |
| | | } |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¯Á¤Þ¹ÏÀɪº¤u§@ |
| | | * �����ഫ�����ɪ��u�@ |
| | | * |
| | | * @param context ¤u§@°õ¦æÀô¹Ò |
| | | * @param context �u�@�������� |
| | | * @throws org.quartz.JobExecutionException |
| | | * exception |
| | | */ |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | IndexDgnConvertOraSDOJobContext convertContext = |
| | | new IndexDgnConvertOraSDOJobContext(getDataPath(), getTargetDataStore(), isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.debug("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¨ä¥L³]p¹ÏÀɪº¤u§@ |
| | | * �����ഫ��L�]�p���ɪ��u�@ |
| | | * |
| | | * @param context jobContext |
| | | * @throws org.quartz.JobExecutionException |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | GeneralDgnConvertOraSDOJobContext convertContext = |
| | | new GeneralDgnConvertOraSDOJobContext(getDataPath(), getTargetDataStore(), isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | FeatureDgnConvertOraSDOJobContext convertContext = |
| | | new FeatureDgnConvertOraSDOJobContext(getDataPath(), getTargetDataStore(), _filterPath, isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | } |
| | | */ |
| | | |
| | | if (!sdoProperties.containsKey(OracleDataStoreFactory.MAXCONN.key)) { |
| | | sdoProperties.put(OracleDataStoreFactory.MAXCONN.key, "2"); |
| | | if (!sdoProperties.containsKey(OracleNGDataStoreFactory.MAXCONN.key)) { |
| | | sdoProperties.put(OracleNGDataStoreFactory.MAXCONN.key, "2"); |
| | | } |
| | | |
| | | if (!sdoProperties.containsKey(OracleDataStoreFactory.MINCONN.key)) { |
| | | sdoProperties.put(OracleDataStoreFactory.MINCONN.key, "1"); |
| | | if (!sdoProperties.containsKey(OracleNGDataStoreFactory.MINCONN.key)) { |
| | | sdoProperties.put(OracleNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | /* |
| | |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = (OracleDataStore) dataStoreFactory.createDataStore(sdoProperties); |
| | | targetDataStore = dataStoreFactory.createDataStore(sdoProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | |
| | | import java.io.FileNotFoundException; |
| | | import java.io.FilenameFilter; |
| | | import java.io.IOException; |
| | | import java.io.PushbackReader; |
| | | import java.io.StringReader; |
| | | import java.math.BigDecimal; |
| | | import java.nio.BufferOverflowException; |
| | | import java.nio.ByteBuffer; |
| | | import java.nio.ByteOrder; |
| | | import java.nio.channels.FileChannel; |
| | | import java.sql.Connection; |
| | | import java.sql.PreparedStatement; |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | |
| | | import java.util.Map; |
| | | import java.util.TreeMap; |
| | | |
| | | import com.ximple.eofms.util.*; |
| | | import org.apache.commons.collections.OrderedMap; |
| | | import org.apache.commons.collections.OrderedMapIterator; |
| | | import org.apache.commons.collections.map.LinkedMap; |
| | | import org.apache.commons.dbcp.DelegatingConnection; |
| | | import org.apache.commons.dbcp.PoolingConnection; |
| | | import org.apache.commons.dbcp.PoolingDataSource; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisDataStore; |
| | | import org.geotools.data.postgis.PostgisDataStoreFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.data.postgis.PostgisNGDataStoreFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.postgresql.PGConnection; |
| | | import org.postgresql.copy.CopyManager; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import oracle.jdbc.OracleConnection; |
| | | import oracle.jdbc.OracleResultSet; |
| | |
| | | import com.ximple.eofms.jobs.context.postgis.GeneralDgnConvertPostGISJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.IndexDgnConvertPostGISJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.OracleConvertPostGISJobContext; |
| | | import com.ximple.eofms.util.BinConverter; |
| | | import com.ximple.eofms.util.ByteArrayCompressor; |
| | | import com.ximple.eofms.util.FileUtils; |
| | | import com.ximple.eofms.util.StringUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Dgn7fileException; |
| | | import com.ximple.io.dgn7.Dgn7fileReader; |
| | |
| | | import com.ximple.io.dgn7.ElementType; |
| | | import com.ximple.io.dgn7.IElementHandler; |
| | | import com.ximple.io.dgn7.Lock; |
| | | import com.ximple.io.dgn7.ShapeElement; |
| | | import com.ximple.io.dgn7.TextElement; |
| | | import com.ximple.util.PrintfFormat; |
| | | |
| | | public class OracleConvertDgn2PostGISJob extends AbstractOracleDatabaseJob { |
| | | final static Log logger = LogFactory.getLog(OracleConvertDgn2PostGISJob.class); |
| | | |
| | | private static final String PGHOST = "PGHOST"; |
| | | private static final String PGDDATBASE = "PGDDATBASE"; |
| | | private static final String PGDATBASE = "PGDATBASE"; |
| | | private static final String PGPORT = "PGPORT"; |
| | | private static final String PGSCHEMA = "PGSCHEMA"; |
| | | private static final String PGUSER = "PGUSER"; |
| | |
| | | private static final int COMMITSIZE = 100; |
| | | private static final String INDEXPATHNAME = "index"; |
| | | private static final String OTHERPATHNAME = "other"; |
| | | public static final String FORWARDFLOW_MARK = "shape://ccarrow"; |
| | | public static final String BACKFLOW_MARK = "shape://rccarrow"; |
| | | public static final String UNFLOW_MARK = "shape://backslash"; |
| | | public static final String NONFLOW_MARK = "shape://slash"; |
| | | |
| | | private static String FETCH_CONNFDR = "SELECT FSC, UFID, FDR1, DIR FROM BASEDB.CONNECTIVITY ORDER BY FSC"; |
| | | private static String FETCH_COLORTAB = "SELECT TAG_SFSC, TAG_LUFID, COLOR FROM OCSDB.COLOR WHERE TAG_BCOMPID = 0 ORDER BY TAG_SFSC"; |
| | | |
| | | private static String CREATE_OWNERTABLE = "CREATE TABLE s (tid smallint not null, oid int not null, owner smallint not null)"; |
| | | private static String CREATE_COLORTABLE = "CREATE TABLE s (tid smallint not null, oid int not null, dyncolor varchar(10) not null)"; |
| | | |
| | | public static final String FDYNCOLOR_SUFFIX = "_fdyncolor"; |
| | | public static final String FOWNER_SUFFIX = "_fowner"; |
| | | |
| | | protected static class Pair { |
| | | Object first; |
| | |
| | | } |
| | | } |
| | | |
| | | protected static PostgisDataStoreFactory dataStoreFactory = new PostgisDataStoreFactory(); |
| | | protected static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | GeometryFactory _geomFactory = new GeometryFactory(); |
| | | protected String _pgHost; |
| | | protected String _pgDatabase; |
| | | protected String _pgPort; |
| | |
| | | protected String _pgUseWKB; |
| | | |
| | | protected Map<String, String> pgProperties; |
| | | protected PostgisDataStore targetDataStore; |
| | | // protected OracleConvertPostGISJobContext oracleJobContext; |
| | | protected JDBCDataStore targetDataStore; |
| | | // protected OracleConvertEdbGeoJobContext oracleJobContext; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | boolean useTransform) { |
| | | return new OracleConvertPostGISJobContext(getDataPath(), |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform, useEPSG3826); |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _pgHost = dataMap.getString(PGHOST); |
| | | _pgDatabase = dataMap.getString(PGDDATBASE); |
| | | _pgDatabase = dataMap.getString(PGDATBASE); |
| | | _pgPort = dataMap.getString(PGPORT); |
| | | _pgSchema = dataMap.getString(PGSCHEMA); |
| | | _pgUsername = dataMap.getString(PGUSER); |
| | |
| | | Log logger = getLogger(); |
| | | /* |
| | | logger.info("PGHOST=" + _myHost); |
| | | logger.info("PGDDATBASE=" + _myDatabase); |
| | | logger.info("PGDATBASE=" + _myDatabase); |
| | | logger.info("PGPORT=" + _myPort); |
| | | logger.info("PGSCHEMA=" + _mySchema); |
| | | logger.info("PGUSER=" + _myUsername); |
| | |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put("dbtype", "postgis"); |
| | | remote.put("charset", "UTF-8"); |
| | | remote.put("host", _pgHost); |
| | | remote.put("port", _pgPort); |
| | | remote.put("database", _pgDatabase); |
| | | remote.put("user", _pgUsername); |
| | | remote.put("passwd", _pgPassword); |
| | | remote.put("namespace", null); |
| | | remote.put(PostgisNGDataStoreFactory.DBTYPE.key, "postgis"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(PostgisNGDataStoreFactory.HOST.key, _pgHost); |
| | | remote.put(PostgisNGDataStoreFactory.PORT.key, _pgPort); |
| | | remote.put(PostgisNGDataStoreFactory.DATABASE.key, _pgDatabase); |
| | | remote.put(PostgisNGDataStoreFactory.USER.key, _pgUsername); |
| | | remote.put(PostgisNGDataStoreFactory.PASSWD.key, _pgPassword); |
| | | // remote.put( "namespace", null); |
| | | pgProperties = remote; |
| | | } |
| | | |
| | |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getName(); |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | // Log the time the job started |
| | | logger.info(jobName + " fired at " + new Date()); |
| | | extractJobConfiguration(jobDetail); |
| | | |
| | | if (isIgnoreDBETL()) { |
| | | return; |
| | | } |
| | | |
| | | createSourceDataStore(); |
| | | createTargetDataStore(); |
| | | if (getSourceDataStore() == null) { |
| | |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName; |
| | | String targetSchemaName, targetThemeTable; |
| | | try { |
| | | logger.info("-- step:clearOutputDatabase --"); |
| | | clearOutputDatabase(); |
| | | targetSchemaName = determineTargetSchemaName(); |
| | | targetThemeTable = determineTargetThemeTableName(); |
| | | |
| | | if (checkConvertFile()) { |
| | | logger.info("-- step:convertIndexDesignFile --"); |
| | |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-convertOtherDesignFile", tStep, tStepEnd); |
| | | } |
| | | |
| | | } |
| | | |
| | | OracleConvertPostGISJobContext jobContext = null; |
| | | if (checkConvertDB()) { |
| | | logger.info("-- step:convertOracleDB --"); |
| | | |
| | | OracleConvertPostGISJobContext jobContext = |
| | | (OracleConvertPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed(), isEPSG3826()); |
| | | jobContext = (OracleConvertPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | // jobContext.setConvertElementIn(_convertElementIn); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | CreateHibernateSequence(jobContext); |
| | | |
| | | createHibernateSequence(jobContext); |
| | | |
| | | long tStep = System.currentTimeMillis(); |
| | | |
| | | if (isCopyConnectivityMode()) |
| | | { |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | if (isCopyConnectivityMode()) { |
| | | copyConnectivity(jobContext); |
| | | } |
| | | |
| | |
| | | createDummyFeatureFile(context); |
| | | } |
| | | |
| | | updateRepoStatusToReady(targetSchemaName); |
| | | |
| | | if (checkConvertPWThemes()) { |
| | | jobContext = (OracleConvertPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | long tStep = System.currentTimeMillis(); |
| | | if (!convertPowerOwnerThemeWithCopyAPI(jobContext, targetThemeTable)) { |
| | | convertPowerOwnerTheme(jobContext, targetThemeTable); |
| | | } |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-convertFeatureDesignFile", tStep, tStepEnd); |
| | | } |
| | | tStep = System.currentTimeMillis(); |
| | | if (!convertDynamicColorThemeWithCopyAPI(jobContext, targetThemeTable)) |
| | | convertDynamicColorTheme(jobContext, targetThemeTable); |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-convertFeatureDesignFile", tStep, tStepEnd); |
| | | } |
| | | jobContext.closeOracleConnection(); |
| | | } |
| | | |
| | | updatePWThemeStatusToReady(targetThemeTable); |
| | | |
| | | long t2 = System.currentTimeMillis(); |
| | | // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss"; |
| | | // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); |
| | | logTimeDiff("Total ", t1, t2); |
| | | |
| | | updateRepoStatusToReady(targetSchemaName); |
| | | |
| | | } catch (SQLException e) { |
| | | disconnect(); |
| | |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | /** |
| | | * Connectivity½Æ»s¤@Óª©¥»¡A¦b¬d¸ß¹q¬y¤è¦V®É¥Î¨Ó¤ñ¹ïOMS¸ê®Æ®wªº¹q¾¹³s±µ©Ê(Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws SQLException sql exception |
| | | */ |
| | | private void copyConnectivity(OracleConvertPostGISJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | ResultSet rsMeta = connection.getMetaData().getTables(null, "BASEDB", |
| | | AbstractOracleJobContext.CONNECTIVITY_WEBCHECK_NAME + "%", |
| | | new String[]{"TABLE"}); |
| | | |
| | | boolean found = false; |
| | | try { |
| | | while (rsMeta.next()) { |
| | | String tablename = rsMeta.getString(3); |
| | | if (AbstractOracleJobContext.CONNECTIVITY_WEBCHECK_NAME.equalsIgnoreCase(tablename)) { |
| | | found = true; |
| | | break; |
| | | } |
| | | } |
| | | // } catch (SQLException e) |
| | | } finally { |
| | | if (rsMeta != null) { |
| | | rsMeta.close(); |
| | | rsMeta = null; |
| | | } |
| | | } |
| | | Statement stmt = connection.createStatement(); |
| | | if (found) { |
| | | stmt.execute(AbstractOracleJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK); |
| | | } else { |
| | | logger.info("Create CONNECTIVITY_WEBCHECK table."); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_1); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_2); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_3); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_4); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_5); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_6); |
| | | stmt.execute(AbstractOracleJobContext.ALTER_CONNECTIVITY_WEBCHECK_1); |
| | | stmt.execute(AbstractOracleJobContext.ALTER_CONNECTIVITY_WEBCHECK_2); |
| | | } |
| | | |
| | | stmt.execute(AbstractOracleJobContext.COPY_CONNECTIVITY_TO_WEBCHECK); |
| | | stmt.close(); |
| | | } |
| | | |
| | | |
| | | private void CreateHibernateSequence(OracleConvertPostGISJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | try { |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(AbstractOracleJobContext.CREATE_HIBERNATE_SEQUENCE); |
| | | stmt.close(); |
| | | } catch (SQLException e) |
| | | { |
| | | logger.warn("HIBERNATE_SEQUENCE ¤w¸g¦s¦b"); |
| | | } |
| | | } |
| | | |
| | | private void exetcuteConvert(OracleConvertPostGISJobContext jobContext, |
| | | String querySchema, String targetSchemaName) throws SQLException { |
| | | int order = 0; |
| | | OrderedMap map = getBlobStorageList(jobContext.getOracleConnection(), |
| | | querySchema, "SD$SPACENODES", null); |
| | | querySchema, "SD$SPACENODES", null); |
| | | |
| | | logger.info("begin convert job:[" + map.size() + "]:testmode=" + _testMode); |
| | | |
| | |
| | | int step = total / 100; |
| | | int current = 0; |
| | | |
| | | if (total == 0) { |
| | | if (total == 0) { |
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is zero."); |
| | | return; |
| | | } |
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is " + map.size() ); |
| | | logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is " + map.size()); |
| | | |
| | | //jobContext.startTransaction(); |
| | | jobContext.setCurrentSchema(querySchema); |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", 0); |
| | | for (OrderedMapIterator it = map.orderedMapIterator(); it.hasNext();) { |
| | | for (OrderedMapIterator it = map.orderedMapIterator(); it.hasNext(); ) { |
| | | it.next(); |
| | | |
| | | Pair pair = (Pair) it.getValue(); |
| | |
| | | |
| | | logger.info("begin convert:[" + order + "]-" + tableSrc); |
| | | queryIgsetElement(jobContext, querySchema, tableSrc); |
| | | |
| | | |
| | | order++; |
| | | |
| | |
| | | System.runFinalization(); |
| | | } |
| | | |
| | | if( step != 0) |
| | | { |
| | | if (step != 0) { |
| | | int now = order % step; |
| | | if (now != current) { |
| | | current = now; |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", current); |
| | | |
| | | } |
| | | }else |
| | | { |
| | | } else { |
| | | jobContext.getExecutionContext().put("ConvertDgn2PostGISJobProgress", current); |
| | | current++; |
| | | } |
| | |
| | | } |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¯Á¤Þ¹ÏÀɪº¤u§@ |
| | | * �����ഫ�����ɪ��u�@ |
| | | * |
| | | * @param context ¤u§@°õ¦æÀô¹Ò |
| | | * @param context �u�@�������� |
| | | * @throws org.quartz.JobExecutionException |
| | | * exception |
| | | */ |
| | |
| | | if (dgnFile.isDirectory()) continue; |
| | | IndexDgnConvertPostGISJobContext convertContext = |
| | | new IndexDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, |
| | | isProfileMode(), isTransformed(), isEPSG3826()); |
| | | isProfileMode(), isTransformed()); |
| | | logger.info("--- start index dgnfile-" + dgnFile.toString() + " ---"); |
| | | FileInputStream fs = null; |
| | | FileChannel fc = null; |
| | |
| | | } finally { |
| | | convertContext.closeFeatureWriter(); |
| | | |
| | | if (reader != null) |
| | | { |
| | | if (reader != null) { |
| | | try { |
| | | reader.close(); |
| | | } catch (IOException e) { |
| | |
| | | |
| | | while (reader.hasNext()) { |
| | | if (isProfileMode()) markProcessTime(); |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | // } |
| | | //} else { |
| | | // if (element instanceof ShapeElement) { |
| | | convertContext.putFeatureCollection(element); |
| | | convertContext.putFeatureCollection(element); |
| | | // } |
| | | //} |
| | | } |
| | | |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¨ä¥L³]p¹ÏÀɪº¤u§@ |
| | | * �����ഫ��L�]�p���ɪ��u�@ |
| | | * |
| | | * @param context jobContext |
| | | * @throws org.quartz.JobExecutionException |
| | |
| | | |
| | | GeneralDgnConvertPostGISJobContext convertContext = |
| | | new GeneralDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, |
| | | isProfileMode(), isTransformed(), isEPSG3826()); |
| | | isProfileMode(), isTransformed()); |
| | | logger.info("--- start other dgnfile-" + dgnFile.toString() + " ---"); |
| | | FileInputStream fs = null; |
| | | FileChannel fc; |
| | |
| | | } finally { |
| | | convertContext.closeFeatureWriter(); |
| | | |
| | | if (reader != null) |
| | | { |
| | | if (reader != null) { |
| | | try { |
| | | reader.close(); |
| | | } catch (IOException e) { |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | |
| | | private void clearOutputDatabase() { |
| | | /* |
| | | File outDataPath = new File(getDataPath(), OracleConvertPostGISJobContext.SHPOUTPATH); |
| | | File outDataPath = new File(getDataPath(), OracleConvertEdbGeoJobContext.SHPOUTPATH); |
| | | if (outDataPath.exists() && outDataPath.isDirectory()) |
| | | { |
| | | deleteFilesInPath(outDataPath); |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | FeatureDgnConvertPostGISJobContext convertContext = |
| | | new FeatureDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed(), isEPSG3826()); |
| | | isProfileMode(), isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | } |
| | | */ |
| | | |
| | | if (!pgProperties.containsKey(PostgisDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisDataStoreFactory.MAXCONN.key, "5"); |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisDataStoreFactory.MINCONN.key, "1"); |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisDataStoreFactory.WKBENABLED.key)) { |
| | | pgProperties.put(PostgisDataStoreFactory.WKBENABLED.key, "true"); |
| | | /* |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.WKBENABLED.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.WKBENABLED.key, "true"); |
| | | } |
| | | */ |
| | | |
| | | if (!dataStoreFactory.canProcess(pgProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = (PostgisDataStore) dataStoreFactory.createDataStore(pgProperties); |
| | | targetDataStore = dataStoreFactory.createDataStore(pgProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | // Create XGVERSIONTABLE_NAME |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) |
| | | createXGeosVersionTable(connection, _pgSchema); |
| | | rs.close(); |
| | | rs = null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vsschema, vsstatus FROM "); |
| | |
| | | return targetSchema; |
| | | } |
| | | |
| | | private String determineTargetThemeTableName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetTable = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | // Create XPTVERSIONTABLE_NAME |
| | | needCreate = false; |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) |
| | | createXPWThemeVersionTable(connection, _pgSchema); |
| | | rs.close(); |
| | | |
| | | rs = null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vptname, vptstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vptid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpTablenames = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vptname"); |
| | | values[1] = rs.getShort("vptstatus"); |
| | | tmpTablenames.add(values); |
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) { |
| | | current = i; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current == -1) { |
| | | Object[] values = tmpTablenames.get(0); |
| | | targetTable = (String) values[0]; |
| | | } else if (current < (tmpTablenames.size() - 1)) { |
| | | Object[] values = tmpTablenames.get(current + 1); |
| | | targetTable = (String) values[0]; |
| | | } else { |
| | | Object[] values = tmpTablenames.get(0); |
| | | targetTable = (String) values[0]; |
| | | } |
| | | |
| | | sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vptstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_COVERT); |
| | | sbSQL.append(" WHERE vptname = '"); |
| | | sbSQL.append(targetTable).append("'"); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetTable + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetTable; |
| | | } |
| | | |
| | | public String encodeSchemaTableName(String schemaName, String tableName) { |
| | | if (schemaName == null) |
| | | return "\"" + tableName + "\""; |
| | | return "\"" + schemaName + "\".\"" + tableName + "\""; |
| | | } |
| | | |
| | |
| | | |
| | | sql = new StringBuilder("ALTER TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); |
| | | sql.append(" OWNER TO spatialdb"); |
| | | sql.append(" OWNER TO ").append(_pgUsername); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("GRANT ALL ON TABLE "); |
| | |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | createIfNotExistNewSchema(connection, schemaName); |
| | | } |
| | | |
| | | } finally { |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private void createXPWThemeVersionTable(Connection connection, String pgSchema) throws SQLException { |
| | | Statement stmt = null; |
| | | StringBuilder sql = new StringBuilder("CREATE TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" ( vptid serial PRIMARY KEY, "); |
| | | sql.append(" vptname character varying(64) NOT NULL, "); |
| | | sql.append(" vptstatus smallint NOT NULL, "); |
| | | sql.append(" vpttimestamp timestamp with time zone ) "); |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("ALTER TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" OWNER TO ").append(_pgUsername); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | sql = new StringBuilder("GRANT ALL ON TABLE "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" TO public"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | |
| | | for (String schemaName : DataReposVersionManager.DEFAULTXPTVERSIONTABLE_NAMES) { |
| | | sql = new StringBuilder("INSERT INTO "); |
| | | sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)); |
| | | sql.append(" (vptname, vptstatus) VALUES ('"); |
| | | sql.append(schemaName).append("', "); |
| | | sql.append(DataReposVersionManager.VSSTATUS_AVAILABLE).append(" )"); |
| | | stmt.executeUpdate(sql.toString()); |
| | | } |
| | | |
| | | } finally { |
| | |
| | | } |
| | | } |
| | | |
| | | private void updatePWThemeStatusToReady(String targetSchema) { |
| | | if (targetDataStore == null) return; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | StringBuilder sbSQL = new StringBuilder("UPDATE "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XPTVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append(" SET vptstatus = "); |
| | | sbSQL.append(DataReposVersionManager.VSSTATUS_READY); |
| | | sbSQL.append(" , vpttimestamp = CURRENT_TIMESTAMP WHERE vptname = '"); |
| | | sbSQL.append(targetSchema).append("'"); |
| | | |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | stmt = connection.createStatement(); |
| | | int count = stmt.executeUpdate(sbSQL.toString()); |
| | | if (count != 1) { |
| | | logger.info("update status for " + targetSchema + " update result count=" |
| | | + count); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void createIfNotExistNewSchema(Connection connection, String s) throws SQLException { |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("CREATE SCHEMA "); |
| | | sbSQL.append(s).append(' '); |
| | | sbSQL.append("AUTHORIZATION spatialdb"); |
| | | sbSQL.append("AUTHORIZATION ").append(_pgUsername); |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate(sbSQL.toString()); |
| | | |
| | |
| | | public final void resetQueryTime() { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | private void convertDynamicColorTheme(AbstractOracleJobContext context, String targetTableBaseName) throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertDynamicColorTheme"); |
| | | return; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | PreparedStatement pstmt = null; |
| | | try { |
| | | |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | String targetTableName = targetTableBaseName + FDYNCOLOR_SUFFIX; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(50); |
| | | |
| | | createOrClearTargetTable(connectionPG, targetTableName, |
| | | "(tid smallint not null, oid int not null, dyncolor varchar(10) not null)"); |
| | | |
| | | pstmt = connectionPG.prepareStatement("INSERT INTO " + |
| | | encodeSchemaTableName(_pgSchema, targetTableName) + |
| | | " (tid, oid, dyncolor) VALUES (?, ?, ?)" ); |
| | | |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int colorId = rs.getInt(3); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | |
| | | pstmt.setShort(1, (short) cid); |
| | | pstmt.setInt(2, (int) oid); |
| | | pstmt.setString(3, colorText); |
| | | pstmt.addBatch(); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | pstmt.executeBatch(); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | pstmt.executeBatch(); |
| | | createTargetTableIndex(connectionPG, targetTableName); |
| | | |
| | | logger.info("Execute Update Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(pstmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void convertPowerOwnerTheme(AbstractOracleJobContext context, String targetTableBaseName) throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertPowerOwnerTheme"); |
| | | return; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | PreparedStatement pstmt = null; |
| | | try { |
| | | connectionPG.setAutoCommit(false); |
| | | String targetTableName = targetTableBaseName + FOWNER_SUFFIX; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(50); |
| | | |
| | | createOrClearTargetTable(connectionPG, targetTableName, |
| | | "(tid smallint not null, oid int not null, fowner smallint not null, flow varchar(20) not null)"); |
| | | |
| | | pstmt = connectionPG.prepareStatement("INSERT INTO " + |
| | | encodeSchemaTableName(_pgSchema, targetTableName) + |
| | | " (tid, oid, fowner, flow) VALUES (?, ?, ?, ?)" ); |
| | | |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int ownerId = rs.getInt(3); |
| | | short dirId = (short) rs.getInt(4); |
| | | pstmt.setShort(1, (short) cid); |
| | | pstmt.setInt(2, (int) oid); |
| | | pstmt.setShort(3, (short) ownerId); |
| | | ConnectivityDirectionEnum dir = ConnectivityDirectionEnum.convertShort(dirId); |
| | | if ((ConnectivityDirectionEnum.ForwardflowON == dir) || |
| | | (ConnectivityDirectionEnum.ForwardFixflowON == dir)) { |
| | | pstmt.setString(4, "shape://ccarrow"); |
| | | |
| | | } else if ((ConnectivityDirectionEnum.BackflowON == dir) || |
| | | (ConnectivityDirectionEnum.BackFixflowON == dir)) { |
| | | pstmt.setString(4, "shape://rccarrow"); |
| | | } else { |
| | | pstmt.setString(4, "shape://backslash"); |
| | | } |
| | | pstmt.addBatch(); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | pstmt.executeBatch(); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | pstmt.executeBatch(); |
| | | createTargetTableIndex(connectionPG, targetTableName); |
| | | |
| | | logger.info("Execute Update Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(pstmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | private void createOrClearTargetTable(Connection connection, String tableName, String sql) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("DROP TABLE " + encodeSchemaTableName(_pgSchema, tableName) + "CASCADE"); |
| | | } |
| | | |
| | | stmt.executeUpdate("CREATE TABLE " + encodeSchemaTableName(_pgSchema, tableName) + " " + sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void createTargetTableIndex(Connection connection, String tableName) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("ALTER TABLE " + encodeSchemaTableName(_pgSchema, tableName) + |
| | | " ADD PRIMARY KEY (tid, oid)"); |
| | | } |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private boolean convertDynamicColorThemeWithCopyAPI(AbstractOracleJobContext context, String targetTableBaseName) |
| | | throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertDynamicColorThemeWithCopyAPI"); |
| | | return false; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | while (connectionPG instanceof DelegatingConnection) { |
| | | connectionPG = ((DelegatingConnection) connectionPG).getDelegate(); |
| | | } |
| | | |
| | | if (!(connectionPG instanceof PGConnection)) { |
| | | return false; |
| | | } |
| | | |
| | | final int MAX_BATCHSIZE = 250; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | // connectionPG.setAutoCommit(false); |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | String targetTableName = targetTableBaseName + FDYNCOLOR_SUFFIX; |
| | | String targetTempName = "tmp_" + targetTableName; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(MAX_BATCHSIZE); |
| | | |
| | | createOrClearTempTargetTable(connectionPG, targetTempName, |
| | | "(tid smallint not null, oid int not null, dyncolor varchar(10) not null)"); |
| | | StringBuilder sb = new StringBuilder(); |
| | | |
| | | CopyManager cpMgr = ((PGConnection) connectionPG).getCopyAPI(); |
| | | PushbackReader reader = new PushbackReader(new StringReader(""), 10240); |
| | | |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int colorId = rs.getInt(3); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | if (cid > Short.MAX_VALUE) { |
| | | logger.info("Wrong Color Table:" + cid + "-" + oid); |
| | | continue; |
| | | } |
| | | sb.append(cid).append(','); |
| | | sb.append(oid).append(','); |
| | | sb.append(colorText).append("\n"); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | sb.delete(0, sb.length()); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | createTargetTableIndexAndDropTemp(connectionPG, targetTableName, targetTempName); |
| | | |
| | | logger.info("Execute Copy Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return true; |
| | | } |
| | | |
| | | private boolean convertPowerOwnerThemeWithCopyAPI(AbstractOracleJobContext context, String targetTableBaseName) |
| | | throws IOException { |
| | | if (context == null) { |
| | | getLogger().info("jobContext is null in convertPowerOwnerThemeWithCopyAPI"); |
| | | return false; |
| | | } |
| | | Connection connection = context.getOracleConnection(); |
| | | Connection connectionPG = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | while (connectionPG instanceof DelegatingConnection) { |
| | | connectionPG = ((DelegatingConnection) connectionPG).getDelegate(); |
| | | } |
| | | |
| | | if (!(connectionPG instanceof PGConnection)) { |
| | | return false; |
| | | } |
| | | |
| | | final int MAX_BATCHSIZE = 250; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | // connectionPG.setAutoCommit(false); |
| | | String targetTableName = targetTableBaseName + FOWNER_SUFFIX; |
| | | String targetTempName = "tmp_" + targetTableName; |
| | | logger.info("target table:" + targetTableName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(MAX_BATCHSIZE); |
| | | |
| | | createOrClearTempTargetTable(connectionPG, targetTempName, |
| | | "(tid smallint not null, oid int not null, fowner smallint not null, flow varchar(20) not null)"); |
| | | |
| | | StringBuilder sb = new StringBuilder(); |
| | | |
| | | CopyManager cpMgr = ((PGConnection) connectionPG).getCopyAPI(); |
| | | PushbackReader reader = new PushbackReader(new StringReader(""), 10240); |
| | | |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int ownerId = rs.getInt(3); |
| | | short dirId = (short) rs.getInt(4); |
| | | String flowMark = null; |
| | | ConnectivityDirectionEnum dir = ConnectivityDirectionEnum.convertShort(dirId); |
| | | if ((ConnectivityDirectionEnum.ForwardflowON == dir) || |
| | | (ConnectivityDirectionEnum.ForwardFixflowON == dir)) { |
| | | flowMark = FORWARDFLOW_MARK; |
| | | |
| | | } else if ((ConnectivityDirectionEnum.BackflowON == dir) || |
| | | (ConnectivityDirectionEnum.BackFixflowON == dir)) { |
| | | flowMark = BACKFLOW_MARK; |
| | | } else if (ConnectivityDirectionEnum.Nondeterminate == dir) { |
| | | flowMark = NONFLOW_MARK; |
| | | } else { |
| | | flowMark = UNFLOW_MARK; |
| | | } |
| | | |
| | | if (cid > Short.MAX_VALUE) { |
| | | logger.info("Wrong Connectivity Table:" + cid + "-" + oid); |
| | | continue; |
| | | } |
| | | |
| | | sb.append(cid).append(','); |
| | | sb.append(oid).append(','); |
| | | sb.append(ownerId).append(','); |
| | | sb.append(flowMark).append('\n'); |
| | | |
| | | if (count % MAX_BATCHSIZE == 0) { |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | sb.delete(0, sb.length()); |
| | | } |
| | | ++count; |
| | | } |
| | | |
| | | reader.unread(sb.toString().toCharArray()); |
| | | cpMgr.copyIn("COPY " + targetTempName + " FROM STDIN WITH CSV", reader); |
| | | createTargetTableIndexAndDropTemp(connectionPG, targetTableName, targetTempName); |
| | | |
| | | logger.info("Execute Copy Count=" + count); |
| | | } catch (SQLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new IOException(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connectionPG, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return true; |
| | | } |
| | | |
| | | private void createOrClearTempTargetTable(Connection connection, String tableName, String sql) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | rs = connection.getMetaData().getTables(null, null, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("DROP TABLE " + encodeSchemaTableName(null, tableName) + "CASCADE"); |
| | | } |
| | | |
| | | stmt.executeUpdate("CREATE TEMP TABLE " + encodeSchemaTableName(null, tableName) + " " + sql); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void createTargetTableIndexAndDropTemp(Connection connection, String tableName, String tempTable) throws SQLException { |
| | | Statement stmt = connection.createStatement(); |
| | | ResultSet rs = null; |
| | | try { |
| | | stmt.execute("CREATE TABLE " + tableName +" AS SELECT * FROM " + tempTable); |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, tableName, new String[]{"TABLE"}); |
| | | if (rs.next()) { |
| | | stmt.execute("ALTER TABLE " + encodeSchemaTableName(_pgSchema, tableName) + |
| | | " ADD PRIMARY KEY (tid, oid)"); |
| | | } |
| | | stmt.execute("DROP TABLE " + tempTable); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | } |
| | |
| | | import org.apache.commons.collections.map.LinkedMap; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | |
| | | } |
| | | } |
| | | |
| | | GeometryFactory _geomFactory = new GeometryFactory(); |
| | | |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | return new OracleConvertShapefilesJobContext(filterPath, profileMode, useTransform, useEPSG3826); |
| | | boolean profileMode, boolean useTransform) { |
| | | return new OracleConvertShapefilesJobContext(filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getName(); |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | String targetSchemaName = null; |
| | | |
| | |
| | | |
| | | for (String orgSchema : _orgSchema) { |
| | | OracleConvertShapefilesJobContext jobContext = (OracleConvertShapefilesJobContext) |
| | | prepareJobContext(targetSchemaName, _filterPath, isProfileMode(), isTransformed(), isEPSG3826()); |
| | | prepareJobContext(targetSchemaName, _filterPath, isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | jobContext.setDataPath(_dataPath); |
| | | jobContext.setConvertElementIn(_convertElementIn); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | if (bFirst) { |
| | | copyConnectivity(jobContext); |
| | |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | /** |
| | | * Connectivity½Æ»s¤@Óª©¥»¡A¦b¬d¸ß¹q¬y¤è¦V®É¥Î¨Ó¤ñ¹ïOMS¸ê®Æ®wªº¹q¾¹³s±µ©Ê(Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws SQLException sql exception |
| | | */ |
| | | private void copyConnectivity(OracleConvertShapefilesJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(OracleConvertShapefilesJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK); |
| | | stmt.execute(OracleConvertShapefilesJobContext.COPY_CONNECTIVITY_TO_WEBCHECK); |
| | | } |
| | | |
| | | private void exetcuteConvert(OracleConvertShapefilesJobContext jobContext, |
| | |
| | | } |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¯Á¤Þ¹ÏÀɪº¤u§@ |
| | | * �����ഫ�����ɪ��u�@ |
| | | * |
| | | * @param context ¤u§@°õ¦æÀô¹Ò |
| | | * @param context �u�@�������� |
| | | * @throws org.quartz.JobExecutionException |
| | | * exception |
| | | */ |
| | |
| | | |
| | | for (File dgnFile : dgnFiles) { |
| | | IndexDgnConvertShpJobContext convertContext = new IndexDgnConvertShpJobContext(getDataPath(), isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | logger.debug("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | |
| | | |
| | | /** |
| | | * °õ¦æÂà´«¨ä¥L³]p¹ÏÀɪº¤u§@ |
| | | * �����ഫ��L�]�p���ɪ��u�@ |
| | | * |
| | | * @param context jobContext |
| | | * @throws org.quartz.JobExecutionException |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | GeneralDgnConvertShpJobContext convertContext = new GeneralDgnConvertShpJobContext(getDataPath(), |
| | | isProfileMode(), |
| | | isTransformed(), |
| | | isEPSG3826()); |
| | | isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
| | |
| | | for (File dgnFile : dgnFiles) { |
| | | FeatureDgnConvertShpJobContext convertContext = new FeatureDgnConvertShpJobContext(getDataPath(), _filterPath, |
| | | isProfileMode(), |
| | | isTransformed(), |
| | | isEPSG3826()); |
| | | isTransformed()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try { |
| | | convertContext.setExecutionContext(context); |
| | |
| | | int count = 0; |
| | | Element lastComplex = null; |
| | | while (reader.hasNext()) { |
| | | Dgn7fileReader.Record record = reader.nextElement(); |
| | | Element.FileRecord record = reader.nextElement(); |
| | | if (record.element() != null) { |
| | | Element element = (Element) record.element(); |
| | | ElementType type = element.getElementType(); |
New file |
| | |
| | | package com.ximple.eofms.jobs; |
| | | |
| | | import java.io.IOException; |
| | | import java.nio.BufferOverflowException; |
| | | import java.nio.ByteBuffer; |
| | | import java.nio.ByteOrder; |
| | | import java.sql.Connection; |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.sql.Types; |
| | | import java.util.ArrayList; |
| | | import java.util.Date; |
| | | import java.util.Map; |
| | | import java.util.TreeMap; |
| | | import java.util.logging.Logger; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.OracleIncrementPostGISJobContext; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Dgn7fileException; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.ElementType; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.IElementHandler; |
| | | import com.ximple.util.PrintfFormat; |
| | | import oracle.sql.BLOB; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisNGDataStoreFactory; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | import static com.ximple.eofms.jobs.context.postgis.OracleIncrementPostGISJobContext.*; |
| | | |
| | | public class OracleIncrementDgn2PostGISJob extends AbstractOracleDatabaseJob { |
| | | final static Log logger = LogFactory.getLog(OracleIncrementDgn2PostGISJob.class); |
| | | |
| | | private static final String PGHOST = "PGHOST"; |
| | | private static final String PGDATBASE = "PGDATBASE"; |
| | | private static final String PGPORT = "PGPORT"; |
| | | private static final String PGSCHEMA = "PGSCHEMA"; |
| | | private static final String PGUSER = "PGUSER"; |
| | | private static final String PGPASS = "PGPASS"; |
| | | private static final String USEWKB = "USEWKB"; |
| | | |
| | | private static final int FETCHSIZE = 30; |
| | | private static final int COMMITSIZE = 100; |
| | | |
| | | protected static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | protected String _pgHost; |
| | | protected String _pgDatabase; |
| | | protected String _pgPort; |
| | | protected String _pgSchema; |
| | | protected String _pgUsername; |
| | | protected String _pgPassword; |
| | | protected String _pgUseWKB; |
| | | |
| | | protected Map<String, String> pgProperties; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | | |
| | | public final void accumulateQueryTime() { |
| | | queryTime += System.currentTimeMillis() - queryTimeStart; |
| | | } |
| | | |
| | | public long getQueryTime() { |
| | | return queryTime; |
| | | } |
| | | |
| | | public final void markQueryTime() { |
| | | queryTimeStart = System.currentTimeMillis(); |
| | | } |
| | | |
| | | public final void resetQueryTime() { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | @Override |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | public DataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | @Override |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _pgHost = dataMap.getString(PGHOST); |
| | | _pgDatabase = dataMap.getString(PGDATBASE); |
| | | _pgPort = dataMap.getString(PGPORT); |
| | | _pgSchema = dataMap.getString(PGSCHEMA); |
| | | _pgUsername = dataMap.getString(PGUSER); |
| | | _pgPassword = dataMap.getString(PGPASS); |
| | | _pgUseWKB = dataMap.getString(USEWKB); |
| | | |
| | | Log logger = getLogger(); |
| | | if (_pgHost == null) { |
| | | logger.warn("PGHOST is null"); |
| | | throw new JobExecutionException("Unknown PostGIS host."); |
| | | } |
| | | if (_pgDatabase == null) { |
| | | logger.warn("PGDATABASE is null"); |
| | | throw new JobExecutionException("Unknown PostGIS database."); |
| | | } |
| | | if (_pgPort == null) { |
| | | logger.warn("PGPORT is null"); |
| | | throw new JobExecutionException("Unknown PostGIS port."); |
| | | } |
| | | if (_pgSchema == null) { |
| | | logger.warn("PGSCHEMA is null"); |
| | | throw new JobExecutionException("Unknown PostGIS schema."); |
| | | } |
| | | if (_pgUsername == null) { |
| | | logger.warn("PGUSERNAME is null"); |
| | | throw new JobExecutionException("Unknown PostGIS username."); |
| | | } |
| | | if (_pgPassword == null) { |
| | | logger.warn("PGPASSWORD is null"); |
| | | throw new JobExecutionException("Unknown PostGIS password."); |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put(PostgisNGDataStoreFactory.DBTYPE.key, "postgis"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(PostgisNGDataStoreFactory.HOST.key, _pgHost); |
| | | remote.put(PostgisNGDataStoreFactory.PORT.key, _pgPort); |
| | | remote.put(PostgisNGDataStoreFactory.DATABASE.key, _pgDatabase); |
| | | remote.put(PostgisNGDataStoreFactory.USER.key, _pgUsername); |
| | | remote.put(PostgisNGDataStoreFactory.PASSWD.key, _pgPassword); |
| | | // remote.put( "namespace", null); |
| | | pgProperties = remote; |
| | | } |
| | | |
| | | @Override |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, boolean profileMode, boolean useTransform) { |
| | | return new OracleIncrementPostGISJobContext(getDataPath(), |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | protected void createTargetDataStore() throws JobExecutionException { |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | |
| | | /* |
| | | if (!isDriverFound()) |
| | | { |
| | | throw new JobExecutionException("Oracle JDBC Driver not found.-" + JDBC_DRIVER); |
| | | } |
| | | */ |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | /* |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.WKBENABLED.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.WKBENABLED.key, "true"); |
| | | } |
| | | */ |
| | | |
| | | if (!dataStoreFactory.canProcess(pgProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = dataStoreFactory.createDataStore(pgProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | @Override |
| | | protected void disconnect() { |
| | | super.disconnect(); |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | } |
| | | |
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) { |
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " + |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | @Override |
| | | public void execute(JobExecutionContext context) throws JobExecutionException { |
| | | // Every job has its own job detail |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | // Log the time the job started |
| | | logger.info(jobName + " fired at " + new Date()); |
| | | |
| | | createSourceDataStore(); |
| | | createTargetDataStore(); |
| | | if (getSourceDataStore() == null) { |
| | | logger.warn("Cannot connect source oracle database."); |
| | | throw new JobExecutionException("Cannot connect source oracle database."); |
| | | } |
| | | |
| | | if (getTargetDataStore() == null) { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName, targetThemeTable; |
| | | |
| | | try { |
| | | logger.info("-- step:incrementConvertOracleDB --"); |
| | | targetSchemaName = determineCurrentTargetSchemaName(); |
| | | if (targetSchemaName == null) return; |
| | | |
| | | OracleIncrementPostGISJobContext jobContext = null; |
| | | |
| | | jobContext = (OracleIncrementPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | long tStep = System.currentTimeMillis(); |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-Copy Connectivity", tStep, tStepEnd); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | jobContext.resetProcessTime(); |
| | | jobContext.resetUpdateTime(); |
| | | } |
| | | tStep = System.currentTimeMillis(); |
| | | exetcuteIncrementConvert(jobContext, _dataPath); |
| | | |
| | | //close all open filewriter instance |
| | | jobContext.closeFeatureWriter(); |
| | | |
| | | if (isProfileMode()) { |
| | | logger.warn("Profile-Current Query Oracle Cost-" + |
| | | ((int) ((getQueryTime()) / 60000.0)) + " min - " + |
| | | (((int) ((getQueryTime()) % 60000.0)) / 1000) + " sec"); |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logger.warn("Profile-Current Process Cost-" + |
| | | ((int) ((getProcessTime()) / 60000.0)) + " min - " + |
| | | (((int) ((getProcessTime()) % 60000.0)) / 1000) + " sec"); |
| | | logger.warn("Profile-Current Update Cost-" + |
| | | ((int) ((getUpdateTime()) / 60000.0)) + " min - " + |
| | | (((int) ((getUpdateTime()) % 60000.0)) / 1000) + " sec"); |
| | | logger.warn("Profile-Current JobContext Process Cost-" + |
| | | ((int) ((jobContext.getProcessTime()) / 60000.0)) + " min - " + |
| | | (((int) ((jobContext.getProcessTime()) % 60000.0)) / 1000) + " sec"); |
| | | logger.warn("Profile-Current JobContext Update Cost-" + |
| | | ((int) ((jobContext.getUpdateTime()) / 60000.0)) + " min - " + |
| | | (((int) ((jobContext.getUpdateTime()) % 60000.0)) / 1000) + " sec"); |
| | | logTimeDiff("Profile-Convert[ Increment ]", tStep, tStepEnd); |
| | | |
| | | resetQueryTime(); |
| | | resetProcessTime(); |
| | | resetUpdateTime(); |
| | | } |
| | | |
| | | jobContext.closeOracleConnection(); |
| | | |
| | | long t2 = System.currentTimeMillis(); |
| | | // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss"; |
| | | // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); |
| | | logTimeDiff("Total ", t1, t2); |
| | | |
| | | } catch (SQLException e) { |
| | | disconnect(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("Database error. " + e.getMessage(), e); |
| | | } catch (IOException ex) { |
| | | disconnect(); |
| | | logger.warn(ex.getMessage(), ex); |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } finally { |
| | | disconnect(); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | private String determineCurrentTargetSchemaName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetSchema = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | // Create XGVERSIONTABLE_NAME |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | rs.close(); |
| | | if (needCreate) return null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vsschema, vsstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vsid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpSchemas = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vsschema"); |
| | | values[1] = rs.getShort("vsstatus"); |
| | | tmpSchemas.add(values); |
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) { |
| | | current = i; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current != -1) { |
| | | Object[] values = tmpSchemas.get(current); |
| | | targetSchema = (String) values[0]; |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetSchema; |
| | | } |
| | | |
| | | public String encodeSchemaTableName(String schemaName, String tableName) { |
| | | if (schemaName == null) |
| | | return "\"" + tableName + "\""; |
| | | return "\"" + schemaName + "\".\"" + tableName + "\""; |
| | | } |
| | | |
| | | /** |
| | | * CREATE TABLE CMMS_POSTDB.GEO_EXCHANGE |
| | | * ( |
| | | * ID NUMBER NOT NULL, |
| | | * TAG_LUFID NUMBER(10) NOT NULL, |
| | | * TAG_SFSC NUMBER(5) NOT NULL, |
| | | * TAG_BCOMPID NUMBER(3) NOT NULL, |
| | | * TAG_SOCCID NUMBER(5) NOT NULL, |
| | | * STATUS NUMBER(3) NOT NULL, |
| | | * IGDSELM BLOB, |
| | | * UPDATETIME DATE DEFAULT sysdate NOT NULL, |
| | | * TASKID NUMBER(10) NOT NULL, |
| | | * ISEXCHANGE NUMBER DEFAULT 0 NOT NULL |
| | | * ) |
| | | * |
| | | * STATUS 欄位 :0:新增 2:編輯 3:刪除設備 4:刪除元件 |
| | | * ISEXCHANGE 欄位:0 未同步 1已同步 或者已同步就刪除 |
| | | * |
| | | * |
| | | * @param jobContext |
| | | * @param targetSchemaName |
| | | * @throws SQLException |
| | | */ |
| | | private void exetcuteIncrementConvert(OracleIncrementPostGISJobContext jobContext, String targetSchemaName) throws SQLException { |
| | | |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | if (connection == null) { |
| | | logger.warn("Cannot Get Oracle Connection for DMMS."); |
| | | return; |
| | | } |
| | | |
| | | // SELECT COUNT(*) FROM CMMS_POSTDB.GEO_EXCHANGE WHERE ISEXCHANGE <> 0 |
| | | int exchangeCount = fetchExchangeCount(connection); |
| | | try { |
| | | processIncrementElement(jobContext); |
| | | // jobContext.setCurrentSchema(querySchema); |
| | | } finally { |
| | | } |
| | | |
| | | } |
| | | |
| | | private int fetchExchangeCount(Connection connection) throws SQLException { |
| | | // SELECT COUNT(*) FROM CMMS_POSTDB.GEO_EXCHANGE WHERE ISEXCHANGE <> 0 |
| | | Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | ResultSet rs = null; |
| | | StringBuilder sbSQL = new StringBuilder(); |
| | | sbSQL.append("SELECT COUNT(*) FROM \"CMMS_POSTDB\".\"GEO_EXCHANGE\" WHERE ISEXCHANGE <> 0"); |
| | | |
| | | int size = -1; |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | if (rs.next()) { |
| | | size = (int) rs.getLong(1); |
| | | } |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | |
| | | return size; |
| | | } |
| | | |
| | | static class IncrementRecord { |
| | | Element element; |
| | | }; |
| | | |
| | | private void processIncrementElement(OracleIncrementPostGISJobContext jobContext) throws SQLException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | // SELECT TAG_LUFID, TAG_SFSC, TAG_BCOMPID, TAG_SOCCID, STATUS, IGDSELM |
| | | // FROM CMMS_POSTDB.GEO_EXCHANGE ORDER BY UPDATETIME WHERE ISEXCHANGE <> 0 |
| | | String fetchSrcStmtFmt = "SELECT TAG_LUFID, TAG_SFSC, TAG_BCOMPID, TAG_SOCCID, STATUS, TASKID IGDSELM " + |
| | | "FROM \"%s\".\"%s\" ORDER BY UPDATETIME WHERE ISEXCHANGE <> 0"; |
| | | //String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" WHERE TAG_SFSC = 423 AND TAG_LUFID = 21612065 ORDER BY ROWID"; |
| | | PrintfFormat spf = new PrintfFormat(fetchSrcStmtFmt); |
| | | String fetchSrcStmt = spf.sprintf(new Object[]{"CMMS_POSTDB", "GEO_EXCHANGE"}); |
| | | Statement stmtSrc = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | |
| | | stmtSrc.setFetchSize(FETCHSIZE); |
| | | ResultSet rsSrc = stmtSrc.executeQuery(fetchSrcStmt); |
| | | int igdsMetaType = rsSrc.getMetaData().getColumnType(1); |
| | | while (rsSrc.next()) { |
| | | if (isProfileMode()) { |
| | | markQueryTime(); |
| | | } |
| | | ElementTransactionContext xContext = new ElementTransactionContext(); |
| | | xContext.oid = rsSrc.getInt(1); |
| | | xContext.cid = (short) rsSrc.getInt(2); |
| | | xContext.compid = (short) rsSrc.getInt(3); |
| | | xContext.occid = (short) rsSrc.getInt(4); |
| | | xContext.transcationType = rsSrc.getInt(5); |
| | | xContext.taskid = rsSrc.getInt(6); |
| | | |
| | | try { |
| | | if (xContext.transcationType > 2) { |
| | | byte[] raw = null; |
| | | if (igdsMetaType == Types.BLOB) { |
| | | BLOB blob = (BLOB) rsSrc.getBlob(7); |
| | | |
| | | try { |
| | | raw = getBytesFromBLOB(blob); |
| | | } catch (BufferOverflowException e) { |
| | | logger.warn("Wrong Element Structure-", e); |
| | | } finally { |
| | | // blob.close(); |
| | | } |
| | | } else { |
| | | raw = rsSrc.getBytes(7); |
| | | } |
| | | if (raw != null) { |
| | | Element element = fetchBinaryElement(raw); |
| | | if (isProfileMode()) { |
| | | accumulateQueryTime(); |
| | | } |
| | | xContext.element = element; |
| | | } else { |
| | | if (isProfileMode()) { |
| | | accumulateQueryTime(); |
| | | } |
| | | } |
| | | } |
| | | jobContext.putFeatureCollection(xContext); |
| | | } catch (Dgn7fileException e) { |
| | | logger.warn("Dgn7Exception", e); |
| | | } |
| | | } |
| | | |
| | | JDBCUtils.close(rsSrc); |
| | | JDBCUtils.close(stmtSrc); |
| | | } |
| | | |
| | | // Binary to Element |
| | | private Element fetchBinaryElement(byte[] raws) throws Dgn7fileException { |
| | | ByteBuffer buffer = ByteBuffer.wrap(raws); |
| | | buffer.order(ByteOrder.LITTLE_ENDIAN); |
| | | short signature = buffer.getShort(); |
| | | |
| | | // byte type = (byte) (buffer.get() & 0x7f); |
| | | byte type = (byte) ((signature >>> 8) & 0x007f); |
| | | |
| | | // silly Bentley say contentLength is in 2-byte words |
| | | // and ByteByffer uses raws. |
| | | // track the record location |
| | | int elementLength = (buffer.getShort() * 2) + 4; |
| | | ElementType recordType = ElementType.forID(type); |
| | | IElementHandler handler; |
| | | |
| | | handler = recordType.getElementHandler(); |
| | | |
| | | Element dgnElement = (Element) handler.read(buffer, signature, elementLength); |
| | | if (recordType.isComplexElement() && (elementLength < raws.length)) { |
| | | int offset = elementLength; |
| | | while (offset < (raws.length - 4)) { |
| | | buffer.position(offset); |
| | | signature = buffer.getShort(); |
| | | type = (byte) ((signature >>> 8) & 0x007f); |
| | | elementLength = (buffer.getShort() * 2) + 4; |
| | | if (raws.length < (offset + elementLength)) { |
| | | logger.debug("Length not match:" + offset + ":" + buffer.position() + ":" + buffer.limit()); |
| | | break; |
| | | } |
| | | recordType = ElementType.forID(type); |
| | | handler = recordType.getElementHandler(); |
| | | if (handler != null) { |
| | | Element subElement = (Element) handler.read(buffer, signature, elementLength); |
| | | ((ComplexElement) dgnElement).add(subElement); |
| | | offset += elementLength; |
| | | } else { |
| | | byte[] remain = new byte[buffer.remaining()]; |
| | | System.arraycopy(raws, offset, remain, 0, buffer.remaining()); |
| | | for (int i = 0; i < remain.length; i++) { |
| | | if (remain[i] != 0) { |
| | | logger.info("fetch element has some error. index=" + (offset + i) + ":value=" + remain[i]); |
| | | } |
| | | } |
| | | break; |
| | | } |
| | | } |
| | | } |
| | | |
| | | return dgnElement; |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.jobs; |
| | | |
| | | import java.io.FileWriter; |
| | | import java.io.IOException; |
| | | import java.sql.Connection; |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.util.ArrayList; |
| | | import java.util.Date; |
| | | import java.util.List; |
| | | import java.util.Map; |
| | | import java.util.TreeMap; |
| | | |
| | | import au.com.bytecode.opencsv.CSVWriter; |
| | | import au.com.bytecode.opencsv.ResultSetHelper; |
| | | import au.com.bytecode.opencsv.ResultSetHelperService; |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.OracleConvertPostGISJobContext; |
| | | import com.ximple.eofms.util.ConnectivityDirectionEnum; |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisNGDataStoreFactory; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | public class OracleTransformColorOwner2CSVJob extends AbstractOracleDatabaseJob { |
| | | final static Log logger = LogFactory.getLog(OracleTransformColorOwner2CSVJob.class); |
| | | |
| | | public static String FETCH_TPDATA = "SELECT TPID, TPNAME FROM BASEDB.TPDATA"; |
| | | public static String FETCH_CONNFDR = "SELECT FSC, UFID, FDR1, DIR FROM BASEDB.CONNECTIVITY ORDER BY FSC"; |
| | | public static String FETCH_FDRCOLOR = "SELECT FRREDERID, COLOR FROM BASEDB.FEEDER"; |
| | | public static String FETCH_COLORTAB = "SELECT TAG_SFSC, TAG_LUFID, COLOR FROM OCSDB.COLOR ORDER BY TAG_SFSC"; |
| | | |
| | | private static final String PGHOST = "PGHOST"; |
| | | private static final String PGDATBASE = "PGDATBASE"; |
| | | private static final String PGPORT = "PGPORT"; |
| | | private static final String PGSCHEMA = "PGSCHEMA"; |
| | | private static final String PGUSER = "PGUSER"; |
| | | private static final String PGPASS = "PGPASS"; |
| | | private static final String USEWKB = "USEWKB"; |
| | | |
| | | private static final boolean useTpclidText = false; |
| | | |
| | | private static final int FETCHSIZE = 100; |
| | | private static final int COMMITSIZE = 100; |
| | | |
| | | protected static class Pair { |
| | | Object first; |
| | | Object second; |
| | | |
| | | public Pair(Object first, Object second) { |
| | | this.first = first; |
| | | this.second = second; |
| | | } |
| | | } |
| | | |
| | | protected static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | protected String _pgHost; |
| | | protected String _pgDatabase; |
| | | protected String _pgPort; |
| | | protected String _pgSchema; |
| | | protected String _pgUsername; |
| | | protected String _pgPassword; |
| | | protected String _pgUseWKB; |
| | | |
| | | protected Map<String, String> pgProperties; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _pgHost = dataMap.getString(PGHOST); |
| | | _pgDatabase = dataMap.getString(PGDATBASE); |
| | | _pgPort = dataMap.getString(PGPORT); |
| | | _pgSchema = dataMap.getString(PGSCHEMA); |
| | | _pgUsername = dataMap.getString(PGUSER); |
| | | _pgPassword = dataMap.getString(PGPASS); |
| | | _pgUseWKB = dataMap.getString(USEWKB); |
| | | |
| | | Log logger = getLogger(); |
| | | /* |
| | | logger.info("PGHOST=" + _myHost); |
| | | logger.info("PGDATBASE=" + _myDatabase); |
| | | logger.info("PGPORT=" + _myPort); |
| | | logger.info("PGSCHEMA=" + _mySchema); |
| | | logger.info("PGUSER=" + _myUsername); |
| | | logger.info("PGPASS=" + _myPassword); |
| | | logger.info("USEWKB=" + _myUseWKB); |
| | | */ |
| | | |
| | | if (_pgHost == null) { |
| | | logger.warn("PGHOST is null"); |
| | | throw new JobExecutionException("Unknown PostGIS host."); |
| | | } |
| | | if (_pgDatabase == null) { |
| | | logger.warn("PGDATABASE is null"); |
| | | throw new JobExecutionException("Unknown PostGIS database."); |
| | | } |
| | | if (_pgPort == null) { |
| | | logger.warn("PGPORT is null"); |
| | | throw new JobExecutionException("Unknown PostGIS port."); |
| | | } |
| | | if (_pgSchema == null) { |
| | | logger.warn("PGSCHEMA is null"); |
| | | throw new JobExecutionException("Unknown PostGIS schema."); |
| | | } |
| | | if (_pgUsername == null) { |
| | | logger.warn("PGUSERNAME is null"); |
| | | throw new JobExecutionException("Unknown PostGIS username."); |
| | | } |
| | | if (_pgPassword == null) { |
| | | logger.warn("PGPASSWORD is null"); |
| | | throw new JobExecutionException("Unknown PostGIS password."); |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put(PostgisNGDataStoreFactory.DBTYPE.key, "postgis"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(PostgisNGDataStoreFactory.HOST.key, _pgHost); |
| | | remote.put(PostgisNGDataStoreFactory.PORT.key, _pgPort); |
| | | remote.put(PostgisNGDataStoreFactory.DATABASE.key, _pgDatabase); |
| | | remote.put(PostgisNGDataStoreFactory.USER.key, _pgUsername); |
| | | remote.put(PostgisNGDataStoreFactory.PASSWD.key, _pgPassword); |
| | | // remote.put( "namespace", null); |
| | | pgProperties = remote; |
| | | } |
| | | |
| | | @Override |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | @Override |
| | | public void execute(JobExecutionContext context) throws JobExecutionException { |
| | | // Every job has its own job detail |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | // Log the time the job started |
| | | logger.info(jobName + " fired at " + new Date()); |
| | | extractJobConfiguration(jobDetail); |
| | | |
| | | createSourceDataStore(); |
| | | createTargetDataStore(); |
| | | if (getSourceDataStore() == null) { |
| | | logger.warn("Cannot connect source oracle database."); |
| | | throw new JobExecutionException("Cannot connect source oracle database."); |
| | | } |
| | | |
| | | if (getTargetDataStore() == null) { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName; |
| | | try { |
| | | logger.info("-- step:clearOutputDatabase --"); |
| | | clearOutputDatabase(); |
| | | |
| | | logger.info("-- step:transformOracleDMMSDB --"); |
| | | targetSchemaName = determineTargetSchemaName(); |
| | | |
| | | OracleConvertPostGISJobContext jobContext = |
| | | (OracleConvertPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | long tStep = System.currentTimeMillis(); |
| | | |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | mergeConnectivityOwner(jobContext); |
| | | |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-Merge Connectivity Owner", tStep, tStepEnd); |
| | | } |
| | | |
| | | tStep = System.currentTimeMillis(); |
| | | mergeDynamicColor(jobContext); |
| | | |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-Merge ColorTable", tStep, tStepEnd); |
| | | } |
| | | |
| | | jobContext.closeOracleConnection(); |
| | | |
| | | long t2 = System.currentTimeMillis(); |
| | | // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss"; |
| | | // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); |
| | | logTimeDiff("Total ", t1, t2); |
| | | |
| | | } catch (SQLException e) { |
| | | disconnect(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("Database error. " + e.getMessage(), e); |
| | | } catch (IOException ex) { |
| | | disconnect(); |
| | | logger.warn(ex.getMessage(), ex); |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } finally { |
| | | disconnect(); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | /** |
| | | * Connectivity (Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws java.sql.SQLException sql exception |
| | | */ |
| | | protected void mergeConnectivityOwner(AbstractOracleJobContext jobContext) throws SQLException, IOException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | String targetSchemaName = determineTargetSchemaName(); |
| | | logger.info("target schema:" + targetSchemaName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(FETCHSIZE); |
| | | |
| | | ResultSetHelper resultService = new ResultSetHelperService(); |
| | | final String[] header = new String[] { "tid", "oid", "owner", "flow" }; |
| | | CSVWriter writer = new CSVWriter(new FileWriter("featureowner.csv"), ','); |
| | | writer.writeNext(header); |
| | | while (rs.next()) { |
| | | short dirId = (short) rs.getInt(4); |
| | | String[] values = resultService.getColumnValues(rs); |
| | | ConnectivityDirectionEnum dir = ConnectivityDirectionEnum.convertShort(dirId); |
| | | if ((ConnectivityDirectionEnum.ForwardflowON == dir) || |
| | | (ConnectivityDirectionEnum.ForwardFixflowON == dir)) { |
| | | values[3] = "shape://ccarrow"; |
| | | |
| | | } else if ((ConnectivityDirectionEnum.BackflowON == dir) || |
| | | (ConnectivityDirectionEnum.BackFixflowON == dir)) { |
| | | values[3] = "shape://rccarrow"; |
| | | } else { |
| | | values[3] = "shape://backslash"; |
| | | } |
| | | writer.writeNext(values); |
| | | } |
| | | writer.flush(); |
| | | writer.close(); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void mergeDynamicColor(OracleConvertPostGISJobContext jobContext) throws SQLException, IOException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | String targetSchemaName = determineTargetSchemaName(); |
| | | logger.info("target schema:" + targetSchemaName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(FETCHSIZE); |
| | | |
| | | ResultSetHelper resultService = new ResultSetHelperService(); |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | |
| | | final String[] header = new String[] { "tid", "oid", "dyncolor" }; |
| | | CSVWriter writer = new CSVWriter(new FileWriter("featurecolor.csv"), ','); |
| | | // writer.writeAll(rs, true); |
| | | writer.writeNext(header); |
| | | while (rs.next()) { |
| | | int colorId = rs.getInt(3); |
| | | String[] values = resultService.getColumnValues(rs); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | values[2] = colorText; |
| | | writer.writeNext(values); |
| | | } |
| | | writer.flush(); |
| | | writer.close(); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void batchExecuteSQL(ArrayList<String> sqlStmts) throws IOException { |
| | | if (targetDataStore == null) return; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | // ResultSet rs = null; |
| | | int[] results = null; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | connection.setAutoCommit(false); |
| | | stmt = connection.createStatement(); |
| | | for (String sqlStmt : sqlStmts) { |
| | | stmt.addBatch(sqlStmt); |
| | | } |
| | | results = stmt.executeBatch(); |
| | | connection.commit(); |
| | | } catch (SQLException e) { |
| | | if (results != null) { |
| | | } |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | // JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | |
| | | private List<String> fetchTargetTableList(String targetSchemaName, int cid) throws IOException { |
| | | ArrayList<String> result = new ArrayList<String>(); |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | String[] types = {"TABLE"}; |
| | | rs = connection.getMetaData().getTables(null, targetSchemaName, "fsc-" + cid +"%", types); |
| | | while (rs.next()) { |
| | | String tableName = rs.getString("TABLE_NAME"); |
| | | logger.info("table:" + tableName); |
| | | result.add(tableName); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | |
| | | return result; //To change body of created methods use File | Settings | File Templates. |
| | | } |
| | | |
| | | |
| | | @Override |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, boolean profileMode, boolean useTransform) { |
| | | return new OracleConvertPostGISJobContext(getDataPath(), |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) { |
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " + |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | public DataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | protected void createTargetDataStore() throws JobExecutionException { |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | if (!dataStoreFactory.canProcess(pgProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = dataStoreFactory.createDataStore(pgProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | protected void disconnect() { |
| | | super.disconnect(); |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | } |
| | | |
| | | private String determineTargetSchemaName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetSchema = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) { |
| | | throw new IOException("cannot found " + DataReposVersionManager.XGVERSIONTABLE_NAME); |
| | | } |
| | | rs.close(); |
| | | rs = null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vsschema, vsstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vsid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpSchemas = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vsschema"); |
| | | values[1] = rs.getShort("vsstatus"); |
| | | tmpSchemas.add(values); |
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) { |
| | | current = i; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current != -1) { |
| | | Object[] values = tmpSchemas.get(current); |
| | | targetSchema = (String) values[0]; |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetSchema; |
| | | } |
| | | |
| | | public String encodeSchemaTableName(String schemaName, String tableName) { |
| | | return "\"" + schemaName + "\".\"" + tableName + "\""; |
| | | } |
| | | |
| | | public final void accumulateQueryTime() { |
| | | queryTime += System.currentTimeMillis() - queryTimeStart; |
| | | } |
| | | |
| | | public long getQueryTime() { |
| | | return queryTime; |
| | | } |
| | | |
| | | public final void markQueryTime() { |
| | | queryTimeStart = System.currentTimeMillis(); |
| | | } |
| | | |
| | | public final void resetQueryTime() { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | private void clearOutputDatabase() { |
| | | } |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.jobs; |
| | | |
| | | import java.io.IOException; |
| | | import java.sql.Connection; |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.util.ArrayList; |
| | | import java.util.Date; |
| | | import java.util.List; |
| | | import java.util.Map; |
| | | import java.util.TreeMap; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.jobs.context.postgis.OracleConvertPostGISJobContext; |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisNGDataStoreFactory; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | |
| | | /** |
| | | * |
| | | */ |
| | | @Deprecated |
| | | public class OracleTransformColorOwnerJob extends AbstractOracleDatabaseJob { |
| | | final static Log logger = LogFactory.getLog(OracleTransformColorOwnerJob.class); |
| | | |
| | | public static String FETCH_TPDATA = "SELECT TPID, TPNAME FROM BASEDB.TPDATA"; |
| | | public static String FETCH_CONNFDR = "SELECT FSC, UFID, FDR1 FROM BASEDB.CONNECTIVITY ORDER BY FSC"; |
| | | public static String FETCH_FDRCOLOR = "SELECT FRREDERID, COLOR FROM BASEDB.FEEDER"; |
| | | public static String FETCH_COLORTAB = "SELECT TAG_SFSC, TAG_LUFID, COLOR FROM OCSDB.COLOR ORDER BY TAG_SFSC"; |
| | | |
| | | private static final String PGHOST = "PGHOST"; |
| | | private static final String PGDATBASE = "PGDATBASE"; |
| | | private static final String PGPORT = "PGPORT"; |
| | | private static final String PGSCHEMA = "PGSCHEMA"; |
| | | private static final String PGUSER = "PGUSER"; |
| | | private static final String PGPASS = "PGPASS"; |
| | | private static final String USEWKB = "USEWKB"; |
| | | |
| | | private static final boolean useTpclidText = false; |
| | | |
| | | private static final int FETCHSIZE = 30; |
| | | private static final int COMMITSIZE = 100; |
| | | |
| | | protected static class Pair { |
| | | Object first; |
| | | Object second; |
| | | |
| | | public Pair(Object first, Object second) { |
| | | this.first = first; |
| | | this.second = second; |
| | | } |
| | | } |
| | | |
| | | protected static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | protected String _pgHost; |
| | | protected String _pgDatabase; |
| | | protected String _pgPort; |
| | | protected String _pgSchema; |
| | | protected String _pgUsername; |
| | | protected String _pgPassword; |
| | | protected String _pgUseWKB; |
| | | |
| | | protected Map<String, String> pgProperties; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _pgHost = dataMap.getString(PGHOST); |
| | | _pgDatabase = dataMap.getString(PGDATBASE); |
| | | _pgPort = dataMap.getString(PGPORT); |
| | | _pgSchema = dataMap.getString(PGSCHEMA); |
| | | _pgUsername = dataMap.getString(PGUSER); |
| | | _pgPassword = dataMap.getString(PGPASS); |
| | | _pgUseWKB = dataMap.getString(USEWKB); |
| | | |
| | | Log logger = getLogger(); |
| | | /* |
| | | logger.info("PGHOST=" + _myHost); |
| | | logger.info("PGDATBASE=" + _myDatabase); |
| | | logger.info("PGPORT=" + _myPort); |
| | | logger.info("PGSCHEMA=" + _mySchema); |
| | | logger.info("PGUSER=" + _myUsername); |
| | | logger.info("PGPASS=" + _myPassword); |
| | | logger.info("USEWKB=" + _myUseWKB); |
| | | */ |
| | | |
| | | if (_pgHost == null) { |
| | | logger.warn("PGHOST is null"); |
| | | throw new JobExecutionException("Unknown PostGIS host."); |
| | | } |
| | | if (_pgDatabase == null) { |
| | | logger.warn("PGDATABASE is null"); |
| | | throw new JobExecutionException("Unknown PostGIS database."); |
| | | } |
| | | if (_pgPort == null) { |
| | | logger.warn("PGPORT is null"); |
| | | throw new JobExecutionException("Unknown PostGIS port."); |
| | | } |
| | | if (_pgSchema == null) { |
| | | logger.warn("PGSCHEMA is null"); |
| | | throw new JobExecutionException("Unknown PostGIS schema."); |
| | | } |
| | | if (_pgUsername == null) { |
| | | logger.warn("PGUSERNAME is null"); |
| | | throw new JobExecutionException("Unknown PostGIS username."); |
| | | } |
| | | if (_pgPassword == null) { |
| | | logger.warn("PGPASSWORD is null"); |
| | | throw new JobExecutionException("Unknown PostGIS password."); |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put(PostgisNGDataStoreFactory.DBTYPE.key, "postgis"); |
| | | // remote.put("charset", "UTF-8"); |
| | | remote.put(PostgisNGDataStoreFactory.HOST.key, _pgHost); |
| | | remote.put(PostgisNGDataStoreFactory.PORT.key, _pgPort); |
| | | remote.put(PostgisNGDataStoreFactory.DATABASE.key, _pgDatabase); |
| | | remote.put(PostgisNGDataStoreFactory.USER.key, _pgUsername); |
| | | remote.put(PostgisNGDataStoreFactory.PASSWD.key, _pgPassword); |
| | | // remote.put( "namespace", null); |
| | | pgProperties = remote; |
| | | } |
| | | |
| | | @Override |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | @Override |
| | | public void execute(JobExecutionContext context) throws JobExecutionException { |
| | | // Every job has its own job detail |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getKey().getName(); |
| | | |
| | | // Log the time the job started |
| | | logger.info(jobName + " fired at " + new Date()); |
| | | extractJobConfiguration(jobDetail); |
| | | |
| | | createSourceDataStore(); |
| | | createTargetDataStore(); |
| | | if (getSourceDataStore() == null) { |
| | | logger.warn("Cannot connect source oracle database."); |
| | | throw new JobExecutionException("Cannot connect source oracle database."); |
| | | } |
| | | |
| | | if (getTargetDataStore() == null) { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName; |
| | | try { |
| | | logger.info("-- step:clearOutputDatabase --"); |
| | | clearOutputDatabase(); |
| | | |
| | | logger.info("-- step:transformOracleDMMSDB --"); |
| | | targetSchemaName = determineTargetSchemaName(); |
| | | |
| | | OracleConvertPostGISJobContext jobContext = |
| | | (OracleConvertPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath, |
| | | isProfileMode(), isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | | long tStep = System.currentTimeMillis(); |
| | | |
| | | fetchTPData(jobContext); |
| | | logger.info("TPC DIST:" + jobContext.getDistId() + ":" + |
| | | ((jobContext.getDistName() == null) ? "NULL" : jobContext.getDistName())); |
| | | |
| | | mergeConnectivityOwner(jobContext); |
| | | |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-Merge Connectivity Owner", tStep, tStepEnd); |
| | | } |
| | | |
| | | tStep = System.currentTimeMillis(); |
| | | mergeDynamicColor(jobContext); |
| | | |
| | | if (isProfileMode()) { |
| | | long tStepEnd = System.currentTimeMillis(); |
| | | logTimeDiff("Profile-Merge ColorTable", tStep, tStepEnd); |
| | | } |
| | | |
| | | jobContext.closeOracleConnection(); |
| | | |
| | | long t2 = System.currentTimeMillis(); |
| | | // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss"; |
| | | // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); |
| | | logTimeDiff("Total ", t1, t2); |
| | | |
| | | } catch (SQLException e) { |
| | | disconnect(); |
| | | logger.warn(e.getMessage(), e); |
| | | throw new JobExecutionException("Database error. " + e.getMessage(), e); |
| | | } catch (IOException ex) { |
| | | disconnect(); |
| | | logger.warn(ex.getMessage(), ex); |
| | | throw new JobExecutionException("IO error. " + ex.getMessage(), ex); |
| | | } finally { |
| | | disconnect(); |
| | | } |
| | | logger.warn(jobName + " end at " + new Date()); |
| | | } |
| | | |
| | | /** |
| | | * Connectivity (Connectivity) |
| | | * |
| | | * @param jobContext job context |
| | | * @throws java.sql.SQLException sql exception |
| | | */ |
| | | protected void mergeConnectivityOwner(AbstractOracleJobContext jobContext) throws SQLException, IOException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | String targetSchemaName = determineTargetSchemaName(); |
| | | logger.info("target schema:" + targetSchemaName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_CONNFDR); |
| | | rs.setFetchSize(50); |
| | | int lastClass = -1; |
| | | boolean changeType = false; |
| | | List<String> tables = null; |
| | | ArrayList<String> sqlBatchStmts = new ArrayList<String>(); |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int ownerId = rs.getInt(3); |
| | | if (lastClass != cid) { |
| | | logger.info("change type to :" + cid); |
| | | } |
| | | changeType = (lastClass != cid); |
| | | if (changeType) { |
| | | tables = fetchTargetTableList(targetSchemaName, cid); |
| | | if (tables == null) |
| | | logger.info("tables is null." + cid); |
| | | } |
| | | if (tables != null) { |
| | | for (String t : tables) { |
| | | String sqlStmt = generatrTargetOwnerSql(targetSchemaName, t, cid, oid, ownerId); |
| | | sqlBatchStmts.add(sqlStmt); |
| | | } |
| | | } |
| | | |
| | | if (MAX_BATCHSIZE < sqlBatchStmts.size()) { |
| | | batchExecuteSQL(sqlBatchStmts); |
| | | count += sqlBatchStmts.size(); |
| | | sqlBatchStmts.clear(); |
| | | } |
| | | lastClass = cid; |
| | | } |
| | | |
| | | if (!sqlBatchStmts.isEmpty()) { |
| | | batchExecuteSQL(sqlBatchStmts); |
| | | count += sqlBatchStmts.size(); |
| | | } |
| | | logger.info("Execute Update Count=" + count); |
| | | // } catch (SQLException e) |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private String generatrTargetOwnerSql(String schemaName, String t, int cid, long oid, int ownerId) { |
| | | StringBuilder sb = new StringBuilder("UPDATE "); |
| | | sb.append(schemaName).append(".\"").append(t).append("\""); |
| | | sb.append(" SET fowner = ").append(ownerId); |
| | | sb.append(" WHERE tid=").append(cid); |
| | | sb.append(" AND oid=").append(oid); |
| | | return sb.toString(); |
| | | } |
| | | |
| | | private void updateTargetOwner(Connection connection, |
| | | String schemaName, String t, int cid, long oid, int ownerId) |
| | | throws SQLException, IOException { |
| | | if (connection == null) return; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | stmt.executeUpdate("UPDATE " + schemaName + "." + t + " SET fowner = " + ownerId + " WHERE oid=" + oid); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void mergeDynamicColor(OracleConvertPostGISJobContext jobContext) throws SQLException, IOException { |
| | | Connection connection = jobContext.getOracleConnection(); |
| | | |
| | | boolean found = false; |
| | | ResultSet rs = null; |
| | | Statement stmt = null; |
| | | try { |
| | | String targetSchemaName = determineTargetSchemaName(); |
| | | logger.info("target schema:" + targetSchemaName); |
| | | stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); |
| | | rs = stmt.executeQuery(FETCH_COLORTAB); |
| | | rs.setFetchSize(50); |
| | | int lastClass = -1; |
| | | boolean changeType = false; |
| | | List<String> tables = null; |
| | | ArrayList<String> sqlBatchStmts = new ArrayList<String>(); |
| | | final int MAX_BATCHSIZE = 50; |
| | | int count = 0; |
| | | while (rs.next()) { |
| | | int cid = rs.getInt(1); |
| | | long oid = rs.getLong(2); |
| | | int colorId = rs.getInt(3); |
| | | if (lastClass != cid) { |
| | | logger.info("change type to :" + cid); |
| | | } |
| | | changeType = (lastClass != cid); |
| | | if (changeType) { |
| | | tables = fetchTargetTableList(targetSchemaName, cid); |
| | | if (tables == null) |
| | | logger.info("tables is null." + cid); |
| | | } |
| | | if (tables != null) { |
| | | for (String t : tables) { |
| | | String sqlStmt = generatrTargetDynamicColorSql(targetSchemaName, t, cid, oid, colorId); |
| | | sqlBatchStmts.add(sqlStmt); |
| | | } |
| | | } |
| | | if (MAX_BATCHSIZE < sqlBatchStmts.size()) { |
| | | batchExecuteSQL(sqlBatchStmts); |
| | | count += sqlBatchStmts.size(); |
| | | sqlBatchStmts.clear(); |
| | | } |
| | | lastClass = cid; |
| | | } |
| | | if (!sqlBatchStmts.isEmpty()) { |
| | | batchExecuteSQL(sqlBatchStmts); |
| | | count += sqlBatchStmts.size(); |
| | | } |
| | | logger.info("Execute Update Count=" + count); |
| | | // } catch (SQLException e) |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private String generatrTargetDynamicColorSql(String schemaName, String t, int cid, long oid, int colorId) { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | StringBuilder sb = new StringBuilder("UPDATE "); |
| | | sb.append(schemaName).append(".\"").append(t).append("\""); |
| | | sb.append(" SET dyncolor = '").append(colorText).append("'"); |
| | | sb.append(" WHERE tid=").append(cid); |
| | | sb.append(" AND oid=").append(oid); |
| | | return sb.toString(); |
| | | } |
| | | |
| | | private void updateTargetDynamicColor(Connection connection, String schemaName, |
| | | String t, int cid, long oid, int colorId) { |
| | | if (connection == null) return; |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | try { |
| | | stmt = connection.createStatement(); |
| | | String colorText = colorTable.getColorCode(colorId); |
| | | stmt.executeUpdate("UPDATE " + schemaName + "." + t + " SET dyncolor = '" + colorText + "' WHERE oid=" + oid); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } |
| | | |
| | | private void batchExecuteSQL(ArrayList<String> sqlStmts) throws IOException { |
| | | if (targetDataStore == null) return; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | // ResultSet rs = null; |
| | | int[] results = null; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | connection.setAutoCommit(false); |
| | | stmt = connection.createStatement(); |
| | | for (String sqlStmt : sqlStmts) { |
| | | stmt.addBatch(sqlStmt); |
| | | } |
| | | results = stmt.executeBatch(); |
| | | connection.commit(); |
| | | } catch (SQLException e) { |
| | | if (results != null) { |
| | | } |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | // JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | |
| | | |
| | | private List<String> fetchTargetTableList(String targetSchemaName, int cid) throws IOException { |
| | | ArrayList<String> result = new ArrayList<String>(); |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | String[] types = {"TABLE"}; |
| | | rs = connection.getMetaData().getTables(null, targetSchemaName, "fsc-" + cid +"%", types); |
| | | while (rs.next()) { |
| | | String tableName = rs.getString("TABLE_NAME"); |
| | | logger.info("table:" + tableName); |
| | | result.add(tableName); |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | |
| | | return result; //To change body of created methods use File | Settings | File Templates. |
| | | } |
| | | |
| | | |
| | | @Override |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, boolean profileMode, boolean useTransform) { |
| | | return new OracleConvertPostGISJobContext(getDataPath(), |
| | | getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | private void logTimeDiff(String message, long tBefore, long tCurrent) { |
| | | logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " + |
| | | (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); |
| | | } |
| | | |
| | | public DataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | protected void createTargetDataStore() throws JobExecutionException { |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MAXCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MAXCONN.key, "5"); |
| | | } |
| | | |
| | | if (!pgProperties.containsKey(PostgisNGDataStoreFactory.MINCONN.key)) { |
| | | pgProperties.put(PostgisNGDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | if (!dataStoreFactory.canProcess(pgProperties)) { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try { |
| | | targetDataStore = dataStoreFactory.createDataStore(pgProperties); |
| | | } catch (IOException e) { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | protected void disconnect() { |
| | | super.disconnect(); |
| | | if (targetDataStore != null) { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | } |
| | | |
| | | private String determineTargetSchemaName() throws IOException { |
| | | if (targetDataStore == null) return null; |
| | | Connection connection = null; |
| | | Statement stmt = null; |
| | | ResultSet rs = null; |
| | | String targetSchema = null; |
| | | boolean needCreate = false; |
| | | try { |
| | | connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | rs = connection.getMetaData().getTables(null, _pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"}); |
| | | if (!rs.next()) needCreate = true; |
| | | if (needCreate) { |
| | | throw new IOException("cannot found " + DataReposVersionManager.XGVERSIONTABLE_NAME); |
| | | } |
| | | rs.close(); |
| | | rs = null; |
| | | |
| | | StringBuilder sbSQL = new StringBuilder("SELECT "); |
| | | sbSQL.append("vsschema, vsstatus FROM "); |
| | | sbSQL.append(encodeSchemaTableName(_pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); |
| | | sbSQL.append("ORDER BY vsid"); |
| | | stmt = connection.createStatement(); |
| | | rs = stmt.executeQuery(sbSQL.toString()); |
| | | ArrayList<Object[]> tmpSchemas = new ArrayList<Object[]>(); |
| | | int i = 0; |
| | | int current = -1; |
| | | while (rs.next()) { |
| | | Object[] values = new Object[2]; |
| | | values[0] = rs.getString("vsschema"); |
| | | values[1] = rs.getShort("vsstatus"); |
| | | tmpSchemas.add(values); |
| | | if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) { |
| | | current = i; |
| | | } |
| | | i++; |
| | | } |
| | | |
| | | if (current != -1) { |
| | | Object[] values = tmpSchemas.get(current); |
| | | targetSchema = (String) values[0]; |
| | | } |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(rs); |
| | | JDBCUtils.close(stmt); |
| | | JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | return targetSchema; |
| | | } |
| | | |
| | | public String encodeSchemaTableName(String schemaName, String tableName) { |
| | | return "\"" + schemaName + "\".\"" + tableName + "\""; |
| | | } |
| | | |
| | | public final void accumulateQueryTime() { |
| | | queryTime += System.currentTimeMillis() - queryTimeStart; |
| | | } |
| | | |
| | | public long getQueryTime() { |
| | | return queryTime; |
| | | } |
| | | |
| | | public final void markQueryTime() { |
| | | queryTimeStart = System.currentTimeMillis(); |
| | | } |
| | | |
| | | public final void resetQueryTime() { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | private void clearOutputDatabase() { |
| | | } |
| | | } |
| | |
| | | JobDetail jobDetail = context.getJobDetail(); |
| | | |
| | | // The name is defined in the job definition |
| | | String jobName = jobDetail.getName(); |
| | | String jobName = jobDetail.getKey().getName(); |
| | | String targetSchemaName = null; |
| | | |
| | | // Log the time the job started |
| | |
| | | } |
| | | |
| | | AbstractOracleJobContext jobContext = prepareJobContext(targetSchemaName, _filterPath, isProfileMode(), |
| | | isTransformed(), isEPSG3826()); |
| | | isTransformed()); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | |
| | | try { |
| | |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | return new OracleUpgradeJobContext(profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | return new OracleUpgradeJobContext(profileMode, useTransform); |
| | | } |
| | | |
| | | private void exetcuteConvert(AbstractOracleJobContext jobContext, |
| | |
| | |
|
| | | import java.util.Map;
|
| | |
|
| | | import com.ximple.eofms.filter.ElementDispatcher;
|
| | | import org.apache.commons.digester3.Digester;
|
| | | import org.apache.commons.digester3.binder.DigesterLoader;
|
| | | import org.apache.commons.digester3.xmlrules.FromXmlRulesModule;
|
| | | import org.apache.commons.logging.Log;
|
| | | import org.quartz.JobExecutionContext;
|
| | |
|
| | | import com.ximple.io.dgn7.Dgn7fileReader;
|
| | |
|
| | | public abstract class AbstractDgnFileJobContext {
|
| | |
|
| | | // ------------------------------ FIELDS ------------------------------
|
| | |
|
| | | /**
|
| | |
| | | private boolean _elementLogging;
|
| | | private boolean _profileMode = false;
|
| | | private boolean _useTransform = true;
|
| | | private boolean _useEPSG3826 = true;
|
| | |
|
| | | private long _processTime;
|
| | | private long _updateTime;
|
| | | private long _processTimeStart;
|
| | | private long _updateTimeStart;
|
| | |
|
| | | private short distId;
|
| | |
|
| | | // --------------------------- CONSTRUCTORS ---------------------------
|
| | |
|
| | | public AbstractDgnFileJobContext(String dataPath, boolean profileMode,
|
| | | boolean useTransform, boolean useEPSG3826) {
|
| | | boolean useTransform) {
|
| | | _dataPath = dataPath;
|
| | | _profileMode = profileMode;
|
| | | _useTransform = useTransform;
|
| | | _useEPSG3826 = useEPSG3826;
|
| | | }
|
| | |
|
| | | // --------------------- GETTER / SETTER METHODS ---------------------
|
| | |
| | | return _useTransform;
|
| | | }
|
| | |
|
| | | public boolean isEPSG3826() {
|
| | | return _useEPSG3826;
|
| | | public short getDistId() {
|
| | | return distId;
|
| | | }
|
| | |
|
| | | // -------------------------- OTHER METHODS --------------------------
|
| | | public void setDistId(short distId) {
|
| | | this.distId = distId;
|
| | | }
|
| | |
|
| | | // -------------------------- OTHER METHODS --------------------------
|
| | |
|
| | | public final void accumulateProcessTime() {
|
| | | _processTime += System.currentTimeMillis() - _processTimeStart;
|
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.oracle.OracleDataStore; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | |
| | | public abstract class AbstractOracleJobContext { |
| | | /** |
| | |
| | | " CACHE 20\n" + |
| | | " NOORDER"; |
| | | |
| | | public static String FETCH_TPDATA = "SELECT TPID, TPNAME FROM BASEDB.TPDATA"; |
| | | |
| | | /** |
| | | * |
| | | */ |
| | |
| | | * Encoding of URL path. |
| | | */ |
| | | protected static final String ENCODING = "UTF-8"; |
| | | protected OracleDataStore sourceDataStore; |
| | | protected JDBCDataStore sourceDataStore; |
| | | |
| | | protected String _dataPath; |
| | | protected Properties properties; |
| | |
| | | private Connection connection = null; |
| | | private boolean profileMode; |
| | | private boolean useTransform; |
| | | private boolean useEPSG3826; |
| | | |
| | | private short distId; |
| | | private String distName; |
| | | |
| | | private long _processTime; |
| | | private long _updateTime; |
| | | private long _processTimeStart; |
| | | private long _updateTimeStart; |
| | | |
| | | public AbstractOracleJobContext(boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | public AbstractOracleJobContext(boolean profileMode, boolean useTransform) { |
| | | this.profileMode = profileMode; |
| | | this.useTransform = useTransform; |
| | | this.useEPSG3826 = useEPSG3826; |
| | | } |
| | | |
| | | public OracleDataStore getSourceDataStore() { |
| | | public JDBCDataStore getSourceDataStore() { |
| | | return sourceDataStore; |
| | | } |
| | | |
| | | public void setSourceDataStore(DataStore sourceDataStore) { |
| | | if ((sourceDataStore != null) && (sourceDataStore instanceof OracleDataStore)) { |
| | | this.sourceDataStore = (OracleDataStore) sourceDataStore; |
| | | if ((sourceDataStore != null) && (sourceDataStore instanceof JDBCDataStore)) { |
| | | this.sourceDataStore = (JDBCDataStore) sourceDataStore; |
| | | } else { |
| | | assert sourceDataStore != null; |
| | | getLogger().warn("setSourceDataStore(datastore) need OracleDataStore but got " + |
| | |
| | | return useTransform; |
| | | } |
| | | |
| | | public boolean isEPSG3826() { |
| | | return useEPSG3826; |
| | | } |
| | | |
| | | public final void accumulateProcessTime() { |
| | | _processTime += System.currentTimeMillis() - _processTimeStart; |
| | | } |
| | |
| | | public final void resetProcessTime() { |
| | | _processTime = 0; |
| | | } |
| | | |
| | | public short getDistId() { |
| | | return distId; |
| | | } |
| | | |
| | | public void setDistId(short distId) { |
| | | this.distId = distId; |
| | | } |
| | | |
| | | public String getDistName() { |
| | | return distName; |
| | | } |
| | | |
| | | public void setDistName(String distName) { |
| | | this.distName = distName; |
| | | } |
| | | } |
| | |
| | | import org.apache.commons.logging.Log; |
| | | |
| | | public class OracleUpgradeJobContext extends AbstractOracleJobContext { |
| | | public OracleUpgradeJobContext(boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | super(profileMode, useTransform, useEPSG3826); |
| | | public OracleUpgradeJobContext(boolean profileMode, boolean useTransform) { |
| | | super(profileMode, useTransform); |
| | | } |
| | | |
| | | public void startTransaction() { |
New file |
| | |
| | | package com.ximple.eofms.jobs.context.edbgeo;
|
| | |
|
| | | import java.io.IOException;
|
| | | import java.math.BigDecimal;
|
| | | import java.sql.Connection;
|
| | | import java.sql.DatabaseMetaData;
|
| | | import java.sql.PreparedStatement;
|
| | | import java.sql.ResultSet;
|
| | | import java.sql.SQLException;
|
| | | import java.sql.Statement;
|
| | | import java.util.*;
|
| | |
|
| | | import com.vividsolutions.jts.geom.LinearRing;
|
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil;
|
| | | import org.geotools.data.DataSourceException;
|
| | | import org.geotools.data.DataStore;
|
| | | import org.geotools.data.SchemaNotFoundException;
|
| | | import org.geotools.data.Transaction;
|
| | | import org.geotools.data.jdbc.JDBCUtils;
|
| | | import org.geotools.filter.LengthFunction;
|
| | | import org.geotools.jdbc.JDBCDataStore;
|
| | | import org.geotools.referencing.NamedIdentifier;
|
| | | import org.geotools.referencing.crs.DefaultGeographicCRS;
|
| | | import org.opengis.feature.simple.SimpleFeature;
|
| | | import org.opengis.feature.simple.SimpleFeatureType;
|
| | | import org.opengis.feature.type.AttributeDescriptor;
|
| | | import org.opengis.feature.type.AttributeType;
|
| | | import org.opengis.feature.type.GeometryDescriptor;
|
| | | import org.opengis.feature.type.PropertyType;
|
| | | import org.opengis.filter.BinaryComparisonOperator;
|
| | | import org.opengis.filter.Filter;
|
| | | import org.opengis.filter.PropertyIsLessThan;
|
| | | import org.opengis.filter.PropertyIsLessThanOrEqualTo;
|
| | | import org.opengis.filter.expression.Literal;
|
| | | import org.opengis.referencing.crs.CoordinateReferenceSystem;
|
| | |
|
| | | import com.vividsolutions.jts.geom.Geometry;
|
| | | import com.vividsolutions.jts.geom.GeometryCollection;
|
| | | import com.vividsolutions.jts.geom.LineString;
|
| | | import com.vividsolutions.jts.geom.MultiLineString;
|
| | | import com.vividsolutions.jts.geom.MultiPoint;
|
| | | import com.vividsolutions.jts.geom.MultiPolygon;
|
| | | import com.vividsolutions.jts.geom.Point;
|
| | | import com.vividsolutions.jts.geom.Polygon;
|
| | | import com.vividsolutions.jts.io.WKBWriter;
|
| | | import com.vividsolutions.jts.io.WKTWriter;
|
| | |
|
| | | import com.ximple.eofms.jobs.context.AbstractDgnFileJobContext;
|
| | | import com.ximple.eofms.util.postjts.JtsBinaryWriter;
|
| | |
|
| | | public abstract class AbstractDgnToEdbGeoJobContext extends AbstractDgnFileJobContext {
|
| | | private static Map<String, Class> GEOM_TYPE_MAP = new HashMap<String, Class>();
|
| | | private static Map<String, Class> GEOM3D_TYPE_MAP = new HashMap<String, Class>();
|
| | |
|
| | | static {
|
| | | GEOM_TYPE_MAP.put("GEOMETRY", Geometry.class);
|
| | | GEOM_TYPE_MAP.put("POINT", Point.class);
|
| | | GEOM_TYPE_MAP.put("LINESTRING", LineString.class);
|
| | | GEOM_TYPE_MAP.put("POLYGON", Polygon.class);
|
| | | GEOM_TYPE_MAP.put("MULTIPOINT", MultiPoint.class);
|
| | | GEOM_TYPE_MAP.put("MULTILINESTRING", MultiLineString.class);
|
| | | GEOM_TYPE_MAP.put("MULTIPOLYGON", MultiPolygon.class);
|
| | | GEOM_TYPE_MAP.put("GEOMETRYCOLLECTION", GeometryCollection.class);
|
| | |
|
| | | GEOM3D_TYPE_MAP.put("POINTM", Point.class);
|
| | | GEOM3D_TYPE_MAP.put("LINESTRINGM", LineString.class);
|
| | | GEOM3D_TYPE_MAP.put("POLYGONM", Polygon.class);
|
| | | GEOM3D_TYPE_MAP.put("MULTIPOINTM", MultiPoint.class);
|
| | | GEOM3D_TYPE_MAP.put("MULTILINESTRINGM", MultiLineString.class);
|
| | | GEOM3D_TYPE_MAP.put("MULTIPOLYGONM", MultiPolygon.class);
|
| | | GEOM3D_TYPE_MAP.put("GEOMETRYCOLLECTIONM", GeometryCollection.class);
|
| | | }
|
| | |
|
| | | private static Map<Class, String> CLASS_MAPPINGS = new HashMap<Class, String>();
|
| | |
|
| | | static {
|
| | | CLASS_MAPPINGS.put(String.class, "VARCHAR");
|
| | |
|
| | | CLASS_MAPPINGS.put(Boolean.class, "BOOLEAN");
|
| | |
|
| | | CLASS_MAPPINGS.put(Short.class, "SMALLINT");
|
| | | CLASS_MAPPINGS.put(Integer.class, "INTEGER");
|
| | | CLASS_MAPPINGS.put(Long.class, "BIGINT");
|
| | |
|
| | | CLASS_MAPPINGS.put(Float.class, "REAL");
|
| | | CLASS_MAPPINGS.put(Double.class, "DOUBLE PRECISION");
|
| | |
|
| | | CLASS_MAPPINGS.put(BigDecimal.class, "DECIMAL");
|
| | |
|
| | | CLASS_MAPPINGS.put(java.sql.Date.class, "DATE");
|
| | | CLASS_MAPPINGS.put(java.util.Date.class, "DATE");
|
| | | CLASS_MAPPINGS.put(java.sql.Time.class, "TIME");
|
| | | CLASS_MAPPINGS.put(java.sql.Timestamp.class, "TIMESTAMP");
|
| | | }
|
| | |
|
| | | private static Map<Class, String> GEOM_CLASS_MAPPINGS = new HashMap<Class, String>();
|
| | |
|
| | | //why don't we just stick this in with the non-geom class mappings?
|
| | | static {
|
| | | // init the inverse map
|
| | | Set keys = GEOM_TYPE_MAP.keySet();
|
| | |
|
| | | for (Object key : keys) {
|
| | | String name = (String) key;
|
| | | Class geomClass = GEOM_TYPE_MAP.get(name);
|
| | | GEOM_CLASS_MAPPINGS.put(geomClass, name);
|
| | | }
|
| | | }
|
| | |
|
| | | /**
|
| | | * Maximum string size for postgres
|
| | | */
|
| | | private static final int MAX_ALLOWED_VALUE = 10485760;
|
| | |
|
| | | // protected static final int BATCHSIZE = 2048;
|
| | | protected static final int BATCHSIZE = 256;
|
| | |
|
| | | /**
|
| | | * Well Known Text writer (from JTS).
|
| | | */
|
| | | protected static WKTWriter geometryWriter = new WKTWriter();
|
| | | protected static JtsBinaryWriter binaryWriter = new JtsBinaryWriter();
|
| | |
|
| | | private JDBCDataStore targetDataStore;
|
| | | // protected Connection connection;
|
| | | protected String targetSchema = "public";
|
| | |
|
| | | protected boolean schemaEnabled = true;
|
| | |
|
| | | public AbstractDgnToEdbGeoJobContext(String dataPath, DataStore targetDataStore, String targetSchema,
|
| | | boolean profileMode, boolean useTransform) {
|
| | | super(dataPath, profileMode, useTransform);
|
| | | if ((targetDataStore != null) && (targetDataStore instanceof JDBCDataStore)) {
|
| | | this.targetDataStore = (JDBCDataStore) targetDataStore;
|
| | | } else {
|
| | | getLogger().info("targetDataStore has wrong.");
|
| | | }
|
| | | // this.connection = connection;
|
| | | setTargetSchema(targetSchema);
|
| | | }
|
| | |
|
| | | public JDBCDataStore getTargetDataStore() {
|
| | | return targetDataStore;
|
| | | }
|
| | |
|
| | | public void setTargetDataStore(JDBCDataStore targetDataStore) {
|
| | | this.targetDataStore = targetDataStore;
|
| | | }
|
| | |
|
| | | public String getTargetSchema() {
|
| | | return targetSchema;
|
| | | }
|
| | |
|
| | | public void setTargetSchema(String schemaName) {
|
| | | targetSchema = schemaName;
|
| | | }
|
| | |
|
| | | public Connection getConnection() {
|
| | | try {
|
| | | return targetDataStore.getConnection(Transaction.AUTO_COMMIT);
|
| | |
|
| | | } catch (IOException e)
|
| | | {
|
| | | getLogger().warn(e.getMessage(), e);
|
| | | return null;
|
| | | }
|
| | | // return connection;
|
| | | }
|
| | |
|
| | | protected boolean isExistFeature(SimpleFeatureType featureType) {
|
| | | try {
|
| | | SimpleFeatureType existFeatureType = targetDataStore.getSchema(featureType.getTypeName());
|
| | | return existFeatureType != null; // && existFeatureType.equals(featureType);
|
| | | } catch (SchemaNotFoundException e) {
|
| | | return false;
|
| | | } catch (IOException e) {
|
| | | getLogger().info(e.getMessage(), e);
|
| | | return false;
|
| | | }
|
| | | }
|
| | |
|
| | | protected void deleteTable(Connection conn, String tableName) throws SQLException {
|
| | | Statement stmt = conn.createStatement();
|
| | | StringBuilder sb = new StringBuilder();
|
| | | sb.append("DELETE FROM \"");
|
| | | sb.append(getTargetSchema());
|
| | | sb.append("\".\"");
|
| | | sb.append(tableName);
|
| | | sb.append('\"');
|
| | | stmt.execute(sb.toString());
|
| | | if (!conn.getAutoCommit())
|
| | | conn.commit();
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | |
|
| | | protected void dropTable(Connection conn, String tableName) throws SQLException {
|
| | | Statement stmt = conn.createStatement();
|
| | | StringBuilder sb = new StringBuilder();
|
| | | sb.append("DROP TABLE \"");
|
| | | sb.append(getTargetSchema());
|
| | | sb.append("\".\"");
|
| | | sb.append(tableName);
|
| | | sb.append("\"");
|
| | | // sb.append(" CASCADE");
|
| | | stmt.execute(sb.toString());
|
| | | if (!conn.getAutoCommit())
|
| | | conn.commit();
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | |
|
| | | protected void dropGeometryColumn(Connection conn, String tableName, String geomField) throws SQLException {
|
| | | Statement stmt = conn.createStatement();
|
| | | StringBuilder sb = new StringBuilder();
|
| | | sb.append("SELECT \"public\".DropGeometryColumn('','");
|
| | | sb.append(tableName);
|
| | | sb.append("','");
|
| | | sb.append(geomField);
|
| | | sb.append("')");
|
| | | stmt.execute(sb.toString());
|
| | | if (!conn.getAutoCommit())
|
| | | conn.commit();
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | |
|
| | | protected String dropGeometryColumn(String dbSchema, String tableName, String geomField) {
|
| | | StringBuilder sb = new StringBuilder();
|
| | | sb.append("SELECT \"public\".DropGeometryColumn('");
|
| | | sb.append(dbSchema);
|
| | | sb.append("','");
|
| | | sb.append(tableName);
|
| | | sb.append("','");
|
| | | sb.append(geomField);
|
| | | sb.append("')");
|
| | | getLogger().info("Execute-" + sb.toString());
|
| | | return sb.toString();
|
| | | }
|
| | |
|
| | | private String addGeometryColumn(String dbSchema, String tableName, GeometryDescriptor geometryDescriptor, int srid) {
|
| | | StringBuilder sql;
|
| | | String typeName = getGeometrySQLTypeName(((AttributeType) geometryDescriptor.getType()).getBinding());
|
| | | if (typeName == null) {
|
| | | getLogger().warn("Error: " + geometryDescriptor.getLocalName() + " unknown type!!!");
|
| | | throw new RuntimeException("Error: " + geometryDescriptor.getLocalName() + " unknown type!!!");
|
| | | }
|
| | |
|
| | | sql = new StringBuilder("SELECT \"public\".AddGeometryColumn('");
|
| | | sql.append(dbSchema);
|
| | | sql.append("','");
|
| | | sql.append(tableName);
|
| | | sql.append("','");
|
| | | sql.append(geometryDescriptor.getLocalName());
|
| | | sql.append("','");
|
| | | sql.append(srid);
|
| | | sql.append("','");
|
| | | sql.append(typeName);
|
| | | sql.append("', 2);");
|
| | |
|
| | | //prints statement for later reuse
|
| | | return sql.toString();
|
| | | }
|
| | |
|
| | | public ArrayList<String> createNewSchemaTexts(Connection conn, SimpleFeatureType featureType) throws IOException {
|
| | | String origintableName = featureType.getTypeName();
|
| | | String tableName = origintableName.toLowerCase();
|
| | |
|
| | | ArrayList<String> result = new ArrayList<String>();
|
| | |
|
| | | List<AttributeDescriptor> descriptors = featureType.getAttributeDescriptors();
|
| | |
|
| | | boolean shouldDrop = tablePresent(getTargetSchema(), tableName, conn);
|
| | |
|
| | | if (shouldDrop) {
|
| | | String sqlStr;
|
| | | for (AttributeDescriptor descriptor : descriptors) {
|
| | | if (!(descriptor instanceof GeometryDescriptor)) {
|
| | | continue;
|
| | | }
|
| | | GeometryDescriptor geometryDescriptor = (GeometryDescriptor) descriptor;
|
| | | sqlStr = dropGeometryColumn(getTargetSchema(), tableName, geometryDescriptor.getLocalName());
|
| | | getLogger().info(sqlStr);
|
| | | result.add(sqlStr);
|
| | | }
|
| | | // sqlStr = "DROP TABLE " + encodeSchemaTableName(tableName) + " CASCADE;";
|
| | | sqlStr = "DROP TABLE " + encodeSchemaTableName(tableName) + " ;";
|
| | | getLogger().info(sqlStr);
|
| | | result.add(sqlStr);
|
| | | }
|
| | |
|
| | | StringBuilder sql = new StringBuilder("CREATE TABLE ");
|
| | | sql.append(encodeSchemaTableName(tableName));
|
| | | sql.append(" ( gid serial PRIMARY KEY, ");
|
| | | sql.append(makeNonGeomSqlCreate(descriptors));
|
| | | sql.append(");");
|
| | |
|
| | | String sqlStr = sql.toString();
|
| | | getLogger().debug(sqlStr);
|
| | | result.add(sqlStr);
|
| | |
|
| | | for (AttributeDescriptor descriptor : descriptors) {
|
| | | if (!(descriptor instanceof GeometryDescriptor)) {
|
| | | continue;
|
| | | }
|
| | | GeometryDescriptor geometryDescriptor = (GeometryDescriptor) descriptor;
|
| | |
|
| | | CoordinateReferenceSystem refSys = geometryDescriptor.getCoordinateReferenceSystem();
|
| | | int SRID = getSRID(refSys);
|
| | |
|
| | | sqlStr = addGeometryColumn(getTargetSchema(), tableName, geometryDescriptor, SRID);
|
| | | getLogger().debug(sqlStr);
|
| | | result.add(sqlStr);
|
| | |
|
| | |
|
| | | String indexName = tableName.replace('-', '_');
|
| | | //also build a spatial index on each geometry column.
|
| | | sql = new StringBuilder("CREATE INDEX \"spatial_");
|
| | | sql.append(indexName);
|
| | | sql.append("_");
|
| | | sql.append(descriptor.getLocalName().toLowerCase());
|
| | | sql.append("\" ON ");
|
| | | sql.append(encodeSchemaTableName(tableName));
|
| | | sql.append(" USING GIST (");
|
| | | sql.append(encodeSchemaColumnName(descriptor.getLocalName()));
|
| | | sql.append(" );");
|
| | | // sql.append(" gist_geometry_ops);");
|
| | |
|
| | | sqlStr = sql.toString();
|
| | | getLogger().debug(sqlStr);
|
| | |
|
| | | result.add(sqlStr);
|
| | | }
|
| | | return result;
|
| | | }
|
| | |
|
| | | private int getSRID(CoordinateReferenceSystem refSys) {
|
| | | int SRID;
|
| | | if (refSys != null) {
|
| | | try {
|
| | | Set ident = refSys.getIdentifiers();
|
| | | if ((ident == null || ident.isEmpty()) && refSys == DefaultGeographicCRS.WGS84) {
|
| | | SRID = 4326;
|
| | | } else {
|
| | | assert ident != null;
|
| | | String code = ((NamedIdentifier) ident.toArray()[0]).getCode();
|
| | | SRID = Integer.parseInt(code);
|
| | | }
|
| | | } catch (Exception e) {
|
| | | getLogger().warn("SRID could not be determined");
|
| | | SRID = -1;
|
| | | }
|
| | | } else {
|
| | | SRID = -1;
|
| | | }
|
| | | return SRID;
|
| | | }
|
| | |
|
| | | private boolean tablePresent(String schema, String table, Connection conn) throws IOException {
|
| | | final int TABLE_NAME_COL = 3;
|
| | |
|
| | | ResultSet tables = null;
|
| | | try {
|
| | | DatabaseMetaData meta = conn.getMetaData();
|
| | | String[] tableType = {"TABLE"};
|
| | | tables = meta.getTables(null, schema, "%", tableType);
|
| | |
|
| | | while (tables.next()) {
|
| | | String tableName = tables.getString(TABLE_NAME_COL);
|
| | |
|
| | | if (allowTable(tableName) && (tableName != null)
|
| | | && (tableName.equalsIgnoreCase(table))) {
|
| | | return (true);
|
| | | }
|
| | | }
|
| | |
|
| | | return false;
|
| | | } catch (SQLException sqlException) {
|
| | | String message = "Error querying database for list of tables:"
|
| | | + sqlException.getMessage();
|
| | | throw new DataSourceException(message, sqlException);
|
| | | } finally {
|
| | | if (tables != null)
|
| | | JDBCUtils.close(tables);
|
| | | }
|
| | | }
|
| | |
|
| | | protected boolean allowTable(String tablename) {
|
| | | if (tablename.equals("geometry_columns")) {
|
| | | return false;
|
| | | } else if (tablename.startsWith("spatial_ref_sys")) {
|
| | | return false;
|
| | | }
|
| | |
|
| | | //others?
|
| | | return true;
|
| | | }
|
| | |
|
| | |
|
| | | private StringBuilder makeNonGeomSqlCreate(List<AttributeDescriptor> descriptors)
|
| | | throws IOException {
|
| | | StringBuilder buf = new StringBuilder("");
|
| | |
|
| | | for (AttributeDescriptor descriptor : descriptors) {
|
| | | String typeName;
|
| | | typeName = CLASS_MAPPINGS.get(((AttributeType) descriptor.getType()).getBinding());
|
| | | if (typeName == null) {
|
| | | typeName = GEOM_CLASS_MAPPINGS.get(((AttributeType) descriptor.getType()).getBinding());
|
| | | if (typeName != null) continue;
|
| | | }
|
| | |
|
| | | if (typeName != null) {
|
| | | if (typeName.equals("VARCHAR")) {
|
| | | int length = -1;
|
| | | List<Filter> flist = ((PropertyType) descriptor.getType()).getRestrictions();
|
| | | for (Filter f : flist) {
|
| | | if (f != null && f != Filter.EXCLUDE && f != Filter.INCLUDE &&
|
| | | (f instanceof PropertyIsLessThan || f instanceof PropertyIsLessThanOrEqualTo)) {
|
| | | try {
|
| | | BinaryComparisonOperator cf = (BinaryComparisonOperator) f;
|
| | | if (cf.getExpression1() instanceof LengthFunction) {
|
| | | length = Integer.parseInt(((Literal) cf.getExpression2()).getValue().toString());
|
| | | } else {
|
| | | if (cf.getExpression2() instanceof LengthFunction) {
|
| | | length = Integer.parseInt(((Literal) cf.getExpression1()).getValue().toString());
|
| | | }
|
| | | }
|
| | | } catch (NumberFormatException e) {
|
| | | length = 256;
|
| | | }
|
| | | } else {
|
| | | length = 256;
|
| | | }
|
| | | break;
|
| | | }
|
| | | if (length < 1) {
|
| | | getLogger().warn("FeatureType did not specify string length; defaulted to 256");
|
| | | length = 256;
|
| | | } else if (length > MAX_ALLOWED_VALUE) {
|
| | | length = MAX_ALLOWED_VALUE;
|
| | | }
|
| | | typeName = typeName + "(" + length + ")";
|
| | | }
|
| | |
|
| | | if (!descriptor.isNillable()) {
|
| | | typeName = typeName + " NOT NULL";
|
| | | }
|
| | |
|
| | | //TODO review!!! Is toString() always OK???
|
| | | Object defaultValue = descriptor.getDefaultValue();
|
| | |
|
| | | if (defaultValue != null) {
|
| | | typeName = typeName + " DEFAULT '"
|
| | | + defaultValue.toString() + "'";
|
| | | }
|
| | |
|
| | | buf.append(" \"").append(descriptor.getLocalName()).append("\" ").append(typeName).append(",");
|
| | |
|
| | | } else {
|
| | | String msg;
|
| | | if (descriptor == null) {
|
| | | msg = "AttributeType was null!";
|
| | | } else {
|
| | | msg = "Type '" + ((AttributeType) descriptor.getType()).getBinding() + "' not supported!";
|
| | | }
|
| | | throw (new IOException(msg));
|
| | | }
|
| | | }
|
| | |
|
| | | return buf.deleteCharAt(buf.length() - 1);
|
| | | }
|
| | |
|
| | | private String getGeometrySQLTypeName(Class type) {
|
| | | String res = GEOM_CLASS_MAPPINGS.get(type);
|
| | |
|
| | | if (res == null) {
|
| | | throw new RuntimeException("Unknown type name for class " + type
|
| | | + " please update GEOMETRY_MAPPINGS");
|
| | | }
|
| | |
|
| | | return res;
|
| | | }
|
| | |
|
| | | protected String getGeometryInsertText(Geometry geom, int srid) // throws IOException
|
| | | {
|
| | | if (geom == null) {
|
| | | return "null";
|
| | | }
|
| | |
|
| | | /*
|
| | | if (targetDataStore.isWKBEnabled()) {
|
| | | //String wkb = WKBEncoder.encodeGeometryHex(geom);
|
| | | String wkb = WKBWriter.bytesToHex(new WKBWriter().write(geom));
|
| | |
|
| | | if (targetDataStore.isByteaWKB()) {
|
| | | return "setSRID('" + wkb + "'::geometry," + srid + ")";
|
| | | } else {
|
| | | return "GeomFromWKB('" + wkb + "', " + srid + ")";
|
| | | }
|
| | | }
|
| | |
|
| | | String geoText = geometryWriter.write(geom);
|
| | | return "GeometryFromText('" + geoText + "', " + srid + ")";
|
| | | */
|
| | | if (geom instanceof LinearRing) {
|
| | | //postgis does not handle linear rings, convert to just a line string
|
| | | geom = geom.getFactory().createLineString(((LinearRing) geom).getCoordinateSequence());
|
| | | }
|
| | | return "ST_GeomFromText('" + geom.toText() + "', " + srid + ")";
|
| | | }
|
| | |
|
| | | protected String makeInsertSql(SimpleFeature feature, int srid) // throws IOException
|
| | | {
|
| | | SimpleFeatureType featureType = feature.getFeatureType();
|
| | |
|
| | | String tableName = encodeSchemaTableName(featureType.getTypeName());
|
| | | List<AttributeDescriptor> descriptors = featureType.getAttributeDescriptors();
|
| | |
|
| | | String attrValue;
|
| | |
|
| | | StringBuilder statementSQL = new StringBuilder("INSERT INTO " + tableName + " (");
|
| | |
|
| | | // encode insertion for attributes, but remember to avoid auto-increment ones,
|
| | | // they may be included in the feature type as well
|
| | | for (AttributeDescriptor attributeType : descriptors) {
|
| | | String attName = attributeType.getLocalName();
|
| | |
|
| | | if (feature.getAttribute(attName) != null) {
|
| | | String colName = encodeSchemaColumnName(attName);
|
| | | statementSQL.append(colName).append(",");
|
| | | }
|
| | | }
|
| | |
|
| | | statementSQL.setCharAt(statementSQL.length() - 1, ')');
|
| | | statementSQL.append(" VALUES (");
|
| | |
|
| | | List<Object> attributes = feature.getAttributes();
|
| | |
|
| | | for (int i = 0; i < descriptors.size(); i++) {
|
| | | attrValue = null;
|
| | |
|
| | | if (descriptors.get(i) instanceof GeometryDescriptor) {
|
| | | // String geomName = descriptors[i].getLocalName();
|
| | | // int srid = ftInfo.getSRID(geomName);
|
| | | Geometry geometry = (Geometry) attributes.get(i);
|
| | | geometry.setSRID(FeatureTypeBuilderUtil.getDefaultFeatureSRID());
|
| | |
|
| | | if (geometry == null) {
|
| | | attrValue = "NULL";
|
| | | } else {
|
| | | attrValue = getGeometryInsertText(geometry, srid);
|
| | | }
|
| | | } else {
|
| | | if (attributes.get(i) != null) {
|
| | | attrValue = addQuotes(attributes.get(i));
|
| | | }
|
| | | }
|
| | |
|
| | | if (attrValue != null) {
|
| | | statementSQL.append(attrValue).append(",");
|
| | | }
|
| | | }
|
| | |
|
| | | statementSQL.setCharAt(statementSQL.length() - 1, ')');
|
| | |
|
| | | return (statementSQL.toString());
|
| | | }
|
| | |
|
| | | protected String makePrepareInsertSql(SimpleFeatureType featureType) {
|
| | | String tableName = encodeSchemaTableName(featureType.getTypeName());
|
| | | List<AttributeDescriptor> attributeTypes = featureType.getAttributeDescriptors();
|
| | |
|
| | | String attrValue;
|
| | |
|
| | | StringBuilder statementSQL = new StringBuilder("INSERT INTO " + tableName + " (");
|
| | |
|
| | | // encode insertion for attributes, but remember to avoid auto-increment ones,
|
| | | // they may be included in the feature type as well
|
| | | for (AttributeDescriptor descriptor : attributeTypes) {
|
| | | String attName = descriptor.getName().getLocalPart();
|
| | |
|
| | | String colName = encodeSchemaColumnName(attName);
|
| | | statementSQL.append(colName).append(",");
|
| | | }
|
| | |
|
| | | statementSQL.setCharAt(statementSQL.length() - 1, ')');
|
| | | statementSQL.append(" VALUES (");
|
| | |
|
| | | for (AttributeDescriptor attributeType : attributeTypes) {
|
| | | statementSQL.append(" ? ,");
|
| | | }
|
| | |
|
| | | statementSQL.setCharAt(statementSQL.length() - 1, ')');
|
| | |
|
| | | return (statementSQL.toString());
|
| | | }
|
| | |
|
| | | protected String addQuotes(Object value) {
|
| | | String retString;
|
| | |
|
| | | if (value != null) {
|
| | | if (value instanceof Number) {
|
| | | retString = value.toString();
|
| | | } else {
|
| | | retString = "'" + doubleQuote(value) + "'";
|
| | | }
|
| | | } else {
|
| | | retString = "null";
|
| | | }
|
| | |
|
| | | return retString;
|
| | | }
|
| | |
|
| | | String doubleQuote(Object obj) {
|
| | | return obj.toString().replaceAll("'", "''");
|
| | | }
|
| | |
|
| | | protected String encodeName(String tableName) {
|
| | | return tableName;
|
| | | }
|
| | |
|
| | | protected String encodeColumnName(String colName) {
|
| | | return encodeName(colName);
|
| | | }
|
| | |
|
| | | public String encodeSchemaTableName(String tableName) {
|
| | | return schemaEnabled ? ("\"" + getTargetSchema() + "\".\"" + tableName + "\"")
|
| | | : ("\"" + tableName + "\"");
|
| | | }
|
| | |
|
| | | public String encodeSchemaColumnName(String columnName) {
|
| | | return "\"" + columnName + "\"";
|
| | | }
|
| | |
|
| | | protected void bindFeatureParameters(PreparedStatement pstmt, SimpleFeature feature) throws SQLException {
|
| | | SimpleFeatureType featureType = feature.getFeatureType();
|
| | |
|
| | | List<AttributeDescriptor> descriptors = featureType.getAttributeDescriptors();
|
| | | List<Object> attributes = feature.getAttributes();
|
| | |
|
| | | for (int i = 0; i < descriptors.size(); i++) {
|
| | | if (descriptors.get(i) instanceof GeometryDescriptor) {
|
| | | pstmt.setBytes(i + 1, binaryWriter.writeBinary((Geometry) attributes.get(i)));
|
| | | } else {
|
| | | Class<?> bindingType = ((AttributeType) descriptors.get(i).getType()).getBinding();
|
| | | if (bindingType.equals(Short.class)) {
|
| | | pstmt.setShort(i + 1, (Short) attributes.get(i));
|
| | | } else if (bindingType.equals(Integer.class)) {
|
| | | pstmt.setInt(i + 1, (Integer) attributes.get(i));
|
| | | } else if (bindingType.equals(Long.class)) {
|
| | | pstmt.setLong(i + 1, (Long) attributes.get(i));
|
| | | } else if (bindingType.equals(String.class)) {
|
| | | pstmt.setString(i + 1, (String) attributes.get(i));
|
| | | } else if (bindingType.equals(Float.class)) {
|
| | | pstmt.setFloat(i + 1, (Float) attributes.get(i));
|
| | | } else if (bindingType.equals(Double.class)) {
|
| | | pstmt.setDouble(i + 1, (Double) attributes.get(i));
|
| | | } else if (bindingType.equals(Boolean.class)) {
|
| | | pstmt.setBoolean(i + 1, (Boolean) attributes.get(i));
|
| | | } else if (bindingType.equals(BigDecimal.class)) {
|
| | | pstmt.setBigDecimal(i + 1, (BigDecimal) attributes.get(i));
|
| | | } else if (bindingType.equals(java.sql.Date.class)) {
|
| | | pstmt.setDate(i + 1, (java.sql.Date) attributes.get(i));
|
| | | } else if (bindingType.equals(java.sql.Time.class)) {
|
| | | pstmt.setTime(i + 1, (java.sql.Time) attributes.get(i));
|
| | | } else if (bindingType.equals(java.sql.Timestamp.class)) {
|
| | | pstmt.setTimestamp(i + 1, (java.sql.Timestamp) attributes.get(i));
|
| | | } else if (bindingType.equals(java.util.Date.class)) {
|
| | | java.sql.Date sDate = new java.sql.Date(((java.util.Date) attributes.get(i)).getTime());
|
| | | pstmt.setDate(i + 1, sDate);
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
New file |
| | |
| | | package com.ximple.eofms.jobs.context.edbgeo;
|
| | |
|
| | | import java.io.IOException;
|
| | | import java.math.BigDecimal;
|
| | | import java.sql.Connection;
|
| | | import java.sql.DatabaseMetaData;
|
| | | import java.sql.PreparedStatement;
|
| | | import java.sql.ResultSet;
|
| | | import java.sql.SQLException;
|
| | | import java.sql.Statement;
|
| | | import java.util.*;
|
| | |
|
| | | import com.vividsolutions.jts.geom.LinearRing;
|
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil;
|
| | | import org.geotools.data.DataSourceException;
|
| | | import org.geotools.data.DataStore;
|
| | | import org.geotools.data.SchemaNotFoundException;
|
| | | import org.geotools.filter.LengthFunction;
|
| | | import org.geotools.jdbc.JDBCDataStore;
|
| | | import org.geotools.referencing.NamedIdentifier;
|
| | | import org.geotools.referencing.crs.DefaultGeographicCRS;
|
| | | import org.opengis.feature.simple.SimpleFeature;
|
| | | import org.opengis.feature.simple.SimpleFeatureType;
|
| | | import org.opengis.feature.type.AttributeDescriptor;
|
| | | import org.opengis.feature.type.AttributeType;
|
| | | import org.opengis.feature.type.GeometryDescriptor;
|
| | | import org.opengis.feature.type.PropertyType;
|
| | | import org.opengis.filter.BinaryComparisonOperator;
|
| | | import org.opengis.filter.Filter;
|
| | | import org.opengis.filter.PropertyIsLessThan;
|
| | | import org.opengis.filter.PropertyIsLessThanOrEqualTo;
|
| | | import org.opengis.filter.expression.Literal;
|
| | | import org.opengis.referencing.crs.CoordinateReferenceSystem;
|
| | |
|
| | | import com.vividsolutions.jts.geom.Geometry;
|
| | | import com.vividsolutions.jts.geom.GeometryCollection;
|
| | | import com.vividsolutions.jts.geom.LineString;
|
| | | import com.vividsolutions.jts.geom.MultiLineString;
|
| | | import com.vividsolutions.jts.geom.MultiPoint;
|
| | | import com.vividsolutions.jts.geom.MultiPolygon;
|
| | | import com.vividsolutions.jts.geom.Point;
|
| | | import com.vividsolutions.jts.geom.Polygon;
|
| | | import com.vividsolutions.jts.io.WKBWriter;
|
| | | import com.vividsolutions.jts.io.WKTWriter;
|
| | |
|
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext;
|
| | | import com.ximple.eofms.util.postjts.JtsBinaryWriter;
|
| | |
|
| | | public abstract class AbstractOracleToEdbGeoJobContext extends AbstractOracleJobContext {
|
| | | private static Map<String, Class> GEOM_TYPE_MAP = new HashMap<String, Class>();
|
| | | private static Map<String, Class> GEOM3D_TYPE_MAP = new HashMap<String, Class>();
|
| | |
|
| | | static {
|
| | | GEOM_TYPE_MAP.put("GEOMETRY", Geometry.class);
|
| | | GEOM_TYPE_MAP.put("POINT", Point.class);
|
| | | GEOM_TYPE_MAP.put("LINESTRING", LineString.class);
|
| | | GEOM_TYPE_MAP.put("POLYGON", Polygon.class);
|
| | | GEOM_TYPE_MAP.put("MULTIPOINT", MultiPoint.class);
|
| | | GEOM_TYPE_MAP.put("MULTILINESTRING", MultiLineString.class);
|
| | | GEOM_TYPE_MAP.put("MULTIPOLYGON", MultiPolygon.class);
|
| | | GEOM_TYPE_MAP.put("GEOMETRYCOLLECTION", GeometryCollection.class);
|
| | |
|
| | | GEOM3D_TYPE_MAP.put("POINTM", Point.class);
|
| | | GEOM3D_TYPE_MAP.put("LINESTRINGM", LineString.class);
|
| | | GEOM3D_TYPE_MAP.put("POLYGONM", Polygon.class);
|
| | | GEOM3D_TYPE_MAP.put("MULTIPOINTM", MultiPoint.class);
|
| | | GEOM3D_TYPE_MAP.put("MULTILINESTRINGM", MultiLineString.class);
|
| | | GEOM3D_TYPE_MAP.put("MULTIPOLYGONM", MultiPolygon.class);
|
| | | GEOM3D_TYPE_MAP.put("GEOMETRYCOLLECTIONM", GeometryCollection.class);
|
| | | }
|
| | |
|
| | | private static Map<Class, String> CLASS_MAPPINGS = new HashMap<Class, String>();
|
| | |
|
| | | static {
|
| | | CLASS_MAPPINGS.put(String.class, "VARCHAR");
|
| | |
|
| | | CLASS_MAPPINGS.put(Boolean.class, "BOOLEAN");
|
| | |
|
| | | CLASS_MAPPINGS.put(Short.class, "SMALLINT");
|
| | | CLASS_MAPPINGS.put(Integer.class, "INTEGER");
|
| | | CLASS_MAPPINGS.put(Long.class, "BIGINT");
|
| | |
|
| | | CLASS_MAPPINGS.put(Float.class, "REAL");
|
| | | CLASS_MAPPINGS.put(Double.class, "DOUBLE PRECISION");
|
| | |
|
| | | CLASS_MAPPINGS.put(BigDecimal.class, "DECIMAL");
|
| | |
|
| | | CLASS_MAPPINGS.put(java.sql.Date.class, "DATE");
|
| | | CLASS_MAPPINGS.put(java.util.Date.class, "DATE");
|
| | | CLASS_MAPPINGS.put(java.sql.Time.class, "TIME");
|
| | | CLASS_MAPPINGS.put(java.sql.Timestamp.class, "TIMESTAMP");
|
| | | }
|
| | |
|
| | | private static Map<Class, String> GEOM_CLASS_MAPPINGS = new HashMap<Class, String>();
|
| | |
|
| | | //why don't we just stick this in with the non-geom class mappings?
|
| | | static {
|
| | | // init the inverse map
|
| | | Set keys = GEOM_TYPE_MAP.keySet();
|
| | |
|
| | | for (Object key : keys) {
|
| | | String name = (String) key;
|
| | | Class geomClass = GEOM_TYPE_MAP.get(name);
|
| | | GEOM_CLASS_MAPPINGS.put(geomClass, name);
|
| | | }
|
| | | }
|
| | |
|
| | | /**
|
| | | * Maximum string size for postgres
|
| | | */
|
| | | private static final int MAX_ALLOWED_VALUE = 10485760;
|
| | |
|
| | | // protected static final int BATCHSIZE = 512;
|
| | | protected static final int BATCHSIZE = 128;
|
| | |
|
| | | /**
|
| | | * Well Known Text writer (from JTS).
|
| | | */
|
| | | protected static WKTWriter geometryWriter = new WKTWriter();
|
| | | protected static JtsBinaryWriter binaryWriter = new JtsBinaryWriter();
|
| | |
|
| | | protected boolean schemaEnabled = true;
|
| | |
|
| | | private JDBCDataStore targetDataStore;
|
| | | private String targetSchema = "public";
|
| | |
|
| | | public AbstractOracleToEdbGeoJobContext(String dataPath, DataStore targetDataStore, String targetSchema,
|
| | | boolean profileMode, boolean useTransform) {
|
| | | super(profileMode, useTransform);
|
| | | if ((targetDataStore != null) && (targetDataStore instanceof JDBCDataStore)) {
|
| | | this.targetDataStore = (JDBCDataStore) targetDataStore;
|
| | | } else {
|
| | | getLogger().info("targetDataStore has wrong.");
|
| | | }
|
| | | setDataPath(dataPath);
|
| | | setTargetSchema(targetSchema);
|
| | | }
|
| | |
|
| | | public JDBCDataStore getTargetDataStore() {
|
| | | return targetDataStore;
|
| | | }
|
| | |
|
| | | public void setTargetDataStore(JDBCDataStore targetDataStore) {
|
| | | this.targetDataStore = targetDataStore;
|
| | | }
|
| | |
|
| | | public String getTargetSchema() {
|
| | | return targetSchema;
|
| | | }
|
| | |
|
| | | public void setTargetSchema(String schemaName) {
|
| | | targetSchema = schemaName;
|
| | | }
|
| | |
|
| | | public Connection getConnection() {
|
| | | if (targetDataStore != null) {
|
| | | try {
|
| | | return targetDataStore.getDataSource().getConnection();
|
| | | } catch (SQLException e) {
|
| | | getLogger().warn(e.getMessage(), e);
|
| | | return null;
|
| | | }
|
| | | }
|
| | | return null;
|
| | | }
|
| | |
|
| | | protected boolean isExistFeature(SimpleFeatureType featureType) {
|
| | | try {
|
| | | SimpleFeatureType existFeatureType = targetDataStore.getSchema(featureType.getTypeName());
|
| | | return existFeatureType != null; // && existFeatureType.equals(featureType);
|
| | | } catch (SchemaNotFoundException e) {
|
| | | return false;
|
| | | } catch (IOException e) {
|
| | | getLogger().info(e.getMessage(), e);
|
| | | return false;
|
| | | }
|
| | | }
|
| | |
|
| | | protected void deleteTable(Connection conn, String schemaName, String tableName) throws SQLException {
|
| | | Statement stmt = conn.createStatement();
|
| | | StringBuilder sb = new StringBuilder();
|
| | | sb.append("DELETE FROM \"");
|
| | | sb.append(schemaName);
|
| | | sb.append("\".\"");
|
| | | sb.append(tableName);
|
| | | sb.append('\"');
|
| | | stmt.execute(sb.toString());
|
| | | stmt.close();
|
| | | conn.commit();
|
| | | }
|
| | |
|
| | | protected void dropTable(Connection conn, String schemaName, String tableName) throws SQLException {
|
| | | Statement stmt = conn.createStatement();
|
| | | StringBuilder sb = new StringBuilder();
|
| | | sb.append("DROP TABLE \"");
|
| | | sb.append(schemaName);
|
| | | sb.append("\".\"");
|
| | | sb.append(tableName);
|
| | | sb.append("\"");
|
| | | // sb.append(" CASCADE");
|
| | | stmt.execute(sb.toString());
|
| | | stmt.close();
|
| | | conn.commit();
|
| | | }
|
| | |
|
| | | protected void dropGeometryColumn(Connection conn, String dbSchema, String tableName, String geomField) throws SQLException {
|
| | | Statement stmt = conn.createStatement();
|
| | | StringBuilder sb = new StringBuilder();
|
| | | sb.append("SELECT \"public\".DropGeometryColumn('");
|
| | | sb.append(dbSchema);
|
| | | sb.append("','");
|
| | | sb.append(tableName);
|
| | | sb.append("','");
|
| | | sb.append(geomField);
|
| | | sb.append("')");
|
| | | stmt.execute(sb.toString());
|
| | | stmt.close();
|
| | | conn.commit();
|
| | | }
|
| | |
|
| | | protected String dropGeometryColumn(String dbSchema, String tableName, String geomField) {
|
| | | StringBuilder sb = new StringBuilder();
|
| | | sb.append("SELECT \"public\".DropGeometryColumn('");
|
| | | sb.append(dbSchema);
|
| | | sb.append("','");
|
| | | sb.append(tableName);
|
| | | sb.append("','");
|
| | | sb.append(geomField);
|
| | | sb.append("')");
|
| | | return sb.toString();
|
| | | }
|
| | |
|
| | | private String addGeometryColumn(String dbSchema, String tableName, GeometryDescriptor geometryDescriptor, int srid) {
|
| | | StringBuilder sql;
|
| | | String typeName = getGeometrySQLTypeName(((PropertyType)geometryDescriptor.getType()).getBinding());
|
| | | if (typeName == null) {
|
| | | getLogger().warn("Error: " + geometryDescriptor.getLocalName() + " unknown type!!!");
|
| | | throw new RuntimeException("Error: " + geometryDescriptor.getLocalName() + " unknown type!!!");
|
| | | }
|
| | |
|
| | | sql = new StringBuilder("SELECT \"public\".AddGeometryColumn('");
|
| | | sql.append(dbSchema);
|
| | | sql.append("','");
|
| | | sql.append(tableName);
|
| | | sql.append("','");
|
| | | sql.append(geometryDescriptor.getLocalName());
|
| | | sql.append("','");
|
| | | sql.append(srid);
|
| | | sql.append("','");
|
| | | sql.append(typeName);
|
| | | sql.append("', 2);");
|
| | |
|
| | | //prints statement for later reuse
|
| | | return sql.toString();
|
| | | }
|
| | |
|
| | | public ArrayList<String> createNewSchemaTexts(Connection conn, SimpleFeatureType featureType) throws IOException {
|
| | | String origintableName = featureType.getTypeName();
|
| | | String tableName = origintableName.toLowerCase();
|
| | |
|
| | | ArrayList<String> result = new ArrayList<String>();
|
| | |
|
| | | List<AttributeDescriptor> attributeType = featureType.getAttributeDescriptors();
|
| | | // String dbSchema = targetDataStore.getDatabaseSchemaName();
|
| | |
|
| | | boolean shouldDrop = tablePresent(getTargetSchema(), tableName, conn);
|
| | | if (shouldDrop) {
|
| | | String sqlStr;
|
| | | for (AttributeDescriptor descriptor : attributeType) {
|
| | | if (!(descriptor instanceof GeometryDescriptor)) {
|
| | | continue;
|
| | | }
|
| | | GeometryDescriptor geomAttribute = (GeometryDescriptor) descriptor;
|
| | | sqlStr = dropGeometryColumn(getTargetSchema(), tableName, geomAttribute.getLocalName());
|
| | | getLogger().info(sqlStr);
|
| | | result.add(sqlStr);
|
| | | }
|
| | | // sqlStr = "DROP TABLE " + encodeSchemaTableName(tableName) + " CASCADE;";
|
| | | sqlStr = "DROP TABLE " + encodeSchemaTableName(tableName) + " ;";
|
| | | getLogger().info(sqlStr);
|
| | | result.add(sqlStr);
|
| | | }
|
| | |
|
| | | StringBuilder sql = new StringBuilder("CREATE TABLE ");
|
| | | sql.append(encodeSchemaTableName(tableName));
|
| | | sql.append(" ( gid serial PRIMARY KEY, ");
|
| | | sql.append(makeNonGeomSqlCreate(attributeType));
|
| | | sql.append(");");
|
| | |
|
| | | String sqlStr = sql.toString();
|
| | | getLogger().debug(sqlStr);
|
| | | result.add(sqlStr);
|
| | |
|
| | | String indexName = tableName.replace('-', '_');
|
| | |
|
| | | for (AttributeDescriptor descriptor : attributeType) {
|
| | | if (!(descriptor instanceof GeometryDescriptor)) {
|
| | | String fieldName = descriptor.getLocalName().toLowerCase();
|
| | | continue;
|
| | | }
|
| | | GeometryDescriptor geomDescriptor = (GeometryDescriptor) descriptor;
|
| | |
|
| | | CoordinateReferenceSystem refSys = geomDescriptor.getCoordinateReferenceSystem();
|
| | | int SRID = getSRID(refSys);
|
| | |
|
| | | sqlStr = addGeometryColumn(getTargetSchema(), tableName, geomDescriptor, SRID);
|
| | | getLogger().debug(sqlStr);
|
| | | result.add(sqlStr);
|
| | |
|
| | | //also build a spatial index on each geometry column.
|
| | | sql = new StringBuilder("CREATE INDEX \"");
|
| | | sql.append("spatial_");
|
| | | sql.append(indexName);
|
| | | sql.append("_");
|
| | | sql.append(descriptor.getLocalName().toLowerCase());
|
| | | sql.append("\" ON ");
|
| | | sql.append(encodeSchemaTableName(tableName));
|
| | | sql.append(" USING GIST (");
|
| | | sql.append(encodeSchemaColumnName(descriptor.getLocalName()));
|
| | | sql.append(" );");
|
| | | // sql.append(" gist_geometry_ops);");
|
| | |
|
| | | sqlStr = sql.toString();
|
| | | getLogger().debug(sqlStr);
|
| | | result.add(sqlStr);
|
| | | }
|
| | |
|
| | | if (origintableName.startsWith("fsc")) {
|
| | | sql = new StringBuilder("CREATE INDEX \"");
|
| | | sql.append("oid_");
|
| | | sql.append(indexName);
|
| | | sql.append("\" ON ");
|
| | | sql.append(encodeSchemaTableName(tableName));
|
| | | sql.append(" USING BTREE (\"tid\", \"oid\")");
|
| | | sqlStr = sql.toString();
|
| | | getLogger().debug(sqlStr);
|
| | | result.add(sqlStr);
|
| | |
|
| | | sql = new StringBuilder("CREATE INDEX \"");
|
| | | sql.append("lvwt_");
|
| | | sql.append(indexName);
|
| | | sql.append("\" ON ");
|
| | | sql.append(encodeSchemaTableName(tableName));
|
| | | sql.append(" USING BTREE (\"level\", \"symweight\")");
|
| | | sqlStr = sql.toString();
|
| | | getLogger().debug(sqlStr);
|
| | | result.add(sqlStr);
|
| | | }
|
| | |
|
| | | return result;
|
| | | }
|
| | |
|
| | | private int getSRID(CoordinateReferenceSystem refSys) {
|
| | | int SRID;
|
| | | if (refSys != null) {
|
| | | try {
|
| | | Set ident = refSys.getIdentifiers();
|
| | | if ((ident == null || ident.isEmpty()) && refSys == DefaultGeographicCRS.WGS84) {
|
| | | SRID = 4326;
|
| | | } else {
|
| | | String code = ((NamedIdentifier) ident.toArray()[0]).getCode();
|
| | | SRID = Integer.parseInt(code);
|
| | | }
|
| | | } catch (Exception e) {
|
| | | getLogger().warn("SRID could not be determined");
|
| | | SRID = -1;
|
| | | }
|
| | | } else {
|
| | | SRID = -1;
|
| | | }
|
| | | return SRID;
|
| | | }
|
| | |
|
| | | private boolean tablePresent(String schema, String table, Connection conn) throws IOException {
|
| | | final int TABLE_NAME_COL = 3;
|
| | |
|
| | | ResultSet tables = null;
|
| | | try {
|
| | | DatabaseMetaData meta = conn.getMetaData();
|
| | | String[] tableType = {"TABLE"};
|
| | | tables = meta.getTables(null, schema, "%", tableType);
|
| | |
|
| | | while (tables.next()) {
|
| | | String tableName = tables.getString(TABLE_NAME_COL);
|
| | |
|
| | | if (allowTable(tableName) && (tableName != null)
|
| | | && (tableName.equalsIgnoreCase(table))) {
|
| | | return true;
|
| | | }
|
| | | }
|
| | |
|
| | | return false;
|
| | | } catch (SQLException sqlException) {
|
| | | // JDBCUtils.close(conn, Transaction.AUTO_COMMIT, sqlException);
|
| | | String message = "Error querying database for list of tables:"
|
| | | + sqlException.getMessage();
|
| | | throw new DataSourceException(message, sqlException);
|
| | | } finally {
|
| | | if (tables != null) try {
|
| | | tables.close();
|
| | | } catch (SQLException e) {
|
| | | }
|
| | | // JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | }
|
| | | }
|
| | |
|
| | | protected boolean allowTable(String tablename) {
|
| | | if (tablename.equals("geometry_columns")) {
|
| | | return false;
|
| | | } else if (tablename.startsWith("spatial_ref_sys")) {
|
| | | return false;
|
| | | }
|
| | |
|
| | | //others?
|
| | | return true;
|
| | | }
|
| | |
|
| | |
|
| | | private StringBuffer makeNonGeomSqlCreate(List<AttributeDescriptor> descriptors)
|
| | | throws IOException {
|
| | | StringBuffer buf = new StringBuffer("");
|
| | |
|
| | | for (AttributeDescriptor descriptor : descriptors) {
|
| | | String typeName;
|
| | | typeName = CLASS_MAPPINGS.get(((PropertyType) descriptor.getType()).getBinding());
|
| | | if (typeName == null) {
|
| | | typeName = GEOM_CLASS_MAPPINGS.get(((PropertyType) descriptor.getType()).getBinding());
|
| | | if (typeName != null) continue;
|
| | | }
|
| | |
|
| | | if (typeName != null) {
|
| | | if (typeName.equals("VARCHAR")) {
|
| | | int length = -1;
|
| | | List<Filter> flist = ((PropertyType) descriptor.getType()).getRestrictions();
|
| | | for (Filter f : flist) {
|
| | | if (f != null && f != Filter.EXCLUDE && f != Filter.INCLUDE &&
|
| | | (f instanceof PropertyIsLessThan || f instanceof PropertyIsLessThanOrEqualTo)) {
|
| | | try {
|
| | | BinaryComparisonOperator cf = (BinaryComparisonOperator) f;
|
| | | if (cf.getExpression1() instanceof LengthFunction) {
|
| | | length = Integer.parseInt(((Literal) cf.getExpression2()).getValue().toString());
|
| | | } else {
|
| | | if (cf.getExpression2() instanceof LengthFunction) {
|
| | | length = Integer.parseInt(((Literal) cf.getExpression1()).getValue().toString());
|
| | | }
|
| | | }
|
| | | } catch (NumberFormatException e) {
|
| | | length = 512;
|
| | | }
|
| | | } else {
|
| | | length = 512;
|
| | | }
|
| | | break;
|
| | | }
|
| | |
|
| | | if (length < 1) {
|
| | | getLogger().info("FeatureType did not specify string length; defaulted to 512");
|
| | | length = 512;
|
| | | } else if (length > MAX_ALLOWED_VALUE) {
|
| | | length = MAX_ALLOWED_VALUE;
|
| | | }
|
| | | typeName = typeName + "(" + length + ")";
|
| | | }
|
| | |
|
| | | if (!descriptor.isNillable()) {
|
| | | typeName = typeName + " NOT NULL";
|
| | | }
|
| | |
|
| | | //TODO review!!! Is toString() always OK???
|
| | | Object defaultValue = descriptor.getDefaultValue();
|
| | |
|
| | | if (defaultValue != null) {
|
| | | typeName = typeName + " DEFAULT '"
|
| | | + defaultValue.toString() + "'";
|
| | | }
|
| | |
|
| | | buf.append(" \"").append(descriptor.getLocalName()).append("\" ").append(typeName).append(",");
|
| | |
|
| | | } else {
|
| | | String msg;
|
| | | if (descriptor == null) {
|
| | | msg = "AttributeType was null!";
|
| | | } else {
|
| | | msg = "Type '" + ((PropertyType) descriptor.getType()).getBinding() + "' not supported!";
|
| | | }
|
| | | throw (new IOException(msg));
|
| | | }
|
| | | }
|
| | |
|
| | | return buf.deleteCharAt(buf.length() - 1);
|
| | | }
|
| | |
|
| | | private String getGeometrySQLTypeName(Class type) {
|
| | | String res = GEOM_CLASS_MAPPINGS.get(type);
|
| | |
|
| | | if (res == null) {
|
| | | throw new RuntimeException("Unknown type name for class " + type
|
| | | + " please update GEOMETRY_MAPPINGS");
|
| | | }
|
| | |
|
| | | return res;
|
| | | }
|
| | |
|
| | | protected String getGeometryInsertText(Geometry geom, int srid) // throws IOException
|
| | | {
|
| | | if (geom == null) {
|
| | | return "null";
|
| | | }
|
| | |
|
| | | /*
|
| | | if (targetDataStore.isWKBEnabled()) {
|
| | | //String wkb = WKBEncoder.encodeGeometryHex(geom);
|
| | | String wkb = WKBWriter.bytesToHex(new WKBWriter().write(geom));
|
| | |
|
| | | if (targetDataStore.isByteaWKB()) {
|
| | | return "setSRID('" + wkb + "'::geometry," + srid + ")";
|
| | | } else {
|
| | | return "GeomFromWKB('" + wkb + "', " + srid + ")";
|
| | | }
|
| | | }
|
| | |
|
| | | String geoText = geometryWriter.write(geom);
|
| | | return "GeometryFromText('" + geoText + "', " + srid + ")";
|
| | | */
|
| | | if (geom instanceof LinearRing) {
|
| | | //postgis does not handle linear rings, convert to just a line string
|
| | | geom = geom.getFactory().createLineString(((LinearRing) geom).getCoordinateSequence());
|
| | | }
|
| | | return "ST_GeomFromText('" + geom.toText() + "', " + srid + ")";
|
| | | }
|
| | |
|
| | | protected String makeInsertSql(SimpleFeature feature, int srid) // throws IOException
|
| | | {
|
| | | SimpleFeatureType featureType = feature.getFeatureType();
|
| | |
|
| | | String tableName = encodeSchemaTableName(featureType.getTypeName());
|
| | | List<AttributeDescriptor> descriptors = featureType.getAttributeDescriptors();
|
| | |
|
| | | String attrValue;
|
| | |
|
| | | StringBuilder statementSQL = new StringBuilder(512);
|
| | | statementSQL.append("INSERT INTO ").append(tableName).append(" (");
|
| | |
|
| | | // encode insertion for attributes, but remember to avoid auto-increment ones,
|
| | | // they may be included in the feature type as well
|
| | | for (AttributeDescriptor attributeDescriptor : descriptors) {
|
| | | String attName = attributeDescriptor.getLocalName();
|
| | |
|
| | | if (feature.getAttribute(attName) != null) {
|
| | | String colName = encodeSchemaColumnName(attName);
|
| | | statementSQL.append(colName).append(",");
|
| | | }
|
| | | }
|
| | |
|
| | | statementSQL.setCharAt(statementSQL.length() - 1, ')');
|
| | | statementSQL.append(" VALUES (");
|
| | |
|
| | | List<Object> attributes = feature.getAttributes();
|
| | |
|
| | | for (int i = 0; i < descriptors.size(); i++) {
|
| | | attrValue = null;
|
| | |
|
| | | if (descriptors.get(i) instanceof GeometryDescriptor) {
|
| | | // String geomName = descriptors[i].getLocalName();
|
| | | // int srid = ftInfo.getSRID(geomName);
|
| | | Geometry geometry = (Geometry) attributes.get(i);
|
| | |
|
| | | if (geometry == null) {
|
| | | attrValue = "NULL";
|
| | | } else {
|
| | | attrValue = getGeometryInsertText(geometry, srid);
|
| | | }
|
| | | } else {
|
| | | if (attributes.get(i) != null) {
|
| | | attrValue = addQuotes(attributes.get(i));
|
| | | }
|
| | | }
|
| | |
|
| | | if (attrValue != null) {
|
| | | statementSQL.append(attrValue).append(",");
|
| | | }
|
| | | }
|
| | |
|
| | | statementSQL.setCharAt(statementSQL.length() - 1, ')');
|
| | |
|
| | | return (statementSQL.toString());
|
| | | }
|
| | |
|
| | | protected String makePrepareInsertSql(SimpleFeatureType featureType) {
|
| | | String tableName = encodeSchemaTableName(featureType.getTypeName());
|
| | | List<AttributeDescriptor> descriptors = featureType.getAttributeDescriptors();
|
| | |
|
| | | String attrValue;
|
| | |
|
| | | StringBuilder statementSQL = new StringBuilder("INSERT INTO " + tableName + " (");
|
| | |
|
| | | // encode insertion for attributes, but remember to avoid auto-increment ones,
|
| | | // they may be included in the feature type as well
|
| | | for (AttributeDescriptor descriptor : descriptors) {
|
| | | String attName = descriptor.getLocalName();
|
| | |
|
| | | String colName = encodeSchemaColumnName(attName);
|
| | | statementSQL.append(colName).append(",");
|
| | | }
|
| | |
|
| | | statementSQL.setCharAt(statementSQL.length() - 1, ')');
|
| | | statementSQL.append(" VALUES (");
|
| | |
|
| | | for (AttributeDescriptor descriptor : descriptors) {
|
| | | statementSQL.append(" ? ,");
|
| | | }
|
| | |
|
| | | statementSQL.setCharAt(statementSQL.length() - 1, ')');
|
| | |
|
| | | return (statementSQL.toString());
|
| | | }
|
| | |
|
| | | protected String addQuotes(Object value) {
|
| | | String retString;
|
| | |
|
| | | if (value != null) {
|
| | | if (value instanceof Number) {
|
| | | retString = value.toString();
|
| | | } else {
|
| | | retString = "'" + doubleQuote(value) + "'";
|
| | | }
|
| | | } else {
|
| | | retString = "null";
|
| | | }
|
| | |
|
| | | return retString;
|
| | | }
|
| | |
|
| | | String doubleQuote(Object obj) {
|
| | | return obj.toString().replaceAll("'", "''");
|
| | | }
|
| | |
|
| | | protected String encodeName(String tableName) {
|
| | | return tableName;
|
| | | }
|
| | |
|
| | | protected String encodeColumnName(String colName) {
|
| | | return encodeName(colName);
|
| | | }
|
| | |
|
| | | public String encodeSchemaTableName(String tableName) {
|
| | | return schemaEnabled ? ("\"" + getTargetSchema() + "\".\"" + tableName + "\"")
|
| | | : ("\"" + tableName + "\"");
|
| | | }
|
| | |
|
| | | public String encodeSchemaColumnName(String columnName) {
|
| | | return "\"" + columnName + "\"";
|
| | | }
|
| | |
|
| | | protected void bindFeatureParameters(PreparedStatement pstmt, SimpleFeature feature) throws SQLException {
|
| | | SimpleFeatureType featureType = feature.getFeatureType();
|
| | |
|
| | | List<AttributeDescriptor> descriptors = featureType.getAttributeDescriptors();
|
| | | List<Object> attributes = feature.getAttributes();
|
| | |
|
| | | for (int i = 0; i < descriptors.size(); i++) {
|
| | | if (descriptors.get(i) instanceof GeometryDescriptor) {
|
| | | // String wktTest = geometryWriter.write((Geometry) attributes.get(i));
|
| | | Geometry geom = (Geometry) attributes.get(i);
|
| | | geom.setSRID(FeatureTypeBuilderUtil.getDefaultFeatureSRID());
|
| | | pstmt.setBytes(i + 1, binaryWriter.writeBinary((Geometry) attributes.get(i)));
|
| | | } else {
|
| | | Class<?> bindingType = ((AttributeType)descriptors.get(i).getType()).getBinding();
|
| | | if (bindingType.equals(Short.class)) {
|
| | | pstmt.setShort(i + 1, (Short) attributes.get(i));
|
| | | } else if (bindingType.equals(Integer.class)) {
|
| | | pstmt.setInt(i + 1, (Integer) attributes.get(i));
|
| | | } else if (bindingType.equals(Long.class)) {
|
| | | pstmt.setLong(i + 1, (Long) attributes.get(i));
|
| | | } else if (bindingType.equals(String.class)) {
|
| | | pstmt.setString(i + 1, (String) attributes.get(i));
|
| | | } else if (bindingType.equals(Float.class)) {
|
| | | pstmt.setFloat(i + 1, (Float) attributes.get(i));
|
| | | } else if (bindingType.equals(Double.class)) {
|
| | | pstmt.setDouble(i + 1, (Double) attributes.get(i));
|
| | | } else if (bindingType.equals(Boolean.class)) {
|
| | | pstmt.setBoolean(i + 1, (Boolean) attributes.get(i));
|
| | | } else if (bindingType.equals(BigDecimal.class)) {
|
| | | pstmt.setBigDecimal(i + 1, (BigDecimal) attributes.get(i));
|
| | | } else if (bindingType.equals(java.sql.Date.class)) {
|
| | | pstmt.setDate(i + 1, (java.sql.Date) attributes.get(i));
|
| | | } else if (bindingType.equals(java.sql.Time.class)) {
|
| | | pstmt.setTime(i + 1, (java.sql.Time) attributes.get(i));
|
| | | } else if (bindingType.equals(java.sql.Timestamp.class)) {
|
| | | pstmt.setTimestamp(i + 1, (java.sql.Timestamp) attributes.get(i));
|
| | | } else if (bindingType.equals(java.util.Date.class)) {
|
| | | java.sql.Date sDate = new java.sql.Date(((java.util.Date) attributes.get(i)).getTime());
|
| | | pstmt.setDate(i + 1, sDate);
|
| | | }
|
| | | }
|
| | |
|
| | | }
|
| | | }
|
| | | }
|
New file |
| | |
| | | package com.ximple.eofms.jobs.context.edbgeo;
|
| | |
|
| | | import java.io.File;
|
| | | import java.io.IOException;
|
| | | import java.io.UnsupportedEncodingException;
|
| | | import java.net.MalformedURLException;
|
| | | import java.net.URL;
|
| | | import java.util.ArrayList;
|
| | | import java.util.HashMap;
|
| | | import java.util.Iterator;
|
| | | import java.util.List;
|
| | |
|
| | | import com.ximple.eofms.util.ElementDigesterUtils;
|
| | | import org.apache.commons.digester3.Digester;
|
| | | import org.apache.commons.logging.Log;
|
| | | import org.apache.commons.logging.LogFactory;
|
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper;
|
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger;
|
| | | import org.apache.commons.transaction.util.LoggerFacade;
|
| | | import org.geotools.data.DataStore;
|
| | | import org.geotools.data.FeatureWriter;
|
| | | import org.geotools.data.Transaction;
|
| | | import org.geotools.feature.SchemaException;
|
| | | import org.opengis.feature.IllegalAttributeException;
|
| | | import org.opengis.feature.simple.SimpleFeature;
|
| | | import org.opengis.feature.simple.SimpleFeatureType;
|
| | | import org.xml.sax.SAXException;
|
| | |
|
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter;
|
| | | import com.ximple.eofms.filter.ElementDispatchableFilter;
|
| | | import com.ximple.eofms.filter.ElementDispatcher;
|
| | | import com.ximple.eofms.filter.TypeCompIdDispatchableFilter;
|
| | | import com.ximple.eofms.filter.TypeCompLevelIdDispatchableFilter;
|
| | | import com.ximple.eofms.filter.TypeIdDispatchableFilter;
|
| | | import com.ximple.io.dgn7.ComplexElement;
|
| | | import com.ximple.io.dgn7.Element;
|
| | | import com.ximple.io.dgn7.FrammeAttributeData;
|
| | | import com.ximple.io.dgn7.UserAttributeData;
|
| | |
|
| | | public class DummyFeatureConvertEdbGeoJobContext extends AbstractDgnToEdbGeoJobContext {
|
| | | static final Log logger = LogFactory.getLog(DummyFeatureConvertEdbGeoJobContext.class);
|
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger);
|
| | |
|
| | | private String dataOut = null;
|
| | |
|
| | | private HashMap<String, ArrayList<SimpleFeature>> featuresContext = new HashMap<String, ArrayList<SimpleFeature>>();
|
| | | private HashMap<String, FeatureWriter> featuresWriterContext = new HashMap<String, FeatureWriter>();
|
| | |
|
| | | private PessimisticMapWrapper txFeaturesContext;
|
| | |
|
| | | private ElementDispatcher elementDispatcher;
|
| | | private String _filterConfig;
|
| | | private boolean withIndex = false;
|
| | |
|
| | | public DummyFeatureConvertEdbGeoJobContext(String dataPath, DataStore targetDataStore, String targetSchema,
|
| | | String filterConfig, boolean profileMode,
|
| | | boolean useTransform) {
|
| | | super(dataPath, targetDataStore, targetSchema, profileMode, useTransform);
|
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger);
|
| | | _filterConfig = filterConfig;
|
| | | elementDispatcher = createElementDispatcher();
|
| | | }
|
| | |
|
| | | private ElementDispatcher createElementDispatcher() {
|
| | | try {
|
| | | URL filterURL = null;
|
| | | if (_filterConfig != null) {
|
| | | File config = new File(_filterConfig);
|
| | | if (config.exists()) {
|
| | | filterURL = config.toURI().toURL();
|
| | | }
|
| | | }
|
| | | if (filterURL == null) {
|
| | | // config = new File("conf/DefaultConvertShpFilter.xml");
|
| | | filterURL = this.getClass().getResource("/conf/DefaultConvertShpFilter.xml");
|
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml");
|
| | | }
|
| | | assert filterURL != null;
|
| | | Digester digester = ElementDigesterUtils.getElementDigester();
|
| | | return (ElementDispatcher) digester.parse(filterURL);
|
| | | } catch (UnsupportedEncodingException e) {
|
| | | logger.info(e.getMessage(), e);
|
| | | throw new RuntimeException(e.getMessage(), e);
|
| | | } catch (MalformedURLException e) {
|
| | | logger.info(e.getMessage(), e);
|
| | | throw new RuntimeException(e.getMessage(), e);
|
| | | } catch (IOException e) {
|
| | | logger.info(e.getMessage(), e);
|
| | | throw new RuntimeException(e.getMessage(), e);
|
| | | } catch (SAXException e) {
|
| | | logger.info(e.getMessage(), e);
|
| | | throw new RuntimeException(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | public void putFeatureCollection(Element element) throws IllegalAttributeException, SchemaException {
|
| | | assert elementDispatcher != null;
|
| | |
|
| | | if (element == null) {
|
| | | logger.warn("Unknown Element:" + null);
|
| | | return;
|
| | | }
|
| | |
|
| | | // �P�_�O�_�ũM���
|
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed());
|
| | | if (feature == null) {
|
| | | FrammeAttributeData linkage =
|
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element);
|
| | | logger.warn("Unknown Element:" + element.getElementType().toString() +
|
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" +
|
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID())));
|
| | | if (element instanceof ComplexElement) {
|
| | | ComplexElement complex = (ComplexElement) element;
|
| | | logger.warn("----Complex Element size=" + complex.size());
|
| | | }
|
| | |
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!txFeaturesContext.containsKey(feature.getFeatureType())) {
|
| | | txFeaturesContext.put(feature.getFeatureType(), new ArrayList());
|
| | | }
|
| | | ArrayList arrayList = (ArrayList) txFeaturesContext.get(feature.getFeatureType());
|
| | | arrayList.add(feature);
|
| | | }
|
| | |
|
| | | public void startTransaction() {
|
| | | assert elementDispatcher != null;
|
| | | for (ElementDispatchableFilter filter : elementDispatcher.getRules()) {
|
| | | if (filter instanceof TypeCompIdDispatchableFilter) {
|
| | | ((TypeCompIdDispatchableFilter) filter).getCreateStrategy();
|
| | | } else if (filter instanceof TypeCompLevelIdDispatchableFilter) {
|
| | | ((TypeCompIdDispatchableFilter) filter).getCreateStrategy();
|
| | | } else if (filter instanceof TypeIdDispatchableFilter) {
|
| | | ((TypeCompIdDispatchableFilter) filter).getCreateStrategy();
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | public void commitTransaction() {
|
| | | if (!txFeaturesContext.isEmpty()) {
|
| | | logger.debug("Transaction size = " + txFeaturesContext.size());
|
| | | //txFeaturesContext.commitTransaction();
|
| | | } else {
|
| | | logger.debug("Transaction is empty.");
|
| | | }
|
| | |
|
| | | if (!featuresContext.isEmpty()) {
|
| | | updateDataStore();
|
| | | }
|
| | | }
|
| | |
|
| | | public void rollbackTransaction() {
|
| | | //txFeaturesContext.rollbackTransaction();
|
| | | if (!featuresContext.isEmpty()) {
|
| | | updateDataStore();
|
| | | }
|
| | | }
|
| | |
|
| | | private void updateDataStore() {
|
| | | Iterator it = featuresContext.keySet().iterator();
|
| | |
|
| | | try {
|
| | | while (it.hasNext()) {
|
| | | SimpleFeatureType featureType = (SimpleFeatureType) it.next();
|
| | | logger.debug("Begin Save EdbGeo:" + featureType.getTypeName());
|
| | |
|
| | | FeatureWriter writer;
|
| | | if (featuresWriterContext.containsKey(featureType.getTypeName())) {
|
| | | writer = featuresWriterContext.get(featureType.getTypeName());
|
| | | } else {
|
| | | if (!isExistFeature(featureType)) {
|
| | | getTargetDataStore().createSchema(featureType);
|
| | | writer = getTargetDataStore().getFeatureWriter(featureType.getTypeName(), Transaction.AUTO_COMMIT);
|
| | | } else {
|
| | | writer = getTargetDataStore().getFeatureWriterAppend(featureType.getTypeName(), Transaction.AUTO_COMMIT);
|
| | | }
|
| | | featuresWriterContext.put(featureType.getTypeName(), writer);
|
| | | }
|
| | |
|
| | | ArrayList<SimpleFeature> features = featuresContext.get(featureType);
|
| | | Iterator itFeature = features.iterator();
|
| | | while (itFeature.hasNext()) {
|
| | | SimpleFeature feature = (SimpleFeature) itFeature.next();
|
| | | ((SimpleFeature) writer.next()).setAttributes(feature.getAttributes());
|
| | | }
|
| | | //writer.close();
|
| | | logger.debug("End Save EdbGeo:" + featureType.getTypeName());
|
| | | }
|
| | | featuresContext.clear();
|
| | | } catch (MalformedURLException e) {
|
| | | logger.error(e.getMessage(), e);
|
| | | } catch (IllegalAttributeException e) {
|
| | | logger.error(e.getMessage(), e);
|
| | | } catch (IOException e) {
|
| | | logger.error(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | public void closeFeatureWriter() throws IOException {
|
| | |
|
| | | for (FeatureWriter featureWriter : this.featuresWriterContext.values()) {
|
| | | featureWriter.close();
|
| | | }
|
| | |
|
| | | this.featuresWriterContext.clear();
|
| | | }
|
| | |
|
| | | protected FrammeAttributeData getFeatureLinkage(Element element) {
|
| | | if (!element.hasUserAttributeData())
|
| | | return null;
|
| | |
|
| | | List<UserAttributeData> usrDatas = element.getUserAttributeData();
|
| | | for (UserAttributeData anUsrData : usrDatas) {
|
| | | if (anUsrData instanceof FrammeAttributeData) {
|
| | | return (FrammeAttributeData) anUsrData;
|
| | | }
|
| | | }
|
| | | return null;
|
| | | }
|
| | |
|
| | |
|
| | | public boolean isWithIndex() {
|
| | | return withIndex;
|
| | | }
|
| | |
|
| | | public void setWithIndex(boolean withIndex) {
|
| | | this.withIndex = withIndex;
|
| | | }
|
| | |
|
| | | public Log getLogger() {
|
| | | return logger;
|
| | | }
|
| | | }
|
New file |
| | |
| | | package com.ximple.eofms.jobs.context.edbgeo;
|
| | |
|
| | | import java.io.File;
|
| | | import java.io.IOException;
|
| | | import java.io.UnsupportedEncodingException;
|
| | | import java.net.MalformedURLException;
|
| | | import java.net.URL;
|
| | | import java.util.ArrayList;
|
| | | import java.util.HashMap;
|
| | | import java.util.Iterator;
|
| | | import java.util.List;
|
| | |
|
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter;
|
| | | import com.ximple.eofms.filter.ElementDispatcher;
|
| | | import com.ximple.eofms.util.ElementDigesterUtils;
|
| | | import com.ximple.io.dgn7.ComplexElement;
|
| | | import com.ximple.io.dgn7.Element;
|
| | | import com.ximple.io.dgn7.FrammeAttributeData;
|
| | | import com.ximple.io.dgn7.UserAttributeData;
|
| | | import org.apache.commons.digester3.Digester;
|
| | | import org.apache.commons.logging.Log;
|
| | | import org.apache.commons.logging.LogFactory;
|
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper;
|
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger;
|
| | | import org.apache.commons.transaction.util.LoggerFacade;
|
| | | import org.geotools.data.DataStore;
|
| | | import org.geotools.data.FeatureWriter;
|
| | | import org.geotools.data.Transaction;
|
| | | import org.opengis.feature.IllegalAttributeException;
|
| | | import org.geotools.feature.SchemaException;
|
| | | import org.opengis.feature.simple.SimpleFeature;
|
| | | import org.opengis.feature.simple.SimpleFeatureType;
|
| | | import org.xml.sax.SAXException;
|
| | |
|
| | | public class FeatureDgnConvertEdbGeoJobContext extends AbstractDgnToEdbGeoJobContext {
|
| | | static final Log logger = LogFactory.getLog(FeatureDgnConvertEdbGeoJobContext.class);
|
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger);
|
| | |
|
| | | private HashMap<String, ArrayList<SimpleFeature>> featuresContext = new HashMap<String, ArrayList<SimpleFeature>>();
|
| | | private HashMap<String, FeatureWriter> featuresWriterContext = new HashMap<String, FeatureWriter>();
|
| | |
|
| | | private PessimisticMapWrapper txFeaturesContext;
|
| | |
|
| | | private ElementDispatcher elementDispatcher;
|
| | | private String _filterConfig;
|
| | | private boolean withIndex = false;
|
| | |
|
| | | public FeatureDgnConvertEdbGeoJobContext(String dataPath, DataStore targetDataStore, String targetSchema,
|
| | | String filterConfig, boolean profileMode,
|
| | | boolean useTransform) {
|
| | | super(dataPath, targetDataStore, targetSchema, profileMode, useTransform);
|
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger);
|
| | | _filterConfig = filterConfig;
|
| | | elementDispatcher = createElementDispatcher();
|
| | | }
|
| | |
|
| | | private ElementDispatcher createElementDispatcher() {
|
| | | try {
|
| | | URL filterURL = null;
|
| | | if (_filterConfig != null) {
|
| | | File config = new File(_filterConfig);
|
| | | if (config.exists()) {
|
| | | filterURL = config.toURI().toURL();
|
| | | }
|
| | | }
|
| | | if (filterURL == null) {
|
| | | // config = new File("conf/DefaultConvertShpFilter.xml");
|
| | | filterURL = this.getClass().getResource("/conf/DefaultConvertShpFilter.xml");
|
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml");
|
| | | }
|
| | | assert filterURL != null;
|
| | | Digester digester = ElementDigesterUtils.getElementDigester();
|
| | | return (ElementDispatcher) digester.parse(filterURL);
|
| | | } catch (UnsupportedEncodingException e) {
|
| | | logger.info(e.getMessage(), e);
|
| | | throw new RuntimeException(e.getMessage(), e);
|
| | | } catch (MalformedURLException e) {
|
| | | logger.info(e.getMessage(), e);
|
| | | throw new RuntimeException(e.getMessage(), e);
|
| | | } catch (IOException e) {
|
| | | logger.info(e.getMessage(), e);
|
| | | throw new RuntimeException(e.getMessage(), e);
|
| | | } catch (SAXException e) {
|
| | | logger.info(e.getMessage(), e);
|
| | | throw new RuntimeException(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | public void putFeatureCollection(Element element) throws IllegalAttributeException, SchemaException {
|
| | | assert elementDispatcher != null;
|
| | |
|
| | | if (element == null) {
|
| | | logger.warn("Unknown Element:" + null);
|
| | | return;
|
| | | }
|
| | |
|
| | | // �P�_�O�_�ũM���
|
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed());
|
| | | if (feature == null) {
|
| | | FrammeAttributeData linkage =
|
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element);
|
| | | logger.warn("Unknown Element:" + element.getElementType().toString() +
|
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" +
|
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID())));
|
| | | if (element instanceof ComplexElement) {
|
| | | ComplexElement complex = (ComplexElement) element;
|
| | | logger.warn("----Complex Element size=" + complex.size());
|
| | | }
|
| | |
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!txFeaturesContext.containsKey(feature.getFeatureType())) {
|
| | | txFeaturesContext.put(feature.getFeatureType(), new ArrayList());
|
| | | }
|
| | | ArrayList<SimpleFeature> arrayList = (ArrayList<SimpleFeature>) txFeaturesContext.get(feature.getFeatureType());
|
| | | arrayList.add(feature);
|
| | | }
|
| | |
|
| | | public void startTransaction() {
|
| | | }
|
| | |
|
| | | public void commitTransaction() {
|
| | | if (!txFeaturesContext.isEmpty()) {
|
| | | logger.debug("Transaction size = " + txFeaturesContext.size());
|
| | | //txFeaturesContext.commitTransaction();
|
| | | } else {
|
| | | logger.debug("Transaction is empty.");
|
| | | }
|
| | |
|
| | | if (!featuresContext.isEmpty()) {
|
| | | updateDataStore();
|
| | | }
|
| | | }
|
| | |
|
| | | public void rollbackTransaction() {
|
| | | //txFeaturesContext.rollbackTransaction();
|
| | | if (!featuresContext.isEmpty()) {
|
| | | updateDataStore();
|
| | | }
|
| | | }
|
| | |
|
| | | private void updateDataStore() {
|
| | | Iterator it = featuresContext.keySet().iterator();
|
| | |
|
| | | try {
|
| | | while (it.hasNext()) {
|
| | | SimpleFeatureType featureType = (SimpleFeatureType) it.next();
|
| | | logger.debug("Begin Save EdbGeo:" + featureType.getTypeName());
|
| | |
|
| | | FeatureWriter writer;
|
| | | if (featuresWriterContext.containsKey(featureType.getTypeName())) {
|
| | | writer = featuresWriterContext.get(featureType.getTypeName());
|
| | | } else {
|
| | | if (!isExistFeature(featureType)) {
|
| | | getTargetDataStore().createSchema(featureType);
|
| | | writer = getTargetDataStore().getFeatureWriter(featureType.getTypeName(),
|
| | | Transaction.AUTO_COMMIT);
|
| | | } else {
|
| | | writer = getTargetDataStore().getFeatureWriterAppend(featureType.getTypeName(),
|
| | | Transaction.AUTO_COMMIT);
|
| | | }
|
| | | featuresWriterContext.put(featureType.getTypeName(), writer);
|
| | | }
|
| | |
|
| | | ArrayList<SimpleFeature> features = featuresContext.get(featureType);
|
| | | Iterator itFeature = features.iterator();
|
| | | while (itFeature.hasNext()) {
|
| | | SimpleFeature feature = (SimpleFeature) itFeature.next();
|
| | | ((SimpleFeature) writer.next()).setAttributes(feature.getAttributes());
|
| | | }
|
| | | //writer.close();
|
| | | logger.debug("End Save EdbGeo:" + featureType.getTypeName());
|
| | | }
|
| | | featuresContext.clear();
|
| | | } catch (MalformedURLException e) {
|
| | | logger.error(e.getMessage(), e);
|
| | | } catch (IllegalAttributeException e) {
|
| | | logger.error(e.getMessage(), e);
|
| | | } catch (IOException e) {
|
| | | logger.error(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | public void closeFeatureWriter() {
|
| | | this.featuresWriterContext.clear();
|
| | |
|
| | | try {
|
| | | for (FeatureWriter featureWriter : this.featuresWriterContext.values()) {
|
| | | featureWriter.close();
|
| | | }
|
| | | } catch (IOException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | |
|
| | | this.featuresWriterContext.clear();
|
| | | }
|
| | |
|
| | | protected FrammeAttributeData getFeatureLinkage(Element element) {
|
| | | if (!element.hasUserAttributeData())
|
| | | return null;
|
| | |
|
| | | List<UserAttributeData> usrDatas = element.getUserAttributeData();
|
| | | for (UserAttributeData anUsrData : usrDatas) {
|
| | | if (anUsrData instanceof FrammeAttributeData) {
|
| | | return (FrammeAttributeData) anUsrData;
|
| | | }
|
| | | }
|
| | | return null;
|
| | | }
|
| | |
|
| | |
|
| | | public boolean isWithIndex() {
|
| | | return withIndex;
|
| | | }
|
| | |
|
| | | public void setWithIndex(boolean withIndex) {
|
| | | this.withIndex = withIndex;
|
| | | }
|
| | |
|
| | | public Log getLogger() {
|
| | | return logger;
|
| | | }
|
| | | }
|
New file |
| | |
| | | package com.ximple.eofms.jobs.context.edbgeo;
|
| | |
|
| | | import java.io.IOException;
|
| | | import java.math.BigDecimal;
|
| | | import java.math.RoundingMode;
|
| | | import java.sql.Connection;
|
| | | import java.sql.PreparedStatement;
|
| | | import java.sql.SQLException;
|
| | | import java.sql.Statement;
|
| | | import java.util.ArrayList;
|
| | | import java.util.HashMap;
|
| | | import java.util.Iterator;
|
| | | import java.util.List;
|
| | | import java.util.TreeMap;
|
| | |
|
| | | import org.apache.commons.logging.Log;
|
| | | import org.apache.commons.logging.LogFactory;
|
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger;
|
| | | import org.apache.commons.transaction.util.LoggerFacade;
|
| | | import org.geotools.data.DataStore;
|
| | | import org.geotools.data.Transaction;
|
| | | import org.geotools.data.jdbc.JDBCUtils;
|
| | | import org.opengis.feature.IllegalAttributeException;
|
| | | import org.geotools.feature.SchemaException;
|
| | | import org.geotools.feature.simple.SimpleFeatureBuilder;
|
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
|
| | | import org.geotools.feature.type.FeatureTypeImpl;
|
| | | import org.geotools.geometry.jts.JTSFactoryFinder;
|
| | | import org.opengis.feature.simple.SimpleFeature;
|
| | | import org.opengis.feature.simple.SimpleFeatureType;
|
| | | import com.edb.util.PSQLException;
|
| | |
|
| | | import com.vividsolutions.jts.geom.Geometry;
|
| | | import com.vividsolutions.jts.geom.GeometryFactory;
|
| | |
|
| | | import com.ximple.eofms.util.DefaultColorTable;
|
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil;
|
| | | import com.ximple.eofms.util.GeometryConverterDecorator;
|
| | | import com.ximple.io.dgn7.ArcElement;
|
| | | import com.ximple.io.dgn7.ComplexChainElement;
|
| | | import com.ximple.io.dgn7.Element;
|
| | | import com.ximple.io.dgn7.EllipseElement;
|
| | | import com.ximple.io.dgn7.FrammeAttributeData;
|
| | | import com.ximple.io.dgn7.LineElement;
|
| | | import com.ximple.io.dgn7.LineStringElement;
|
| | | import com.ximple.io.dgn7.ShapeElement;
|
| | | import com.ximple.io.dgn7.TextElement;
|
| | | import com.ximple.io.dgn7.TextNodeElement;
|
| | | import com.ximple.io.dgn7.UserAttributeData;
|
| | |
|
| | | public class GeneralDgnConvertEdbGeoJobContext extends AbstractDgnToEdbGeoJobContext {
|
| | | static final Log logger = LogFactory.getLog(GeneralDgnConvertEdbGeoJobContext.class);
|
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger);
|
| | | static final GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null);
|
| | |
|
| | | private HashMap<SimpleFeatureType, ArrayList<SimpleFeature>> txFeaturesContext = new HashMap<SimpleFeatureType, ArrayList<SimpleFeature>>();
|
| | |
|
| | | private TreeMap<String, SimpleFeatureType> featureTypes = new TreeMap<String, SimpleFeatureType>();
|
| | |
|
| | | private String featureBaseName = null;
|
| | | private boolean dropTableMode = true;
|
| | |
|
| | | private int accumulate = 0;
|
| | |
|
| | | public GeneralDgnConvertEdbGeoJobContext(String dataPath, DataStore targetDataStore, String targetSchema,
|
| | | boolean profileMode, boolean useTransform) {
|
| | | super(dataPath, targetDataStore, targetSchema, profileMode, useTransform);
|
| | | }
|
| | |
|
| | | public void putFeatureCollection(Element element) throws IllegalAttributeException, SchemaException {
|
| | | SimpleFeatureType ft = lookupFeatureType(element);
|
| | | if (ft != null) {
|
| | | boolean canLog = true;
|
| | | SimpleFeature feature = null;
|
| | | try {
|
| | | feature = createFeature(ft, element);
|
| | | } catch (ArrayIndexOutOfBoundsException e) {
|
| | | getLogger().warn(e.getMessage(), e);
|
| | | canLog = false;
|
| | | } catch (ClassCastException e) {
|
| | | getLogger().warn(e.getMessage(), e);
|
| | | }
|
| | |
|
| | | if (feature == null) {
|
| | | if (!canLog) return;
|
| | |
|
| | | if (element instanceof TextElement)
|
| | | logger.info("cannot craete feature." + element.toString() + "'" +
|
| | | ((TextElement) element).getText() + "'");
|
| | | else if (element instanceof ShapeElement)
|
| | | logger.info("cannot craete feature." + element.toString() + "'" +
|
| | | ((ShapeElement) element).getVerticeSize() + "'" +
|
| | | ((ShapeElement) element).getStartPoint());
|
| | | else if (element instanceof LineStringElement)
|
| | | logger.info("cannot craete feature." + element.toString() + "'" +
|
| | | ((LineStringElement) element).getVerticeSize() + "'" +
|
| | | ((LineStringElement) element).getStartPoint());
|
| | | else if (element instanceof ArcElement)
|
| | | logger.info("cannot craete feature." + element.toString() + "'" +
|
| | | ((ArcElement) element).getOrigin().toString() + "'" +
|
| | | ((ArcElement) element).getRotationAngle());
|
| | |
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!txFeaturesContext.containsKey(feature.getFeatureType())) {
|
| | | txFeaturesContext.put(feature.getFeatureType(), new ArrayList<SimpleFeature>());
|
| | | }
|
| | | ArrayList<SimpleFeature> arrayList = txFeaturesContext.get(feature.getFeatureType());
|
| | | if (feature.getDefaultGeometry() != null && !((Geometry)feature.getDefaultGeometry()).isEmpty()) {
|
| | | arrayList.add(feature);
|
| | | accumulate++;
|
| | | }
|
| | | } else {
|
| | | logger.info("Unknown Element :" + element.getType() + ", lv=" + element.getLevelIndex());
|
| | | }
|
| | |
|
| | | if (accumulate > BATCHSIZE) {
|
| | | commitTransaction();
|
| | | }
|
| | | }
|
| | |
|
| | | // private Transaction transaction;
|
| | |
|
| | | public void startTransaction() {
|
| | | }
|
| | |
|
| | | public void commitTransaction() {
|
| | | if (!txFeaturesContext.isEmpty()) {
|
| | | logger.debug("Transaction size = " + txFeaturesContext.size());
|
| | | //txFeaturesContext.commitTransaction();
|
| | | } else {
|
| | | logger.debug("Transaction is empty.");
|
| | | }
|
| | |
|
| | | if (!txFeaturesContext.isEmpty()) {
|
| | | updateDataStore();
|
| | | }
|
| | | }
|
| | |
|
| | | public void rollbackTransaction() {
|
| | | }
|
| | |
|
| | | private void updateDataStore() {
|
| | | if (isProfileMode()) markUpdateTime();
|
| | | Iterator<SimpleFeatureType> it = txFeaturesContext.keySet().iterator();
|
| | | Connection conn = null;
|
| | | try {
|
| | | conn = getConnection();
|
| | | boolean autoCommit = conn.getAutoCommit();
|
| | | conn.setAutoCommit(false);
|
| | |
|
| | | while (it.hasNext()) {
|
| | | SimpleFeatureType featureType = it.next();
|
| | | logger.debug("Begin Save into EdbGeo:" + featureType.getTypeName());
|
| | |
|
| | | String bindingStmt = makePrepareInsertSql(featureType);
|
| | | ArrayList<SimpleFeature> features = txFeaturesContext.get(featureType);
|
| | | PreparedStatement pstmt = conn.prepareStatement(bindingStmt);
|
| | |
|
| | | for (SimpleFeature feature : features) {
|
| | | // currentStmt = feature;
|
| | | // Statement stmt = conn.createStatement();
|
| | | try {
|
| | | // stmt.execute(feature);
|
| | | bindFeatureParameters(pstmt, feature);
|
| | | // pstmt.execute();
|
| | | pstmt.addBatch();
|
| | | } catch (PSQLException e) {
|
| | | if (bindingStmt != null) {
|
| | | logger.error("Execute:" + bindingStmt);
|
| | | }
|
| | | logger.error(e.getServerErrorMessage());
|
| | | logger.error(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | int[] numUpdates = pstmt.executeBatch();
|
| | | for (int i = 0; i < numUpdates.length; i++) {
|
| | | if (numUpdates[i] == -2)
|
| | | logger.warn("Execution " + i + ": unknown number of rows updated");
|
| | | }
|
| | | conn.commit();
|
| | |
|
| | | JDBCUtils.close(pstmt);
|
| | | features.clear();
|
| | | logger.debug("End Save into EdbGeo:" + featureType.getTypeName());
|
| | | }
|
| | | conn.setAutoCommit(autoCommit);
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | accumulate = 0;
|
| | | } catch (SQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.error(e.getMessage(), e);
|
| | | } finally {
|
| | | if (isProfileMode()) accumulateUpdateTime();
|
| | | }
|
| | | }
|
| | |
|
| | | public void closeFeatureWriter() {
|
| | | txFeaturesContext.clear();
|
| | | /*
|
| | | for (FeatureWriter featureWriter : this.featuresWriterContext.values())
|
| | | {
|
| | | featureWriter.close();
|
| | | }
|
| | |
|
| | | this.featuresWriterContext.clear();
|
| | | */
|
| | | }
|
| | |
|
| | | public SimpleFeatureType createPointFeatureElement(String featureName) throws SchemaException {
|
| | | if (!featureTypes.containsKey(featureName)) {
|
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalPointFeatureTypeBuilder(featureName);
|
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType();
|
| | | featureTypes.put(featureName, featureType);
|
| | | clearFeatureData(typeBuilder);
|
| | | }
|
| | | return featureTypes.get(featureName);
|
| | | }
|
| | |
|
| | | public SimpleFeatureType createPolygonFeatureElement(String featureName) throws SchemaException {
|
| | | if (!featureTypes.containsKey(featureName)) {
|
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalPolygonFeatureTypeBuilder(featureName);
|
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType();
|
| | | featureTypes.put(featureName, featureType);
|
| | | clearFeatureData(typeBuilder);
|
| | | }
|
| | | return featureTypes.get(featureName);
|
| | | }
|
| | |
|
| | | public SimpleFeatureType createLineFeatureElement(String featureName) throws SchemaException {
|
| | | if (!featureTypes.containsKey(featureName)) {
|
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalLineFeatureTypeBuilder(featureName);
|
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType();
|
| | | featureTypes.put(featureName, featureType);
|
| | | clearFeatureData(typeBuilder);
|
| | | }
|
| | | return featureTypes.get(featureName);
|
| | | }
|
| | |
|
| | | public SimpleFeatureType createMultiLineFeatureElement(String featureName) throws SchemaException {
|
| | | if (!featureTypes.containsKey(featureName)) {
|
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalMultiLineFeatureTypeBuilder(featureName);
|
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType();
|
| | | featureTypes.put(featureName, featureType);
|
| | | clearFeatureData(typeBuilder);
|
| | | }
|
| | | return featureTypes.get(featureName);
|
| | | }
|
| | |
|
| | | public SimpleFeatureType createArcFeatureElement(String featureName) throws SchemaException {
|
| | | if (!featureTypes.containsKey(featureName)) {
|
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalArcFeatureTypeBuilder(featureName);
|
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType();
|
| | | featureTypes.put(featureName, featureType);
|
| | | clearFeatureData(typeBuilder);
|
| | | }
|
| | | return featureTypes.get(featureName);
|
| | | }
|
| | |
|
| | | public SimpleFeatureType createEllipseFeatureElement(String featureName) throws SchemaException {
|
| | | if (!featureTypes.containsKey(featureName)) {
|
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalEllipseFeatureTypeBuilder(featureName);
|
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType();
|
| | | featureTypes.put(featureName, featureType);
|
| | | clearFeatureData(typeBuilder);
|
| | | }
|
| | | return featureTypes.get(featureName);
|
| | | }
|
| | |
|
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element) throws IllegalAttributeException {
|
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance();
|
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter();
|
| | |
|
| | | if (element instanceof TextElement) {
|
| | | TextElement textElement = (TextElement) element;
|
| | | convertDecorator.setConverter(textElement);
|
| | |
|
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory);
|
| | | double angle = textElement.getRotationAngle();
|
| | | String content = textElement.getText();
|
| | | content = content.replace('\u0000', ' ');
|
| | | angle = BigDecimal.valueOf(angle).setScale(3, RoundingMode.HALF_UP).doubleValue();
|
| | | if (geom != null) {
|
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | geom,
|
| | | colorTable.getColorCode(textElement.getColorIndex()),
|
| | | textElement.getFontIndex(),
|
| | | textElement.getJustification(),
|
| | | textElement.getTextHeight(),
|
| | | textElement.getTextWidth(),
|
| | | angle,
|
| | | content
|
| | | }, null);
|
| | | } else {
|
| | | logger.info("geometry is null." + element.toString());
|
| | | }
|
| | | return null;
|
| | | } else if (element instanceof TextNodeElement) {
|
| | | TextNodeElement textNodeElement = (TextNodeElement) element;
|
| | | convertDecorator.setConverter(textNodeElement);
|
| | |
|
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory);
|
| | | double angle = textNodeElement.getRotationAngle();
|
| | | angle = BigDecimal.valueOf(angle).setScale(3, RoundingMode.HALF_UP).doubleValue();
|
| | | String[] texts = textNodeElement.getTextArray();
|
| | | StringBuilder sb = new StringBuilder();
|
| | | for (String text : texts) {
|
| | | if (sb.length() != 0)
|
| | | sb.append("\n");
|
| | | String content = text.replace('\u0000', ' ');
|
| | | sb.append(content);
|
| | | }
|
| | |
|
| | | if (geom != null) {
|
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | geom,
|
| | | colorTable.getColorCode(textNodeElement.getColorIndex()),
|
| | | textNodeElement.getFontIndex(),
|
| | | textNodeElement.getJustification(),
|
| | | textNodeElement.getTextNodeHeight(),
|
| | | textNodeElement.getTextNodeLength(),
|
| | | angle,
|
| | | sb.toString()
|
| | | }, null);
|
| | | } else {
|
| | | logger.info("geometry is null." + element.toString());
|
| | | }
|
| | | return null;
|
| | | } else if (element instanceof ShapeElement) {
|
| | | ShapeElement shapeElement = (ShapeElement) element;
|
| | | convertDecorator.setConverter(shapeElement);
|
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory);
|
| | | if (geom != null) {
|
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | geom,
|
| | | colorTable.getColorCode(shapeElement.getColorIndex()),
|
| | | shapeElement.getWeight(),
|
| | | shapeElement.getLineStyle()
|
| | | }, null);
|
| | | } else {
|
| | | logger.info("geometry is null." + element.toString());
|
| | | }
|
| | | return null;
|
| | | } else if (element instanceof LineStringElement) {
|
| | | LineStringElement linestring = (LineStringElement) element;
|
| | | convertDecorator.setConverter(linestring);
|
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory);
|
| | | if (geom != null) {
|
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | geom,
|
| | | colorTable.getColorCode(linestring.getColorIndex()),
|
| | | linestring.getWeight(),
|
| | | linestring.getLineStyle()
|
| | | }, null);
|
| | | }
|
| | | return null;
|
| | | } else if (element instanceof LineElement) {
|
| | | LineElement line = (LineElement) element;
|
| | | convertDecorator.setConverter(line);
|
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory);
|
| | | if (geom != null) {
|
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | geom,
|
| | | colorTable.getColorCode(line.getColorIndex()),
|
| | | line.getWeight(),
|
| | | line.getLineStyle()
|
| | | }, null);
|
| | | }
|
| | | return null;
|
| | | } else if (element instanceof ArcElement) {
|
| | | ArcElement arcElement = (ArcElement) element;
|
| | | /*
|
| | | logger.fatal("" + arcElement.getPrimary() + ":" + arcElement.getSecondary() +
|
| | | "-" + arcElement.getStartAngle() + ":" + arcElement.getSweepAngle() + ":" +
|
| | | arcElement.getRotationAngle() + ":" + arcElement.getOrigin());
|
| | | */
|
| | | convertDecorator.setConverter(arcElement);
|
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory);
|
| | | if (geom != null) {
|
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | geom,
|
| | | colorTable.getColorCode(arcElement.getColorIndex()),
|
| | | arcElement.getWeight(),
|
| | | arcElement.getLineStyle()
|
| | | }, null);
|
| | | }
|
| | | return null;
|
| | | } else if (element instanceof EllipseElement) {
|
| | | EllipseElement arcElement = (EllipseElement) element;
|
| | | convertDecorator.setConverter(arcElement);
|
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory);
|
| | | if (geom != null) {
|
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | geom,
|
| | | colorTable.getColorCode(arcElement.getColorIndex()),
|
| | | arcElement.getWeight(),
|
| | | arcElement.getLineStyle()
|
| | | }, null);
|
| | | }
|
| | | return null;
|
| | | } else if (element instanceof ComplexChainElement) {
|
| | | ComplexChainElement complexChainElement = (ComplexChainElement) element;
|
| | | convertDecorator.setConverter(complexChainElement);
|
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory);
|
| | | if (geom != null) {
|
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | geom,
|
| | | colorTable.getColorCode(complexChainElement.getColorIndex()),
|
| | | complexChainElement.getWeight(),
|
| | | complexChainElement.getLineStyle()
|
| | | }, null);
|
| | | }
|
| | | return null;
|
| | | }
|
| | | return null;
|
| | | }
|
| | |
|
| | | private String getFeatureBaseName() {
|
| | | if (featureBaseName == null) {
|
| | | String dgnname = getFilename().toLowerCase();
|
| | | int i = dgnname.lastIndexOf(".");
|
| | | if (i != -1) {
|
| | | dgnname = dgnname.substring(0, i);
|
| | | }
|
| | | featureBaseName = dgnname;
|
| | | }
|
| | | return featureBaseName;
|
| | | }
|
| | |
|
| | | private SimpleFeatureType lookupFeatureType(Element element) throws SchemaException, IllegalAttributeException {
|
| | | String typeName;
|
| | | if (element instanceof TextElement) {
|
| | | typeName = getFeatureBaseName() + "_P";
|
| | | typeName = typeName.toLowerCase();
|
| | | if (!featureTypes.containsKey(typeName)) {
|
| | | featureTypes.put(typeName, createPointFeatureElement(typeName));
|
| | | }
|
| | | return featureTypes.get(typeName);
|
| | | } else if (element instanceof TextNodeElement) {
|
| | | typeName = getFeatureBaseName() + "_P";
|
| | | typeName = typeName.toLowerCase();
|
| | | if (!featureTypes.containsKey(typeName)) {
|
| | | featureTypes.put(typeName, createPointFeatureElement(typeName));
|
| | | }
|
| | | return featureTypes.get(typeName);
|
| | | } else if (element instanceof LineStringElement) {
|
| | | if (element instanceof ShapeElement) {
|
| | | typeName = getFeatureBaseName() + "_R";
|
| | | typeName = typeName.toLowerCase();
|
| | | if (!featureTypes.containsKey(typeName)) {
|
| | | featureTypes.put(typeName, createPolygonFeatureElement(typeName));
|
| | | }
|
| | | return featureTypes.get(typeName);
|
| | | } else {
|
| | | typeName = getFeatureBaseName() + "_L";
|
| | | typeName = typeName.toLowerCase();
|
| | | if (!featureTypes.containsKey(typeName)) {
|
| | | featureTypes.put(typeName, createLineFeatureElement(typeName));
|
| | | }
|
| | | return featureTypes.get(typeName);
|
| | | }
|
| | | } else if (element instanceof LineElement) {
|
| | | typeName = getFeatureBaseName() + "_L";
|
| | | typeName = typeName.toLowerCase();
|
| | | if (!featureTypes.containsKey(typeName)) {
|
| | | featureTypes.put(typeName, createLineFeatureElement(typeName));
|
| | | }
|
| | | return featureTypes.get(typeName);
|
| | | } else if (element instanceof ComplexChainElement) {
|
| | | typeName = getFeatureBaseName() + "_ML";
|
| | | typeName = typeName.toLowerCase();
|
| | | if (!featureTypes.containsKey(typeName)) {
|
| | | featureTypes.put(typeName, createMultiLineFeatureElement(typeName));
|
| | | }
|
| | | return featureTypes.get(typeName);
|
| | | } else if (element instanceof ArcElement) {
|
| | | typeName = getFeatureBaseName() + "_A";
|
| | | typeName = typeName.toLowerCase();
|
| | | if (!featureTypes.containsKey(typeName)) {
|
| | | featureTypes.put(typeName, createArcFeatureElement(typeName));
|
| | | }
|
| | | return featureTypes.get(typeName);
|
| | | } else if (element instanceof EllipseElement) {
|
| | | typeName = getFeatureBaseName() + "_R";
|
| | | typeName = typeName.toLowerCase();
|
| | | if (!featureTypes.containsKey(typeName)) {
|
| | | featureTypes.put(typeName, createEllipseFeatureElement(typeName));
|
| | | }
|
| | | return featureTypes.get(typeName);
|
| | | }
|
| | |
|
| | | return null;
|
| | | }
|
| | |
|
| | | protected FrammeAttributeData getFeatureLinkage(Element element) {
|
| | | if (!element.hasUserAttributeData())
|
| | | return null;
|
| | |
|
| | | List<UserAttributeData> usrDatas = element.getUserAttributeData();
|
| | | for (UserAttributeData anUsrData : usrDatas) {
|
| | | if (anUsrData instanceof FrammeAttributeData) {
|
| | | return (FrammeAttributeData) anUsrData;
|
| | | }
|
| | | }
|
| | | return null;
|
| | | }
|
| | |
|
| | | public Log getLogger() {
|
| | | return logger;
|
| | | }
|
| | |
|
| | | public boolean isDropTableMode() {
|
| | | return dropTableMode;
|
| | | }
|
| | |
|
| | | public void setDropTableMode(boolean dropTableMode) {
|
| | | this.dropTableMode = dropTableMode;
|
| | | }
|
| | |
|
| | | protected void clearFeatureData(SimpleFeatureTypeBuilder typeBuilder) throws SchemaException {
|
| | | if (isProfileMode()) markUpdateTime();
|
| | |
|
| | | String featureName = typeBuilder.getName();
|
| | | Connection conn = null;
|
| | | if (isExistFeature(typeBuilder.buildFeatureType())) {
|
| | | try {
|
| | | conn = getConnection();
|
| | | if (dropTableMode) {
|
| | | dropGeometryColumn(conn, featureName,
|
| | | ((FeatureTypeImpl)typeBuilder.buildFeatureType()).getGeometryDescriptor().getLocalName());
|
| | | dropTable(conn, featureName);
|
| | |
|
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, typeBuilder.buildFeatureType());
|
| | | for (String stmtText : schemaTexts) {
|
| | | Statement stmt = conn.createStatement();
|
| | | stmt.execute(stmtText);
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | | } else {
|
| | | deleteTable(conn, featureName);
|
| | | }
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | } catch (IOException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | logger.warn(e.getMessage(), e);
|
| | | } catch (SQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.warn(e.getMessage(), e);
|
| | | } finally {
|
| | | if (isProfileMode()) accumulateUpdateTime();
|
| | | }
|
| | | } else {
|
| | | try {
|
| | | conn = getConnection();
|
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, typeBuilder.buildFeatureType());
|
| | | for (String stmtText : schemaTexts) {
|
| | | Statement stmt = conn.createStatement();
|
| | | stmt.execute(stmtText);
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | } catch (IOException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | logger.warn(e.getMessage(), e);
|
| | | } catch (SQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.warn(e.getMessage(), e);
|
| | | } finally {
|
| | | if (isProfileMode()) accumulateUpdateTime();
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
New file |
| | |
| | | package com.ximple.eofms.jobs.context.edbgeo;
|
| | |
|
| | | import java.io.IOException;
|
| | | import java.math.BigDecimal;
|
| | | import java.math.RoundingMode;
|
| | | import java.sql.Connection;
|
| | | import java.sql.PreparedStatement;
|
| | | import java.sql.SQLException;
|
| | | import java.sql.Statement;
|
| | | import java.util.ArrayList;
|
| | | import java.util.Arrays;
|
| | | import java.util.HashMap;
|
| | | import java.util.Iterator;
|
| | | import java.util.List;
|
| | |
|
| | | import org.apache.commons.logging.Log;
|
| | | import org.apache.commons.logging.LogFactory;
|
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger;
|
| | | import org.apache.commons.transaction.util.LoggerFacade;
|
| | | import org.geotools.data.DataStore;
|
| | | import org.geotools.data.Transaction;
|
| | | import org.geotools.data.jdbc.JDBCUtils;
|
| | | import org.opengis.feature.IllegalAttributeException;
|
| | | import org.geotools.feature.SchemaException;
|
| | | import org.geotools.feature.simple.SimpleFeatureBuilder;
|
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
|
| | | import org.geotools.feature.type.FeatureTypeImpl;
|
| | | import org.geotools.geometry.jts.JTSFactoryFinder;
|
| | | import org.opengis.feature.simple.SimpleFeature;
|
| | | import org.opengis.feature.simple.SimpleFeatureType;
|
| | | import com.edb.util.PSQLException;
|
| | |
|
| | | import com.vividsolutions.jts.geom.Coordinate;
|
| | | import com.vividsolutions.jts.geom.Envelope;
|
| | | import com.vividsolutions.jts.geom.Geometry;
|
| | | import com.vividsolutions.jts.geom.GeometryFactory;
|
| | | import com.vividsolutions.jts.geom.Polygon;
|
| | |
|
| | | import com.ximple.eofms.util.DefaultColorTable;
|
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil;
|
| | | import com.ximple.eofms.util.GeometryConverterDecorator;
|
| | | import com.ximple.eofms.util.TPCLIDConverter;
|
| | | import com.ximple.eofms.util.TWDDatumConverter;
|
| | | import com.ximple.io.dgn7.Element;
|
| | | import com.ximple.io.dgn7.FrammeAttributeData;
|
| | | import com.ximple.io.dgn7.ShapeElement;
|
| | | import com.ximple.io.dgn7.TextElement;
|
| | | import com.ximple.io.dgn7.UserAttributeData;
|
| | |
|
| | | public class IndexDgnConvertEdbGeoJobContext extends AbstractDgnToEdbGeoJobContext {
|
| | | static final Log logger = LogFactory.getLog(IndexDgnConvertEdbGeoJobContext.class);
|
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger);
|
| | | static final GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null);
|
| | |
|
| | | private HashMap<SimpleFeatureType, ArrayList<SimpleFeature>> txFeaturesContext =
|
| | | new HashMap<SimpleFeatureType, ArrayList<SimpleFeature>>();
|
| | |
|
| | | private SimpleFeatureType featureType1 = null;
|
| | | private SimpleFeatureType featureType2 = null;
|
| | | private SimpleFeatureType featureType3 = null;
|
| | |
|
| | | private boolean dropTableMode = true;
|
| | | private int accumulate = 0;
|
| | |
|
| | | public IndexDgnConvertEdbGeoJobContext(String dataPath, DataStore targetDataStore, String targetSchema,
|
| | | boolean profileMode, boolean useTransform) {
|
| | | super(dataPath, targetDataStore, targetSchema, profileMode, useTransform);
|
| | | }
|
| | |
|
| | | public void putFeatureCollection(Element element) throws IllegalAttributeException, SchemaException {
|
| | | if ((!(element instanceof TextElement)) && (!(element instanceof ShapeElement))) {
|
| | | return;
|
| | | }
|
| | |
|
| | | if ((element instanceof TextElement)) {
|
| | | putTextFeatureCollection((TextElement) element);
|
| | | }
|
| | |
|
| | | if ((element instanceof ShapeElement)) {
|
| | | putShapeFeatureCollection((ShapeElement) element);
|
| | | }
|
| | | }
|
| | |
|
| | | protected void putTextFeatureCollection(TextElement element) throws SchemaException, IllegalAttributeException {
|
| | | SimpleFeature feature = createFeature(element);
|
| | | if (feature == null) {
|
| | | logger.info("cannot craete feature." + element.toString() + "'" +
|
| | | element.getText() + "'");
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!txFeaturesContext.containsKey(feature.getFeatureType())) {
|
| | | txFeaturesContext.put(feature.getFeatureType(), new ArrayList<SimpleFeature>());
|
| | | }
|
| | | ArrayList<SimpleFeature> arrayList = txFeaturesContext.get(feature.getFeatureType());
|
| | | arrayList.add(feature);
|
| | |
|
| | | feature = createFeature2(element);
|
| | | if (feature == null) {
|
| | | logger.info("cannot craete feature2." + element.toString() + "'" +
|
| | | element.getText() + "'");
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!txFeaturesContext.containsKey(feature.getFeatureType())) {
|
| | | txFeaturesContext.put(feature.getFeatureType(), new ArrayList<SimpleFeature>());
|
| | | }
|
| | | arrayList = txFeaturesContext.get(feature.getFeatureType());
|
| | | arrayList.add(feature);
|
| | |
|
| | | SimpleFeature[] features = createFeature3(element);
|
| | | if (features == null) {
|
| | | logger.info("cannot craete feature3." + element.toString() + "'" +
|
| | | element.getText() + "'");
|
| | | return;
|
| | | }
|
| | | if (!txFeaturesContext.containsKey(features[0].getFeatureType())) {
|
| | | txFeaturesContext.put(features[0].getFeatureType(), new ArrayList<SimpleFeature>());
|
| | | }
|
| | | arrayList = txFeaturesContext.get(features[0].getFeatureType());
|
| | | arrayList.addAll(Arrays.asList(features));
|
| | |
|
| | | accumulate++;
|
| | |
|
| | | if (accumulate > BATCHSIZE) {
|
| | | commitTransaction();
|
| | | }
|
| | | }
|
| | |
|
| | | protected void putShapeFeatureCollection(ShapeElement element) throws SchemaException, IllegalAttributeException {
|
| | | SimpleFeature feature = createFeature(element);
|
| | | if (feature == null) {
|
| | | Polygon polygon = (Polygon) element.toGeometry(geometryFactory);
|
| | | if (polygon == null) {
|
| | | logger.info("cannot craete feature." + element.toString() + "'" +
|
| | | "linear is null" + "'");
|
| | | } else {
|
| | | Coordinate pt = polygon.getEnvelopeInternal().centre();
|
| | | String id = TPCLIDConverter.CoordinateToTpclId(pt);
|
| | | logger.info("cannot craete feature." + element.toString() + "'" +
|
| | | id + "'- from pt=" + pt);
|
| | | }
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!txFeaturesContext.containsKey(feature.getFeatureType())) {
|
| | | txFeaturesContext.put(feature.getFeatureType(), new ArrayList<SimpleFeature>());
|
| | | }
|
| | | ArrayList<SimpleFeature> arrayList = txFeaturesContext.get(feature.getFeatureType());
|
| | | arrayList.add(feature);
|
| | |
|
| | | feature = createFeature2(element);
|
| | | if (feature == null) {
|
| | | Polygon polygon = (Polygon) element.toGeometry(geometryFactory);
|
| | | if (polygon == null) {
|
| | | logger.info("cannot craete feature2." + element.toString() + "'" +
|
| | | "linear is null" + "'");
|
| | | } else {
|
| | | Coordinate pt = polygon.getEnvelopeInternal().centre();
|
| | | String id = TPCLIDConverter.CoordinateToTpclId(pt);
|
| | | logger.info("cannot craete feature2." + element.toString() + "'" +
|
| | | id + "'- from pt=" + pt);
|
| | | }
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!txFeaturesContext.containsKey(feature.getFeatureType())) {
|
| | | txFeaturesContext.put(feature.getFeatureType(), new ArrayList<SimpleFeature>());
|
| | | }
|
| | | arrayList = txFeaturesContext.get(feature.getFeatureType());
|
| | | arrayList.add(feature);
|
| | |
|
| | | SimpleFeature[] features = createFeature3(element);
|
| | | if (features == null) {
|
| | | Polygon polygon = (Polygon) element.toGeometry(geometryFactory);
|
| | | if (polygon == null) {
|
| | | logger.info("cannot craete feature3." + element.toString() + "'" +
|
| | | "linear is null" + "'");
|
| | | } else {
|
| | | Coordinate pt = polygon.getEnvelopeInternal().centre();
|
| | | String id = TPCLIDConverter.CoordinateToTpclId(pt);
|
| | | logger.info("cannot craete feature3." + element.toString() + "'" +
|
| | | id + "'- from pt=" + pt);
|
| | | }
|
| | | return;
|
| | | }
|
| | | if (!txFeaturesContext.containsKey(features[0].getFeatureType())) {
|
| | | txFeaturesContext.put(features[0].getFeatureType(), new ArrayList<SimpleFeature>());
|
| | | }
|
| | | arrayList = txFeaturesContext.get(features[0].getFeatureType());
|
| | | arrayList.addAll(Arrays.asList(features));
|
| | |
|
| | | accumulate++;
|
| | |
|
| | | if (accumulate > BATCHSIZE) {
|
| | | commitTransaction();
|
| | | }
|
| | | }
|
| | |
|
| | | public void startTransaction() {
|
| | | }
|
| | |
|
| | | public void commitTransaction() {
|
| | | if (!txFeaturesContext.isEmpty()) {
|
| | | logger.debug("Transaction size = " + txFeaturesContext.size());
|
| | | } else {
|
| | | logger.debug("Transaction is empty.");
|
| | | }
|
| | |
|
| | | if (!txFeaturesContext.isEmpty()) {
|
| | | updateDataStore();
|
| | | }
|
| | | }
|
| | |
|
| | | public void rollbackTransaction() {
|
| | | }
|
| | |
|
| | | private void updateDataStore() {
|
| | | if (isProfileMode()) markUpdateTime();
|
| | |
|
| | | Iterator<SimpleFeatureType> it = txFeaturesContext.keySet().iterator();
|
| | | Connection conn = null;
|
| | | try {
|
| | | conn = getConnection();
|
| | | boolean autoCommit = conn.getAutoCommit();
|
| | | conn.setAutoCommit(false);
|
| | |
|
| | | while (it.hasNext()) {
|
| | | SimpleFeatureType featureType = it.next();
|
| | | logger.debug("Begin Save EdbGeo:" + featureType.getTypeName());
|
| | |
|
| | | String bindingStmt = makePrepareInsertSql(featureType);
|
| | | ArrayList<SimpleFeature> features = txFeaturesContext.get(featureType);
|
| | | PreparedStatement pstmt = conn.prepareStatement(bindingStmt);
|
| | |
|
| | | for (SimpleFeature feature : features) {
|
| | | // currentStmt = feature;
|
| | | // Statement stmt = conn.createStatement();
|
| | | try {
|
| | | // stmt.execute(feature);
|
| | | bindFeatureParameters(pstmt, feature);
|
| | | // pstmt.execute();
|
| | | pstmt.addBatch();
|
| | | } catch (PSQLException e) {
|
| | | if (bindingStmt != null) {
|
| | | logger.error("Execute:" + bindingStmt);
|
| | | }
|
| | | logger.error(e.getServerErrorMessage());
|
| | | logger.error(e.getMessage(), e);
|
| | | /*
|
| | | } finally {
|
| | | stmt.close();
|
| | | */
|
| | | }
|
| | | }
|
| | | /*
|
| | | if ((i % BATCHSIZE) != 0)
|
| | | {
|
| | | stmt.executeBatch();
|
| | | }
|
| | | stmt.close();
|
| | | */
|
| | | int[] numUpdates = pstmt.executeBatch();
|
| | | for (int i = 0; i < numUpdates.length; i++) {
|
| | | if (numUpdates[i] == -2)
|
| | | logger.warn("Execution " + i + ": unknown number of rows updated");
|
| | | }
|
| | | conn.commit();
|
| | |
|
| | | pstmt.close();
|
| | | features.clear();
|
| | |
|
| | | logger.debug("End Save EdbGeo:" + featureType.getTypeName());
|
| | | }
|
| | | accumulate = 0;
|
| | | conn.setAutoCommit(autoCommit);
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | } catch (PSQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.error(e.getServerErrorMessage());
|
| | | logger.error(e.getMessage(), e);
|
| | | } catch (SQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.error(e.getMessage(), e);
|
| | | } finally {
|
| | | if (isProfileMode()) this.accumulateUpdateTime();
|
| | | }
|
| | | }
|
| | |
|
| | | public void closeFeatureWriter() {
|
| | | txFeaturesContext.clear();
|
| | | /*
|
| | | for (FeatureWriter featureWriter : this.featuresWriterContext.values())
|
| | | {
|
| | | featureWriter.close();
|
| | | }
|
| | |
|
| | | this.featuresWriterContext.clear();
|
| | | */
|
| | | }
|
| | |
|
| | | public void createFeatureElement(String featureName) throws SchemaException {
|
| | | if (featureType1 == null) {
|
| | | Connection conn = null;
|
| | | SimpleFeatureTypeBuilder typeBuilder1 = FeatureTypeBuilderUtil.createNormalIndexFeatureTypeBuilder(featureName);
|
| | | featureType1 = typeBuilder1.buildFeatureType();
|
| | | if (isExistFeature(featureType1)) {
|
| | | try {
|
| | | conn = getConnection();
|
| | | if (dropTableMode) {
|
| | | try {
|
| | | dropGeometryColumn(conn, featureName,
|
| | | (featureType1).getGeometryDescriptor().getLocalName());
|
| | | } catch (PSQLException e) {
|
| | | logger.debug(e.getMessage(), e);
|
| | | }
|
| | | try {
|
| | | dropTable(conn, featureName);
|
| | | } catch (PSQLException e) {
|
| | | logger.debug(e.getMessage(), e);
|
| | | }
|
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType1);
|
| | | for (String stmtText : schemaTexts) {
|
| | | Statement stmt = conn.createStatement();
|
| | | stmt.execute(stmtText);
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | | } else {
|
| | | deleteTable(conn, featureName);
|
| | | }
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | } catch (IOException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | logger.warn(e.getMessage(), e);
|
| | | } catch (SQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | } else {
|
| | | try {
|
| | | conn = getConnection();
|
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType1);
|
| | | for (String stmtText : schemaTexts) {
|
| | | Statement stmt = conn.createStatement();
|
| | | stmt.execute(stmtText);
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | } catch (IOException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | logger.warn(e.getMessage(), e);
|
| | | } catch (SQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | public void createFeatureElement2(String featureName) throws SchemaException {
|
| | | if (featureType2 == null) {
|
| | | Connection conn = null;
|
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalIndexTextFeatureTypeBuilder(featureName);
|
| | | featureType2 = typeBuilder.buildFeatureType();
|
| | | if (isExistFeature(featureType2)) {
|
| | | try {
|
| | | conn = getConnection();
|
| | | if (dropTableMode) {
|
| | | dropGeometryColumn(conn, featureName,
|
| | | (featureType2).getGeometryDescriptor().getLocalName());
|
| | | dropTable(conn, featureName);
|
| | |
|
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType2);
|
| | | for (String stmtText : schemaTexts) {
|
| | | Statement stmt = conn.createStatement();
|
| | | stmt.execute(stmtText);
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | | } else {
|
| | | deleteTable(conn, featureName);
|
| | | }
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | } catch (IOException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | logger.warn(e.getMessage(), e);
|
| | | } catch (SQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | } else {
|
| | | try {
|
| | | conn = getConnection();
|
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType2);
|
| | | for (String stmtText : schemaTexts) {
|
| | | Statement stmt = conn.createStatement();
|
| | | stmt.execute(stmtText);
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | } catch (IOException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | logger.warn(e.getMessage(), e);
|
| | | } catch (SQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | public void createFeatureElement3(String featureName) throws SchemaException {
|
| | | if (featureType3 == null) {
|
| | | Connection conn = null;
|
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalIndexFeatureTypeBuilder(featureName);
|
| | | featureType3 = typeBuilder.buildFeatureType();
|
| | | if (isExistFeature(featureType3)) {
|
| | | try {
|
| | | conn = getConnection();
|
| | | if (dropTableMode) {
|
| | | try {
|
| | | dropGeometryColumn(conn, featureName,
|
| | | (featureType3).getGeometryDescriptor().getLocalName());
|
| | | } catch (PSQLException e) {
|
| | | logger.debug(e.getMessage(), e);
|
| | | }
|
| | | try {
|
| | | dropTable(conn, featureName);
|
| | | } catch (PSQLException e) {
|
| | | logger.debug(e.getMessage(), e);
|
| | | }
|
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType3);
|
| | | for (String stmtText : schemaTexts) {
|
| | | Statement stmt = conn.createStatement();
|
| | | stmt.execute(stmtText);
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | | } else {
|
| | | deleteTable(conn, featureName);
|
| | | }
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | } catch (IOException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | logger.warn(e.getMessage(), e);
|
| | | } catch (SQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | } else {
|
| | | try {
|
| | | conn = getConnection();
|
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType3);
|
| | | for (String stmtText : schemaTexts) {
|
| | | Statement stmt = conn.createStatement();
|
| | | stmt.execute(stmtText);
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | } catch (IOException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | logger.warn(e.getMessage(), e);
|
| | | } catch (SQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element) throws IllegalAttributeException {
|
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance();
|
| | | if (element instanceof TextElement) {
|
| | | TextElement textElement = (TextElement) element;
|
| | | String tpclid = textElement.getText();
|
| | |
|
| | | if(tpclid.trim().endsWith(""))
|
| | | {
|
| | | return null; |
| | | }
|
| | |
|
| | | Envelope extent = TPCLIDConverter.convertTpclIdToEnvelope(tpclid);
|
| | | Geometry geom;
|
| | | if (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826) {
|
| | | geom = geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[]
|
| | | {
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMinX(), extent.getMinY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMaxX(), extent.getMinY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMaxX(), extent.getMaxY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMinX(), extent.getMaxY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMinX(), extent.getMinY())),
|
| | | }), null);
|
| | | } else {
|
| | | geom = geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[]
|
| | | {
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(extent.getMinX(), extent.getMinY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(extent.getMaxX(), extent.getMinY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(extent.getMaxX(), extent.getMaxY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(extent.getMinX(), extent.getMaxY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(extent.getMinX(), extent.getMinY())),
|
| | | }), null);
|
| | | }
|
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | geom,
|
| | | extent.getMinX(),
|
| | | extent.getMinY(),
|
| | | extent.getMaxX(),
|
| | | extent.getMaxY(),
|
| | | tpclid,
|
| | | colorTable.getColorCode(textElement.getColorIndex()),
|
| | | textElement.getWeight(),
|
| | | textElement.getLineStyle()
|
| | | }, null);
|
| | | } else if (element instanceof ShapeElement) {
|
| | | ShapeElement shapeElement = (ShapeElement) element;
|
| | | Geometry geomShape = shapeElement.toGeometry(geometryFactory);
|
| | | Polygon polygon = (Polygon) geomShape;
|
| | | if (polygon.isRectangle()) {
|
| | | Envelope bounds = polygon.getEnvelopeInternal();
|
| | | if (bounds.getWidth() == TPCLIDConverter.SX1200) {
|
| | | Coordinate center = bounds.centre();
|
| | | String tpclid = TPCLIDConverter.CoordinateToTpclId(center);
|
| | | if ((tpclid == null) || (tpclid.length() == 0)) {
|
| | | logger.warn("Cannot convert coordinate to tpclid-[" + center.toString() + "]");
|
| | | return null;
|
| | | }
|
| | | if (tpclid.length() > 5) {
|
| | | tpclid = tpclid.substring(0, 5);
|
| | | }
|
| | | Envelope extent = TPCLIDConverter.convertTpclIdToEnvelope(tpclid);
|
| | | Geometry geom = null;
|
| | | try {
|
| | | geom = (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826 ?
|
| | | geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[]
|
| | | {
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMinX(), extent.getMinY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMaxX(), extent.getMinY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMaxX(), extent.getMaxY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMinX(), extent.getMaxY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMinX(), extent.getMinY())),
|
| | | }), null) :
|
| | | geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[]
|
| | | {
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(extent.getMinX(), extent.getMinY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(extent.getMaxX(), extent.getMinY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(extent.getMaxX(), extent.getMaxY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(extent.getMinX(), extent.getMaxY())),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(extent.getMinX(), extent.getMinY())),
|
| | | }), null));
|
| | | } catch (NullPointerException e) {
|
| | | logger.warn("TPCLIDConverter has error at [" + tpclid + "]");
|
| | | logger.warn(e.getMessage(), e);
|
| | | return null;
|
| | | }
|
| | |
|
| | | return (geom == null ? null :
|
| | | SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | geom,
|
| | | extent.getMinX(),
|
| | | extent.getMinY(),
|
| | | extent.getMaxX(),
|
| | | extent.getMaxY(),
|
| | | tpclid,
|
| | | colorTable.getColorCode(shapeElement.getColorIndex()),
|
| | | shapeElement.getWeight(),
|
| | | shapeElement.getLineStyle()
|
| | | }, null));
|
| | | }
|
| | | }
|
| | | }
|
| | | return null;
|
| | | }
|
| | |
|
| | | public SimpleFeature createFeature2(SimpleFeatureType featureType, Element element) throws IllegalAttributeException {
|
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance();
|
| | | if (element instanceof TextElement) {
|
| | | SimpleFeature feature = null;
|
| | | TextElement txtElement = (TextElement) element;
|
| | | double angle = txtElement.getRotationAngle();
|
| | | angle = BigDecimal.valueOf(angle).setScale(3, RoundingMode.HALF_UP).doubleValue();
|
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter();
|
| | | convertDecorator.setConverter(txtElement);
|
| | | Geometry gobj = convertDecorator.toGeometry(geometryFactory);
|
| | | if (gobj != null)
|
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | gobj,
|
| | | colorTable.getColorCode(txtElement.getColorIndex()),
|
| | | txtElement.getWeight(),
|
| | | txtElement.getLineStyle(),
|
| | | txtElement.getJustification(),
|
| | | txtElement.getTextHeight(),
|
| | | txtElement.getTextWidth(),
|
| | | angle,
|
| | | txtElement.getText()
|
| | | }, null);
|
| | | return feature;
|
| | | } else if (element instanceof ShapeElement) {
|
| | | SimpleFeature feature = null;
|
| | | ShapeElement shapeElement = (ShapeElement) element;
|
| | | double angle = 0.0;
|
| | | Geometry geomShape = shapeElement.toGeometry(geometryFactory);
|
| | | Polygon polygon = (Polygon) geomShape;
|
| | | if (polygon.isRectangle()) {
|
| | | Envelope bounds = polygon.getEnvelopeInternal();
|
| | | if (bounds.getWidth() == TPCLIDConverter.SX1200) {
|
| | | Coordinate center = bounds.centre();
|
| | | String tpclid = TPCLIDConverter.CoordinateToTpclId(center);
|
| | | if (tpclid.length() > 5) {
|
| | | tpclid = tpclid.substring(0, 5);
|
| | | Coordinate pos = (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826 ?
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(center.x, center.y)) :
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(center.x, center.y)));
|
| | | Geometry gobj = geometryFactory.createPoint(pos);
|
| | |
|
| | | if (gobj != null)
|
| | | feature = SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | gobj,
|
| | | colorTable.getColorCode(shapeElement.getColorIndex()),
|
| | | shapeElement.getWeight(),
|
| | | shapeElement.getLineStyle(),
|
| | | 0,
|
| | | 15.0,
|
| | | 15 * 5,
|
| | | angle,
|
| | | tpclid
|
| | | }, null);
|
| | | return feature;
|
| | | }
|
| | |
|
| | | }
|
| | | }
|
| | | }
|
| | | return null;
|
| | | }
|
| | |
|
| | | public SimpleFeature[] createFeature3(SimpleFeatureType featureType, Element element) throws IllegalAttributeException {
|
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance();
|
| | | if (element instanceof TextElement) {
|
| | | TextElement textElement = (TextElement) element;
|
| | | String tpclid = textElement.getText();
|
| | | SimpleFeature[] result = new SimpleFeature[4];
|
| | |
|
| | | Envelope extent = TPCLIDConverter.convertTpclIdToEnvelope(tpclid);
|
| | |
|
| | | for (int i = 0; i < 4; i++) {
|
| | | char mapSubId = TPCLIDConverter.intToAscii(65 + i);
|
| | | int dx = (i % 2) * TPCLIDConverter.SX600;
|
| | | int dy = (i / 2) * TPCLIDConverter.SY600;
|
| | |
|
| | | Geometry geom;
|
| | |
|
| | | if (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826) {
|
| | | geom = geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[]
|
| | | {
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(
|
| | | extent.getMinX() + dx, extent.getMaxY() - TPCLIDConverter.SY600 - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(
|
| | | extent.getMinX() + TPCLIDConverter.SX600 + dx, extent.getMaxY() - TPCLIDConverter.SY600 - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(
|
| | | extent.getMinX() + TPCLIDConverter.SX600 + dx, extent.getMaxY() - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(
|
| | | extent.getMinX() + dx, extent.getMaxY() - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(
|
| | | extent.getMinX() + dx, extent.getMaxY() - TPCLIDConverter.SY600 - dy)),
|
| | | }), null);
|
| | | } else {
|
| | | geom = geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[]
|
| | | {
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(
|
| | | extent.getMinX() + dx, extent.getMaxY() - TPCLIDConverter.SY600 - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(
|
| | | extent.getMinX() + TPCLIDConverter.SX600 + dx, extent.getMaxY() - TPCLIDConverter.SY600 - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(
|
| | | extent.getMinX() + TPCLIDConverter.SX600 + dx, extent.getMaxY() - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(
|
| | | extent.getMinX() + dx, extent.getMaxY() - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(
|
| | | extent.getMinX() + dx, extent.getMaxY() - TPCLIDConverter.SY600 - dy)),
|
| | | }), null);
|
| | | }
|
| | | Envelope innerExtent = geom.getEnvelopeInternal();
|
| | |
|
| | | result[i] = SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | geom,
|
| | | innerExtent.getMinX(),
|
| | | innerExtent.getMinY(),
|
| | | innerExtent.getMaxX(),
|
| | | innerExtent.getMaxY(),
|
| | | tpclid + mapSubId,
|
| | | colorTable.getColorCode(textElement.getColorIndex()),
|
| | | textElement.getWeight(),
|
| | | textElement.getLineStyle()
|
| | | }, null);
|
| | |
|
| | | }
|
| | | return result;
|
| | | } else if (element instanceof ShapeElement) {
|
| | | ShapeElement shapeElement = (ShapeElement) element;
|
| | | Geometry geomShape = shapeElement.toGeometry(geometryFactory);
|
| | | Polygon polygon = (Polygon) geomShape;
|
| | | if (polygon.isRectangle()) {
|
| | | Envelope extent = polygon.getEnvelopeInternal();
|
| | | if (extent.getWidth() == TPCLIDConverter.SX1200) {
|
| | | SimpleFeature[] result = new SimpleFeature[4];
|
| | | Coordinate center = extent.centre();
|
| | | String tpclid = TPCLIDConverter.CoordinateToTpclId(center);
|
| | | if (tpclid.length() > 5) {
|
| | | tpclid = tpclid.substring(0, 5);
|
| | | }
|
| | | for (int i = 0; i < 4; i++) {
|
| | | char mapSubId = TPCLIDConverter.intToAscii(65 + i);
|
| | | int dx = (i % 2) * TPCLIDConverter.SX600;
|
| | | int dy = (i / 2) * TPCLIDConverter.SY600;
|
| | |
|
| | | Geometry geom = (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826 ?
|
| | | geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[]
|
| | | {
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(
|
| | | extent.getMinX() + dx, extent.getMaxY() - TPCLIDConverter.SY600 - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(
|
| | | extent.getMinX() + TPCLIDConverter.SX600 + dx, extent.getMaxY() - TPCLIDConverter.SY600 - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(
|
| | | extent.getMinX() + TPCLIDConverter.SX600 + dx, extent.getMaxY() - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(
|
| | | extent.getMinX() + dx, extent.getMaxY() - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(
|
| | | extent.getMinX() + dx, extent.getMaxY() - TPCLIDConverter.SY600 - dy)),
|
| | | }), null) :
|
| | | geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[]
|
| | | {
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(
|
| | | extent.getMinX() + dx, extent.getMaxY() - TPCLIDConverter.SY600 - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(
|
| | | extent.getMinX() + TPCLIDConverter.SX600 + dx, extent.getMaxY() - TPCLIDConverter.SY600 - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(
|
| | | extent.getMinX() + TPCLIDConverter.SX600 + dx, extent.getMaxY() - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(
|
| | | extent.getMinX() + dx, extent.getMaxY() - dy)),
|
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(
|
| | | extent.getMinX() + dx, extent.getMaxY() - TPCLIDConverter.SY600 - dy)),
|
| | | }), null));
|
| | |
|
| | | Envelope innerExtent = geom.getEnvelopeInternal();
|
| | |
|
| | | result[i] = SimpleFeatureBuilder.build(featureType, new Object[]{
|
| | | geom,
|
| | | innerExtent.getMinX(),
|
| | | innerExtent.getMinY(),
|
| | | innerExtent.getMaxX(),
|
| | | innerExtent.getMaxY(),
|
| | | tpclid + mapSubId,
|
| | | colorTable.getColorCode(shapeElement.getColorIndex()),
|
| | | shapeElement.getWeight(),
|
| | | shapeElement.getLineStyle()
|
| | | }, null);
|
| | | }
|
| | |
|
| | | return result;
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | return null;
|
| | | }
|
| | |
|
| | | private SimpleFeature createFeature(Element element) throws SchemaException, IllegalAttributeException {
|
| | | if (featureType1 == null) {
|
| | | String dgnname = getFilename().toLowerCase();
|
| | | int i = dgnname.lastIndexOf(".");
|
| | | if (i != -1) {
|
| | | dgnname = dgnname.substring(0, i);
|
| | | }
|
| | | createFeatureElement(dgnname.toLowerCase());
|
| | | }
|
| | | return createFeature(featureType1, element);
|
| | | }
|
| | |
|
| | | private SimpleFeature createFeature2(Element element) throws SchemaException, IllegalAttributeException {
|
| | | if (featureType2 == null) {
|
| | | String dgnname = getFilename().toLowerCase();
|
| | | int i = dgnname.lastIndexOf(".");
|
| | | if (i != -1) {
|
| | | dgnname = dgnname.substring(0, i);
|
| | | }
|
| | | dgnname = dgnname + "_p";
|
| | | createFeatureElement2(dgnname.toLowerCase());
|
| | | }
|
| | | return createFeature2(featureType2, element);
|
| | | }
|
| | |
|
| | | private SimpleFeature[] createFeature3(Element element) throws SchemaException, IllegalAttributeException {
|
| | | if (featureType3 == null) {
|
| | | String dgnname = getFilename().toLowerCase();
|
| | | int i = dgnname.lastIndexOf(".");
|
| | | if (i != -1) {
|
| | | dgnname = dgnname.substring(0, i);
|
| | | }
|
| | | dgnname = dgnname + "_s";
|
| | | createFeatureElement3(dgnname.toLowerCase());
|
| | | }
|
| | | return createFeature3(featureType3, element);
|
| | | }
|
| | |
|
| | | protected FrammeAttributeData getFeatureLinkage(Element element) {
|
| | | if (!element.hasUserAttributeData())
|
| | | return null;
|
| | |
|
| | | List<UserAttributeData> usrDatas = element.getUserAttributeData();
|
| | | for (UserAttributeData anUsrData : usrDatas) {
|
| | | if (anUsrData instanceof FrammeAttributeData) {
|
| | | return (FrammeAttributeData) anUsrData;
|
| | | }
|
| | | }
|
| | | return null;
|
| | | }
|
| | |
|
| | | public Log getLogger() {
|
| | | return logger;
|
| | | }
|
| | |
|
| | | public boolean isDropTableMode() {
|
| | | return dropTableMode;
|
| | | }
|
| | |
|
| | | public void setDropTableMode(boolean dropTableMode) {
|
| | | this.dropTableMode = dropTableMode;
|
| | | }
|
| | |
|
| | | public void clearOutputDatabase() {
|
| | | }
|
| | | }
|
New file |
| | |
| | | package com.ximple.eofms.jobs.context.edbgeo;
|
| | |
|
| | | import java.io.File;
|
| | | import java.io.IOException;
|
| | | import java.io.UnsupportedEncodingException;
|
| | | import java.net.MalformedURLException;
|
| | | import java.net.URL;
|
| | | import java.sql.BatchUpdateException;
|
| | | import java.sql.Connection;
|
| | | import java.sql.DriverManager;
|
| | | import java.sql.PreparedStatement;
|
| | | import java.sql.SQLException;
|
| | | import java.sql.Statement;
|
| | | import java.util.ArrayList;
|
| | | import java.util.HashMap;
|
| | | import java.util.Iterator;
|
| | |
|
| | | import com.edb.util.PSQLException;
|
| | | import com.vividsolutions.jts.geom.Geometry;
|
| | | import com.vividsolutions.jts.util.Assert;
|
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter;
|
| | | import com.ximple.eofms.filter.CreateFeatureTypeEventListener;
|
| | | import com.ximple.eofms.filter.ElementDispatcher;
|
| | | import com.ximple.eofms.filter.FeatureTypeEvent;
|
| | | import com.ximple.eofms.jobs.OracleElementLogger;
|
| | | import com.ximple.eofms.util.ElementDigesterUtils;
|
| | | import com.ximple.io.dgn7.ComplexElement;
|
| | | import com.ximple.io.dgn7.Element;
|
| | | import com.ximple.io.dgn7.FrammeAttributeData;
|
| | | import org.apache.commons.digester3.Digester;
|
| | | import org.apache.commons.logging.Log;
|
| | | import org.apache.commons.logging.LogFactory;
|
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger;
|
| | | import org.apache.commons.transaction.util.LoggerFacade;
|
| | | import org.geotools.data.DataStore;
|
| | | import org.geotools.data.Transaction;
|
| | | import org.geotools.data.edbgeo.PostgisDataStoreFactory;
|
| | | import org.geotools.data.jdbc.JDBCUtils;
|
| | | import org.geotools.feature.SchemaException;
|
| | | import org.geotools.feature.type.FeatureTypeImpl;
|
| | | import org.opengis.feature.simple.SimpleFeature;
|
| | | import org.opengis.feature.simple.SimpleFeatureType;
|
| | | import org.quartz.JobExecutionContext;
|
| | | import org.xml.sax.SAXException;
|
| | |
|
| | | public class OracleConvertEdbGeoJobContext extends AbstractOracleToEdbGeoJobContext
|
| | | implements CreateFeatureTypeEventListener {
|
| | | static Log logger = LogFactory.getLog(OracleConvertEdbGeoJobContext.class);
|
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger);
|
| | |
|
| | | static PostgisDataStoreFactory dataStoreFactory = new PostgisDataStoreFactory();
|
| | |
|
| | | private OracleElementLogger elmLogger = null;
|
| | |
|
| | | static {
|
| | | try {
|
| | | DriverManager.registerDriver(new oracle.jdbc.driver.OracleDriver());
|
| | | } catch (SQLException e) {
|
| | | Assert.shouldNeverReachHere(e.getMessage());
|
| | | }
|
| | | }
|
| | |
|
| | | private String _filterConfig;
|
| | |
|
| | | private ElementDispatcher elementDispatcher;
|
| | |
|
| | | private HashMap<SimpleFeatureType, ArrayList<SimpleFeature>> txFeaturesContext = new HashMap<SimpleFeatureType, ArrayList<SimpleFeature>>();
|
| | |
|
| | | private JobExecutionContext executionContext;
|
| | |
|
| | | private String currentSchema = null;
|
| | | private boolean schemaChanged = false;
|
| | | private boolean dropTableMode = true;
|
| | | private int accumulate = 0;
|
| | |
|
| | | public OracleConvertEdbGeoJobContext(String dataPath, DataStore pgDS, String targetSchema, String filterConfig,
|
| | | boolean profileMode, boolean useTransform) {
|
| | | super(dataPath, pgDS, targetSchema, profileMode, useTransform);
|
| | | _filterConfig = filterConfig;
|
| | | elementDispatcher = createElementDispatcher();
|
| | | elementDispatcher.addCreateFeatureTypeEventListener(this);
|
| | | // txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger);
|
| | | }
|
| | |
|
| | | private ElementDispatcher createElementDispatcher() {
|
| | | try {
|
| | | URL filterURL = null;
|
| | | if (_filterConfig != null) {
|
| | | File config = new File(_filterConfig);
|
| | | if (config.exists()) {
|
| | | filterURL = config.toURI().toURL();
|
| | | }
|
| | | }
|
| | | if (filterURL == null) {
|
| | | // config = new File("conf/DefaultConvertShpFilter.xml");
|
| | | filterURL = this.getClass().getResource("/conf/DefaultConvertShpFilter.xml");
|
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml");
|
| | | }
|
| | | assert filterURL != null;
|
| | | Digester digester = ElementDigesterUtils.getElementDigester();
|
| | | return (ElementDispatcher) digester.parse(filterURL);
|
| | | } catch (UnsupportedEncodingException e) {
|
| | | logger.info(e.getMessage(), e);
|
| | | throw new RuntimeException(e.getMessage(), e);
|
| | | } catch (MalformedURLException e) {
|
| | | logger.info(e.getMessage(), e);
|
| | | throw new RuntimeException(e.getMessage(), e);
|
| | | } catch (IOException e) {
|
| | | logger.info(e.getMessage(), e);
|
| | | throw new RuntimeException(e.getMessage(), e);
|
| | | } catch (SAXException e) {
|
| | | logger.info(e.getMessage(), e);
|
| | | throw new RuntimeException(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | public void putFeatureCollection(Element element) {
|
| | | assert elementDispatcher != null;
|
| | | // 判斷是否符和條件
|
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed());
|
| | | if (feature == null) {
|
| | | boolean isEmptySize = false;
|
| | | FrammeAttributeData linkage =
|
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element);
|
| | | logger.warn("Unknown Element:" + element.getElementType().toString() +
|
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" +
|
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID())));
|
| | |
|
| | | if (element instanceof ComplexElement) {
|
| | | ComplexElement complex = (ComplexElement) element;
|
| | | logger.warn("----Complex Element size=" + complex.size() + ":" +
|
| | | (linkage == null ? "NULL" : (linkage.getUfid())));
|
| | | if (complex.size() == 0)
|
| | | isEmptySize = true;
|
| | | }
|
| | |
|
| | | if (getElementLogging() && (!isEmptySize)) {
|
| | | getElementLogger().logElement(element, getCurrentSchema());
|
| | | }
|
| | | return;
|
| | | }
|
| | |
|
| | | if (((Geometry)feature.getDefaultGeometry()).isEmpty()) {
|
| | | boolean isEmptySize = false;
|
| | | FrammeAttributeData linkage =
|
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element);
|
| | | logger.warn("Empty Geom Element:" + element.getElementType().toString() +
|
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" +
|
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID())));
|
| | |
|
| | | if (element instanceof ComplexElement) {
|
| | | ComplexElement complex = (ComplexElement) element;
|
| | | logger.warn("----Complex Element size=" + complex.size() + ":" +
|
| | | (linkage == null ? "NULL" : (linkage.getUfid())));
|
| | | if (complex.size() == 0)
|
| | | isEmptySize = true;
|
| | | }
|
| | |
|
| | | if (getElementLogging() && (!isEmptySize)) {
|
| | | getElementLogger().logElement(element, getCurrentSchema());
|
| | | }
|
| | | return;
|
| | | }
|
| | |
|
| | | if (!txFeaturesContext.containsKey(feature.getFeatureType())) {
|
| | | txFeaturesContext.put(feature.getFeatureType(), new ArrayList<SimpleFeature>());
|
| | | }
|
| | | ArrayList<SimpleFeature> arrayList = txFeaturesContext.get(feature.getFeatureType());
|
| | | arrayList.add(feature);
|
| | | accumulate++;
|
| | | if (accumulate > BATCHSIZE) {
|
| | | commitTransaction();
|
| | | }
|
| | | }
|
| | |
|
| | | public void startTransaction() {
|
| | | }
|
| | |
|
| | | public void commitTransaction() {
|
| | | if (!txFeaturesContext.isEmpty()) {
|
| | | logger.debug("Transaction size = " + txFeaturesContext.size());
|
| | | //txFeaturesContext.commitTransaction();
|
| | | } else {
|
| | | logger.debug("Transaction is empty.");
|
| | | }
|
| | |
|
| | | if (!txFeaturesContext.isEmpty()) {
|
| | | updateDataStore();
|
| | | }
|
| | |
|
| | | if (this.getElementLogger() != null)
|
| | | this.getElementLogger().flashLogging();
|
| | | }
|
| | |
|
| | | public void rollbackTransaction() {
|
| | | }
|
| | |
|
| | | public void resetFeatureContext() {
|
| | | txFeaturesContext.clear();
|
| | | }
|
| | |
|
| | | private void updateDataStore() {
|
| | | if (isProfileMode()) markUpdateTime();
|
| | | Iterator<SimpleFeatureType> it = txFeaturesContext.keySet().iterator();
|
| | | Connection conn = null;
|
| | | try {
|
| | | conn = getConnection();
|
| | | boolean autoCommit = conn.getAutoCommit();
|
| | | conn.setAutoCommit(false);
|
| | |
|
| | | while (it.hasNext()) {
|
| | | SimpleFeatureType featureType = it.next();
|
| | | logger.debug("Begin Save into EdbGeo:" + featureType.getTypeName());
|
| | |
|
| | | int batchCount = 0;
|
| | | String bindingStmt = makePrepareInsertSql(featureType);
|
| | | ArrayList<SimpleFeature> features = txFeaturesContext.get(featureType);
|
| | | PreparedStatement pstmt = conn.prepareStatement(bindingStmt);
|
| | |
|
| | | for (SimpleFeature feature : features) {
|
| | | try {
|
| | | // stmt.execute(feature);
|
| | | bindFeatureParameters(pstmt, feature);
|
| | | // pstmt.executeUpdate();
|
| | | pstmt.addBatch();
|
| | | } catch (PSQLException e) {
|
| | | if (bindingStmt != null) {
|
| | | logger.error("Execute:" + bindingStmt);
|
| | | }
|
| | | logger.error(e.getServerErrorMessage());
|
| | | logger.error(e.getMessage(), e);
|
| | | } catch (ClassCastException e) {
|
| | | if (bindingStmt != null) {
|
| | | logger.error("Execute:" + bindingStmt);
|
| | | }
|
| | | for (int i = 0; i < feature.getAttributeCount(); i++) {
|
| | | logger.info("attr[" + i + "]-" + ((feature.getAttribute(i) == null) ? " NULL" :
|
| | | feature.getAttribute(i).toString()));
|
| | | }
|
| | | logger.error(e.getMessage(), e);
|
| | | }
|
| | | batchCount++;
|
| | | }
|
| | |
|
| | | int[] numUpdates = pstmt.executeBatch();
|
| | | for (int i = 0; i < numUpdates.length; i++) {
|
| | | if (numUpdates[i] == -2)
|
| | | logger.warn("Execution " + i + ": unknown number of rows updated");
|
| | | }
|
| | | conn.commit();
|
| | |
|
| | | pstmt.close();
|
| | | features.clear();
|
| | | logger.debug("End Save into EdbGeo:" + featureType.getTypeName());
|
| | | }
|
| | | conn.setAutoCommit(autoCommit);
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | accumulate = 0;
|
| | | } catch (BatchUpdateException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.error(e.getMessage(), e);
|
| | | SQLException ex;
|
| | | while ((ex = e.getNextException()) != null) {
|
| | | // logger.warn(ex.getMessage(), ex);
|
| | | logger.warn(ex.getMessage());
|
| | | }
|
| | | } catch (SQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.error(e.getMessage(), e);
|
| | | } finally {
|
| | | if (isProfileMode()) accumulateUpdateTime();
|
| | | }
|
| | | }
|
| | |
|
| | | public JobExecutionContext getExecutionContext() {
|
| | | return executionContext;
|
| | | }
|
| | |
|
| | | public void setExecutionContext(JobExecutionContext context) {
|
| | | executionContext = context;
|
| | | }
|
| | |
|
| | | /**
|
| | | * �����]�Ƽg�J��
|
| | | *
|
| | | * @throws IOException IO�o�Ϳ�~
|
| | | */
|
| | | public void closeFeatureWriter() throws IOException {
|
| | | }
|
| | |
|
| | | protected OracleElementLogger getElementLogger() {
|
| | | if (elmLogger == null) {
|
| | | elmLogger = new OracleElementLogger(getOracleConnection());
|
| | | elmLogger.setDataPath(this.getDataPath());
|
| | | }
|
| | | return elmLogger;
|
| | | }
|
| | |
|
| | | public String getCurrentSchema() {
|
| | | return currentSchema;
|
| | | }
|
| | |
|
| | | public void setCurrentSchema(String querySchema) {
|
| | | this.currentSchema = querySchema;
|
| | | this.schemaChanged = true;
|
| | | }
|
| | |
|
| | | protected Log getLogger() {
|
| | | return logger;
|
| | | }
|
| | |
|
| | | public boolean isDropTableMode() {
|
| | | return dropTableMode;
|
| | | }
|
| | |
|
| | | public void setDropTableMode(boolean dropTableMode) {
|
| | | this.dropTableMode = dropTableMode;
|
| | | }
|
| | |
|
| | | public void createFeatureTypeOccurred(FeatureTypeEvent evt) {
|
| | | try {
|
| | | createOrClearFeatureDataTable(evt.getFeatureType());
|
| | | } catch (SchemaException e) {
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | |
|
| | | protected void createOrClearFeatureDataTable(SimpleFeatureType featureType) throws SchemaException {
|
| | | String featureName = featureType.getTypeName();
|
| | | Connection conn = null;
|
| | | if (isExistFeature(featureType)) {
|
| | | try {
|
| | | conn = getConnection();
|
| | | if (dropTableMode) {
|
| | | dropGeometryColumn(conn, getTargetSchema(), featureName,
|
| | | ((FeatureTypeImpl)featureType).getGeometryDescriptor().getName().getLocalPart());
|
| | | dropTable(conn, getTargetSchema(), featureName);
|
| | |
|
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType);
|
| | | for (String stmtText : schemaTexts) {
|
| | | Statement stmt = conn.createStatement();
|
| | | stmt.execute(stmtText);
|
| | | JDBCUtils.close(stmt);
|
| | | }
|
| | | } else {
|
| | | deleteTable(conn, getTargetSchema(), featureName);
|
| | | }
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | } catch (IOException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | logger.warn(e.getMessage(), e);
|
| | | } catch (SQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | } else {
|
| | | String tempStmt = null;
|
| | | try {
|
| | | conn = getConnection();
|
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType);
|
| | | for (String stmtText : schemaTexts) {
|
| | | Statement stmt = conn.createStatement();
|
| | | tempStmt = stmtText;
|
| | | stmt.execute(stmtText);
|
| | | stmt.close();
|
| | | }
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | } catch (IOException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
|
| | | logger.warn("RUN--" + tempStmt);
|
| | | logger.warn(e.getMessage(), e);
|
| | | } catch (SQLException e) {
|
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e);
|
| | | logger.warn("RUN--" + tempStmt);
|
| | | logger.warn(e.getMessage(), e);
|
| | | }
|
| | | }
|
| | | }
|
| | |
|
| | | public boolean isSchemaChanged() {
|
| | | return schemaChanged;
|
| | | }
|
| | | }
|
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.SchemaNotFoundException; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.mysql.MySQLDataStore; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractDgnFileJobContext; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public abstract class AbstractDgnToMySQLJobContext extends AbstractDgnFileJobContext { |
| | | protected MySQLDataStore targetDataStore; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | public AbstractDgnToMySQLJobContext(String dataPath, DataStore targetDataStore, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, profileMode, useTransform, useEPSG3826); |
| | | this.targetDataStore = (MySQLDataStore) targetDataStore; |
| | | boolean useTransform) { |
| | | super(dataPath, profileMode, useTransform); |
| | | this.targetDataStore = (JDBCDataStore) targetDataStore; |
| | | } |
| | | |
| | | public MySQLDataStore getTargetDataStore() { |
| | | public JDBCDataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | public void setTargetDataStore(MySQLDataStore targetDataStore) { |
| | | public void setTargetDataStore(JDBCDataStore targetDataStore) { |
| | | this.targetDataStore = targetDataStore; |
| | | } |
| | | |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.SchemaNotFoundException; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.mysql.MySQLDataStore; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public abstract class AbstractOracleToMySQLJobContext extends AbstractOracleJobContext { |
| | | protected MySQLDataStore targetDataStore; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | public AbstractOracleToMySQLJobContext(String dataPath, DataStore targetDataStore, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(profileMode, useTransform, useEPSG3826); |
| | | if ((targetDataStore != null) && (targetDataStore instanceof MySQLDataStore)) { |
| | | this.targetDataStore = (MySQLDataStore) targetDataStore; |
| | | boolean useTransform) { |
| | | super(profileMode, useTransform); |
| | | if ((targetDataStore != null) && (targetDataStore instanceof JDBCDataStore)) { |
| | | this.targetDataStore = (JDBCDataStore) targetDataStore; |
| | | } else { |
| | | getLogger().info("targetDataStore has wrong."); |
| | | } |
| | | setDataPath(dataPath); |
| | | } |
| | | |
| | | public MySQLDataStore getTargetDataStore() { |
| | | public JDBCDataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | public void setTargetDataStore(MySQLDataStore targetDataStore) { |
| | | public void setTargetDataStore(JDBCDataStore targetDataStore) { |
| | | this.targetDataStore = targetDataStore; |
| | | } |
| | | |
| | |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.filter.TypeCompIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeCompLevelIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeIdDispatchableFilter; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.filter.TypeCompIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeCompLevelIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeIdDispatchableFilter; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | public class DummyFeatureConvertMySQlJobContext extends AbstractDgnToMySQLJobContext { |
| | | static final Log logger = LogFactory.getLog(DummyFeatureConvertMySQlJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | static final String SHPOUTPATH = "shpout"; |
| | | |
| | | private String dataOut = null; |
| | |
| | | private boolean withIndex = false; |
| | | |
| | | public DummyFeatureConvertMySQlJobContext(String dataPath, DataStore targetDataStore, String filterConfig, |
| | | boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform, useEPSG3826); |
| | | boolean profileMode, boolean useTransform) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | return; |
| | | } |
| | | |
| | | SimpleFeature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID()))); |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | | logger.warn("----Complex Element size=" + complex.size()); |
| | |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | public class FeatureDgnConvertMySQLJobContext extends AbstractDgnToMySQLJobContext { |
| | | static final Log logger = LogFactory.getLog(FeatureDgnConvertMySQLJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | |
| | | private HashMap<String, ArrayList<SimpleFeature>> featuresContext = new HashMap<String, ArrayList<SimpleFeature>>(); |
| | | private HashMap<String, FeatureWriter> featuresWriterContext = new HashMap<String, FeatureWriter>(); |
| | |
| | | private boolean withIndex = false; |
| | | |
| | | public FeatureDgnConvertMySQLJobContext(String dataPath, DataStore targetDataStore, String filterConfig, |
| | | boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform, useEPSG3826); |
| | | boolean profileMode, boolean useTransform) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | } |
| | | |
| | | // �P�_�O�_�ũM��� |
| | | SimpleFeature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID()))); |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | | logger.warn("----Complex Element size=" + complex.size()); |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public class GeneralDgnConvertMySQLJobContext extends AbstractDgnToMySQLJobContext { |
| | | static final Log logger = LogFactory.getLog(GeneralDgnConvertMySQLJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | static final GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | |
| | | private String dataOut = null; |
| | | |
| | |
| | | private PessimisticMapWrapper txFeaturesContext; |
| | | private TreeMap<String, SimpleFeatureType> featureTypes = new TreeMap<String, SimpleFeatureType>(); |
| | | |
| | | private GeometryConverterDecorator convertDecorator = null; |
| | | private String featureBaseName = null; |
| | | private boolean withIndex = false; |
| | | |
| | | public GeneralDgnConvertMySQLJobContext(String dataPath, DataStore targetDataStore, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | if (isEPSG3826()) |
| | | convertDecorator = new EPSG3826GeometryConverterDecorator(); |
| | | else |
| | | convertDecorator = new EPSG3825GeometryConverterDecorator(); |
| | | } |
| | | |
| | | public void putFeatureCollection(Element element) throws IllegalAttributeException, SchemaException { |
| | |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | if (element instanceof TextElement) { |
| | | TextElement textElement = (TextElement) element; |
| | | convertDecorator.setConverter(textElement); |
| | |
| | | LineStringElement linestring = (LineStringElement) element; |
| | | convertDecorator.setConverter(linestring); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(linestring.getColorIndex()), |
| | | linestring.getWeight(), |
| | | linestring.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof LineElement) { |
| | | LineElement line = (LineElement) element; |
| | | convertDecorator.setConverter(line); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(line.getColorIndex()), |
| | | line.getWeight(), |
| | | line.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof ArcElement) { |
| | | ArcElement arcElement = (ArcElement) element; |
| | |
| | | */ |
| | | convertDecorator.setConverter(arcElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(arcElement.getColorIndex()), |
| | | arcElement.getWeight(), |
| | | arcElement.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof EllipseElement) { |
| | | EllipseElement arcElement = (EllipseElement) element; |
| | | convertDecorator.setConverter(arcElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(arcElement.getColorIndex()), |
| | | arcElement.getWeight(), |
| | | arcElement.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof ComplexChainElement) { |
| | | ComplexChainElement complexChainElement = (ComplexChainElement) element; |
| | | convertDecorator.setConverter(complexChainElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(complexChainElement.getColorIndex()), |
| | | complexChainElement.getWeight(), |
| | | complexChainElement.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } |
| | | return null; |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.Coordinate; |
| | |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public class IndexDgnConvertMySQLJobContext extends AbstractDgnToMySQLJobContext { |
| | | static final Log logger = LogFactory.getLog(IndexDgnConvertMySQLJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | |
| | | protected GeometryConverterDecorator convertDecorator; |
| | | static final GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | |
| | | private HashMap<String, ArrayList<SimpleFeature>> featuresContext = new HashMap<String, ArrayList<SimpleFeature>>(); |
| | | private HashMap<String, FeatureWriter> featuresWriterContext = new HashMap<String, FeatureWriter>(); |
| | |
| | | private SimpleFeatureType featureType3 = null; |
| | | |
| | | public IndexDgnConvertMySQLJobContext(String dataPath, DataStore targetDataStore, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | if (isEPSG3826()) { |
| | | convertDecorator = new EPSG3826GeometryConverterDecorator(); |
| | | } else { |
| | | convertDecorator = new EPSG3825GeometryConverterDecorator(); |
| | | } |
| | | } |
| | | |
| | | public void putFeatureCollection(Element element) throws IllegalAttributeException, SchemaException { |
| | |
| | | String tpclid = textElement.getText(); |
| | | |
| | | Envelope extent = TPCLIDConverter.convertTpclIdToEnvelope(tpclid); |
| | | Geometry geom = (isEPSG3826() ? |
| | | Geometry geom = (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826 ? |
| | | geometryFactory.createLinearRing(new Coordinate[] |
| | | { |
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMinX(), extent.getMinY())), |
| | |
| | | TextElement txtElement = (TextElement) element; |
| | | double angle = txtElement.getRotationAngle(); |
| | | angle = BigDecimal.valueOf(angle).setScale(3, RoundingMode.HALF_UP).doubleValue(); |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(txtElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | SimpleFeature feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | convertDecorator.toGeometry(geometryFactory), |
| | | geom, |
| | | colorTable.getColorCode(txtElement.getColorIndex()), |
| | | txtElement.getWeight(), |
| | | txtElement.getLineStyle(), |
| | |
| | | int dx = (i % 2) * TPCLIDConverter.SX600; |
| | | int dy = (i / 2) * TPCLIDConverter.SY600; |
| | | |
| | | Geometry geom = (isEPSG3826() ? |
| | | Geometry geom = (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826 ? |
| | | geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[] |
| | | { |
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate( |
| | |
| | | import java.util.HashMap; |
| | | import java.util.Iterator; |
| | | |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.mysql.MySQLDataStoreFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.quartz.JobExecutionContext; |
| | |
| | | // private String _convertElementIn = null; |
| | | |
| | | public OracleConvertMySQLJobContext(String dataPath, DataStore oraDS, String filterConfig, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, oraDS, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, oraDS, profileMode, useTransform); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | |
| | | public void putFeatureCollection(Element element) { |
| | | assert elementDispatcher != null; |
| | | // �P�_�O�_�ũM��� |
| | | SimpleFeature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | boolean isEmptySize = false; |
| | | FrammeAttributeData linkage = |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.SchemaNotFoundException; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.oracle.OracleDataStore; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractDgnFileJobContext; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public abstract class AbstractDgnToOraSDOJobContext extends AbstractDgnFileJobContext { |
| | | protected OracleDataStore targetDataStore; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | public AbstractDgnToOraSDOJobContext(String dataPath, DataStore targetDataStore, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, profileMode, useTransform, useEPSG3826); |
| | | this.targetDataStore = (OracleDataStore) targetDataStore; |
| | | boolean useTransform) { |
| | | super(dataPath, profileMode, useTransform); |
| | | this.targetDataStore = (JDBCDataStore) targetDataStore; |
| | | } |
| | | |
| | | public OracleDataStore getTargetDataStore() { |
| | | public JDBCDataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | public void setTargetDataStore(OracleDataStore targetDataStore) { |
| | | public void setTargetDataStore(JDBCDataStore targetDataStore) { |
| | | this.targetDataStore = targetDataStore; |
| | | } |
| | | |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.SchemaNotFoundException; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.oracle.OracleDataStore; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public abstract class AbstractOracleToOraSDOJobContext extends AbstractOracleJobContext { |
| | | protected boolean profileMode = false; |
| | | protected OracleDataStore targetDataStore; |
| | | protected JDBCDataStore targetDataStore; |
| | | |
| | | public AbstractOracleToOraSDOJobContext(String dataPath, DataStore targetDataStore, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(profileMode, useTransform, useEPSG3826); |
| | | if ((targetDataStore != null) && (targetDataStore instanceof OracleDataStore)) { |
| | | this.targetDataStore = (OracleDataStore) targetDataStore; |
| | | boolean useTransform) { |
| | | super(profileMode, useTransform); |
| | | if ((targetDataStore != null) && (targetDataStore instanceof JDBCDataStore)) { |
| | | this.targetDataStore = (JDBCDataStore) targetDataStore; |
| | | } else { |
| | | getLogger().info("targetDataStore has wrong."); |
| | | } |
| | | setDataPath(dataPath); |
| | | } |
| | | |
| | | public OracleDataStore getTargetDataStore() { |
| | | public JDBCDataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | public void setTargetDataStore(OracleDataStore targetDataStore) { |
| | | public void setTargetDataStore(JDBCDataStore targetDataStore) { |
| | | this.targetDataStore = targetDataStore; |
| | | } |
| | | |
| | |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.filter.TypeCompIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeCompLevelIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeIdDispatchableFilter; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.filter.TypeCompIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeCompLevelIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeIdDispatchableFilter; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | public class DummyFeatureConvertOraSDOJobContext extends AbstractDgnToOraSDOJobContext { |
| | | static final Log logger = LogFactory.getLog(DummyFeatureConvertOraSDOJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | static final String SHPOUTPATH = "shpout"; |
| | | |
| | | private String dataOut = null; |
| | |
| | | private boolean withIndex = false; |
| | | |
| | | public DummyFeatureConvertOraSDOJobContext(String dataPath, DataStore targetDataStore, String filterConfig, |
| | | boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform, useEPSG3826); |
| | | boolean profileMode, boolean useTransform) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | } |
| | | |
| | | // �P�_�O�_�ũM��� |
| | | SimpleFeature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID()))); |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | | logger.warn("----Complex Element size=" + complex.size()); |
| | |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | public class FeatureDgnConvertOraSDOJobContext extends AbstractDgnToOraSDOJobContext { |
| | | static final Log logger = LogFactory.getLog(FeatureDgnConvertOraSDOJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | |
| | | private HashMap<String, ArrayList<SimpleFeature>> featuresContext = new HashMap<String, ArrayList<SimpleFeature>>(); |
| | | private HashMap<String, FeatureWriter> featuresWriterContext = new HashMap<String, FeatureWriter>(); |
| | |
| | | private boolean withIndex = false; |
| | | |
| | | public FeatureDgnConvertOraSDOJobContext(String dataPath, DataStore targetDataStore, String filterConfig, |
| | | boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform, useEPSG3826); |
| | | boolean profileMode, boolean useTransform) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | return; |
| | | } |
| | | |
| | | // �P�_�O�_�ũM��� |
| | | SimpleFeature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID()))); |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | | logger.warn("----Complex Element size=" + complex.size()); |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public class GeneralDgnConvertOraSDOJobContext extends AbstractDgnToOraSDOJobContext { |
| | | static final Log logger = LogFactory.getLog(GeneralDgnConvertOraSDOJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | static final GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | |
| | | private String dataOut = null; |
| | | |
| | |
| | | private PessimisticMapWrapper txFeaturesContext; |
| | | private TreeMap<String, SimpleFeatureType> featureTypes = new TreeMap<String, SimpleFeatureType>(); |
| | | |
| | | private GeometryConverterDecorator convertDecorator = null; |
| | | private String featureBaseName = null; |
| | | private boolean withIndex = false; |
| | | |
| | | public GeneralDgnConvertOraSDOJobContext(String dataPath, DataStore targetDataStore, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | if (isEPSG3826()) |
| | | convertDecorator = new EPSG3826GeometryConverterDecorator(); |
| | | else |
| | | convertDecorator = new EPSG3825GeometryConverterDecorator(); |
| | | } |
| | | |
| | | public void putFeatureCollection(Element element) throws IllegalAttributeException, SchemaException { |
| | |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | if (element instanceof TextElement) { |
| | | TextElement textElement = (TextElement) element; |
| | | convertDecorator.setConverter(textElement); |
| | |
| | | LineStringElement linestring = (LineStringElement) element; |
| | | convertDecorator.setConverter(linestring); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(linestring.getColorIndex()), |
| | | linestring.getWeight(), |
| | | linestring.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof LineElement) { |
| | | LineElement line = (LineElement) element; |
| | | convertDecorator.setConverter(line); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(line.getColorIndex()), |
| | | line.getWeight(), |
| | | line.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof ArcElement) { |
| | | ArcElement arcElement = (ArcElement) element; |
| | |
| | | */ |
| | | convertDecorator.setConverter(arcElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(arcElement.getColorIndex()), |
| | | arcElement.getWeight(), |
| | | arcElement.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof EllipseElement) { |
| | | EllipseElement arcElement = (EllipseElement) element; |
| | | convertDecorator.setConverter(arcElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(arcElement.getColorIndex()), |
| | | arcElement.getWeight(), |
| | | arcElement.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof ComplexChainElement) { |
| | | ComplexChainElement complexChainElement = (ComplexChainElement) element; |
| | | convertDecorator.setConverter(complexChainElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(complexChainElement.getColorIndex()), |
| | | complexChainElement.getWeight(), |
| | | complexChainElement.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } |
| | | return null; |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.Coordinate; |
| | |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public class IndexDgnConvertOraSDOJobContext extends AbstractDgnToOraSDOJobContext { |
| | | static final Log logger = LogFactory.getLog(IndexDgnConvertOraSDOJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | |
| | | protected GeometryConverterDecorator convertDecorator; |
| | | static final GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | |
| | | private HashMap<String, ArrayList<SimpleFeature>> featuresContext = new HashMap<String, ArrayList<SimpleFeature>>(); |
| | | private HashMap<String, FeatureWriter> featuresWriterContext = new HashMap<String, FeatureWriter>(); |
| | |
| | | private SimpleFeatureType featureType3 = null; |
| | | |
| | | public IndexDgnConvertOraSDOJobContext(String dataPath, DataStore targetDataStore, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, targetDataStore, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | if (isEPSG3826()) { |
| | | convertDecorator = new EPSG3826GeometryConverterDecorator(); |
| | | } else { |
| | | convertDecorator = new EPSG3825GeometryConverterDecorator(); |
| | | } |
| | | } |
| | | |
| | | public void putFeatureCollection(Element element) throws IllegalAttributeException, SchemaException { |
| | |
| | | String tpclid = textElement.getText(); |
| | | |
| | | Envelope extent = TPCLIDConverter.convertTpclIdToEnvelope(tpclid); |
| | | Geometry geom = (isEPSG3826() ? |
| | | Geometry geom = (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826 ? |
| | | geometryFactory.createLinearRing(new Coordinate[] |
| | | { |
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMinX(), extent.getMinY())), |
| | |
| | | |
| | | public SimpleFeature createFeature2(SimpleFeatureType featureType, Element element) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | if (element instanceof TextElement) { |
| | | TextElement txtElement = (TextElement) element; |
| | | double angle = txtElement.getRotationAngle(); |
| | | angle = BigDecimal.valueOf(angle).setScale(3, RoundingMode.HALF_UP).doubleValue(); |
| | | convertDecorator.setConverter(txtElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | SimpleFeature feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | convertDecorator.toGeometry(geometryFactory), |
| | | geom, |
| | | colorTable.getColorCode(txtElement.getColorIndex()), |
| | | txtElement.getWeight(), |
| | | txtElement.getLineStyle(), |
| | |
| | | int dx = (i % 2) * TPCLIDConverter.SX600; |
| | | int dy = (i / 2) * TPCLIDConverter.SY600; |
| | | |
| | | Geometry geom = (isEPSG3826() ? |
| | | Geometry geom = (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826 ? |
| | | geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[] |
| | | { |
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate( |
| | |
| | | import java.util.HashMap; |
| | | import java.util.Iterator; |
| | | |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import com.vividsolutions.jts.util.Assert; |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.jobs.OracleElementLogger; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.oracle.OracleDataStoreFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.data.oracle.OracleNGDataStoreFactory; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import com.vividsolutions.jts.util.Assert; |
| | | |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.jobs.OracleElementLogger; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | |
| | | public class OracleConvertOraSDOJobContext extends AbstractOracleToOraSDOJobContext { |
| | | static Log logger = LogFactory.getLog(OracleConvertOraSDOJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | |
| | | static OracleDataStoreFactory dataStoreFactory = new OracleDataStoreFactory(); |
| | | static OracleNGDataStoreFactory dataStoreFactory = new OracleNGDataStoreFactory(); |
| | | |
| | | private OracleElementLogger elmLogger = null; |
| | | |
| | |
| | | // private String _convertElementIn = null; |
| | | |
| | | public OracleConvertOraSDOJobContext(String dataPath, DataStore oraDS, String filterConfig, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, oraDS, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, oraDS, profileMode, useTransform); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | |
| | | public void putFeatureCollection(Element element) { |
| | | assert elementDispatcher != null; |
| | | // �P�_�O�_�ũM��� |
| | | SimpleFeature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | boolean isEmptySize = false; |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID()))); |
| | | |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | |
| | | executionContext = context; |
| | | } |
| | | |
| | | /** |
| | | * �����]�Ƽg�J�� |
| | | * |
| | | * @throws IOException IO�o�Ϳ�~ |
| | | */ |
| | | public void closeFeatureWriter() throws IOException { |
| | | |
| | | for (FeatureWriter featureWriter : this.featuresWriterContext.values()) { |
| | |
| | | import java.sql.ResultSet; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.util.*; |
| | | import java.util.ArrayList; |
| | | import java.util.HashMap; |
| | | import java.util.List; |
| | | import java.util.Map; |
| | | import java.util.Set; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryCollection; |
| | | import com.vividsolutions.jts.geom.LineString; |
| | | import com.vividsolutions.jts.geom.LinearRing; |
| | | import com.vividsolutions.jts.geom.MultiLineString; |
| | | import com.vividsolutions.jts.geom.MultiPoint; |
| | | import com.vividsolutions.jts.geom.MultiPolygon; |
| | | import com.vividsolutions.jts.geom.Point; |
| | | import com.vividsolutions.jts.geom.Polygon; |
| | | import com.ximple.eofms.jobs.context.AbstractDgnFileJobContext; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.postjts.JtsBinaryWriter; |
| | | import org.geotools.data.DataSourceException; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.SchemaNotFoundException; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisDataStore; |
| | | import org.geotools.filter.LengthFunction; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.geotools.referencing.NamedIdentifier; |
| | | import org.geotools.referencing.crs.DefaultGeographicCRS; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.opengis.feature.type.AttributeDescriptor; |
| | | import org.opengis.feature.type.AttributeType; |
| | | import org.opengis.feature.type.GeometryDescriptor; |
| | | import org.opengis.feature.type.PropertyType; |
| | | import org.opengis.filter.BinaryComparisonOperator; |
| | |
| | | import org.opengis.filter.expression.Literal; |
| | | import org.opengis.referencing.crs.CoordinateReferenceSystem; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryCollection; |
| | | import com.vividsolutions.jts.geom.LineString; |
| | | import com.vividsolutions.jts.geom.MultiLineString; |
| | | import com.vividsolutions.jts.geom.MultiPoint; |
| | | import com.vividsolutions.jts.geom.MultiPolygon; |
| | | import com.vividsolutions.jts.geom.Point; |
| | | import com.vividsolutions.jts.geom.Polygon; |
| | | import com.vividsolutions.jts.io.WKBWriter; |
| | | import com.vividsolutions.jts.io.WKTWriter; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractDgnFileJobContext; |
| | | import com.ximple.eofms.util.postjts.JtsBinaryWriter; |
| | | |
| | | public abstract class AbstractDgnToPostGISJobContext extends AbstractDgnFileJobContext { |
| | | |
| | | private static Map<String, Class> GEOM_TYPE_MAP = new HashMap<String, Class>(); |
| | | private static Map<String, Class> GEOM3D_TYPE_MAP = new HashMap<String, Class>(); |
| | | |
| | |
| | | /** |
| | | * Well Known Text writer (from JTS). |
| | | */ |
| | | protected static WKTWriter geometryWriter = new WKTWriter(); |
| | | // protected static WKTWriter geometryWriter = new WKTWriter(); |
| | | protected static JtsBinaryWriter binaryWriter = new JtsBinaryWriter(); |
| | | |
| | | private PostgisDataStore targetDataStore; |
| | | private JDBCDataStore targetDataStore; |
| | | // protected Connection connection; |
| | | protected String targetSchema = "public"; |
| | | |
| | | protected boolean schemaEnabled = true; |
| | | |
| | | public AbstractDgnToPostGISJobContext(String dataPath, DataStore targetDataStore, String targetSchema, |
| | | boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, profileMode, useTransform, useEPSG3826); |
| | | if ((targetDataStore != null) && (targetDataStore instanceof PostgisDataStore)) { |
| | | this.targetDataStore = (PostgisDataStore) targetDataStore; |
| | | boolean profileMode, boolean useTransform) { |
| | | super(dataPath, profileMode, useTransform); |
| | | if ((targetDataStore != null) && (targetDataStore instanceof JDBCDataStore)) { |
| | | this.targetDataStore = (JDBCDataStore) targetDataStore; |
| | | } else { |
| | | getLogger().info("targetDataStore has wrong."); |
| | | } |
| | |
| | | setTargetSchema(targetSchema); |
| | | } |
| | | |
| | | public PostgisDataStore getTargetDataStore() { |
| | | public JDBCDataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | public void setTargetDataStore(PostgisDataStore targetDataStore) { |
| | | public void setTargetDataStore(JDBCDataStore targetDataStore) { |
| | | this.targetDataStore = targetDataStore; |
| | | } |
| | | |
| | |
| | | |
| | | public void setTargetSchema(String schemaName) { |
| | | targetSchema = schemaName; |
| | | targetDataStore.setDatabaseSchema(targetSchema); |
| | | } |
| | | |
| | | public Connection getConnection() { |
| | | try { |
| | | return targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | |
| | | } catch (IOException e) |
| | | { |
| | | getLogger().warn(e.getMessage(), e); |
| | |
| | | } catch (SchemaNotFoundException e) { |
| | | return false; |
| | | } catch (IOException e) { |
| | | getLogger().info(e.getMessage(), e); |
| | | if (e.getMessage().indexOf("Schema") != -1) |
| | | getLogger().info(e.getMessage(), e); |
| | | else |
| | | getLogger().info(e.getMessage()); |
| | | return false; |
| | | } |
| | | } |
| | |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | |
| | | protected void dropTable(Connection conn, String tableName) throws SQLException { |
| | | protected boolean dropTable(Connection conn, String tableName) throws SQLException { |
| | | Statement stmt = conn.createStatement(); |
| | | StringBuilder sb = new StringBuilder(); |
| | | sb.append("DROP TABLE \""); |
| | |
| | | sb.append(tableName); |
| | | sb.append("\""); |
| | | // sb.append(" CASCADE"); |
| | | stmt.execute(sb.toString()); |
| | | if (!conn.getAutoCommit()) |
| | | conn.commit(); |
| | | JDBCUtils.close(stmt); |
| | | |
| | | try { |
| | | stmt.execute(sb.toString()); |
| | | } catch (SQLException e) { |
| | | getLogger().warn("Execute-" + sb.toString()); |
| | | getLogger().warn(e.getMessage(), e); |
| | | return false; |
| | | } finally { |
| | | if (!conn.getAutoCommit()) |
| | | conn.commit(); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | |
| | | return true; |
| | | } |
| | | |
| | | protected void dropGeometryColumn(Connection conn, String tableName, String geomField) throws SQLException { |
| | | protected boolean dropGeometryColumn(Connection conn, String dbSchema, String tableName, String geomField) throws SQLException { |
| | | Statement stmt = conn.createStatement(); |
| | | StringBuilder sb = new StringBuilder(); |
| | | sb.append("SELECT \"public\".DropGeometryColumn('','"); |
| | | sb.append("SELECT \"public\".DropGeometryColumn('"); |
| | | sb.append(dbSchema); |
| | | sb.append("','"); |
| | | sb.append(tableName); |
| | | sb.append("','"); |
| | | sb.append(geomField); |
| | | sb.append("')"); |
| | | stmt.execute(sb.toString()); |
| | | if (!conn.getAutoCommit()) |
| | | conn.commit(); |
| | | JDBCUtils.close(stmt); |
| | | try { |
| | | stmt.execute(sb.toString()); |
| | | } catch (SQLException e) { |
| | | getLogger().warn("Execute-" + sb.toString()); |
| | | getLogger().warn(e.getMessage(), e); |
| | | return false; |
| | | } finally { |
| | | if (!conn.getAutoCommit()) |
| | | conn.commit(); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | |
| | | return true; |
| | | } |
| | | |
| | | protected String dropGeometryColumn(String dbSchema, String tableName, String geomField) { |
| | |
| | | |
| | | private String addGeometryColumn(String dbSchema, String tableName, GeometryDescriptor geometryDescriptor, int srid) { |
| | | StringBuilder sql; |
| | | String typeName = getGeometrySQLTypeName(geometryDescriptor.getType().getBinding()); |
| | | String typeName = getGeometrySQLTypeName((geometryDescriptor.getType()).getBinding()); |
| | | if (typeName == null) { |
| | | getLogger().warn("Error: " + geometryDescriptor.getLocalName() + " unknown type!!!"); |
| | | throw new RuntimeException("Error: " + geometryDescriptor.getLocalName() + " unknown type!!!"); |
| | |
| | | sql.append(encodeSchemaTableName(tableName)); |
| | | sql.append(" USING GIST ("); |
| | | sql.append(encodeSchemaColumnName(descriptor.getLocalName())); |
| | | sql.append(" gist_geometry_ops);"); |
| | | sql.append(" );"); |
| | | // sql.append(" gist_geometry_ops);"); |
| | | |
| | | sqlStr = sql.toString(); |
| | | getLogger().debug(sqlStr); |
| | |
| | | if (descriptor == null) { |
| | | msg = "AttributeType was null!"; |
| | | } else { |
| | | msg = "Type '" + descriptor.getType().getBinding() + "' not supported!"; |
| | | msg = "Type '" + ((AttributeType) descriptor.getType()).getBinding() + "' not supported!"; |
| | | } |
| | | throw (new IOException(msg)); |
| | | } |
| | |
| | | return "null"; |
| | | } |
| | | |
| | | /* |
| | | if (targetDataStore.isWKBEnabled()) { |
| | | //String wkb = WKBEncoder.encodeGeometryHex(geom); |
| | | String wkb = WKBWriter.bytesToHex(new WKBWriter().write(geom)); |
| | |
| | | } |
| | | |
| | | String geoText = geometryWriter.write(geom); |
| | | |
| | | return "GeometryFromText('" + geoText + "', " + srid + ")"; |
| | | */ |
| | | if (geom instanceof LinearRing) { |
| | | //postgis does not handle linear rings, convert to just a line string |
| | | geom = geom.getFactory().createLineString(((LinearRing) geom).getCoordinateSequence()); |
| | | } |
| | | return "ST_GeomFromText('" + geom.toText() + "', " + srid + ")"; |
| | | } |
| | | |
| | | protected String makeInsertSql(SimpleFeature feature, int srid) // throws IOException |
| | |
| | | // String geomName = descriptors[i].getLocalName(); |
| | | // int srid = ftInfo.getSRID(geomName); |
| | | Geometry geometry = (Geometry) attributes.get(i); |
| | | geometry.setSRID(FeatureTypeBuilderUtil.getDefaultFeatureSRID()); |
| | | |
| | | if (geometry == null) { |
| | | attrValue = "NULL"; |
| | |
| | | |
| | | for (int i = 0; i < descriptors.size(); i++) { |
| | | if (descriptors.get(i) instanceof GeometryDescriptor) { |
| | | Geometry geom = (Geometry) attributes.get(i); |
| | | geom.setSRID(FeatureTypeBuilderUtil.getDefaultFeatureSRID()); |
| | | pstmt.setBytes(i + 1, binaryWriter.writeBinary((Geometry) attributes.get(i))); |
| | | } else { |
| | | if (descriptors.get(i).getType().getBinding().equals(Short.class)) { |
| | | Class<?> bindingType = descriptors.get(i).getType().getBinding(); |
| | | if (bindingType.equals(Short.class)) { |
| | | pstmt.setShort(i + 1, (Short) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(Integer.class)) { |
| | | pstmt.setInt(i + 1, (Short) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(Long.class)) { |
| | | } else if (bindingType.equals(Integer.class)) { |
| | | pstmt.setInt(i + 1, (Integer) attributes.get(i)); |
| | | } else if (bindingType.equals(Long.class)) { |
| | | pstmt.setLong(i + 1, (Long) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(String.class)) { |
| | | } else if (bindingType.equals(String.class)) { |
| | | pstmt.setString(i + 1, (String) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(Float.class)) { |
| | | } else if (bindingType.equals(Float.class)) { |
| | | pstmt.setFloat(i + 1, (Float) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(Double.class)) { |
| | | } else if (bindingType.equals(Double.class)) { |
| | | pstmt.setDouble(i + 1, (Double) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(Boolean.class)) { |
| | | } else if (bindingType.equals(Boolean.class)) { |
| | | pstmt.setBoolean(i + 1, (Boolean) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(BigDecimal.class)) { |
| | | } else if (bindingType.equals(BigDecimal.class)) { |
| | | pstmt.setBigDecimal(i + 1, (BigDecimal) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(java.sql.Date.class)) { |
| | | } else if (bindingType.equals(java.sql.Date.class)) { |
| | | pstmt.setDate(i + 1, (java.sql.Date) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(java.sql.Time.class)) { |
| | | } else if (bindingType.equals(java.sql.Time.class)) { |
| | | pstmt.setTime(i + 1, (java.sql.Time) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(java.sql.Timestamp.class)) { |
| | | } else if (bindingType.equals(java.sql.Timestamp.class)) { |
| | | pstmt.setTimestamp(i + 1, (java.sql.Timestamp) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(java.util.Date.class)) { |
| | | } else if (bindingType.equals(java.util.Date.class)) { |
| | | java.sql.Date sDate = new java.sql.Date(((java.util.Date) attributes.get(i)).getTime()); |
| | | pstmt.setDate(i + 1, sDate); |
| | | } |
| | |
| | | import java.sql.Statement; |
| | | import java.util.*; |
| | | |
| | | import com.vividsolutions.jts.geom.LinearRing; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import org.geotools.data.DataSourceException; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.SchemaNotFoundException; |
| | | import org.geotools.data.postgis.PostgisDataStore; |
| | | import org.geotools.filter.LengthFunction; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.geotools.referencing.NamedIdentifier; |
| | | import org.geotools.referencing.crs.DefaultGeographicCRS; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.opengis.feature.type.AttributeDescriptor; |
| | | import org.opengis.feature.type.AttributeType; |
| | | import org.opengis.feature.type.GeometryDescriptor; |
| | | import org.opengis.feature.type.PropertyType; |
| | | import org.opengis.filter.BinaryComparisonOperator; |
| | |
| | | /** |
| | | * Well Known Text writer (from JTS). |
| | | */ |
| | | protected static WKTWriter geometryWriter = new WKTWriter(); |
| | | // protected static WKTWriter geometryWriter = new WKTWriter(); |
| | | protected static JtsBinaryWriter binaryWriter = new JtsBinaryWriter(); |
| | | |
| | | protected boolean schemaEnabled = true; |
| | | |
| | | private PostgisDataStore targetDataStore; |
| | | private JDBCDataStore targetDataStore; |
| | | private String targetSchema = "public"; |
| | | |
| | | public AbstractOracleToPostGISJobContext(String dataPath, DataStore targetDataStore, String targetSchema, |
| | | boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | super(profileMode, useTransform, useEPSG3826); |
| | | if ((targetDataStore != null) && (targetDataStore instanceof PostgisDataStore)) { |
| | | this.targetDataStore = (PostgisDataStore) targetDataStore; |
| | | boolean profileMode, boolean useTransform) { |
| | | super(profileMode, useTransform); |
| | | if ((targetDataStore != null) && (targetDataStore instanceof JDBCDataStore)) { |
| | | this.targetDataStore = (JDBCDataStore) targetDataStore; |
| | | } else { |
| | | getLogger().info("targetDataStore has wrong."); |
| | | } |
| | |
| | | setTargetSchema(targetSchema); |
| | | } |
| | | |
| | | public PostgisDataStore getTargetDataStore() { |
| | | public JDBCDataStore getTargetDataStore() { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | public void setTargetDataStore(PostgisDataStore targetDataStore) { |
| | | public void setTargetDataStore(JDBCDataStore targetDataStore) { |
| | | this.targetDataStore = targetDataStore; |
| | | } |
| | | |
| | |
| | | |
| | | public void setTargetSchema(String schemaName) { |
| | | targetSchema = schemaName; |
| | | targetDataStore.setDatabaseSchema(targetSchema); |
| | | } |
| | | |
| | | public Connection getConnection() { |
| | |
| | | } catch (SchemaNotFoundException e) { |
| | | return false; |
| | | } catch (IOException e) { |
| | | getLogger().info(e.getMessage(), e); |
| | | if (e.getMessage().indexOf("Schema") != -1) |
| | | getLogger().info(e.getMessage(), e); |
| | | else |
| | | getLogger().info(e.getMessage()); |
| | | return false; |
| | | } |
| | | } |
| | |
| | | conn.commit(); |
| | | } |
| | | |
| | | protected void dropTable(Connection conn, String schemaName, String tableName) throws SQLException { |
| | | protected boolean dropTable(Connection conn, String schemaName, String tableName) throws SQLException { |
| | | Statement stmt = conn.createStatement(); |
| | | StringBuilder sb = new StringBuilder(); |
| | | sb.append("DROP TABLE \""); |
| | |
| | | sb.append(tableName); |
| | | sb.append("\""); |
| | | // sb.append(" CASCADE"); |
| | | stmt.execute(sb.toString()); |
| | | stmt.close(); |
| | | conn.commit(); |
| | | try { |
| | | stmt.execute(sb.toString()); |
| | | } catch (SQLException e) { |
| | | getLogger().warn("Execute-" + sb.toString()); |
| | | getLogger().warn(e.getMessage(), e); |
| | | return false; |
| | | } finally { |
| | | stmt.close(); |
| | | conn.commit(); |
| | | } |
| | | return true; |
| | | } |
| | | |
| | | protected void dropGeometryColumn(Connection conn, String dbSchema, String tableName, String geomField) throws SQLException { |
| | | protected boolean dropGeometryColumn(Connection conn, String dbSchema, String tableName, String geomField) throws SQLException { |
| | | Statement stmt = conn.createStatement(); |
| | | StringBuilder sb = new StringBuilder(); |
| | | sb.append("SELECT \"public\".DropGeometryColumn('"); |
| | |
| | | sb.append("','"); |
| | | sb.append(geomField); |
| | | sb.append("')"); |
| | | stmt.execute(sb.toString()); |
| | | stmt.close(); |
| | | conn.commit(); |
| | | |
| | | try { |
| | | stmt.execute(sb.toString()); |
| | | } catch (SQLException e) { |
| | | getLogger().warn("Execute-" + sb.toString()); |
| | | getLogger().warn(e.getMessage(), e); |
| | | return false; |
| | | } finally { |
| | | stmt.close(); |
| | | conn.commit(); |
| | | } |
| | | return true; |
| | | } |
| | | |
| | | protected String dropGeometryColumn(String dbSchema, String tableName, String geomField) { |
| | |
| | | sql.append(encodeSchemaTableName(tableName)); |
| | | sql.append(" USING GIST ("); |
| | | sql.append(encodeSchemaColumnName(descriptor.getLocalName())); |
| | | sql.append(" gist_geometry_ops);"); |
| | | sql.append(" );"); |
| | | // sql.append(" gist_geometry_ops);"); |
| | | |
| | | sqlStr = sql.toString(); |
| | | getLogger().debug(sqlStr); |
| | |
| | | } |
| | | |
| | | if (length < 1) { |
| | | getLogger().warn("FeatureType did not specify string length; defaulted to 256"); |
| | | getLogger().info("FeatureType did not specify string length; defaulted to 512 :" + |
| | | descriptor.getLocalName()); |
| | | length = 512; |
| | | } else if (length > MAX_ALLOWED_VALUE) { |
| | | length = MAX_ALLOWED_VALUE; |
| | |
| | | return "null"; |
| | | } |
| | | |
| | | /* |
| | | if (targetDataStore.isWKBEnabled()) { |
| | | //String wkb = WKBEncoder.encodeGeometryHex(geom); |
| | | String wkb = WKBWriter.bytesToHex(new WKBWriter().write(geom)); |
| | |
| | | } |
| | | |
| | | String geoText = geometryWriter.write(geom); |
| | | |
| | | return "GeometryFromText('" + geoText + "', " + srid + ")"; |
| | | */ |
| | | if (geom instanceof LinearRing) { |
| | | //postgis does not handle linear rings, convert to just a line string |
| | | geom = geom.getFactory().createLineString(((LinearRing) geom).getCoordinateSequence()); |
| | | } |
| | | return "ST_GeomFromText('" + geom.toText() + "', " + srid + ")"; |
| | | } |
| | | |
| | | protected String makeInsertSql(SimpleFeature feature, int srid) // throws IOException |
| | |
| | | |
| | | for (int i = 0; i < descriptors.size(); i++) { |
| | | if (descriptors.get(i) instanceof GeometryDescriptor) { |
| | | // String wktTest = geometryWriter.write((Geometry) attributes.get(i)); |
| | | Geometry geom = (Geometry) attributes.get(i); |
| | | geom.setSRID(FeatureTypeBuilderUtil.getDefaultFeatureSRID()); |
| | | pstmt.setBytes(i + 1, binaryWriter.writeBinary((Geometry) attributes.get(i))); |
| | | } else { |
| | | if (descriptors.get(i).getType().getBinding().equals(Short.class)) { |
| | | Class<?> bindingType = ((AttributeType)descriptors.get(i).getType()).getBinding(); |
| | | if (bindingType.equals(Short.class)) { |
| | | pstmt.setShort(i + 1, (Short) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(Integer.class)) { |
| | | pstmt.setInt(i + 1, (Short) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(Long.class)) { |
| | | } else if (bindingType.equals(Integer.class)) { |
| | | pstmt.setInt(i + 1, (Integer) attributes.get(i)); |
| | | } else if (bindingType.equals(Long.class)) { |
| | | pstmt.setLong(i + 1, (Long) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(String.class)) { |
| | | } else if (bindingType.equals(String.class)) { |
| | | pstmt.setString(i + 1, (String) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(Float.class)) { |
| | | } else if (bindingType.equals(Float.class)) { |
| | | pstmt.setFloat(i + 1, (Float) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(Double.class)) { |
| | | } else if (bindingType.equals(Double.class)) { |
| | | pstmt.setDouble(i + 1, (Double) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(Boolean.class)) { |
| | | } else if (bindingType.equals(Boolean.class)) { |
| | | pstmt.setBoolean(i + 1, (Boolean) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(BigDecimal.class)) { |
| | | } else if (bindingType.equals(BigDecimal.class)) { |
| | | pstmt.setBigDecimal(i + 1, (BigDecimal) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(java.sql.Date.class)) { |
| | | } else if (bindingType.equals(java.sql.Date.class)) { |
| | | pstmt.setDate(i + 1, (java.sql.Date) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(java.sql.Time.class)) { |
| | | } else if (bindingType.equals(java.sql.Time.class)) { |
| | | pstmt.setTime(i + 1, (java.sql.Time) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(java.sql.Timestamp.class)) { |
| | | } else if (bindingType.equals(java.sql.Timestamp.class)) { |
| | | pstmt.setTimestamp(i + 1, (java.sql.Timestamp) attributes.get(i)); |
| | | } else if (descriptors.get(i).getType().getBinding().equals(java.util.Date.class)) { |
| | | } else if (bindingType.equals(java.util.Date.class)) { |
| | | java.sql.Date sDate = new java.sql.Date(((java.util.Date) attributes.get(i)).getTime()); |
| | | pstmt.setDate(i + 1, sDate); |
| | | } |
| | |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.filter.TypeCompIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeCompLevelIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeIdDispatchableFilter; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.filter.TypeCompIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeCompLevelIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeIdDispatchableFilter; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | public class DummyFeatureConvertPostGISJobContext extends AbstractDgnToPostGISJobContext { |
| | | static final Log logger = LogFactory.getLog(DummyFeatureConvertPostGISJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | |
| | | private String dataOut = null; |
| | | |
| | |
| | | |
| | | public DummyFeatureConvertPostGISJobContext(String dataPath, DataStore targetDataStore, String targetSchema, |
| | | String filterConfig, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, targetDataStore, targetSchema, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, targetDataStore, targetSchema, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | } |
| | | |
| | | // �P�_�O�_�ũM��� |
| | | SimpleFeature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID()))); |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | | logger.warn("----Complex Element size=" + complex.size()); |
| | |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | public class FeatureDgnConvertPostGISJobContext extends AbstractDgnToPostGISJobContext { |
| | | static final Log logger = LogFactory.getLog(FeatureDgnConvertPostGISJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | |
| | | private HashMap<String, ArrayList<SimpleFeature>> featuresContext = new HashMap<String, ArrayList<SimpleFeature>>(); |
| | | private HashMap<String, FeatureWriter> featuresWriterContext = new HashMap<String, FeatureWriter>(); |
| | |
| | | |
| | | public FeatureDgnConvertPostGISJobContext(String dataPath, DataStore targetDataStore, String targetSchema, |
| | | String filterConfig, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, targetDataStore, targetSchema, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, targetDataStore, targetSchema, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | return; |
| | | } |
| | | |
| | | // �P�_�O�_�ũM��� |
| | | SimpleFeature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID()))); |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | | logger.warn("----Complex Element size=" + complex.size()); |
| | |
| | | import java.util.List; |
| | | import java.util.TreeMap; |
| | | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | | import org.apache.commons.transaction.util.LoggerFacade; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.postgresql.util.PSQLException; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.EPSG3825GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.EPSG3826GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.GeometryConverterDecorator; |
| | | import com.ximple.io.dgn7.ArcElement; |
| | |
| | | import com.ximple.io.dgn7.TextElement; |
| | | import com.ximple.io.dgn7.TextNodeElement; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | | import org.apache.commons.transaction.util.LoggerFacade; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.postgresql.util.PSQLException; |
| | | |
| | | public class GeneralDgnConvertPostGISJobContext extends AbstractDgnToPostGISJobContext { |
| | | static final Log logger = LogFactory.getLog(GeneralDgnConvertPostGISJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | static final GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | |
| | | private HashMap<SimpleFeatureType, ArrayList<SimpleFeature>> txFeaturesContext = new HashMap<SimpleFeatureType, ArrayList<SimpleFeature>>(); |
| | | |
| | | private TreeMap<String, SimpleFeatureType> featureTypes = new TreeMap<String, SimpleFeatureType>(); |
| | | |
| | | private GeometryConverterDecorator convertDecorator = null; |
| | | private String featureBaseName = null; |
| | | private boolean dropTableMode = true; |
| | | |
| | | private int accumulate = 0; |
| | | |
| | | public GeneralDgnConvertPostGISJobContext(String dataPath, DataStore targetDataStore, String targetSchema, |
| | | boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, targetDataStore, targetSchema, profileMode, useTransform, useEPSG3826); |
| | | if (isEPSG3826()) |
| | | convertDecorator = new EPSG3826GeometryConverterDecorator(); |
| | | else |
| | | convertDecorator = new EPSG3825GeometryConverterDecorator(); |
| | | boolean profileMode, boolean useTransform) { |
| | | super(dataPath, targetDataStore, targetSchema, profileMode, useTransform); |
| | | } |
| | | |
| | | public void putFeatureCollection(Element element) throws IllegalAttributeException, SchemaException { |
| | |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | logger.error(e.getMessage(), e); |
| | | Exception nextE = e.getNextException(); |
| | | if (nextE != null) { |
| | | logger.error("getNextException:" + nextE.getMessage(), nextE); |
| | | } |
| | | } finally { |
| | | if (isProfileMode()) accumulateUpdateTime(); |
| | | } |
| | |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalPointFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | featureTypes.put(featureName, featureType); |
| | | clearFeatureData(typeBuilder); |
| | | clearFeatureData(featureName); |
| | | } |
| | | return featureTypes.get(featureName); |
| | | } |
| | |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalPolygonFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | featureTypes.put(featureName, featureType); |
| | | clearFeatureData(typeBuilder); |
| | | clearFeatureData(featureName); |
| | | } |
| | | return featureTypes.get(featureName); |
| | | } |
| | |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalLineFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | featureTypes.put(featureName, featureType); |
| | | clearFeatureData(typeBuilder); |
| | | clearFeatureData(featureName); |
| | | } |
| | | return featureTypes.get(featureName); |
| | | } |
| | |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalMultiLineFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | featureTypes.put(featureName, featureType); |
| | | clearFeatureData(typeBuilder); |
| | | clearFeatureData(featureName); |
| | | } |
| | | return featureTypes.get(featureName); |
| | | } |
| | |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalArcFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | featureTypes.put(featureName, featureType); |
| | | clearFeatureData(typeBuilder); |
| | | clearFeatureData(featureName); |
| | | } |
| | | return featureTypes.get(featureName); |
| | | } |
| | |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalEllipseFeatureTypeBuilder(featureName); |
| | | SimpleFeatureType featureType = typeBuilder.buildFeatureType(); |
| | | featureTypes.put(featureName, featureType); |
| | | clearFeatureData(typeBuilder); |
| | | clearFeatureData(featureName); |
| | | } |
| | | return featureTypes.get(featureName); |
| | | } |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | |
| | | if (element instanceof TextElement) { |
| | | TextElement textElement = (TextElement) element; |
| | | convertDecorator.setConverter(textElement); |
| | |
| | | LineStringElement linestring = (LineStringElement) element; |
| | | convertDecorator.setConverter(linestring); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(linestring.getColorIndex()), |
| | | linestring.getWeight(), |
| | | linestring.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof LineElement) { |
| | | LineElement line = (LineElement) element; |
| | | convertDecorator.setConverter(line); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(line.getColorIndex()), |
| | | line.getWeight(), |
| | | line.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof ArcElement) { |
| | | ArcElement arcElement = (ArcElement) element; |
| | |
| | | */ |
| | | convertDecorator.setConverter(arcElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(arcElement.getColorIndex()), |
| | | arcElement.getWeight(), |
| | | arcElement.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof EllipseElement) { |
| | | EllipseElement arcElement = (EllipseElement) element; |
| | | convertDecorator.setConverter(arcElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(arcElement.getColorIndex()), |
| | | arcElement.getWeight(), |
| | | arcElement.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof ComplexChainElement) { |
| | | ComplexChainElement complexChainElement = (ComplexChainElement) element; |
| | | convertDecorator.setConverter(complexChainElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(complexChainElement.getColorIndex()), |
| | | complexChainElement.getWeight(), |
| | | complexChainElement.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } |
| | | return null; |
| | |
| | | this.dropTableMode = dropTableMode; |
| | | } |
| | | |
| | | protected void clearFeatureData(SimpleFeatureTypeBuilder typeBuilder) throws SchemaException { |
| | | protected void clearFeatureData(String featureTypeName) throws SchemaException { |
| | | if (isProfileMode()) markUpdateTime(); |
| | | |
| | | String featureName = typeBuilder.getName(); |
| | | Connection conn = null; |
| | | if (isExistFeature(typeBuilder.buildFeatureType())) { |
| | | |
| | | SimpleFeatureType featureType = featureTypes.get(featureTypeName); |
| | | String featureName = featureType.getName().getLocalPart(); |
| | | String currentSQL = null; |
| | | if (isExistFeature(featureType)) { |
| | | try { |
| | | conn = getConnection(); |
| | | if (dropTableMode) { |
| | | dropGeometryColumn(conn, featureName, |
| | | typeBuilder.buildFeatureType().getGeometryDescriptor().getLocalName()); |
| | | dropGeometryColumn(conn, featureName, getTargetSchema(), |
| | | (featureType).getGeometryDescriptor().getLocalName()); |
| | | dropTable(conn, featureName); |
| | | |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, typeBuilder.buildFeatureType()); |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType); |
| | | for (String stmtText : schemaTexts) { |
| | | Statement stmt = conn.createStatement(); |
| | | currentSQL = stmtText; |
| | | stmt.execute(stmtText); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } else { |
| | | deleteTable(conn, featureName); |
| | | } |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } catch (IOException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | if (isProfileMode()) accumulateUpdateTime(); |
| | | } |
| | | } else { |
| | | try { |
| | | conn = getConnection(); |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, typeBuilder.buildFeatureType()); |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType); |
| | | for (String stmtText : schemaTexts) { |
| | | Statement stmt = conn.createStatement(); |
| | | stmt.execute(stmtText); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } catch (IOException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | if (isProfileMode()) accumulateUpdateTime(); |
| | | } |
| | | } |
| | |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | | import org.apache.commons.transaction.util.LoggerFacade; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.postgresql.util.PSQLException; |
| | | |
| | | import com.vividsolutions.jts.geom.Coordinate; |
| | | import com.vividsolutions.jts.geom.Envelope; |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | import com.vividsolutions.jts.geom.Polygon; |
| | | |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.EPSG3825GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.EPSG3826GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.TPCLIDConverter; |
| | |
| | | import com.ximple.io.dgn7.ShapeElement; |
| | | import com.ximple.io.dgn7.TextElement; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | | import org.apache.commons.transaction.util.LoggerFacade; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.postgresql.util.PSQLException; |
| | | |
| | | public class IndexDgnConvertPostGISJobContext extends AbstractDgnToPostGISJobContext { |
| | | |
| | | static final Log logger = LogFactory.getLog(IndexDgnConvertPostGISJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | protected GeometryConverterDecorator convertDecorator; |
| | | static final GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | |
| | | private HashMap<SimpleFeatureType, ArrayList<SimpleFeature>> txFeaturesContext = |
| | | new HashMap<SimpleFeatureType, ArrayList<SimpleFeature>>(); |
| | | |
| | | private SimpleFeatureTypeBuilder typeBuilderPnt = null; |
| | | private SimpleFeatureTypeBuilder typeBuilderRect = null; |
| | | private SimpleFeatureTypeBuilder typeBuilderSmallRect = null; |
| | | private HashMap<String, SimpleFeatureType> typesMapping = new HashMap<String, SimpleFeatureType>(); |
| | | |
| | | private SimpleFeatureType featureType = null; |
| | | private SimpleFeatureType featureType1 = null; |
| | | private SimpleFeatureType featureType2 = null; |
| | | private SimpleFeatureType featureType3 = null; |
| | | |
| | |
| | | private int accumulate = 0; |
| | | |
| | | public IndexDgnConvertPostGISJobContext(String dataPath, DataStore targetDataStore, String targetSchema, |
| | | boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, targetDataStore, targetSchema, profileMode, useTransform, useEPSG3826); |
| | | if (isEPSG3826()) { |
| | | convertDecorator = new EPSG3826GeometryConverterDecorator(); |
| | | } else { |
| | | convertDecorator = new EPSG3825GeometryConverterDecorator(); |
| | | } |
| | | boolean profileMode, boolean useTransform) { |
| | | super(dataPath, targetDataStore, targetSchema, profileMode, useTransform); |
| | | } |
| | | |
| | | public void putFeatureCollection(Element element) throws IllegalAttributeException, SchemaException { |
| | |
| | | protected void putTextFeatureCollection(TextElement element) throws SchemaException, IllegalAttributeException { |
| | | SimpleFeature feature = createFeature(element); |
| | | if (feature == null) { |
| | | logger.info("cannot craete feature." + element.toString() + "'" + |
| | | logger.info("cannot craete feature. " + element.toString() + "'" + |
| | | element.getText() + "'"); |
| | | return; |
| | | } |
| | |
| | | |
| | | feature = createFeature2(element); |
| | | if (feature == null) { |
| | | logger.info("cannot craete feature2." + element.toString() + "'" + |
| | | logger.info("cannot craete feature2. " + element.toString() + "'" + |
| | | element.getText() + "'"); |
| | | return; |
| | | } |
| | |
| | | if (feature == null) { |
| | | Polygon polygon = (Polygon) element.toGeometry(geometryFactory); |
| | | if (polygon == null) { |
| | | logger.info("cannot craete feature." + element.toString() + "'" + |
| | | logger.info("cannot craete feature. " + element.toString() + "'" + |
| | | "linear is null" + "'"); |
| | | } else { |
| | | Coordinate pt = polygon.getEnvelopeInternal().centre(); |
| | | String id = TPCLIDConverter.CoordinateToTpclId(pt); |
| | | logger.info("cannot craete feature." + element.toString() + "'" + |
| | | logger.info("cannot craete feature. " + element.toString() + "'" + |
| | | id + "'- from pt=" + pt); |
| | | } |
| | | return; |
| | |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | logger.error(e.getMessage(), e); |
| | | Exception nextE = e.getNextException(); |
| | | if (nextE != null) { |
| | | logger.error("getNextException:" + nextE.getMessage(), nextE); |
| | | } |
| | | } finally { |
| | | if (isProfileMode()) this.accumulateUpdateTime(); |
| | | } |
| | |
| | | */ |
| | | } |
| | | |
| | | public SimpleFeatureType createFeatureElement(String featureName) throws SchemaException { |
| | | if (typeBuilderRect == null) { |
| | | public void createFeatureElement(String featureName) throws SchemaException { |
| | | if (featureType1 == null) { |
| | | Connection conn = null; |
| | | typeBuilderRect = FeatureTypeBuilderUtil.createNormalIndexFeatureTypeBuilder(featureName); |
| | | if (isExistFeature(typeBuilderRect.buildFeatureType())) { |
| | | SimpleFeatureTypeBuilder typeBuilder1 = FeatureTypeBuilderUtil.createNormalIndexFeatureTypeBuilder(featureName); |
| | | featureType1 = typeBuilder1.buildFeatureType(); |
| | | String currentSQL = null; |
| | | |
| | | if (isExistFeature(featureType1)) { |
| | | try { |
| | | conn = getConnection(); |
| | | if (dropTableMode) { |
| | | try { |
| | | dropGeometryColumn(conn, featureName, |
| | | typeBuilderRect.buildFeatureType().getGeometryDescriptor().getLocalName()); |
| | | dropGeometryColumn(conn, getTargetSchema(), featureName, |
| | | (featureType1).getGeometryDescriptor().getLocalName()); |
| | | } catch (PSQLException e) { |
| | | logger.debug(e.getMessage(), e); |
| | | } |
| | |
| | | } catch (PSQLException e) { |
| | | logger.debug(e.getMessage(), e); |
| | | } |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, |
| | | typeBuilderRect.buildFeatureType()); |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType1); |
| | | for (String stmtText : schemaTexts) { |
| | | Statement stmt = conn.createStatement(); |
| | | currentSQL = stmtText; |
| | | stmt.execute(stmtText); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } else { |
| | | deleteTable(conn, featureName); |
| | | } |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } catch (IOException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } else { |
| | | try { |
| | | conn = getConnection(); |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, |
| | | typeBuilderRect.buildFeatureType()); |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType1); |
| | | for (String stmtText : schemaTexts) { |
| | | Statement stmt = conn.createStatement(); |
| | | stmt.execute(stmtText); |
| | | currentSQL = stmtText; |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } catch (IOException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | } |
| | | return typeBuilderRect.buildFeatureType(); |
| | | } |
| | | |
| | | public SimpleFeatureType createFeatureElement2(String featureName) throws SchemaException { |
| | | if (typeBuilderPnt == null) { |
| | | public void createFeatureElement2(String featureName) throws SchemaException { |
| | | if (featureType2 == null) { |
| | | Connection conn = null; |
| | | typeBuilderPnt = FeatureTypeBuilderUtil.createNormalIndexTextFeatureTypeBuilder(featureName); |
| | | if (isExistFeature(typeBuilderPnt.buildFeatureType())) { |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalIndexTextFeatureTypeBuilder(featureName); |
| | | featureType2 = typeBuilder.buildFeatureType(); |
| | | String currentSQL = null; |
| | | if (isExistFeature(featureType2)) { |
| | | try { |
| | | conn = getConnection(); |
| | | if (dropTableMode) { |
| | | dropGeometryColumn(conn, featureName, |
| | | typeBuilderPnt.buildFeatureType().getGeometryDescriptor().getLocalName()); |
| | | dropGeometryColumn(conn, getTargetSchema(), featureName, |
| | | (featureType2).getGeometryDescriptor().getLocalName()); |
| | | dropTable(conn, featureName); |
| | | |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, |
| | | typeBuilderPnt.buildFeatureType()); |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType2); |
| | | for (String stmtText : schemaTexts) { |
| | | Statement stmt = conn.createStatement(); |
| | | currentSQL = stmtText; |
| | | stmt.execute(stmtText); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } else { |
| | | deleteTable(conn, featureName); |
| | | } |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } catch (IOException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } else { |
| | | try { |
| | | conn = getConnection(); |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, |
| | | typeBuilderPnt.buildFeatureType()); |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType2); |
| | | for (String stmtText : schemaTexts) { |
| | | Statement stmt = conn.createStatement(); |
| | | currentSQL = stmtText; |
| | | stmt.execute(stmtText); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } catch (IOException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | } |
| | | return typeBuilderPnt.buildFeatureType(); |
| | | } |
| | | |
| | | public SimpleFeatureType createFeatureElement3(String featureName) throws SchemaException { |
| | | if (typeBuilderSmallRect == null) { |
| | | public void createFeatureElement3(String featureName) throws SchemaException { |
| | | if (featureType3 == null) { |
| | | Connection conn = null; |
| | | typeBuilderSmallRect = FeatureTypeBuilderUtil.createNormalIndexFeatureTypeBuilder(featureName); |
| | | if (isExistFeature(typeBuilderSmallRect.buildFeatureType())) { |
| | | SimpleFeatureTypeBuilder typeBuilder = FeatureTypeBuilderUtil.createNormalIndexFeatureTypeBuilder(featureName); |
| | | featureType3 = typeBuilder.buildFeatureType(); |
| | | String currentSQL = null; |
| | | if (isExistFeature(featureType3)) { |
| | | try { |
| | | conn = getConnection(); |
| | | if (dropTableMode) { |
| | | try { |
| | | dropGeometryColumn(conn, featureName, |
| | | typeBuilderSmallRect.buildFeatureType().getGeometryDescriptor().getLocalName()); |
| | | dropGeometryColumn(conn, getTargetSchema(), featureName, |
| | | (featureType3).getGeometryDescriptor().getLocalName()); |
| | | } catch (PSQLException e) { |
| | | logger.debug(e.getMessage(), e); |
| | | } |
| | |
| | | } catch (PSQLException e) { |
| | | logger.debug(e.getMessage(), e); |
| | | } |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, typeBuilderSmallRect.buildFeatureType()); |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType3); |
| | | for (String stmtText : schemaTexts) { |
| | | Statement stmt = conn.createStatement(); |
| | | currentSQL = stmtText; |
| | | stmt.execute(stmtText); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } else { |
| | | deleteTable(conn, featureName); |
| | | } |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } catch (IOException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } else { |
| | | try { |
| | | conn = getConnection(); |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, |
| | | typeBuilderSmallRect.buildFeatureType()); |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType3); |
| | | for (String stmtText : schemaTexts) { |
| | | Statement stmt = conn.createStatement(); |
| | | currentSQL = stmtText; |
| | | stmt.execute(stmtText); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } catch (IOException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | if (currentSQL != null) |
| | | logger.warn("executeSQL:" + currentSQL); |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } |
| | | } |
| | | return typeBuilderSmallRect.buildFeatureType(); |
| | | } |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element) throws IllegalAttributeException { |
| | |
| | | |
| | | Envelope extent = TPCLIDConverter.convertTpclIdToEnvelope(tpclid); |
| | | Geometry geom; |
| | | if (isEPSG3826()) { |
| | | if (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826) { |
| | | geom = geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[] |
| | | { |
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMinX(), extent.getMinY())), |
| | |
| | | Envelope extent = TPCLIDConverter.convertTpclIdToEnvelope(tpclid); |
| | | Geometry geom = null; |
| | | try { |
| | | geom = (isEPSG3826() ? |
| | | geom = (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826 ? |
| | | geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[] |
| | | { |
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMinX(), extent.getMinY())), |
| | |
| | | TextElement txtElement = (TextElement) element; |
| | | double angle = txtElement.getRotationAngle(); |
| | | angle = BigDecimal.valueOf(angle).setScale(3, RoundingMode.HALF_UP).doubleValue(); |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(txtElement); |
| | | Geometry gobj = convertDecorator.toGeometry(geometryFactory); |
| | | if (gobj != null) |
| | |
| | | String tpclid = TPCLIDConverter.CoordinateToTpclId(center); |
| | | if (tpclid.length() > 5) { |
| | | tpclid = tpclid.substring(0, 5); |
| | | Coordinate pos = (isEPSG3826() ? |
| | | Coordinate pos = (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826 ? |
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(center.x, center.y)) : |
| | | TWDDatumConverter.fromTM2ToEPSG3825(new Coordinate(center.x, center.y))); |
| | | Geometry gobj = geometryFactory.createPoint(pos); |
| | |
| | | |
| | | Geometry geom; |
| | | |
| | | if (isEPSG3826()) { |
| | | if (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826) { |
| | | geom = geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[] |
| | | { |
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate( |
| | |
| | | int dx = (i % 2) * TPCLIDConverter.SX600; |
| | | int dy = (i / 2) * TPCLIDConverter.SY600; |
| | | |
| | | Geometry geom = (isEPSG3826() ? |
| | | Geometry geom = (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826 ? |
| | | geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[] |
| | | { |
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate( |
| | |
| | | } |
| | | |
| | | private SimpleFeature createFeature(Element element) throws SchemaException, IllegalAttributeException { |
| | | if (featureType == null) { |
| | | if (featureType1 == null) { |
| | | String dgnname = getFilename().toLowerCase(); |
| | | int i = dgnname.lastIndexOf("."); |
| | | if (i != -1) { |
| | | dgnname = dgnname.substring(0, i); |
| | | } |
| | | featureType = createFeatureElement(dgnname.toLowerCase()); |
| | | createFeatureElement(dgnname.toLowerCase()); |
| | | } |
| | | return createFeature(featureType, element); |
| | | return createFeature(featureType1, element); |
| | | } |
| | | |
| | | private SimpleFeature createFeature2(Element element) throws SchemaException, IllegalAttributeException { |
| | |
| | | dgnname = dgnname.substring(0, i); |
| | | } |
| | | dgnname = dgnname + "_p"; |
| | | featureType2 = createFeatureElement2(dgnname.toLowerCase()); |
| | | createFeatureElement2(dgnname.toLowerCase()); |
| | | } |
| | | return createFeature2(featureType2, element); |
| | | } |
| | |
| | | dgnname = dgnname.substring(0, i); |
| | | } |
| | | dgnname = dgnname + "_s"; |
| | | featureType3 = createFeatureElement3(dgnname.toLowerCase()); |
| | | createFeatureElement3(dgnname.toLowerCase()); |
| | | } |
| | | return createFeature3(featureType3, element); |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.jobs.context.postgis; |
| | | |
| | | import org.geotools.data.DataStore; |
| | | |
| | | public class OracleConvertGeoServerContext extends OracleConvertPostGISJobContext { |
| | | |
| | | public OracleConvertGeoServerContext(String dataPath, DataStore pgDS, String targetSchema, String filterConfig, boolean profileMode, boolean useTransform) { |
| | | super(dataPath, pgDS, targetSchema, filterConfig, profileMode, useTransform); |
| | | } |
| | | } |
| | |
| | | import java.util.Iterator; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import com.vividsolutions.jts.util.Assert; |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.CreateFeatureTypeEventListener; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.filter.FeatureTypeEvent; |
| | | import com.ximple.eofms.jobs.OracleElementLogger; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.data.postgis.PostgisDataStoreFactory; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | |
| | | import org.quartz.JobExecutionContext; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import com.vividsolutions.jts.util.Assert; |
| | | |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.CreateFeatureTypeEventListener; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.filter.FeatureTypeEvent; |
| | | import com.ximple.eofms.jobs.OracleElementLogger; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | |
| | | public class OracleConvertPostGISJobContext extends AbstractOracleToPostGISJobContext |
| | | implements CreateFeatureTypeEventListener { |
| | | static Log logger = LogFactory.getLog(OracleConvertPostGISJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | |
| | | static PostgisDataStoreFactory dataStoreFactory = new PostgisDataStoreFactory(); |
| | | // static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | private OracleElementLogger elmLogger = null; |
| | | |
| | |
| | | private int accumulate = 0; |
| | | |
| | | public OracleConvertPostGISJobContext(String dataPath, DataStore pgDS, String targetSchema, String filterConfig, |
| | | boolean profileMode, boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, pgDS, targetSchema, profileMode, useTransform, useEPSG3826); |
| | | boolean profileMode, boolean useTransform) { |
| | | super(dataPath, pgDS, targetSchema, profileMode, useTransform); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | | elementDispatcher.addCreateFeatureTypeEventListener(this); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | public void putFeatureCollection(Element element) { |
| | | assert elementDispatcher != null; |
| | | // 判斷是否符和條件 |
| | | SimpleFeature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | boolean isEmptySize = false; |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID()))); |
| | | |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | |
| | | } |
| | | logger.error(e.getServerErrorMessage()); |
| | | logger.error(e.getMessage(), e); |
| | | } catch (NullPointerException e) { |
| | | if (bindingStmt != null) { |
| | | logger.error("Execute:" + bindingStmt); |
| | | } |
| | | logger.error(feature.toString()); |
| | | logger.error(e.getMessage(), e); |
| | | } catch (ClassCastException e) { |
| | | if (bindingStmt != null) { |
| | | logger.error("Execute:" + bindingStmt); |
| | |
| | | } catch (BatchUpdateException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | logger.error(e.getMessage(), e); |
| | | SQLException ex; |
| | | while ((ex = e.getNextException()) != null) { |
| | | // logger.warn(ex.getMessage(), ex); |
| | | logger.warn(ex.getMessage()); |
| | | } |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | logger.error(e.getMessage(), e); |
| | |
| | | conn = getConnection(); |
| | | if (dropTableMode) { |
| | | dropGeometryColumn(conn, getTargetSchema(), featureName, |
| | | featureType.getGeometryDescriptor().getName().getLocalPart()); |
| | | (featureType).getGeometryDescriptor().getName().getLocalPart()); |
| | | dropTable(conn, getTargetSchema(), featureName); |
| | | |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType); |
| | |
| | | } else { |
| | | deleteTable(conn, getTargetSchema(), featureName); |
| | | } |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } catch (IOException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } else { |
| | | String tempStmt = null; |
New file |
| | |
| | | package com.ximple.eofms.jobs.context.postgis; |
| | | |
| | | import java.io.File; |
| | | import java.io.IOException; |
| | | import java.io.UnsupportedEncodingException; |
| | | import java.net.MalformedURLException; |
| | | import java.net.URL; |
| | | import java.sql.BatchUpdateException; |
| | | import java.sql.Connection; |
| | | import java.sql.DriverManager; |
| | | import java.sql.PreparedStatement; |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.util.ArrayList; |
| | | import java.util.HashMap; |
| | | import java.util.Iterator; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.util.Assert; |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.CreateFeatureTypeEventListener; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.filter.FeatureTypeEvent; |
| | | import com.ximple.eofms.jobs.OracleElementLogger; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | | import org.apache.commons.transaction.util.LoggerFacade; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.jdbc.JDBCUtils; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.postgresql.util.PSQLException; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | public class OracleIncrementPostGISJobContext extends AbstractOracleToPostGISJobContext |
| | | implements CreateFeatureTypeEventListener { |
| | | |
| | | static Log logger = LogFactory.getLog(OracleIncrementPostGISJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | |
| | | // static PostgisNGDataStoreFactory dataStoreFactory = new PostgisNGDataStoreFactory(); |
| | | |
| | | private OracleElementLogger elmLogger = null; |
| | | |
| | | static { |
| | | try { |
| | | DriverManager.registerDriver(new oracle.jdbc.driver.OracleDriver()); |
| | | } catch (SQLException e) { |
| | | Assert.shouldNeverReachHere(e.getMessage()); |
| | | } |
| | | } |
| | | |
| | | private String _filterConfig; |
| | | |
| | | private ElementDispatcher elementDispatcher; |
| | | |
| | | private HashMap<SimpleFeatureType, ArrayList<SimpleFeature>> txFeaturesContext = new HashMap<SimpleFeatureType, ArrayList<SimpleFeature>>(); |
| | | |
| | | private JobExecutionContext executionContext; |
| | | |
| | | private String currentSchema = null; |
| | | private boolean schemaChanged = false; |
| | | private boolean dropTableMode = true; |
| | | private int accumulate = 0; |
| | | |
| | | public static class ElementTransactionContext { |
| | | public int transcationType; |
| | | public short cid; |
| | | public int oid; |
| | | public short compid; |
| | | public short occid; |
| | | public int taskid; |
| | | public Element element; |
| | | public int result; |
| | | }; |
| | | |
| | | public OracleIncrementPostGISJobContext(String dataPath, DataStore pgDS, String targetSchema, String filterConfig, |
| | | boolean profileMode, boolean useTransform) { |
| | | super(dataPath, pgDS, targetSchema, profileMode, useTransform); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | | elementDispatcher.addCreateFeatureTypeEventListener(this); |
| | | // txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | } |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | | if (config.exists()) { |
| | | filterURL = config.toURI().toURL(); |
| | | } |
| | | } |
| | | if (filterURL == null) { |
| | | // config = new File("conf/DefaultConvertShpFilter.xml"); |
| | | filterURL = this.getClass().getResource("/conf/DefaultConvertShpFilter.xml"); |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new RuntimeException(e.getMessage(), e); |
| | | } catch (MalformedURLException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new RuntimeException(e.getMessage(), e); |
| | | } catch (IOException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new RuntimeException(e.getMessage(), e); |
| | | } catch (SAXException e) { |
| | | logger.info(e.getMessage(), e); |
| | | throw new RuntimeException(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | /** |
| | | * STATUS 欄位 :0:新增 2:編輯 3:刪除設備 4:刪除元件 |
| | | * @param context |
| | | */ |
| | | public void putFeatureCollection(ElementTransactionContext context) { |
| | | assert elementDispatcher != null; |
| | | if (context == null) { |
| | | logger.warn("putFeatureCollection context is null"); |
| | | return; |
| | | } |
| | | if (context.transcationType == 0) { |
| | | // insert Element |
| | | putFeatureCollection(context.element); |
| | | } else if (context.transcationType == 2) { |
| | | // Update Element |
| | | } else if (context.transcationType == 3) { |
| | | // Remove Whole Feature |
| | | } else if (context.transcationType == 4) { |
| | | // Remove Feature Part |
| | | } |
| | | } |
| | | |
| | | protected void putFeatureCollection(Element element) { |
| | | assert elementDispatcher != null; |
| | | // 判斷是否符和條件 |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | boolean isEmptySize = false; |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID()))); |
| | | |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | | logger.warn("----Complex Element size=" + complex.size() + ":" + |
| | | (linkage == null ? "NULL" : (linkage.getUfid()))); |
| | | if (complex.size() == 0) |
| | | isEmptySize = true; |
| | | } |
| | | |
| | | if (getElementLogging() && (!isEmptySize)) { |
| | | getElementLogger().logElement(element, getCurrentSchema()); |
| | | } |
| | | return; |
| | | } |
| | | |
| | | if (((Geometry)feature.getDefaultGeometry()).isEmpty()) { |
| | | boolean isEmptySize = false; |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Empty Geom Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | | logger.warn("----Complex Element size=" + complex.size() + ":" + |
| | | (linkage == null ? "NULL" : (linkage.getUfid()))); |
| | | if (complex.size() == 0) |
| | | isEmptySize = true; |
| | | } |
| | | |
| | | if (getElementLogging() && (!isEmptySize)) { |
| | | getElementLogger().logElement(element, getCurrentSchema()); |
| | | } |
| | | return; |
| | | } |
| | | |
| | | if (!txFeaturesContext.containsKey(feature.getFeatureType())) { |
| | | txFeaturesContext.put(feature.getFeatureType(), new ArrayList<SimpleFeature>()); |
| | | } |
| | | ArrayList<SimpleFeature> arrayList = txFeaturesContext.get(feature.getFeatureType()); |
| | | arrayList.add(feature); |
| | | accumulate++; |
| | | if (accumulate > BATCHSIZE) { |
| | | commitTransaction(); |
| | | } |
| | | } |
| | | |
| | | public void startTransaction() { |
| | | } |
| | | |
| | | public void commitTransaction() { |
| | | if (!txFeaturesContext.isEmpty()) { |
| | | logger.debug("Transaction size = " + txFeaturesContext.size()); |
| | | //txFeaturesContext.commitTransaction(); |
| | | } else { |
| | | logger.debug("Transaction is empty."); |
| | | } |
| | | |
| | | if (!txFeaturesContext.isEmpty()) { |
| | | updateDataStore(); |
| | | } |
| | | |
| | | if (this.getElementLogger() != null) |
| | | this.getElementLogger().flashLogging(); |
| | | } |
| | | |
| | | public void rollbackTransaction() { |
| | | } |
| | | |
| | | public void resetFeatureContext() { |
| | | txFeaturesContext.clear(); |
| | | } |
| | | |
| | | private void updateDataStore() { |
| | | if (isProfileMode()) markUpdateTime(); |
| | | Iterator<SimpleFeatureType> it = txFeaturesContext.keySet().iterator(); |
| | | Connection conn = null; |
| | | try { |
| | | conn = getConnection(); |
| | | boolean autoCommit = conn.getAutoCommit(); |
| | | conn.setAutoCommit(false); |
| | | |
| | | while (it.hasNext()) { |
| | | SimpleFeatureType featureType = it.next(); |
| | | logger.debug("Begin Save into PostGIS:" + featureType.getTypeName()); |
| | | |
| | | int batchCount = 0; |
| | | String bindingStmt = makePrepareInsertSql(featureType); |
| | | ArrayList<SimpleFeature> features = txFeaturesContext.get(featureType); |
| | | PreparedStatement pstmt = conn.prepareStatement(bindingStmt); |
| | | |
| | | for (SimpleFeature feature : features) { |
| | | try { |
| | | // stmt.execute(feature); |
| | | bindFeatureParameters(pstmt, feature); |
| | | // pstmt.executeUpdate(); |
| | | pstmt.addBatch(); |
| | | } catch (PSQLException e) { |
| | | if (bindingStmt != null) { |
| | | logger.error("Execute:" + bindingStmt); |
| | | } |
| | | logger.error(e.getServerErrorMessage()); |
| | | logger.error(e.getMessage(), e); |
| | | } catch (NullPointerException e) { |
| | | if (bindingStmt != null) { |
| | | logger.error("Execute:" + bindingStmt); |
| | | } |
| | | logger.error(feature.toString()); |
| | | logger.error(e.getMessage(), e); |
| | | } catch (ClassCastException e) { |
| | | if (bindingStmt != null) { |
| | | logger.error("Execute:" + bindingStmt); |
| | | } |
| | | for (int i = 0; i < feature.getAttributeCount(); i++) { |
| | | logger.info("attr[" + i + "]-" + ((feature.getAttribute(i) == null) ? " NULL" : |
| | | feature.getAttribute(i).toString())); |
| | | } |
| | | logger.error(e.getMessage(), e); |
| | | } |
| | | batchCount++; |
| | | } |
| | | |
| | | int[] numUpdates = pstmt.executeBatch(); |
| | | for (int i = 0; i < numUpdates.length; i++) { |
| | | if (numUpdates[i] == -2) |
| | | logger.warn("Execution " + i + ": unknown number of rows updated"); |
| | | } |
| | | conn.commit(); |
| | | |
| | | pstmt.close(); |
| | | features.clear(); |
| | | logger.debug("End Save into PostGIS:" + featureType.getTypeName()); |
| | | } |
| | | conn.setAutoCommit(autoCommit); |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | accumulate = 0; |
| | | } catch (BatchUpdateException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | logger.error(e.getMessage(), e); |
| | | SQLException ex; |
| | | while ((ex = e.getNextException()) != null) { |
| | | // logger.warn(ex.getMessage(), ex); |
| | | logger.warn(ex.getMessage()); |
| | | } |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | logger.error(e.getMessage(), e); |
| | | } finally { |
| | | if (isProfileMode()) accumulateUpdateTime(); |
| | | } |
| | | } |
| | | |
| | | public JobExecutionContext getExecutionContext() { |
| | | return executionContext; |
| | | } |
| | | |
| | | public void setExecutionContext(JobExecutionContext context) { |
| | | executionContext = context; |
| | | } |
| | | |
| | | /** |
| | | * �����]�Ƽg�J�� |
| | | * |
| | | * @throws IOException IO�o�Ϳ�~ |
| | | */ |
| | | public void closeFeatureWriter() throws IOException { |
| | | } |
| | | |
| | | protected OracleElementLogger getElementLogger() { |
| | | if (elmLogger == null) { |
| | | elmLogger = new OracleElementLogger(getOracleConnection()); |
| | | elmLogger.setDataPath(this.getDataPath()); |
| | | } |
| | | return elmLogger; |
| | | } |
| | | |
| | | public String getCurrentSchema() { |
| | | return currentSchema; |
| | | } |
| | | |
| | | public void setCurrentSchema(String querySchema) { |
| | | this.currentSchema = querySchema; |
| | | this.schemaChanged = true; |
| | | } |
| | | |
| | | protected Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | public boolean isDropTableMode() { |
| | | return dropTableMode; |
| | | } |
| | | |
| | | public void setDropTableMode(boolean dropTableMode) { |
| | | this.dropTableMode = dropTableMode; |
| | | } |
| | | |
| | | public void createFeatureTypeOccurred(FeatureTypeEvent evt) { |
| | | try { |
| | | createOrClearFeatureDataTable(evt.getFeatureType()); |
| | | } catch (SchemaException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | |
| | | protected void createOrClearFeatureDataTable(SimpleFeatureType featureType) throws SchemaException { |
| | | String featureName = featureType.getTypeName(); |
| | | Connection conn = null; |
| | | if (isExistFeature(featureType)) { |
| | | try { |
| | | conn = getConnection(); |
| | | if (dropTableMode) { |
| | | dropGeometryColumn(conn, getTargetSchema(), featureName, |
| | | (featureType).getGeometryDescriptor().getName().getLocalPart()); |
| | | dropTable(conn, getTargetSchema(), featureName); |
| | | |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType); |
| | | for (String stmtText : schemaTexts) { |
| | | Statement stmt = conn.createStatement(); |
| | | stmt.execute(stmtText); |
| | | JDBCUtils.close(stmt); |
| | | } |
| | | } else { |
| | | deleteTable(conn, getTargetSchema(), featureName); |
| | | } |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } |
| | | } else { |
| | | String tempStmt = null; |
| | | try { |
| | | conn = getConnection(); |
| | | ArrayList<String> schemaTexts = createNewSchemaTexts(conn, featureType); |
| | | for (String stmtText : schemaTexts) { |
| | | Statement stmt = conn.createStatement(); |
| | | tempStmt = stmtText; |
| | | stmt.execute(stmtText); |
| | | stmt.close(); |
| | | } |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | } catch (IOException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null); |
| | | logger.warn("RUN--" + tempStmt); |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | JDBCUtils.close(conn, Transaction.AUTO_COMMIT, e); |
| | | logger.warn("RUN--" + tempStmt); |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | } |
| | | |
| | | public boolean isSchemaChanged() { |
| | | return schemaChanged; |
| | | } |
| | | } |
| | |
| | | |
| | | public abstract class AbstractDgnToShapefileJobContext extends AbstractDgnFileJobContext { |
| | | public AbstractDgnToShapefileJobContext(String dataPath, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, profileMode, useTransform); |
| | | } |
| | | |
| | | public abstract String getDataOutPath(); |
| | |
| | | import java.util.HashMap; |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | import java.util.TimeZone; |
| | | |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.filter.TypeCompIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeCompLevelIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeIdDispatchableFilter; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.shapefile.ShapefileDataStore; |
| | | import org.geotools.data.shapefile.indexed.IndexType; |
| | | import org.geotools.data.shapefile.indexed.IndexedShapefileDataStore; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.filter.TypeCompIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeCompLevelIdDispatchableFilter; |
| | | import com.ximple.eofms.filter.TypeIdDispatchableFilter; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | public class DummyFeatureConvertShpJobContext extends AbstractDgnToShapefileJobContext { |
| | | static final Log logger = LogFactory.getLog(DummyFeatureConvertShpJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | static final String SHPOUTPATH = "shpout"; |
| | | |
| | | private String dataOut = null; |
| | |
| | | private boolean withIndex = false; |
| | | |
| | | public DummyFeatureConvertShpJobContext(String dataPath, String filterConfig, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | } |
| | | |
| | | // �P�_�O�_�ũM��� |
| | | SimpleFeature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID()))); |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | | logger.warn("----Complex Element size=" + complex.size()); |
| | |
| | | if (featuresWriterContext.containsKey(featureType.getTypeName())) { |
| | | writer = featuresWriterContext.get(featureType.getTypeName()); |
| | | } else { |
| | | ShapefileDataStore shapefileDataStore; |
| | | boolean existFile = sfile.exists(); |
| | | |
| | | if (!withIndex) { |
| | | shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL(), |
| | | true, Charset.forName("UTF-8")); |
| | | } else { |
| | | shapefileDataStore = new IndexedShapefileDataStore(sfile.toURI().toURL(), |
| | | null, true, true, IndexType.QIX, Charset.forName("UTF-8")); |
| | | ShapefileDataStore shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL()); |
| | | /* |
| | | if(namespace != null) { |
| | | store.setNamespaceURI(namespace.toString()); |
| | | } |
| | | */ |
| | | shapefileDataStore.setMemoryMapped(true); |
| | | // store.setBufferCachingEnabled(cacheMemoryMaps); |
| | | shapefileDataStore.setCharset(Charset.forName("UTF-8")); |
| | | shapefileDataStore.setTimeZone(TimeZone.getDefault()); |
| | | shapefileDataStore.setIndexed(withIndex); |
| | | shapefileDataStore.setIndexCreationEnabled(withIndex); |
| | | |
| | | if (!existFile) { |
| | | shapefileDataStore.createSchema(featureType); |
| | |
| | | import java.util.HashMap; |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | import java.util.TimeZone; |
| | | |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.shapefile.ShapefileDataStore; |
| | | import org.geotools.data.shapefile.indexed.IndexType; |
| | | import org.geotools.data.shapefile.indexed.IndexedShapefileDataStore; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | |
| | | public class FeatureDgnConvertShpJobContext extends AbstractDgnToShapefileJobContext { |
| | | |
| | | static final Log logger = LogFactory.getLog(FeatureDgnConvertShpJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | |
| | | static final String SHPOUTPATH = "shpout"; |
| | | |
| | | private String dataOut = null; |
| | |
| | | private boolean withIndex = false; |
| | | |
| | | public FeatureDgnConvertShpJobContext(String dataPath, String filterConfig, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | } |
| | | |
| | | // �P�_�O�_�ũM��� |
| | | SimpleFeature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID()))); |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | | logger.warn("----Complex Element size=" + complex.size()); |
| | |
| | | if (featuresWriterContext.containsKey(featureType.getTypeName())) { |
| | | writer = featuresWriterContext.get(featureType.getTypeName()); |
| | | } else { |
| | | ShapefileDataStore shapefileDataStore = null; |
| | | boolean existFile = sfile.exists(); |
| | | |
| | | if (!withIndex) { |
| | | shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL(), |
| | | true, Charset.forName("UTF-8")); |
| | | } else { |
| | | shapefileDataStore = new IndexedShapefileDataStore(sfile.toURI().toURL(), |
| | | null, true, true, IndexType.QIX, Charset.forName("UTF-8")); |
| | | ShapefileDataStore shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL()); |
| | | /* |
| | | if(namespace != null) { |
| | | shapefileDataStore.setNamespaceURI(namespace.toString()); |
| | | } |
| | | */ |
| | | shapefileDataStore.setMemoryMapped(true); |
| | | // shapefileDataStore.setBufferCachingEnabled(cacheMemoryMaps); |
| | | shapefileDataStore.setCharset(Charset.forName("UTF-8")); |
| | | shapefileDataStore.setTimeZone(TimeZone.getDefault()); |
| | | shapefileDataStore.setIndexed(withIndex); |
| | | shapefileDataStore.setIndexCreationEnabled(withIndex); |
| | | |
| | | if (!existFile) { |
| | | shapefileDataStore.createSchema(featureType); |
| | |
| | | import java.util.HashMap; |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | import java.util.TimeZone; |
| | | import java.util.TreeMap; |
| | | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | | import org.apache.commons.transaction.util.LoggerFacade; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.shapefile.ShapefileDataStore; |
| | | import org.geotools.data.shapefile.indexed.IndexType; |
| | | import org.geotools.data.shapefile.indexed.IndexedShapefileDataStore; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.EPSG3825GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.EPSG3826GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.GeometryConverterDecorator; |
| | | import com.ximple.io.dgn7.ArcElement; |
| | |
| | | import com.ximple.io.dgn7.TextElement; |
| | | import com.ximple.io.dgn7.TextNodeElement; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | | import org.apache.commons.transaction.util.LoggerFacade; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.shapefile.ShapefileDataStore; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public class GeneralDgnConvertShpJobContext extends AbstractDgnToShapefileJobContext { |
| | | static final Log logger = LogFactory.getLog(GeneralDgnConvertShpJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | static final GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | public static final String SHPOUTPATH = "shpout"; |
| | | |
| | | private String dataOut = null; |
| | |
| | | private PessimisticMapWrapper txFeaturesContext; |
| | | private TreeMap<String, SimpleFeatureType> featureTypes = new TreeMap<String, SimpleFeatureType>(); |
| | | |
| | | private GeometryConverterDecorator convertDecorator = null; |
| | | private String featureBaseName = null; |
| | | private boolean withIndex = false; |
| | | |
| | | public GeneralDgnConvertShpJobContext(String dataPath, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | if (isEPSG3826()) |
| | | convertDecorator = new EPSG3826GeometryConverterDecorator(); |
| | | else |
| | | convertDecorator = new EPSG3825GeometryConverterDecorator(); |
| | | } |
| | | |
| | | public void putFeatureCollection(Element element) throws IllegalAttributeException, SchemaException { |
| | |
| | | if (featuresWriterContext.containsKey(featureType.getTypeName())) { |
| | | writer = featuresWriterContext.get(featureType.getTypeName()); |
| | | } else { |
| | | ShapefileDataStore shapefileDataStore = null; |
| | | boolean existFile = sfile.exists(); |
| | | |
| | | if (!withIndex) { |
| | | shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL(), |
| | | true, Charset.forName("UTF-8")); |
| | | } else { |
| | | shapefileDataStore = new IndexedShapefileDataStore(sfile.toURI().toURL(), |
| | | null, true, true, IndexType.QIX, Charset.forName("UTF-8")); |
| | | ShapefileDataStore shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL()); |
| | | /* |
| | | if(namespace != null) { |
| | | shapefileDataStore.setNamespaceURI(namespace.toString()); |
| | | } |
| | | */ |
| | | shapefileDataStore.setMemoryMapped(true); |
| | | // shapefileDataStore.setBufferCachingEnabled(cacheMemoryMaps); |
| | | shapefileDataStore.setCharset(Charset.forName("UTF-8")); |
| | | shapefileDataStore.setTimeZone(TimeZone.getDefault()); |
| | | shapefileDataStore.setIndexed(withIndex); |
| | | shapefileDataStore.setIndexCreationEnabled(withIndex); |
| | | |
| | | if (!existFile) { |
| | | shapefileDataStore.createSchema(featureType); |
| | |
| | | |
| | | public SimpleFeature createFeature(SimpleFeatureType featureType, Element element) throws IllegalAttributeException { |
| | | DefaultColorTable colorTable = (DefaultColorTable) DefaultColorTable.getInstance(); |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | if (element instanceof TextElement) { |
| | | TextElement textElement = (TextElement) element; |
| | | convertDecorator.setConverter(textElement); |
| | |
| | | LineStringElement linestring = (LineStringElement) element; |
| | | convertDecorator.setConverter(linestring); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(linestring.getColorIndex()), |
| | | linestring.getWeight(), |
| | | linestring.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof LineElement) { |
| | | LineElement line = (LineElement) element; |
| | | convertDecorator.setConverter(line); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(line.getColorIndex()), |
| | | line.getWeight(), |
| | | line.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof ArcElement) { |
| | | ArcElement arcElement = (ArcElement) element; |
| | |
| | | */ |
| | | convertDecorator.setConverter(arcElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(arcElement.getColorIndex()), |
| | | arcElement.getWeight(), |
| | | arcElement.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof EllipseElement) { |
| | | EllipseElement arcElement = (EllipseElement) element; |
| | | convertDecorator.setConverter(arcElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(arcElement.getColorIndex()), |
| | | arcElement.getWeight(), |
| | | arcElement.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } else if (element instanceof ComplexChainElement) { |
| | | ComplexChainElement complexChainElement = (ComplexChainElement) element; |
| | | convertDecorator.setConverter(complexChainElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | if (geom != null) |
| | | if (geom != null) { |
| | | return SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | geom, |
| | | colorTable.getColorCode(complexChainElement.getColorIndex()), |
| | | complexChainElement.getWeight(), |
| | | complexChainElement.getLineStyle() |
| | | }, null); |
| | | } |
| | | return null; |
| | | } |
| | | return null; |
| | |
| | | import java.util.HashMap; |
| | | import java.util.Iterator; |
| | | import java.util.List; |
| | | |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | | import org.apache.commons.transaction.util.LoggerFacade; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.shapefile.ShapefileDataStore; |
| | | import org.geotools.data.shapefile.indexed.IndexType; |
| | | import org.geotools.data.shapefile.indexed.IndexedShapefileDataStore; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import java.util.TimeZone; |
| | | |
| | | import com.vividsolutions.jts.geom.Coordinate; |
| | | import com.vividsolutions.jts.geom.Envelope; |
| | | import com.vividsolutions.jts.geom.Geometry; |
| | | import com.vividsolutions.jts.geom.GeometryFactory; |
| | | |
| | | import com.ximple.eofms.util.DefaultColorTable; |
| | | import com.ximple.eofms.util.EPSG3825GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.EPSG3826GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.FeatureTypeBuilderUtil; |
| | | import com.ximple.eofms.util.GeometryConverterDecorator; |
| | | import com.ximple.eofms.util.TPCLIDConverter; |
| | |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import com.ximple.io.dgn7.TextElement; |
| | | import com.ximple.io.dgn7.UserAttributeData; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | | import org.apache.commons.transaction.util.CommonsLoggingLogger; |
| | | import org.apache.commons.transaction.util.LoggerFacade; |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.shapefile.ShapefileDataStore; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.geotools.feature.simple.SimpleFeatureBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.geometry.jts.JTSFactoryFinder; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | |
| | | public class IndexDgnConvertShpJobContext extends AbstractDgnToShapefileJobContext { |
| | | static final Log logger = LogFactory.getLog(IndexDgnConvertShpJobContext.class); |
| | | static final LoggerFacade sLogger = new CommonsLoggingLogger(logger); |
| | | static final GeometryFactory geometryFactory = new GeometryFactory(); |
| | | static final GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(null); |
| | | public static final String SHPOUTPATH = "shpout"; |
| | | |
| | | protected GeometryConverterDecorator convertDecorator; |
| | | |
| | | private String dataOut = null; |
| | | |
| | |
| | | private int accumulate = 0; |
| | | |
| | | public IndexDgnConvertShpJobContext(String dataPath, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(dataPath, profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(dataPath, profileMode, useTransform); |
| | | txFeaturesContext = new PessimisticMapWrapper(featuresContext, sLogger); |
| | | if (isEPSG3826()) { |
| | | convertDecorator = new EPSG3826GeometryConverterDecorator(); |
| | | } else { |
| | | convertDecorator = new EPSG3825GeometryConverterDecorator(); |
| | | } |
| | | } |
| | | |
| | | public void putFeatureCollection(Element element) throws IllegalAttributeException, SchemaException { |
| | |
| | | ShapefileDataStore shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL(), |
| | | true, Charset.forName("UTF-8")); |
| | | */ |
| | | ShapefileDataStore shapefileDataStore = new IndexedShapefileDataStore(sfile.toURI().toURL(), |
| | | null, true, true, IndexType.QIX, Charset.forName("UTF-8")); |
| | | ShapefileDataStore shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL()); |
| | | /* |
| | | if(namespace != null) { |
| | | shapefileDataStore.setNamespaceURI(namespace.toString()); |
| | | } |
| | | */ |
| | | shapefileDataStore.setMemoryMapped(true); |
| | | // shapefileDataStore.setBufferCachingEnabled(cacheMemoryMaps); |
| | | shapefileDataStore.setCharset(Charset.forName("UTF-8")); |
| | | shapefileDataStore.setTimeZone(TimeZone.getDefault()); |
| | | shapefileDataStore.setIndexed(true); |
| | | shapefileDataStore.setIndexCreationEnabled(true); |
| | | shapefileDataStore.createSchema(featureType); |
| | | writer = shapefileDataStore.getFeatureWriter(featureType.getTypeName(), Transaction.AUTO_COMMIT); |
| | | featuresWriterContext.put(featureType.getTypeName(), writer); |
| | |
| | | |
| | | Envelope extent = TPCLIDConverter.convertTpclIdToEnvelope(tpclid); |
| | | Geometry geom; |
| | | if (isEPSG3826()) { |
| | | if (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826) { |
| | | geom = geometryFactory.createLinearRing(new Coordinate[] |
| | | { |
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate(extent.getMinX(), extent.getMinY())), |
| | |
| | | TextElement txtElement = (TextElement) element; |
| | | double angle = txtElement.getRotationAngle(); |
| | | angle = BigDecimal.valueOf(angle).setScale(3, RoundingMode.HALF_UP).doubleValue(); |
| | | GeometryConverterDecorator convertDecorator = FeatureTypeBuilderUtil.lookupDefaultGeometryConverter(); |
| | | convertDecorator.setConverter(txtElement); |
| | | Geometry geom = convertDecorator.toGeometry(geometryFactory); |
| | | SimpleFeature feature = SimpleFeatureBuilder.build(featureType, new Object[]{ |
| | | convertDecorator.toGeometry(geometryFactory), |
| | | geom, |
| | | colorTable.getColorCode(txtElement.getColorIndex()), |
| | | txtElement.getWeight(), |
| | | txtElement.getLineStyle(), |
| | |
| | | int dy = (i / 2) * TPCLIDConverter.SY600; |
| | | |
| | | Geometry geom; |
| | | if (isEPSG3826()) { |
| | | if (FeatureTypeBuilderUtil.getDefaultFeatureSRID() == 3826) { |
| | | geom = geometryFactory.createPolygon(geometryFactory.createLinearRing(new Coordinate[] |
| | | { |
| | | TWDDatumConverter.fromTM2ToEPSG3826(new Coordinate( |
| | |
| | | import java.util.HashMap; |
| | | import java.util.Iterator; |
| | | import java.util.Properties; |
| | | import java.util.TimeZone; |
| | | |
| | | import org.apache.commons.digester.Digester; |
| | | import org.apache.commons.digester.xmlrules.DigesterLoader; |
| | | import com.vividsolutions.jts.util.Assert; |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.jobs.OracleElementLogger; |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.util.ElementDigesterUtils; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.apache.commons.transaction.memory.PessimisticMapWrapper; |
| | |
| | | import org.geotools.data.FeatureWriter; |
| | | import org.geotools.data.Transaction; |
| | | import org.geotools.data.shapefile.ShapefileDataStore; |
| | | import org.geotools.data.shapefile.indexed.IndexType; |
| | | import org.geotools.data.shapefile.indexed.IndexedShapefileDataStore; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.opengis.feature.IllegalAttributeException; |
| | | import org.opengis.feature.simple.SimpleFeature; |
| | | import org.opengis.feature.simple.SimpleFeatureType; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import com.vividsolutions.jts.util.Assert; |
| | | |
| | | import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter; |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import com.ximple.eofms.jobs.OracleElementLogger; |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.io.dgn7.ComplexElement; |
| | | import com.ximple.io.dgn7.Element; |
| | | import com.ximple.io.dgn7.FrammeAttributeData; |
| | | |
| | | |
| | | public class OracleConvertShapefilesJobContext extends AbstractOracleJobContext { |
| | |
| | | private boolean withIndex = false; |
| | | |
| | | public OracleConvertShapefilesJobContext(String filterConfig, boolean profileMode, |
| | | boolean useTransform, boolean useEPSG3826) { |
| | | super(profileMode, useTransform, useEPSG3826); |
| | | boolean useTransform) { |
| | | super(profileMode, useTransform); |
| | | properties = new Properties(); |
| | | _filterConfig = filterConfig; |
| | | elementDispatcher = createElementDispatcher(); |
| | |
| | | |
| | | private ElementDispatcher createElementDispatcher() { |
| | | try { |
| | | URL rulesURL = ElementDispatcher.class.getResource("ElementDispatcherRules.xml"); |
| | | assert rulesURL != null; |
| | | Digester digester = DigesterLoader.createDigester(rulesURL); |
| | | URL filterURL = null; |
| | | if (_filterConfig != null) { |
| | | File config = new File(_filterConfig); |
| | |
| | | // filterURL = this.getClass().getResource("/conf/ConvertShpFilterForLevel.xml"); |
| | | } |
| | | assert filterURL != null; |
| | | Digester digester = ElementDigesterUtils.getElementDigester(); |
| | | return (ElementDispatcher) digester.parse(filterURL); |
| | | } catch (UnsupportedEncodingException e) { |
| | | logger.info(e.getMessage(), e); |
| | |
| | | public void putFeatureCollection(Element element) { |
| | | assert elementDispatcher != null; |
| | | // �P�_�O�_�ũM��� |
| | | SimpleFeature feature = elementDispatcher.execute(element, isTransformed(), isEPSG3826()); |
| | | SimpleFeature feature = elementDispatcher.execute(element, getDistId(), isTransformed()); |
| | | if (feature == null) { |
| | | boolean isEmptySize = false; |
| | | FrammeAttributeData linkage = |
| | | AbstractFLinkageDispatchableFilter.getFeatureLinkage(element); |
| | | logger.warn("Unknown Element:" + element.getElementType().toString() + |
| | | ":type=" + element.getType() + ":lv=" + element.getLevelIndex() + ":id=" + |
| | | (linkage == null ? "NULL" : (linkage.getFsc() + "|" + linkage.getComponentID()))); |
| | | (linkage == null ? "NULL" : "FSC=" + (linkage.getFsc() + "|COMPID=" + linkage.getComponentID()))); |
| | | |
| | | if (element instanceof ComplexElement) { |
| | | ComplexElement complex = (ComplexElement) element; |
| | |
| | | if (featuresWriterContext.containsKey(featureType.getTypeName())) { |
| | | writer = featuresWriterContext.get(featureType.getTypeName()); |
| | | } else { |
| | | ShapefileDataStore shapefileDataStore; |
| | | boolean existFile = sfile.exists(); |
| | | |
| | | if (!withIndex) { |
| | | shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL(), |
| | | true, Charset.forName("UTF-8")); |
| | | } else { |
| | | shapefileDataStore = new IndexedShapefileDataStore(sfile.toURI().toURL(), |
| | | null, true, true, IndexType.QIX, Charset.forName("UTF-8")); |
| | | ShapefileDataStore shapefileDataStore = new ShapefileDataStore(sfile.toURI().toURL()); |
| | | /* |
| | | if(namespace != null) { |
| | | shapefileDataStore.setNamespaceURI(namespace.toString()); |
| | | } |
| | | */ |
| | | shapefileDataStore.setMemoryMapped(true); |
| | | // shapefileDataStore.setBufferCachingEnabled(cacheMemoryMaps); |
| | | shapefileDataStore.setCharset(Charset.forName("UTF-8")); |
| | | shapefileDataStore.setTimeZone(TimeZone.getDefault()); |
| | | shapefileDataStore.setIndexed(withIndex); |
| | | shapefileDataStore.setIndexCreationEnabled(withIndex); |
| | | |
| | | if (!existFile) { |
| | | shapefileDataStore.createSchema(featureType); |
| | |
| | | * BinConverter |
| | | * User: Ulysses |
| | | * Date: 2007/9/17 |
| | | * Time: ¤W¤È 01:13:13 |
| | | */ |
| | | public class BinConverter { |
| | | // our table for binhex conversion |
| | |
| | | * Bits |
| | | * User: Ulysses |
| | | * Date: 2007/6/17 |
| | | * Time: ¤W¤È 01:16:39 |
| | | * Time: 01:16:39 |
| | | */ |
| | | public class Bits { |
| | | // -- Unsafe access -- |
| | |
| | | * ByteArrayCompressor |
| | | * User: Ulysses |
| | | * Date: 2007/6/15 |
| | | * Time: ¤U¤È 02:21:00 |
| | | * To change this template use File | Settings | File Templates. |
| | | */ |
| | | public final class ByteArrayCompressor { |
New file |
| | |
| | | package com.ximple.eofms.util; |
| | | |
| | | /** |
| | | * |
| | | * TODO Purpose of |
| | | * <p> |
| | | * |
| | | * </p> |
| | | * @author Ulysses |
| | | * @since 1.1.0 |
| | | */ |
| | | public enum ConnectivityDirectionEnum { |
| | | Noneflow((short) 0), |
| | | |
| | | ForwardflowON((short) 0x01), |
| | | |
| | | BackflowON((short) 0x02), |
| | | |
| | | ForwardflowOFF((short) 0x03), |
| | | |
| | | BackflowOFF((short) 0x04), |
| | | |
| | | ForwardFixflowON((short) 0x05), |
| | | |
| | | ForwardFixflowOFF((short) 0x06), |
| | | |
| | | BackFixflowON((short) 0x07), |
| | | |
| | | BackFixflowOFF((short) 0x08), |
| | | |
| | | Breakflow((short) 0x62), |
| | | |
| | | Nondeterminate((short) 0x63), |
| | | |
| | | PhaseAON((short) 0x64), // 100 |
| | | |
| | | PhaseBON((short) 0xC8), // 200 |
| | | |
| | | PhaseCON((short) 0x190); // 400 |
| | | |
| | | private final short value; |
| | | |
| | | ConnectivityDirectionEnum(short value) { |
| | | this.value = value; |
| | | } |
| | | |
| | | public short getValue() { |
| | | return this.value; |
| | | } |
| | | |
| | | public static ConnectivityDirectionEnum convertShort(short val) { |
| | | if (val == 0) |
| | | return Noneflow; |
| | | if (val == 0x1) |
| | | return ForwardflowON; |
| | | if (val == 0x2) |
| | | return BackflowON; |
| | | if (val == 0x3) |
| | | return ForwardflowOFF; |
| | | if (val == 0x4) |
| | | return BackflowOFF; |
| | | if (val == 0x5) |
| | | return ForwardFixflowON; |
| | | if (val == 0x6) |
| | | return ForwardFixflowOFF; |
| | | if (val == 0x7) |
| | | return BackFixflowON; |
| | | if (val == 0x8) |
| | | return BackFixflowOFF; |
| | | if (val == 0x62) |
| | | return Breakflow; |
| | | return Nondeterminate; |
| | | } |
| | | |
| | | public static boolean isOn(short val) { |
| | | short dir = (short) (val & 0x00ff); |
| | | switch (dir) { |
| | | case 0x0: |
| | | case 0x1: |
| | | case 0x2: |
| | | case 0x5: |
| | | case 0x7: |
| | | return true; |
| | | } |
| | | |
| | | return false; |
| | | } |
| | | |
| | | public static boolean isOff(short val) { |
| | | return (!isOn(val)); |
| | | } |
| | | |
| | | public static boolean isForward(short val) { |
| | | short dir = (short) (val & 0x00ff); |
| | | switch (dir) { |
| | | case 0x1: |
| | | case 0x3: |
| | | case 0x5: |
| | | case 0x6: |
| | | return true; |
| | | } |
| | | |
| | | return false; |
| | | } |
| | | |
| | | public static boolean isBackward(short val) { |
| | | short dir = (short) (val & 0x00ff); |
| | | switch (dir) { |
| | | case 0x2: |
| | | case 0x4: |
| | | case 0x7: |
| | | case 0x8: |
| | | return true; |
| | | } |
| | | |
| | | return false; |
| | | } |
| | | |
| | | public static boolean isFixDirection(short val) { |
| | | short dir = (short) (val & 0x00ff); |
| | | switch (dir) { |
| | | case 0x5: |
| | | case 0x6: |
| | | case 0x7: |
| | | case 0x8: |
| | | return true; |
| | | } |
| | | |
| | | return false; |
| | | } |
| | | |
| | | public static boolean isNondeterminate(short val) { |
| | | short dir = (short) (val & 0x00ff); |
| | | return (dir == 0x63); |
| | | } |
| | | |
| | | public static short getWeight(short val) { |
| | | short weight = (short) (val & 0xff00); |
| | | return (short) (weight >>> 8); |
| | | } |
| | | |
| | | @Override |
| | | public String toString() { |
| | | boolean isA = (this.value & PhaseAON.getValue()) != 0; |
| | | boolean isB = (this.value & PhaseBON.getValue()) != 0; |
| | | boolean isC = (this.value & PhaseCON.getValue()) != 0; |
| | | String phase = "" + (isA ? "A" : "") + (isB ? "B" : "") //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ //$NON-NLS-5$ |
| | | + (isC ? "C" : ""); //$NON-NLS-1$ //$NON-NLS-2$ |
| | | if (phase.length() != 0) |
| | | phase = "[" + phase + "]"; //$NON-NLS-1$ //$NON-NLS-2$ |
| | | |
| | | if (value == 0) |
| | | return "Noneflow" + phase; //$NON-NLS-1$ |
| | | if (value == 0x1) |
| | | return "ForwardflowON" + phase; //$NON-NLS-1$ |
| | | if (value == 0x2) |
| | | return "BackflowON" + phase; //$NON-NLS-1$ |
| | | if (value == 0x3) |
| | | return "ForwardflowOFF" + phase; //$NON-NLS-1$ |
| | | if (value == 0x4) |
| | | return "BackflowOFF" + phase; //$NON-NLS-1$ |
| | | if (value == 0x5) |
| | | return "ForwardFixflowON" + phase; //$NON-NLS-1$ |
| | | if (value == 0x6) |
| | | return "ForwardFixflowOFF" + phase; //$NON-NLS-1$ |
| | | if (value == 0x7) |
| | | return "BackFixflowON" + phase; //$NON-NLS-1$ |
| | | if (value == 0x8) |
| | | return "BackFixflowOFF" + phase; //$NON-NLS-1$ |
| | | if (value == 0x62) |
| | | return "Breakflow" + phase; //$NON-NLS-1$ |
| | | return "Nondeterminate"; //$NON-NLS-1$ |
| | | } |
| | | |
| | | /* |
| | | * |
| | | * enum DirectionType { Noneflow = 0, ForwardflowON = 0x01, BackflowON = |
| | | * 0x02, ForwardflowOFF = 0x03, BackflowOFF = 0x04, ForwardFixflowON = 0x05, |
| | | * ForwardFixflowOFF = 0x06, BackFixflowON = 0x07, BackFixflowOFF = 0x08, |
| | | * Breakflow = 0x62, Nondeterminate = 0x63, PhaseAON = 0x64, // 100 PhaseBON |
| | | * = 0xC8, // 200 PhaseCON = 0x190 // 400 }; |
| | | * |
| | | * enum OperationType { OFF = 0, ON = 0x01, NSTATUS = 0x10 }; |
| | | * |
| | | * |
| | | * enum tagNetworkEdgeFlowType { FLOWUNKNOWN = 0x0000, // "???w" FLOWFORWARD |
| | | * = 0x0001, // "???V" FLOWBACKWARD = 0x0002, // "�Xf?V" FLOWTWOWAY = |
| | | * 0x0003, // "???V" FLOWBREAK = 0x00F0, // "???_" FLOWUNDELEGATED = 0x00FF, |
| | | * // "?L�PN?q" |
| | | * |
| | | * FLOWWT_1 = 0x0100, // A FLOWWT_2 = 0x0200, // B FLOWWT_3 = 0x0400, // C |
| | | * FLOWWT_4 = 0x0800, // ?-Unused |
| | | * |
| | | * FLOWON = 0x1000, // "On" FLOWOFF = 0x0000, // "Off" |
| | | * |
| | | * FLOWFIX = 0x2000, // Fix Dir |
| | | * |
| | | * FLOWONMASK = 0xF000, FLOWWTMASK = 0x0F00, FLOWWAYMASK = 0x00FF }; |
| | | * |
| | | * enum tagNetworkEdgeOStatusType { OPSTATUS_OFF = 0x0000, OPSTATUS_ON = |
| | | * 0x0001, BOUNDSTATUS = 0x0010, OPSTATUSMASK = 0x000F }; |
| | | * |
| | | * enum tagPhaseMask { PhaseUnknown = 0x0, PhaseA = 0x1, PhaseB = 0x2, |
| | | * PhaseC = 0x4, PhaseN = 0x8 }; |
| | | * |
| | | * BYTE CNetworkSnapshot::marshalDIRFromDirection(short wFlag) { // BYTE |
| | | * bRet = 0xFF; bool bON; bool bFix; if ((wFlag & CNetworkEdge::FLOWONMASK) |
| | | * == CNetworkEdge::FLOWON) bON = true; else bON = false; if ((wFlag & |
| | | * CNetworkEdge::FLOWONMASK) == CNetworkEdge::FLOWFIX) bFix = true; else |
| | | * bFix = false; |
| | | * |
| | | * switch (wFlag & CNetworkEdge::FLOWWAYMASK) { case |
| | | * CNetworkEdge::FLOWFORWARD: if (bFix) if (bON) return |
| | | * CFacility::ForwardFixflowON ; else return CFacility::ForwardFixflowOFF; |
| | | * else if (bON) return CFacility::ForwardflowON; else return |
| | | * CFacility::ForwardflowOFF; |
| | | * |
| | | * case CNetworkEdge::FLOWBACKWARD: if (bFix) if (bON) return |
| | | * CFacility::BackFixflowON; else return CFacility::BackFixflowOFF; else if |
| | | * (bON) return CFacility::BackflowON; else return CFacility::BackflowOFF; |
| | | * |
| | | * case CNetworkEdge::FLOWUNKNOWN: case CNetworkEdge::FLOWTWOWAY: return |
| | | * CFacility::Noneflow; |
| | | * |
| | | * case CNetworkEdge::FLOWBREAK: return CFacility::Breakflow; |
| | | * |
| | | * case CNetworkEdge::FLOWUNDELEGATED: return CFacility::Nondeterminate; |
| | | * |
| | | * default: break; } |
| | | * |
| | | * return 0xFF; } |
| | | * |
| | | * short CNetworkSnapshot::marshalDirectionFromDIR(WORD bFlag) { short wRet |
| | | * = CNetworkEdge::FLOWUNDELEGATED; switch (bFlag) { case |
| | | * CFacility::Noneflow: wRet = CNetworkEdge::FLOWUNKNOWN; break; case |
| | | * CFacility::ForwardflowON: wRet = CNetworkEdge::FLOWFORWARD | |
| | | * CNetworkEdge::FLOWON; break; case CFacility::BackflowON: wRet = |
| | | * CNetworkEdge::FLOWBACKWARD | CNetworkEdge::FLOWON; break; case |
| | | * CFacility::ForwardflowOFF: wRet = CNetworkEdge::FLOWFORWARD; break; case |
| | | * CFacility::BackflowOFF: wRet = CNetworkEdge::FLOWBACKWARD; break; case |
| | | * CFacility::ForwardFixflowON: wRet = CNetworkEdge::FLOWFORWARD | |
| | | * CNetworkEdge::FLOWON | CNetworkEdge::FLOWFIX; break; case |
| | | * CFacility::ForwardFixflowOFF: wRet = CNetworkEdge::FLOWFORWARD | |
| | | * CNetworkEdge::FLOWFIX; break; case CFacility::BackFixflowON: wRet = |
| | | * CNetworkEdge::FLOWBACKWARD | CNetworkEdge::FLOWON | |
| | | * CNetworkEdge::FLOWFIX; break; case CFacility::BackFixflowOFF: wRet = |
| | | * CNetworkEdge::FLOWBACKWARD | CNetworkEdge::FLOWFIX; break; case |
| | | * CFacility::Breakflow: wRet = CNetworkEdge::FLOWBREAK; break; case |
| | | * CFacility::Nondeterminate: wRet = CNetworkEdge::FLOWUNDELEGATED; break; |
| | | * default: break; } return wRet; } |
| | | * |
| | | * short CNetworkSnapshot::marshalPhaseToMask(short wPhase) { short |
| | | * wMaskValue = 0; |
| | | * |
| | | * if (wPhase == 1) { wMaskValue |= CNetworkEdge::PhaseA | |
| | | * CNetworkEdge::PhaseB | CNetworkEdge::PhaseC; return wMaskValue; } |
| | | * |
| | | * if (wPhase > 16) { wMaskValue = CNetworkEdge::PhaseN; wPhase -= 16; } |
| | | * |
| | | * if (wPhase > 10) { wMaskValue |= CNetworkEdge::PhaseA | |
| | | * CNetworkEdge::PhaseB | CNetworkEdge::PhaseC; return wMaskValue; } |
| | | * |
| | | * switch (wPhase) { case 5: // AB case 6: // BA wMaskValue |= |
| | | * CNetworkEdge::PhaseA | CNetworkEdge::PhaseB; break; case 7: // BC case 8: |
| | | * // CB wMaskValue |= CNetworkEdge::PhaseB | CNetworkEdge::PhaseC; break; |
| | | * case 9: // CA case 10: // AC wMaskValue |= CNetworkEdge::PhaseA | |
| | | * CNetworkEdge::PhaseC; break; case 2: // A wMaskValue |= |
| | | * CNetworkEdge::PhaseA; break; case 3: // B wMaskValue |= |
| | | * CNetworkEdge::PhaseB; break; case 4: // C wMaskValue |= |
| | | * CNetworkEdge::PhaseC; break; default: wMaskValue |= |
| | | * CNetworkEdge::PhaseUnknown; break; } |
| | | * |
| | | * return wMaskValue; } |
| | | */ |
| | | } |
New file |
| | |
| | | package com.ximple.eofms.util; |
| | | |
| | | import com.ximple.eofms.filter.ElementDispatcher; |
| | | import org.apache.commons.digester3.Digester; |
| | | import org.apache.commons.digester3.binder.DigesterLoader; |
| | | import org.apache.commons.digester3.xmlrules.FromXmlRulesModule; |
| | | |
| | | public class ElementDigesterUtils { |
| | | protected static Digester digester = null; |
| | | protected static DigesterLoader loader = DigesterLoader.newLoader( new FromXmlRulesModule() { |
| | | @Override |
| | | protected void loadRules() { |
| | | loadXMLRules(ElementDispatcher.class.getResource("ElementDispatcherRules.xml")); |
| | | } |
| | | |
| | | } ); |
| | | |
| | | static { |
| | | digester = loader.newDigester(); |
| | | } |
| | | |
| | | public static Digester getElementDigester() { |
| | | return digester; |
| | | } |
| | | } |
| | |
| | | package com.ximple.eofms.util; |
| | | |
| | | import com.vividsolutions.jts.geom.*; |
| | | import java.util.ArrayList; |
| | | import java.util.TreeMap; |
| | | |
| | | import com.vividsolutions.jts.geom.LineString; |
| | | import com.vividsolutions.jts.geom.MultiLineString; |
| | | import com.vividsolutions.jts.geom.MultiPoint; |
| | | import com.vividsolutions.jts.geom.Point; |
| | | import com.vividsolutions.jts.geom.Polygon; |
| | | import org.apache.log4j.LogManager; |
| | | import org.apache.log4j.Logger; |
| | | import org.geotools.feature.AttributeTypeBuilder; |
| | | import org.geotools.feature.simple.SimpleFeatureTypeBuilder; |
| | | import org.geotools.referencing.CRS; |
| | | import org.opengis.feature.type.AttributeDescriptor; |
| | | import org.opengis.feature.type.AttributeType; |
| | | import org.opengis.feature.type.GeometryType; |
| | | |
| | | import java.util.ArrayList; |
| | | import org.opengis.referencing.FactoryException; |
| | | import org.opengis.referencing.crs.CoordinateReferenceSystem; |
| | | import org.opengis.referencing.operation.MathTransform; |
| | | |
| | | public final class FeatureTypeBuilderUtil { |
| | | protected static GeometryFactory _geomFactory = new GeometryFactory(); |
| | | private static boolean notAllowNull = false; |
| | | |
| | | protected static final String DEFAULTNAMESPACE = "http://www.ximple.com.tw/tpc/"; |
| | | |
| | | private static final Logger LOGGER = LogManager.getLogger(FeatureTypeBuilderUtil.class); |
| | | private static boolean notAllowNull = false; |
| | | private static boolean allowNull = true; |
| | | |
| | | public static boolean isNotAllowNull() { |
| | | return notAllowNull; |
| | |
| | | } |
| | | |
| | | protected static GeometryType createGeometryType(AttributeTypeBuilder attrBuilder, |
| | | String name, Class clazz, boolean isNillable) { |
| | | attrBuilder.setName(name); |
| | | attrBuilder.setBinding(clazz); |
| | | attrBuilder.setNillable(isNillable); |
| | | return attrBuilder.buildGeometryType(); |
| | | String name, Class clazz, boolean isNillable, |
| | | CoordinateReferenceSystem crs) { |
| | | return createGeometryType(attrBuilder, name, clazz, isNillable, crs, null); |
| | | } |
| | | |
| | | protected static GeometryType createGeometryType(AttributeTypeBuilder attrBuilder, |
| | | String name, Class clazz, boolean isNillable, Object defaultValue) { |
| | | String name, Class clazz, boolean isNillable, |
| | | CoordinateReferenceSystem crs, |
| | | Object defaultValue) { |
| | | attrBuilder.setName(name); |
| | | attrBuilder.setBinding(clazz); |
| | | attrBuilder.setNillable(isNillable); |
| | | attrBuilder.setDefaultValue(defaultValue); |
| | | if (defaultValue != null) attrBuilder.setDefaultValue(defaultValue); |
| | | if (crs != null) attrBuilder.setCRS(crs); |
| | | return attrBuilder.buildGeometryType(); |
| | | } |
| | | |
| | |
| | | |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "geom", createGeometryType(attrBuilder, "PointType", Point.class, false))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12, "")); |
| | | "geom", createGeometryType(attrBuilder, "PointType", Point.class, false, lookupCRS(defaultSRID)))); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "dyncolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | */ |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symcolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("font", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "font", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("just", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "just", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("height", Float.class, notAllowNull, 1, (float) 1.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "height", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 1.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("width", Float.class, notAllowNull, 1, (float) 1.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "width", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 1.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("angle", Float.class, notAllowNull, 1, (float) 0.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "angle", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 0.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("context", String.class, notAllowNull, 254, "")); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "context", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 254, ""))); |
| | | |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "fowner", createAttributeType(attrBuilder, "IntegerType", Integer.class, allowNull, 1, -1))); |
| | | */ |
| | | typeBuilder.addAll(attrDescs); |
| | | return typeBuilder; |
| | | } |
| | |
| | | AttributeTypeBuilder attrBuilder; |
| | | ArrayList<AttributeDescriptor> attrDescs = new ArrayList<AttributeDescriptor>(); |
| | | |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", LineString.class, true)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "geom", createGeometryType(attrBuilder, "LineType", LineString.class, true))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12, "")); |
| | | "geom", createGeometryType(attrBuilder, "LineType", LineString.class, true, lookupCRS(defaultSRID)))); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "dyncolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | */ |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symcolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symweight", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symstyle", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "fowner", createAttributeType(attrBuilder, "IntegerType", Integer.class, allowNull, 1, -1))); |
| | | */ |
| | | |
| | | typeBuilder.addAll(attrDescs); |
| | | return typeBuilder; |
| | | } |
| | |
| | | AttributeTypeBuilder attrBuilder; |
| | | ArrayList<AttributeDescriptor> attrDescs = new ArrayList<AttributeDescriptor>(); |
| | | |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", MultiLineString.class, true)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "geom", createGeometryType(attrBuilder, "MultiLineStringType", MultiLineString.class, true))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12, "")); |
| | | "geom", createGeometryType(attrBuilder, "MultiLineStringType", MultiLineString.class, true, lookupCRS(defaultSRID)))); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "dyncolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | */ |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symcolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symweight", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symstyle", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "fowner", createAttributeType(attrBuilder, "IntegerType", Integer.class, allowNull, 1, -1))); |
| | | */ |
| | | |
| | | typeBuilder.addAll(attrDescs); |
| | | return typeBuilder; |
| | | } |
| | |
| | | AttributeTypeBuilder attrBuilder; |
| | | ArrayList<AttributeDescriptor> attrDescs = new ArrayList<AttributeDescriptor>(); |
| | | |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", Polygon.class, true)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "geom", createGeometryType(attrBuilder, "PolygonType", Polygon.class, true))); |
| | | |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12, "")); |
| | | "geom", createGeometryType(attrBuilder, "PolygonType", Polygon.class, true, lookupCRS(defaultSRID)))); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "dyncolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | */ |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symcolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symweight", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symstyle", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "fowner", createAttributeType(attrBuilder, "IntegerType", Integer.class, allowNull, 1, -1))); |
| | | */ |
| | | |
| | | typeBuilder.addAll(attrDescs); |
| | | return typeBuilder; |
| | | } |
| | | |
| | | public static SimpleFeatureTypeBuilder createNormalArcFeatureTypeBuilder(String featureName) { |
| | | return createNormalLineFeatureTypeBuilder(featureName); |
| | | /* |
| | | FeatureTypeBuilder typeBuilder = FeatureTypeBuilder.newInstance(featureName); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", Geometry.class, false)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, false)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, false)); |
| | | return typeBuilder; |
| | | */ |
| | | } |
| | | |
| | | public static SimpleFeatureTypeBuilder createNormalEllipseFeatureTypeBuilder(String featureName) { |
| | | return createNormalPolygonFeatureTypeBuilder(featureName); |
| | | /* |
| | | FeatureTypeBuilder typeBuilder = FeatureTypeBuilder.newInstance(featureName); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", Geometry.class, false)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, false)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, false)); |
| | | return typeBuilder; |
| | | */ |
| | | } |
| | | |
| | | public static SimpleFeatureTypeBuilder createNormalIndexFeatureTypeBuilder(String featureName) { |
| | |
| | | AttributeTypeBuilder attrBuilder; |
| | | ArrayList<AttributeDescriptor> attrDescs = new ArrayList<AttributeDescriptor>(); |
| | | |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", Polygon.class, true)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "geom", createGeometryType(attrBuilder, "PolygonType", Polygon.class, true))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("x1", Double.class, notAllowNull, 1, 0.0)); |
| | | "geom", createGeometryType(attrBuilder, "PolygonType", Polygon.class, true, lookupCRS(defaultSRID)))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "x1", createAttributeType(attrBuilder, "DoubleType", Double.class, notAllowNull, 1, 0.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("y1", Double.class, notAllowNull, 1, 0.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "y1", createAttributeType(attrBuilder, "DoubleType", Double.class, notAllowNull, 1, 0.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("x2", Double.class, notAllowNull, 1, 0.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "x2", createAttributeType(attrBuilder, "DoubleType", Double.class, notAllowNull, 1, 0.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("y2", Double.class, notAllowNull, 1, 0.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "y2", createAttributeType(attrBuilder, "DoubleType", Double.class, notAllowNull, 1, 0.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("tpclid", String.class, notAllowNull, 12, "")); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "tpclid", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12, "")); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symcolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symweight", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symstyle", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | |
| | | AttributeTypeBuilder attrBuilder; |
| | | ArrayList<AttributeDescriptor> attrDescs = new ArrayList<AttributeDescriptor>(); |
| | | |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", Point.class, true)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "geom", createGeometryType(attrBuilder, "PointType", Point.class, true))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12, "")); |
| | | "geom", createGeometryType(attrBuilder, "PointType", Point.class, true, lookupCRS(defaultSRID)))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symcolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symweight", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symstyle", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("just", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "just", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("height", Float.class, notAllowNull, 1, (float) 1.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "height", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 1.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("width", Float.class, notAllowNull, 1, (float) 1.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "width", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 1.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("angle", Float.class, notAllowNull, 1, (float) 0.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "angle", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 0.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("tpclid", String.class, notAllowNull, 12, "")); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "tpclid", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | |
| | | AttributeTypeBuilder attrBuilder; |
| | | ArrayList<AttributeDescriptor> attrDescs = new ArrayList<AttributeDescriptor>(); |
| | | |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", Point.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "geom", createGeometryType(attrBuilder, "PointType", Point.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("tid", Short.class, notAllowNull)); |
| | | "geom", createGeometryType(attrBuilder, "PointType", Point.class, notAllowNull, lookupCRS(defaultSRID)))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "did", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "tid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("oid", Long.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "oid", createAttributeType(attrBuilder, "LongType", Long.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("cid", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "cid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("lid", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "lid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("level", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "level", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12, "")); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "dyncolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | */ |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symcolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symweight", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symstyle", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("just", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "just", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("height", Float.class, notAllowNull, 1, (float) 1.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "height", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 1.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("width", Float.class, notAllowNull, 1, (float) 1.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "width", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 1.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("angle", Float.class, notAllowNull, 1, (float) 0.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "angle", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 0.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("context", String.class, true, 254)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "context", createAttributeType(attrBuilder, "StringType", String.class, true, 254))); |
| | | "context", createAttributeType(attrBuilder, "StringType", String.class, allowNull, 254, ""))); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "fowner", createAttributeType(attrBuilder, "IntegerType", Integer.class, allowNull, 1, -1))); |
| | | */ |
| | | |
| | | typeBuilder.addAll(attrDescs); |
| | | return typeBuilder; |
| | |
| | | AttributeTypeBuilder attrBuilder; |
| | | ArrayList<AttributeDescriptor> attrDescs = new ArrayList<AttributeDescriptor>(); |
| | | |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", Point.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "geom", createGeometryType(attrBuilder, "PointType", Point.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("tid", Short.class, notAllowNull)); |
| | | "geom", createGeometryType(attrBuilder, "PolygonType", Polygon.class, notAllowNull, lookupCRS(defaultSRID)))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "did", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "tid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("oid", Long.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "oid", createAttributeType(attrBuilder, "LongType", Long.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("cid", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "cid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("lid", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "lid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("level", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "level", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12, "")); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "dyncolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | */ |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symcolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symweight", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symstyle", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("just", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "just", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("height", Float.class, notAllowNull, 1, (float) 1.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "height", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 1.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("width", Float.class, notAllowNull, 1, (float) 1.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "width", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 1.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("angle", Float.class, notAllowNull, 1, (float) 0.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "angle", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 0.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symbol", String.class, notAllowNull, 20)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symbol", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 20))); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "fowner", createAttributeType(attrBuilder, "IntegerType", Integer.class, allowNull, 1, -1))); |
| | | */ |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "origin", createGeometryType(attrBuilder, "PointType", Point.class, notAllowNull, lookupCRS(defaultSRID)))); |
| | | |
| | | typeBuilder.addAll(attrDescs); |
| | | return typeBuilder; |
| | |
| | | AttributeTypeBuilder attrBuilder; |
| | | ArrayList<AttributeDescriptor> attrDescs = new ArrayList<AttributeDescriptor>(); |
| | | |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", Polygon.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "geom", createGeometryType(attrBuilder, "PolygonType", Polygon.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("tid", Short.class, notAllowNull)); |
| | | "geom", createGeometryType(attrBuilder, "PolygonType", Polygon.class, notAllowNull, lookupCRS(defaultSRID)))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "did", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "tid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("oid", Long.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "oid", createAttributeType(attrBuilder, "LongType", Long.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("cid", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "cid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("lid", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "lid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("level", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "level", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12, "")); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "dyncolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | */ |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symcolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symweight", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symstyle", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "fowner", createAttributeType(attrBuilder, "IntegerType", Integer.class, allowNull, 1, -1))); |
| | | */ |
| | | |
| | | typeBuilder.addAll(attrDescs); |
| | | return typeBuilder; |
| | |
| | | AttributeTypeBuilder attrBuilder; |
| | | ArrayList<AttributeDescriptor> attrDescs = new ArrayList<AttributeDescriptor>(); |
| | | |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", LineString.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "geom", createGeometryType(attrBuilder, "LineStringType", LineString.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("tid", Short.class, notAllowNull)); |
| | | "geom", createGeometryType(attrBuilder, "LineStringType", LineString.class, notAllowNull, lookupCRS(defaultSRID)))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "did", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "tid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("oid", Long.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "oid", createAttributeType(attrBuilder, "LongType", Long.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("cid", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "cid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("lid", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "lid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("level", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "level", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12, "")); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "dyncolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | */ |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symcolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symweight", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symstyle", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "fowner", createAttributeType(attrBuilder, "IntegerType", Integer.class, allowNull, 1, -1))); |
| | | */ |
| | | typeBuilder.addAll(attrDescs); |
| | | return typeBuilder; |
| | | } |
| | |
| | | AttributeTypeBuilder attrBuilder; |
| | | ArrayList<AttributeDescriptor> attrDescs = new ArrayList<AttributeDescriptor>(); |
| | | |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", MultiLineString.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "geom", createGeometryType(attrBuilder, "MultiLineStringType", MultiLineString.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("tid", Short.class, notAllowNull)); |
| | | "geom", createGeometryType(attrBuilder, "MultiLineStringType", MultiLineString.class, notAllowNull, lookupCRS(defaultSRID)))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "did", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "tid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("oid", Long.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "oid", createAttributeType(attrBuilder, "LongType", Long.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("cid", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "cid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("lid", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "lid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("level", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "level", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12, "")); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "dyncolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | */ |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symcolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symweight", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symstyle", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "fowner", createAttributeType(attrBuilder, "IntegerType", Integer.class, allowNull, 1, -1))); |
| | | */ |
| | | |
| | | typeBuilder.addAll(attrDescs); |
| | | return typeBuilder; |
| | |
| | | |
| | | public static SimpleFeatureTypeBuilder createEllipseFeatureTypeBuilder(String featureName) { |
| | | return createPolygonFeatureTypeBuilder(featureName); |
| | | /* |
| | | FeatureTypeBuilder typeBuilder = FeatureTypeBuilder.newInstance(featureName); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", Geometry.class, notAllowNull)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("tid", Short.class, notAllowNull)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("oid", Long.class)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("cid", Short.class, notAllowNull)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("lid", Short.class, notAllowNull)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("level", Short.class, notAllowNull)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, notAllowNull)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, notAllowNull)); |
| | | return typeBuilder; |
| | | */ |
| | | } |
| | | |
| | | public static SimpleFeatureTypeBuilder createArcFeatureTypeBuilder(String featureName) { |
| | | return createLineFeatureTypeBuilder(featureName); |
| | | /* |
| | | FeatureTypeBuilder typeBuilder = FeatureTypeBuilder.newInstance(featureName); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", Geometry.class, notAllowNull)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("tid", Short.class, notAllowNull)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("oid", Long.class)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("cid", Short.class, notAllowNull)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("lid", Short.class, notAllowNull)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("level", Short.class, notAllowNull)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, notAllowNull)); |
| | | typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, notAllowNull)); |
| | | return typeBuilder; |
| | | */ |
| | | } |
| | | |
| | | public static SimpleFeatureTypeBuilder createMultiSymbolFeatureTypeBuilder(String featureName) { |
| | |
| | | AttributeTypeBuilder attrBuilder; |
| | | ArrayList<AttributeDescriptor> attrDescs = new ArrayList<AttributeDescriptor>(); |
| | | |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("geom", MultiPoint.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "geom", createGeometryType(attrBuilder, "MultiPointType", MultiPoint.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("tid", Short.class, notAllowNull)); |
| | | "geom", createGeometryType(attrBuilder, "MultiPointType", MultiPoint.class, notAllowNull, lookupCRS(defaultSRID)))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "did", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "tid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("oid", Long.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "oid", createAttributeType(attrBuilder, "LongType", Long.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("cid", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "cid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("lid", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "lid", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("level", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "level", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symcolor", String.class, notAllowNull, 12, "")); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "dyncolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | */ |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symcolor", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 12, ""))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symweight", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symweight", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symstyle", Short.class, notAllowNull, 1, (short) 0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symstyle", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("just", Short.class, notAllowNull)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "just", createAttributeType(attrBuilder, "ShortType", Short.class, notAllowNull, 1, (short) 0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("height", Float.class, notAllowNull, 1, (float) 1.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "height", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 1.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("width", Float.class, notAllowNull, 1, (float) 1.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "width", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 1.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("angle", Float.class, notAllowNull, 1, (float) 0.0)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "angle", createAttributeType(attrBuilder, "FloatType", Float.class, notAllowNull, 1, (float) 0.0))); |
| | | // typeBuilder.addType(AttributeTypeFactory.newAttributeType("symbol", String.class, notAllowNull, 20)); |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "symbol", createAttributeType(attrBuilder, "StringType", String.class, notAllowNull, 20))); |
| | | /* |
| | | attrBuilder = new AttributeTypeBuilder(); |
| | | attrDescs.add(attrBuilder.buildDescriptor( |
| | | "fowner", createAttributeType(attrBuilder, "IntegerType", Integer.class, allowNull, 1, -1))); |
| | | */ |
| | | |
| | | typeBuilder.addAll(attrDescs); |
| | | return typeBuilder; |
| | | } |
| | | |
| | | static int defaultSRID = 3826; |
| | | |
| | | static TreeMap<Integer, CoordinateReferenceSystem> crsMap = null; |
| | | static TreeMap<Integer, GeometryConverterDecorator> geometryConverterMap = null; |
| | | |
| | | static void initializeFactories() { |
| | | try { |
| | | crsMap = new TreeMap<Integer, CoordinateReferenceSystem>(); |
| | | CoordinateReferenceSystem wgs84 = CRS.decode("EPSG:4326"); |
| | | crsMap.put(4326, wgs84); |
| | | CoordinateReferenceSystem epsg3825 = CRS.decode("EPSG:3825"); // TWD97-ZONE 119 |
| | | crsMap.put(3825, epsg3825); |
| | | CoordinateReferenceSystem epsg3826 = CRS.decode("EPSG:3826"); // TWD97-ZONE 121 |
| | | crsMap.put(3826, epsg3826); |
| | | CoordinateReferenceSystem epsg3827 = CRS.decode("EPSG:3827"); // TWD67-ZONE 119 |
| | | crsMap.put(3827, epsg3827); |
| | | CoordinateReferenceSystem epsg3828 = CRS.decode("EPSG:3828"); // TWD67-ZONE 121 |
| | | crsMap.put(3828, epsg3828); |
| | | } catch (FactoryException e) { |
| | | LOGGER.warn(e.getMessage(), e); |
| | | } |
| | | |
| | | geometryConverterMap = new TreeMap<Integer, GeometryConverterDecorator>(); |
| | | geometryConverterMap.put(3825, new EPSG3825GeometryConverterDecorator()); |
| | | geometryConverterMap.put(3826, new EPSG3826GeometryConverterDecorator()); |
| | | } |
| | | |
| | | public static CoordinateReferenceSystem lookupCRS(int SRID) { |
| | | if (crsMap == null) initializeFactories(); |
| | | return crsMap.get(SRID); |
| | | } |
| | | |
| | | public static GeometryConverterDecorator lookupGeometryConverter(int SRID) { |
| | | if (crsMap == null) initializeFactories(); |
| | | return geometryConverterMap.get(SRID); |
| | | } |
| | | |
| | | public static GeometryConverterDecorator lookupDefaultGeometryConverter() { |
| | | return lookupGeometryConverter(defaultSRID); |
| | | } |
| | | |
| | | public static int getDefaultFeatureSRID() { |
| | | return defaultSRID; |
| | | } |
| | | |
| | | public static void setDefaultFeatureSRID(int SRID) { |
| | | initializeFactories(); |
| | | defaultSRID = SRID; |
| | | } |
| | | |
| | | public static MathTransform getTWD67ToTWD97Zone191Transform() throws FactoryException { |
| | | CoordinateReferenceSystem sourceCRS = lookupCRS(3827); |
| | | CoordinateReferenceSystem targetCRS = lookupCRS(3825); |
| | | |
| | | MathTransform transform = CRS.findMathTransform(sourceCRS, targetCRS, true); |
| | | return transform; |
| | | } |
| | | |
| | | public static MathTransform getTWD67ToTWD97Zone121Transform() throws FactoryException { |
| | | CoordinateReferenceSystem sourceCRS = lookupCRS(3828); |
| | | CoordinateReferenceSystem targetCRS = lookupCRS(3826); |
| | | |
| | | MathTransform transform = CRS.findMathTransform(sourceCRS, targetCRS, true); |
| | | // Geometry targetGeometry = JTS.transform( sourceGeometry, transform); |
| | | return transform; |
| | | } |
| | | } |
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/GeomUtil.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/LangUtil.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/TPCLIDConverter.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/TWDDatumConverter.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/XGeosConfigDigesterUtils.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/postjts/JTSShape.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/postjts/JtsGisWrapper.java
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/util/postjts/JtsWrapper.java
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/filter/ElementDispatcherRules.xml
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/filter/FeatureClassificationRules.xml
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/filter/digester-rules-3.0.dtd
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/geoserver/config/DefaultXGeosDataConfigRules.xml
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/geoserver/config/digester-rules-3.0.dtd
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/geoserver/config/xgeosdataconfig-1.xml
xdgnjobs/ximple-spatialjob/src/main/resources/com/ximple/eofms/geoserver/config/xgeosdataconfig.xml
xdgnjobs/ximple-spatialjob/src/test/java/com/ximple/eofms/filter/ElementDispatcherTest.java
xdgnjobs/ximple-spatialjob/src/test/java/com/ximple/eofms/rest/GeoManagerEncoderTest.java
xdgnjobs/ximple-spatialjob/src/test/java/com/ximple/eofms/util/FeatureTypeBuilderUtilTest.java |