From 30b82d6d6fca0b8a0a5ac5a15b01cc96ba126e02 Mon Sep 17 00:00:00 2001 From: Ryan Franz Date: Tue, 19 Mar 2019 14:53:16 -0500 Subject: [PATCH 01/12] Change the FID field to ID --- Converters/Option1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Converters/Option1.py b/Converters/Option1.py index 6462a2d..eea0974 100644 --- a/Converters/Option1.py +++ b/Converters/Option1.py @@ -49,7 +49,7 @@ def translateCDB(cDBRoot,ogrPath, removeShapefile): geoPackageFile = shapefile[0:-3] + "gpkg" if(os.path.getsize(shapefile)>0): #'-t_srs', 'EPSG:4326', '-s_srs', 'EPSG:4326', - subprocess.call([ogrPath,'-f', 'GPKG', geoPackageFile,shapefile]) + subprocess.call([ogrPath, '-f', 'GPKG', geoPackageFile, "-lco", "FID=id", shapefile]) print(shapefile + ' -> ' + geoPackageFile) if(removeShapefile): converter.removeShapeFile(shapefile) From 972c19d063ed756d1eed90e00077b3c5d4fbe4e1 Mon Sep 17 00:00:00 2001 From: Ryan Franz Date: Thu, 21 Mar 2019 17:38:05 -0500 Subject: [PATCH 02/12] Fix the FID field to be id Fix floating point numbers to convert correctly --- Converters/Option1d.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Converters/Option1d.py b/Converters/Option1d.py index b3d7ceb..5e1bb16 100644 --- a/Converters/Option1d.py +++ b/Converters/Option1d.py @@ -122,7 +122,7 @@ def copyFeaturesFromShapeToGeoPackage(shpFilename, gpkgFilename): fieldIndexes[fieldName] = fieldIdx fieldIdx += 1 else: - outLayer = gpkgFile.CreateLayer(outLayerName,srs,geom_type=layerDefinition.GetGeomType()) + outLayer = gpkgFile.CreateLayer(outLayerName,srs,geom_type=layerDefinition.GetGeomType(),options=["FID=id"]) # Add fields for i in range(layerDefinition.GetFieldCount()): @@ -193,7 +193,7 @@ def copyFeaturesFromShapeToGeoPackage(shpFilename, gpkgFilename): continue fieldTypeCode = ogr.OFTString if(isinstance(fieldValue,float)): - fieldTypeCode = ogr.OFSTFloat32 + fieldTypeCode = ogr.OFTReal if(isinstance(fieldValue,int)): fieldTypeCode = ogr.OFTInteger if(isinstance(fieldValue,bool)): From 5d09607a487aeb582f45dd242171399af3de5d2e Mon Sep 17 00:00:00 2001 From: Ryan Franz Date: Fri, 22 Mar 2019 09:53:19 -0500 Subject: [PATCH 03/12] Fix crashes for shapefiles without a class-level dbf file --- Converters/Option1d.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/Converters/Option1d.py b/Converters/Option1d.py index 5e1bb16..17d826e 100644 --- a/Converters/Option1d.py +++ b/Converters/Option1d.py @@ -105,6 +105,7 @@ def copyFeaturesFromShapeToGeoPackage(shpFilename, gpkgFilename): outLayerName = getOutputLayerName(shpFilename) ogrDriver = ogr.GetDriverByName("GPKG") + #print(" Creating file " + gpkgFilename) gpkgFile = ogrDriver.CreateDataSource(gpkgFilename) if(gpkgFile == None): @@ -217,8 +218,6 @@ def copyFeaturesFromShapeToGeoPackage(shpFilename, gpkgFilename): #Copy the geometry and attributes outFeature.SetFrom(inFeature) - cnamValue = inFeature.GetField('CNAM') - fclassRecord = fClassRecords[cnamValue] outFeature.SetField(fieldIndexes["_DATASET_CODE"], layerComponents['datasetcode']) outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_1"], layerComponents['selector1']) outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_2"], layerComponents['selector2']) @@ -226,11 +225,17 @@ def copyFeaturesFromShapeToGeoPackage(shpFilename, gpkgFilename): outFeature.SetField(fieldIndexes["_UREF"], layerComponents['uref']) outFeature.SetField(fieldIndexes["_RREF"], layerComponents['rref']) - #flatten attributes from the feature class attributes table - if(cnamValue in fClassRecords.keys()): - fclassFields = fClassRecords[cnamValue] - for field in fclassFields.keys(): - outFeature.SetField(fieldIndexes[field],fclassFields[field]) + #flatten attributes from the feature class attributes table, if a CNAM attribute exists + try: + cnamValue = inFeature.GetField('CNAM') + fclassRecord = fClassRecords[cnamValue] + if(cnamValue in fClassRecords.keys()): + fclassFields = fClassRecords[cnamValue] + for field in fclassFields.keys(): + outFeature.SetField(fieldIndexes[field],fclassFields[field]) + except: + #print(" File does not contain the CNAM attribute") + cnamValue = "" #write the feature outLayer.CreateFeature(outFeature) @@ -281,13 +286,13 @@ def convertShapeFile(shpFilename, cdbInputDir, cdbOutputDir): #Read all the feature records from the DBF at once (using GDAL) #copyFeaturesFromShapeToGeoPackage(shpFilename,outputGeoPackageFile) - fClassRecords = converter.readDBF(fcAttrName) + #fClassRecords = converter.readDBF(fcAttrName) #Read Featureclass records featureTableName = converter.getFeatureAttrTableName(shpFilename) copyFeaturesFromShapeToGeoPackage(shpFilename,outputGeoPackageFile) #convertSHP(sqliteCon,shpFilename,outputGeoPackageFile, fClassRecords, True) sqliteCon = sqlite3.connect(outputGeoPackageFile) - if(createExtendedAttributesTable(sqliteCon,shpFilename)): + if(0 and createExtendedAttributesTable(sqliteCon,shpFilename)): dbfTableName = getExtendedAttrTableName(shpFilename) RelatedTables.createRTESchema(sqliteCon) relationship = RelatedTables.Relationship() From 83bbee9f24fc5cb7b135917a65c7157ed64d91f6 Mon Sep 17 00:00:00 2001 From: Ryan Franz Date: Wed, 27 Mar 2019 16:40:17 -0500 Subject: [PATCH 04/12] Convert logical fields as text, since true/false can be many values and need a better conversion to be a boolean --- Converters/Option1d.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Converters/Option1d.py b/Converters/Option1d.py index 17d826e..eb3a1b0 100644 --- a/Converters/Option1d.py +++ b/Converters/Option1d.py @@ -197,8 +197,9 @@ def copyFeaturesFromShapeToGeoPackage(shpFilename, gpkgFilename): fieldTypeCode = ogr.OFTReal if(isinstance(fieldValue,int)): fieldTypeCode = ogr.OFTInteger - if(isinstance(fieldValue,bool)): - fieldTypeCode = ogr.OFSTBoolean + #DBase logical fields can have multiple values for true and false, best converted as text + #if(isinstance(fieldValue,bool)): + # fieldTypeCode = ogr.OFSTBoolean fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) From 3c1a3aabf9b7111e13dabf9cd575d2a8c88c793b Mon Sep 17 00:00:00 2001 From: ryanfranz <47611904+ryanfranz@users.noreply.github.com> Date: Thu, 4 Apr 2019 22:46:10 -0500 Subject: [PATCH 05/12] Update README.md Added converter procedure --- README.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/README.md b/README.md index 98a650d..ef69cb8 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,26 @@ _use at your own risk_. Please contact kbentley@cognitics.net with any questions, comments, pull requests, etc. +## Converter Procedure + +* Software Installations + * Download and install python 3.7 (I think the script was tested with 3.6, but 3.7 has worked good) + * https://www.python.org/ftp/python/3.7.2/python-3.7.2-amd64.exe + * Download and unzip GDAL executable somewhere on disk (if you don’t already have it) + * http://download.gisinternals.com/sdk/downloads/release-1900-x64-gdal-2-4-0-mapserver-7-2-2.zip + * Download and install the GDAL python package + * http://download.gisinternals.com/sdk/downloads/release-1900-x64-gdal-2-4-0-mapserver-7-2-2/GDAL-2.4.0.win-amd64-py3.7.msi + * Download and install the dbfread python library + * https://files.pythonhosted.org/packages/4c/94/51349e43503e30ed7b4ecfe68a8809cdb58f722c0feb79d18b1f1e36fe74/dbfread-2.0.7-py2.py3-none-any.whl + * python -m pip install _pathToDownloadedFileAbove_ + * There is probably easier ways to install these python packages, but our work internet breaks the easier methods that auto download and install +* Running the converter + * Add the GDAL bin directory in the path (first, in case other programs have older versions of GDAL) + * set PATH=_gdalBinDir_;%PATH% + * set GDAL_DATA=_gdalBinDir_\gdal-data +* Run the conversion + * pathToConverters\Option1d.py _PathToInputCDB_ _PathToOutputCDB_ + --- Copyright 2018, US Army Geospatial Center, Leidos Inc., and Cognitics Inc. From 34226b6df27dcfe102bc4851469e187c27558fa5 Mon Sep 17 00:00:00 2001 From: Ryan Franz Date: Wed, 17 Apr 2019 16:39:02 -0500 Subject: [PATCH 06/12] Cleanup Option3 and Option4 to work in a similar manner to Option1d (input dir and output dir) Fixed the setting of class-level attributes, fixed boolean and float fields Various cleanups to make these files almost identical --- Converters/Option3.py | 82 +++++++++++++++++--------------- Converters/Option4.py | 106 +++++++++++++++++++++++------------------- 2 files changed, 103 insertions(+), 85 deletions(-) diff --git a/Converters/Option3.py b/Converters/Option3.py index b6246f5..7780844 100644 --- a/Converters/Option3.py +++ b/Converters/Option3.py @@ -38,6 +38,9 @@ if version_num < 2020300: sys.exit('ERROR: Python bindings of GDAL 2.2.3 or later required due to GeoPackage performance issues.') +def cleanPath(path): + cleanPath = path.replace("\\",'/') + return cleanPath def convertTable(gpkgFile, sqliteCon, datasetName, shpFilename, selector, fclassSelector, extAttrSelector): featureCount = 0 @@ -64,9 +67,9 @@ def convertTable(gpkgFile, sqliteCon, datasetName, shpFilename, selector, fclas extendedAttrTableName = base.replace(selector,extAttrSelector) extendedAttrTableName = extendedAttrTableName[:-4] extendedAttrFields = [] - if(os.path.isfile(dbfFilename)): - opendbf = True - extendedAttrFields = convertDBF(sqliteCon,dbfFilename,extendedAttrTableName, 'Feature Extended Attributes') + #if(os.path.isfile(dbfFilename)): + # opendbf = True + # extendedAttrFields = convertDBF(sqliteCon,dbfFilename,extendedAttrTableName, 'Feature Extended Attributes') shpFields = [] featureCount = convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName,fClassRecords) @@ -103,7 +106,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): base,ext = os.path.splitext(filenameOnly) dataSource = ogr.Open(shpFilename) if(dataSource==None): - # print("Unable to open " + shpFilename) + print("Unable to open " + shpFilename) return 0 layer = dataSource.GetLayer(0) if(layer == None): @@ -136,7 +139,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIndexes[fieldName] = fieldIdx fieldIdx += 1 else: - outLayer = gpkgFile.CreateLayer(outLayerName,srs,geom_type=layerDefinition.GetGeomType()) + outLayer = gpkgFile.CreateLayer(outLayerName,srs,geom_type=layerDefinition.GetGeomType(),options=["FID=id"]) # Add fields for i in range(layerDefinition.GetFieldCount()): @@ -200,19 +203,19 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIndexes[fieldName] = fieldIdx fieldIdx += 1 - #create fields for featureClass Attributes - + #Create fields for featureClass Attributes for recordCNAM, row in fClassRecords.items(): for fieldName,fieldValue in row.items(): if(fieldName in convertedFields): continue fieldTypeCode = ogr.OFTString if(isinstance(fieldValue,float)): - fieldTypeCode = ogr.OFSTFloat32 + fieldTypeCode = ogr.OFTReal if(isinstance(fieldValue,int)): fieldTypeCode = ogr.OFTInteger - if(isinstance(fieldValue,bool)): - fieldTypeCode = ogr.OFSTBoolean + #DBase logical fields can have multiple values for true and false, best converted as text + #if(isinstance(fieldValue,bool)): + # fieldTypeCode = ogr.OFSTBoolean fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) @@ -232,34 +235,37 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): #Copy the geometry and attributes outFeature.SetFrom(inFeature) - cnamValue = inFeature.GetField('CNAM') - fclassRecord = fClassRecords[cnamValue] outFeature.SetField(fieldIndexes["_DATASET_CODE"], filenameParts[1]) outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_1"], filenameParts[2]) outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_2"], filenameParts[3]) outFeature.SetField(fieldIndexes["_LOD"], filenameParts[4]) outFeature.SetField(fieldIndexes["_UREF"], filenameParts[5]) outFeature.SetField(fieldIndexes["_RREF"], filenameParts[6]) - - ''' - # set the output features to match the input features - for i in range(layerDefinition.GetFieldCount()): - # Look for CNAM to link to the fClassRecord fields - fieldName = layerDefinition.GetFieldDefn(i).GetNameRef() - if(fieldName in ("_DATASET_CODE","_COMPONENT_SELECTOR_1","_COMPONENT_SELECTOR_2","_LOD","_UREF","_RREF")): - continue - if(fieldName in fclassRecord): - fieldValue = fclassRecord[fieldName] - if((fclassRecord != None) and (fieldName in fclassRecord)): - outFeature.SetField(fieldIndexes[fieldName], fieldValue) - else: - outFeature.SetField(fieldIndexes[fieldName],inFeature.GetField(i)) - ''' + + #flatten attributes from the feature class attributes table, if a CNAM attribute exists + try: + cnamValue = inFeature.GetField('CNAM') + fclassRecord = fClassRecords[cnamValue] + for i in range(layerDefinition.GetFieldCount()): + # Look for CNAM to link to the fClassRecord fields + fieldName = layerDefinition.GetFieldDefn(i).GetNameRef() + if(fieldName in ("_DATASET_CODE","_COMPONENT_SELECTOR_1","_COMPONENT_SELECTOR_2","_LOD","_UREF","_RREF")): + continue + if(fieldName in fclassRecord): + fieldValue = fclassRecord[fieldName] + if((fclassRecord != None) and (fieldName in fclassRecord)): + outFeature.SetField(fieldIndexes[fieldName], fieldValue) + else: + outFeature.SetField(fieldIndexes[fieldName],inFeature.GetField(i)) + except: + #print(" File does not contain the CNAM attribute") + cnamValue = "" + #write the feature outLayer.CreateFeature(outFeature) outFeature = None inFeature = layer.GetNextFeature() - + return featureCount #Return a dictionary of dictionaries @@ -363,10 +369,18 @@ def translateCDB(cDBRoot, outputRootDirectory): selector2 = base[18:22] # strip out the .shp shapename = base[0:-4] + print(" Processing file " + shapename) # Create a geotile geopackage fullGpkgPath = subdir + ".gpkg" + print(" Output file " + datasetName + ".gpkg") #Use the same directory structure, but a different root directory. fullGpkgPath = fullGpkgPath.replace(cDBRoot,outputRootDirectory) + + # Make whatever directories we need for the output file. + parentDirectory = os.path.dirname(cleanPath(fullGpkgPath)) + if not os.path.exists(parentDirectory): + os.makedirs(parentDirectory) + gpkgFile = None if(fullGpkgPath in datasourceDict.keys()): gpkgFile = datasourceDict[fullGpkgPath] @@ -385,7 +399,7 @@ def translateCDB(cDBRoot, outputRootDirectory): featureClassAttrTableName = "" extendedAttrTableName = "" dbfFilename = shapefile - print(dbfFilename) + #print(dbfFilename) # If it's a polygon (T005) # T006 Polygon feature class attributes @@ -419,19 +433,13 @@ def translateCDB(cDBRoot, outputRootDirectory): gpkgFile.CommitTransaction() -if(len(sys.argv)!=3 and len(sys.argv)!=2): - print("Usage: Option3.py ") +if(len(sys.argv) != 3): + print("Usage: Option3.py ") print("Example:") print("Option3.py F:\GeoCDB\Option3 F:\GeoCDB\Option3_output") - exit() cDBRoot = sys.argv[1] outputDirectory = sys.argv[2] -sys.path.append(cDBRoot) -if((cDBRoot[-1:]!='\\') and (cDBRoot[-1:]!='/')): - cDBRoot = cDBRoot + '/' -import generateMetaFiles -#generateMetaFiles.generateMetaFiles(cDBRoot) translateCDB(cDBRoot,outputDirectory) \ No newline at end of file diff --git a/Converters/Option4.py b/Converters/Option4.py index f704130..df5d5dc 100644 --- a/Converters/Option4.py +++ b/Converters/Option4.py @@ -38,8 +38,12 @@ if version_num < 2020300: sys.exit('ERROR: Python bindings of GDAL 2.2.3 or later required due to GeoPackage performance issues.') +def cleanPath(path): + cleanPath = path.replace("\\",'/') + return cleanPath + def convertTable(gpkgFile, sqliteCon, datasetName, shpFilename, selector, fclassSelector, extAttrSelector): - featureCount = 0; + featureCount = 0 dbfFilename = shpFilename base = os.path.basename(dbfFilename) featureTableName = base[:-4] @@ -63,9 +67,9 @@ def convertTable(gpkgFile, sqliteCon, datasetName, shpFilename, selector, fclas extendedAttrTableName = base.replace(selector,extAttrSelector) extendedAttrTableName = extendedAttrTableName[:-4] extendedAttrFields = [] - if(os.path.isfile(dbfFilename)): - opendbf = True - extendedAttrFields = convertDBF(sqliteCon,dbfFilename,extendedAttrTableName, 'Feature Extended Attributes') + #if(os.path.isfile(dbfFilename)): + # opendbf = True + # extendedAttrFields = convertDBF(sqliteCon,dbfFilename,extendedAttrTableName, 'Feature Extended Attributes') shpFields = [] featureCount = convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName,fClassRecords) @@ -102,7 +106,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): base,ext = os.path.splitext(filenameOnly) dataSource = ogr.Open(shpFilename) if(dataSource==None): - # print("Unable to open " + shpFilename) + print("Unable to open " + shpFilename) return 0 layer = dataSource.GetLayer(0) if(layer == None): @@ -120,12 +124,13 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): uref = filenameParts[5] rref = filenameParts[6] + #Create the layer if it doesn't already exist. outLayerName = datasetName + "_" + componentSelector1 + "_" + componentSelector2 outLayer = gpkgFile.GetLayerByName(outLayerName) - fieldIndexes = {} fieldIdx = 0 + fieldIndexes = {} if(outLayer!=None): outputLayerDefinition = outLayer.GetLayerDefn() for i in range(outputLayerDefinition.GetFieldCount()): @@ -134,7 +139,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIndexes[fieldName] = fieldIdx fieldIdx += 1 else: - outLayer = gpkgFile.CreateLayer(outLayerName,srs,geom_type=layerDefinition.GetGeomType()) + outLayer = gpkgFile.CreateLayer(outLayerName,srs,geom_type=layerDefinition.GetGeomType(),options=["FID=id"]) # Add fields for i in range(layerDefinition.GetFieldCount()): @@ -198,19 +203,19 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIndexes[fieldName] = fieldIdx fieldIdx += 1 - #create fields for featureClass Attributes - + #Create fields for featureClass Attributes for recordCNAM, row in fClassRecords.items(): for fieldName,fieldValue in row.items(): if(fieldName in convertedFields): continue fieldTypeCode = ogr.OFTString if(isinstance(fieldValue,float)): - fieldTypeCode = ogr.OFSTFloat32 + fieldTypeCode = ogr.OFTReal if(isinstance(fieldValue,int)): fieldTypeCode = ogr.OFTInteger - if(isinstance(fieldValue,bool)): - fieldTypeCode = ogr.OFSTBoolean + #DBase logical fields can have multiple values for true and false, best converted as text + #if(isinstance(fieldValue,bool)): + # fieldTypeCode = ogr.OFSTBoolean fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) @@ -229,29 +234,33 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): outFeature = ogr.Feature(layerDefinition) #Copy the geometry and attributes outFeature.SetFrom(inFeature) - cnamValue = inFeature.GetField('CNAM') - fclassRecord = fClassRecords[cnamValue] + outFeature.SetField(fieldIndexes["_DATASET_CODE"], filenameParts[1]) outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_1"], filenameParts[2]) outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_2"], filenameParts[3]) outFeature.SetField(fieldIndexes["_LOD"], filenameParts[4]) outFeature.SetField(fieldIndexes["_UREF"], filenameParts[5]) outFeature.SetField(fieldIndexes["_RREF"], filenameParts[6]) - # set the output features to match the input features - ''' - # set the output features to match the input features - for i in range(layerDefinition.GetFieldCount()): - # Look for CNAM to link to the fClassRecord fields - fieldName = layerDefinition.GetFieldDefn(i).GetNameRef() - if(fieldName in ("_DATASET_CODE","_COMPONENT_SELECTOR_1","_COMPONENT_SELECTOR_2","_LOD","_UREF","_RREF")): - continue - if(fieldName in fclassRecord): - fieldValue = fclassRecord[fieldName] - if((fclassRecord != None) and (fieldName in fclassRecord)): - outFeature.SetField(fieldIndexes[fieldName], fieldValue) - else: - outFeature.SetField(fieldIndexes[fieldName],inFeature.GetField(i)) - ''' + + #flatten attributes from the feature class attributes table, if a CNAM attribute exists + try: + cnamValue = inFeature.GetField('CNAM') + fclassRecord = fClassRecords[cnamValue] + for i in range(layerDefinition.GetFieldCount()): + # Look for CNAM to link to the fClassRecord fields + fieldName = layerDefinition.GetFieldDefn(i).GetNameRef() + if(fieldName in ("_DATASET_CODE","_COMPONENT_SELECTOR_1","_COMPONENT_SELECTOR_2","_LOD","_UREF","_RREF")): + continue + if(fieldName in fclassRecord): + fieldValue = fclassRecord[fieldName] + if((fclassRecord != None) and (fieldName in fclassRecord)): + outFeature.SetField(fieldIndexes[fieldName], fieldValue) + else: + outFeature.SetField(fieldIndexes[fieldName],inFeature.GetField(i)) + except: + #print(" File does not contain the CNAM attribute") + cnamValue = "" + #write the feature outLayer.CreateFeature(outFeature) outFeature = None @@ -333,7 +342,7 @@ def convertDBF(sqliteCon,dbfFilename,dbfTableName,tableDescription): cursor.execute("COMMIT TRANSACTION") return convertedFields -def translateCDB(cDBRoot, removeShapefile): +def translateCDB(cDBRoot, outputRootDirectory): sys.path.append(cDBRoot) import shapeindex datasourceDict = {} @@ -360,8 +369,17 @@ def translateCDB(cDBRoot, removeShapefile): selector2 = base[18:22] # strip out the .shp shapename = base[0:-4] + print(" Processing file " + shapename) # Create a geotile geopackage fullGpkgPath = subdir + "/" + datasetName + ".gpkg" + print(" Output file " + datasetName + ".gpkg") + #Use the same directory structure, but a different root directory. + fullGpkgPath = fullGpkgPath.replace(cDBRoot,outputRootDirectory) + + # Make whatever directories we need for the output file. + parentDirectory = os.path.dirname(cleanPath(fullGpkgPath)) + if not os.path.exists(parentDirectory): + os.makedirs(parentDirectory) gpkgFile = None if(fullGpkgPath in datasourceDict.keys()): @@ -392,7 +410,7 @@ def translateCDB(cDBRoot, removeShapefile): featureClassAttrTableName = "" extendedAttrTableName = "" dbfFilename = shapefile - print(dbfFilename) + #print(dbfFilename) # If it's a polygon (T005) # T006 Polygon feature class attributes @@ -420,27 +438,19 @@ def translateCDB(cDBRoot, removeShapefile): # T020 Polygon Figure Extended-level attributes elif(selector2=='T009'): featureCount = convertTable(gpkgFile,sqliteCon,datasetName,dbfFilename,selector2,'T010','T020') + if(featureCount>0): print("Translated " + str(featureCount) + " features.") gpkgFile.CommitTransaction() - if(removeShapefile): - converter.removeShapefile(cDBRoot + shapefile) -if(len(sys.argv)!=3 and len(sys.argv)!=2): - print("Usage: Option4.py [remove-shapefiles]") +if(len(sys.argv) != 3): + print("Usage: Option4.py ") print("Example:") - print("Option4.py F:\GeoCDB\Option4") - print("\n-or-\n") - print("Option4.py F:\GeoCDB\Option4 remove-shapefiles") + print("Option4.py F:\GeoCDB\Option4 F:\GeoCDB\Option4_output") + exit() + + cDBRoot = sys.argv[1] -removeShapefile = False -if((len(sys.argv)==3) and sys.argv[2]=="remove-shapefiles"): - removeShapefile = True - -sys.path.append(cDBRoot) -if((cDBRoot[-1:]!='\\') and (cDBRoot[-1:]!='/')): - cDBRoot = cDBRoot + '/' -import generateMetaFiles -generateMetaFiles.generateMetaFiles(cDBRoot) -translateCDB(cDBRoot,removeShapefile) \ No newline at end of file +outputDirectory = sys.argv[2] +translateCDB(cDBRoot,outputDirectory) \ No newline at end of file From ae78d515d97fbe15dff6d407222526e4c5b8e802 Mon Sep 17 00:00:00 2001 From: Ryan Franz Date: Wed, 17 Apr 2019 17:29:53 -0500 Subject: [PATCH 07/12] Fix the building of the file lists --- Converters/Option3.py | 5 +++-- Converters/Option4.py | 6 ++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/Converters/Option3.py b/Converters/Option3.py index 7780844..121a98c 100644 --- a/Converters/Option3.py +++ b/Converters/Option3.py @@ -344,7 +344,8 @@ def convertDBF(sqliteCon,dbfFilename,dbfTableName,tableDescription): def translateCDB(cDBRoot, outputRootDirectory): sys.path.append(cDBRoot) - import shapeindex + import generateMetaFiles + shapeFiles = generateMetaFiles.generateMetaFiles(cDBRoot) datasourceDict = {} ogrDriver = ogr.GetDriverByName("GPKG") # Look for the Tiles Directory @@ -352,7 +353,7 @@ def translateCDB(cDBRoot, outputRootDirectory): # For each whole Longitude # Create N45W120.gpkg # Walk the subdirectory below this - for shapefile in shapeindex.shapeFiles: + for shapefile in shapeFiles: fileparts = shapefile.split('\\') subdir = "" for i in range(len(fileparts)-3): diff --git a/Converters/Option4.py b/Converters/Option4.py index df5d5dc..b9f57fc 100644 --- a/Converters/Option4.py +++ b/Converters/Option4.py @@ -344,7 +344,8 @@ def convertDBF(sqliteCon,dbfFilename,dbfTableName,tableDescription): def translateCDB(cDBRoot, outputRootDirectory): sys.path.append(cDBRoot) - import shapeindex + import generateMetaFiles + shapeFiles = generateMetaFiles.generateMetaFiles(cDBRoot) datasourceDict = {} ogrDriver = ogr.GetDriverByName("GPKG") # Look for the Tiles Directory @@ -352,7 +353,7 @@ def translateCDB(cDBRoot, outputRootDirectory): # For each whole Longitude # Create N45W120.gpkg # Walk the subdirectory below this - for shapefile in shapeindex.shapeFiles: + for shapefile in shapeFiles: fileparts = shapefile.split('\\') subdir = "" for i in range(len(fileparts)-4): @@ -443,6 +444,7 @@ def translateCDB(cDBRoot, outputRootDirectory): print("Translated " + str(featureCount) + " features.") gpkgFile.CommitTransaction() + if(len(sys.argv) != 3): print("Usage: Option4.py ") print("Example:") From 1dd7439667037adfb0f371826090b0b87b106781 Mon Sep 17 00:00:00 2001 From: Ryan Franz Date: Wed, 24 Apr 2019 17:06:48 -0500 Subject: [PATCH 08/12] Fix the name of the table in the GeoPackage --- Converters/Option1d.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/Converters/Option1d.py b/Converters/Option1d.py index eb3a1b0..1775565 100644 --- a/Converters/Option1d.py +++ b/Converters/Option1d.py @@ -47,15 +47,17 @@ def cleanPath(path): def getOutputLayerName(shpFilename): filenameOnly = os.path.basename(shpFilename) - filenameParts = filenameOnly.split("_") + baseName,ext = os.path.splitext(filenameOnly) + filenameParts = baseName.split("_") datasetCode = filenameParts[1] datasetName = filenameParts[-4] componentSelector1 = filenameParts[2] componentSelector2 = filenameParts[3] lod = filenameParts[4] uref = filenameParts[5] + rref = filenameParts[6] #Create the layer if it doesn't already exist. - outLayerName = datasetName + "_" + lod + "_" + componentSelector1 + "_" + componentSelector2 + outLayerName = datasetCode + "_" + componentSelector1 + "_" + componentSelector2 + "_" + lod + "_" + uref + "_" + rref return outLayerName def getFilenameComponents(shpFilename): @@ -105,7 +107,7 @@ def copyFeaturesFromShapeToGeoPackage(shpFilename, gpkgFilename): outLayerName = getOutputLayerName(shpFilename) ogrDriver = ogr.GetDriverByName("GPKG") - #print(" Creating file " + gpkgFilename) + print(" Creating file " + gpkgFilename) gpkgFile = ogrDriver.CreateDataSource(gpkgFilename) if(gpkgFile == None): From d394d6f6b09abe163c0043e77d4d832c4b6053cf Mon Sep 17 00:00:00 2001 From: Ryan Franz Date: Thu, 25 Apr 2019 16:25:55 -0500 Subject: [PATCH 09/12] Modified to store the dataset, component selectors, lod, row and column as integers to facilitate faster SQL queries --- Converters/Option1d.py | 27 +++++++++++++++------------ Converters/Option3.py | 35 ++++++++++++++++++++++------------- Converters/Option4.py | 35 ++++++++++++++++++++++------------- 3 files changed, 59 insertions(+), 38 deletions(-) diff --git a/Converters/Option1d.py b/Converters/Option1d.py index 1775565..3352855 100644 --- a/Converters/Option1d.py +++ b/Converters/Option1d.py @@ -66,17 +66,20 @@ def getFilenameComponents(shpFilename): baseName,ext = os.path.splitext(filenameOnly) filenameParts = baseName.split("_") datasetCode = filenameParts[1] - components['datasetcode'] = datasetCode + components['datasetcode'] = int(datasetCode[1:]) componentSelector1 = filenameParts[2] - components['selector1'] = componentSelector1 + components['selector1'] = int(componentSelector1[1:]) componentSelector2 = filenameParts[3] - components['selector2'] = componentSelector2 + components['selector2'] = int(componentSelector2[1:]) lod = filenameParts[4] - components['lod'] = lod + if (lod[:2] == "LC"): + components['lod'] = -int(lod[2:]) + else: + components['lod'] = int(lod[1:]) uref = filenameParts[5] - components['uref'] = uref + components['uref'] = int(uref[1:]) rref = filenameParts[6] - components['rref'] = rref + components['rref'] = int(rref[1:]) return components @@ -142,7 +145,7 @@ def copyFeaturesFromShapeToGeoPackage(shpFilename, gpkgFilename): # Add the LOD and UXX fields fieldName = "_DATASET_CODE" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -150,7 +153,7 @@ def copyFeaturesFromShapeToGeoPackage(shpFilename, gpkgFilename): fieldIdx += 1 fieldName = "_COMPONENT_SELECTOR_1" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -158,7 +161,7 @@ def copyFeaturesFromShapeToGeoPackage(shpFilename, gpkgFilename): fieldIdx += 1 fieldName = "_COMPONENT_SELECTOR_2" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -166,7 +169,7 @@ def copyFeaturesFromShapeToGeoPackage(shpFilename, gpkgFilename): fieldIdx += 1 fieldName = "_LOD" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -174,7 +177,7 @@ def copyFeaturesFromShapeToGeoPackage(shpFilename, gpkgFilename): fieldIdx += 1 fieldName = "_UREF" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -182,7 +185,7 @@ def copyFeaturesFromShapeToGeoPackage(shpFilename, gpkgFilename): fieldIdx += 1 fieldName = "_RREF" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) diff --git a/Converters/Option3.py b/Converters/Option3.py index 121a98c..4999179 100644 --- a/Converters/Option3.py +++ b/Converters/Option3.py @@ -122,8 +122,17 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): componentSelector2 = filenameParts[3] lod = filenameParts[4] uref = filenameParts[5] - rref = filenameParts[6] + rref = filenameParts[6] + datasetCodeInt = int(datasetCode[1:]) + componentSelector1Int = int(componentSelector1[1:]) + componentSelector2Int = int(componentSelector2[1:]) + if (lod[:2] == "LC"): + lodInt = -int(lod[2:]) + else: + lodInt = int(lod[1:]) + urefInt = int(uref[1:]) + rrefInt = int(rref[1:]) #Create the layer if it doesn't already exist. outLayerName = datasetName + "_" + lod + "_" + componentSelector1 + "_" + componentSelector2 @@ -156,7 +165,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): # Add the LOD and UXX fields fieldName = "_DATASET_CODE" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -164,7 +173,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIdx += 1 fieldName = "_COMPONENT_SELECTOR_1" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -172,7 +181,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIdx += 1 fieldName = "_COMPONENT_SELECTOR_2" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -180,7 +189,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIdx += 1 fieldName = "_LOD" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -188,7 +197,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIdx += 1 fieldName = "_UREF" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -196,7 +205,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIdx += 1 fieldName = "_RREF" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -235,12 +244,12 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): #Copy the geometry and attributes outFeature.SetFrom(inFeature) - outFeature.SetField(fieldIndexes["_DATASET_CODE"], filenameParts[1]) - outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_1"], filenameParts[2]) - outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_2"], filenameParts[3]) - outFeature.SetField(fieldIndexes["_LOD"], filenameParts[4]) - outFeature.SetField(fieldIndexes["_UREF"], filenameParts[5]) - outFeature.SetField(fieldIndexes["_RREF"], filenameParts[6]) + outFeature.SetField(fieldIndexes["_DATASET_CODE"], datasetCodeInt) + outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_1"], componentSelector1Int) + outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_2"], componentSelector2Int) + outFeature.SetField(fieldIndexes["_LOD"], lodInt) + outFeature.SetField(fieldIndexes["_UREF"], urefInt) + outFeature.SetField(fieldIndexes["_RREF"], rrefInt) #flatten attributes from the feature class attributes table, if a CNAM attribute exists try: diff --git a/Converters/Option4.py b/Converters/Option4.py index b9f57fc..37c484c 100644 --- a/Converters/Option4.py +++ b/Converters/Option4.py @@ -122,8 +122,17 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): componentSelector2 = filenameParts[3] lod = filenameParts[4] uref = filenameParts[5] - rref = filenameParts[6] + rref = filenameParts[6] + datasetCodeInt = int(datasetCode[1:]) + componentSelector1Int = int(componentSelector1[1:]) + componentSelector2Int = int(componentSelector2[1:]) + if (lod[:2] == "LC"): + lodInt = -int(lod[2:]) + else: + lodInt = int(lod[1:]) + urefInt = int(uref[1:]) + rrefInt = int(rref[1:]) #Create the layer if it doesn't already exist. outLayerName = datasetName + "_" + componentSelector1 + "_" + componentSelector2 @@ -156,7 +165,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): # Add the LOD and UXX fields fieldName = "_DATASET_CODE" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -164,7 +173,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIdx += 1 fieldName = "_COMPONENT_SELECTOR_1" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -172,7 +181,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIdx += 1 fieldName = "_COMPONENT_SELECTOR_2" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -180,7 +189,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIdx += 1 fieldName = "_LOD" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -188,7 +197,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIdx += 1 fieldName = "_UREF" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -196,7 +205,7 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): fieldIdx += 1 fieldName = "_RREF" - fieldTypeCode = ogr.OFTString + fieldTypeCode = ogr.OFTInteger fieldDef = ogr.FieldDefn(fieldName,fieldTypeCode) outLayer.CreateField(fieldDef) convertedFields.append(fieldName) @@ -235,12 +244,12 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): #Copy the geometry and attributes outFeature.SetFrom(inFeature) - outFeature.SetField(fieldIndexes["_DATASET_CODE"], filenameParts[1]) - outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_1"], filenameParts[2]) - outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_2"], filenameParts[3]) - outFeature.SetField(fieldIndexes["_LOD"], filenameParts[4]) - outFeature.SetField(fieldIndexes["_UREF"], filenameParts[5]) - outFeature.SetField(fieldIndexes["_RREF"], filenameParts[6]) + outFeature.SetField(fieldIndexes["_DATASET_CODE"], datasetCodeInt) + outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_1"], componentSelector1Int) + outFeature.SetField(fieldIndexes["_COMPONENT_SELECTOR_2"], componentSelector2Int) + outFeature.SetField(fieldIndexes["_LOD"], lodInt) + outFeature.SetField(fieldIndexes["_UREF"], urefInt) + outFeature.SetField(fieldIndexes["_RREF"], rrefInt) #flatten attributes from the feature class attributes table, if a CNAM attribute exists try: From 70074b81ba673bbb9a86d8bdce9a82dc7975b1b6 Mon Sep 17 00:00:00 2001 From: Ryan Franz Date: Mon, 29 Apr 2019 11:41:06 -0500 Subject: [PATCH 10/12] Changed to not set the table attributes unless some features exist, so that an empyt class-level attribute file doesn't prevent attributes from being created --- Converters/Option3.py | 4 ++++ Converters/Option4.py | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/Converters/Option3.py b/Converters/Option3.py index 4999179..370b276 100644 --- a/Converters/Option3.py +++ b/Converters/Option3.py @@ -112,6 +112,10 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): if(layer == None): print("Unable to read layer from " + shpFilename) return 0 + inFeature = layer.GetNextFeature() + if inFeature is None: + # If no features, the class-level attributes don't get created correctly, so don't create the attribute definitions + return 0 layerDefinition = layer.GetLayerDefn() srs = osr.SpatialReference() srs.ImportFromEPSG(4326) diff --git a/Converters/Option4.py b/Converters/Option4.py index 37c484c..087b726 100644 --- a/Converters/Option4.py +++ b/Converters/Option4.py @@ -112,6 +112,10 @@ def convertSHP(sqliteCon,shpFilename,gpkgFile,datasetName, fClassRecords): if(layer == None): print("Unable to read layer from " + shpFilename) return 0 + inFeature = layer.GetNextFeature() + if inFeature is None: + # If no features, the class-level attributes don't get created correctly, so don't create the attribute definitions + return 0 layerDefinition = layer.GetLayerDefn() srs = osr.SpatialReference() srs.ImportFromEPSG(4326) From 8744fc8bba8e8e0d5505bb70cdf07d2a688952d4 Mon Sep 17 00:00:00 2001 From: Ryan Franz Date: Mon, 29 Apr 2019 17:51:16 -0500 Subject: [PATCH 11/12] Close the GeoPackage file, so that the spatial indices and extents are populated into the table --- Converters/Option3.py | 5 +++++ Converters/Option4.py | 7 ++++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/Converters/Option3.py b/Converters/Option3.py index 370b276..21b5e4a 100644 --- a/Converters/Option3.py +++ b/Converters/Option3.py @@ -446,6 +446,11 @@ def translateCDB(cDBRoot, outputRootDirectory): print("Translated " + str(featureCount) + " features.") gpkgFile.CommitTransaction() + # Close the GeoPackage files + for gpkgPath in datasourceDict.keys(): + datasourceDict[gpkgPath] = None; + + if(len(sys.argv) != 3): print("Usage: Option3.py ") diff --git a/Converters/Option4.py b/Converters/Option4.py index 087b726..6bb3e16 100644 --- a/Converters/Option4.py +++ b/Converters/Option4.py @@ -1,4 +1,4 @@ -''' +''' Copyright 2018, US Army Geospatial Center, Leidos Inc., and Cognitics Inc. Developed as a joint work by The Army Geospatial Center, Leidos Inc., @@ -457,6 +457,11 @@ def translateCDB(cDBRoot, outputRootDirectory): print("Translated " + str(featureCount) + " features.") gpkgFile.CommitTransaction() + # Close the GeoPackage files + for gpkgPath in datasourceDict.keys(): + datasourceDict[gpkgPath] = None; + + if(len(sys.argv) != 3): print("Usage: Option4.py ") From 38b74de7339990ba7ef9afbb668ea9bd0f2d642b Mon Sep 17 00:00:00 2001 From: Ryan Franz Date: Tue, 30 Apr 2019 12:07:26 -0500 Subject: [PATCH 12/12] Add creation of a search index for file parts that are not in the table name --- Converters/Option3.py | 20 +++++++++++++++++++- Converters/Option4.py | 20 +++++++++++++++++++- 2 files changed, 38 insertions(+), 2 deletions(-) diff --git a/Converters/Option3.py b/Converters/Option3.py index 21b5e4a..2cd036c 100644 --- a/Converters/Option3.py +++ b/Converters/Option3.py @@ -406,7 +406,8 @@ def translateCDB(cDBRoot, outputRootDirectory): continue gpkgFile.StartTransaction() - sqliteCon = sqlite3.connect(fullGpkgPath) + #sqliteCon = sqlite3.connect(fullGpkgPath) + sqliteCon = None #gpkgFileName = subdir + "\\" + lat + "\\" + lon + "\\" + dataset + ".gpkg" featureTableName = base @@ -446,7 +447,24 @@ def translateCDB(cDBRoot, outputRootDirectory): print("Translated " + str(featureCount) + " features.") gpkgFile.CommitTransaction() + # Get the table names + print("Creating search index tables") + for gpkgPath in datasourceDict.keys(): + sqliteCon = sqlite3.connect(gpkgPath) + sql = "SELECT table_name from gpkg_contents" + fkCursor = sqliteCon.cursor() + fkCursor.execute(sql) + rows = fkCursor.fetchall() + for row in rows: + #print(row[0]) + sql = "CREATE INDEX '" + row[0] + "_idx' on '" + row[0] + "' (_UREF, _RREF)" + fkCursor.execute(sql) + #print(sql); + + + # Close the GeoPackage files + print("Closing GeoPackage files") for gpkgPath in datasourceDict.keys(): datasourceDict[gpkgPath] = None; diff --git a/Converters/Option4.py b/Converters/Option4.py index 6bb3e16..46b42ab 100644 --- a/Converters/Option4.py +++ b/Converters/Option4.py @@ -406,7 +406,8 @@ def translateCDB(cDBRoot, outputRootDirectory): continue gpkgFile.StartTransaction() - sqliteCon = sqlite3.connect(fullGpkgPath) + #sqliteCon = sqlite3.connect(fullGpkgPath) + sqliteCon = None #gpkgFileName = subdir + "\\" + lat + "\\" + lon + "\\" + dataset + ".gpkg" featureTableName = base @@ -457,7 +458,24 @@ def translateCDB(cDBRoot, outputRootDirectory): print("Translated " + str(featureCount) + " features.") gpkgFile.CommitTransaction() + # Get the table names + print("Creating search index tables") + for gpkgPath in datasourceDict.keys(): + sqliteCon = sqlite3.connect(gpkgPath) + sql = "SELECT table_name from gpkg_contents" + fkCursor = sqliteCon.cursor() + fkCursor.execute(sql) + rows = fkCursor.fetchall() + for row in rows: + #print(row[0]) + sql = "CREATE INDEX '" + row[0] + "_idx' on '" + row[0] + "' (_LOD, _UREF, _RREF)" + fkCursor.execute(sql) + #print(sql); + + + # Close the GeoPackage files + print("Closing GeoPackage files") for gpkgPath in datasourceDict.keys(): datasourceDict[gpkgPath] = None;