diff --git a/BatchProcessBuildingsTo3D.py b/BatchProcessBuildingsTo3D.py new file mode 100644 index 0000000..480a105 --- /dev/null +++ b/BatchProcessBuildingsTo3D.py @@ -0,0 +1,71 @@ +__author__ = 'geof7015 esri' + +import arcpy +import os + +arcpy.env.overwriteOutput = True + +# TODO Geof7015 or DJARRARD integrate the following with clipRasterToPolyExtrudeTin gp tool once complete + +buildingFootprints = \ + r"C:\Users\geof7015\PycharmProjects\FeatureExtraction\testData\buildingFootprints\buildings.gdb\Buildings_Subset" + +scratchGDB = r'C:\Users\geof7015\PycharmProjects\FeatureExtraction\testData\buildingFootprints\scratch.gdb' + +buildings = os.path.join(scratchGDB, "Buildings") +Field = 'OBJECTID' + +arcpy.AddMessage("Copying Building Footprints") +arcpy.CopyFeatures_management(buildingFootprints, buildings) + +arcpy.AddMessage("Adding Necessary Fields to procedurally model 3D Buildings from") +arcpy.AddField_management(buildings, "TEXT", "TEXT", None, None, 12, "Text", "true", "false", None) + +buildingList = [] +buildingCursor = arcpy.SearchCursor(buildings, Field) + +result = arcpy.GetCount_management(buildings) +count = int(result.getOutput(0)) + +arcpy.AddMessage("Building Footprints detected..." + str(count)) + +# Set the progress Bar (progressor) +arcpy.SetProgressor("step", "Extracting Building Info From LiDAR", + 0, count, 1) + +# for row in buildingCursor: +# buildingList.append(row[0]) + +# arcpy.AddMessage("{0} Building Footprints detected...".format(len(buildingList))) + +for row in buildingCursor: + buildingList.append(row[0]) + + ###################### + # Begin GP Tool Here # + ###################### + + # TODO Geof7015 or DJARRARD integrate clipRasterToPolyExtrudeTin gp tool here: + + arcpy.AddMessage("Processing Building Footprint {0} ".format(max(buildingList))) + arcpy.CalculateField_management(buildings, "TEXT", "!OBJECTID!", "PYTHON_9.3", None) + + # Update the progressor position + arcpy.SetProgressorPosition() + +import time +# Wait for 1 second +time.sleep(1) +arcpy.AddMessage("\n" + "Complete Feature Extraction of " + str(count) + " Buildings") +# Wait for 3 seconds +time.sleep(4) +arcpy.AddMessage("\n" + "You are now a 3D Jedi!") +# Wait for 4 seconds +time.sleep(4) +arcpy.AddMessage("\n" + "What Are You Waiting For?") +# Wait for 2 seconds +time.sleep(2) +arcpy.AddMessage("\n" + "Apply a CityEngine .rpk in ArcGIS Pro to the Building Footprints") +# Wait for 2 seconds +time.sleep(2) +arcpy.AddMessage("\n" + "Or export them to CityEngine to Design in 3D Context!") diff --git a/BuildingExtractionTools.tbx b/BuildingExtractionTools.tbx new file mode 100644 index 0000000..a3fd9e6 Binary files /dev/null and b/BuildingExtractionTools.tbx differ diff --git a/BuildingFootprintsFromLiDAR.py b/BuildingFootprintsFromLiDAR.py new file mode 100644 index 0000000..a037593 --- /dev/null +++ b/BuildingFootprintsFromLiDAR.py @@ -0,0 +1,178 @@ +__author__ = 'geof7015' +#This Script Extracts and Cleans Building Footprints from Classified LiDAR + +import arcpy +import os +from arcpy.sa import * + +arcpy.env.overwriteOutput = True + +global inLASD + +ProductionMode = True # Set to True for ArcGIS Pro GP Tool Use +if ProductionMode: + inLASD = arcpy.GetParameterAsText(0) + sr = arcpy.GetParameterAsText(1) + OutputFootprints = arcpy.GetParameterAsText(2) + scratchGDB = arcpy.env.scratchGDB +else: + inLASD = r'C:\Users\geof7015\PycharmProjects\testData\Boulder\las.lasd' + sr = "PROJCS['NAD_1983_HARN_StatePlane_Colorado_North_FIPS_0501_Feet',GEOGCS['GCS_North_American_1983_HARN',DATUM['D_North_American_1983_HARN',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',3000000.000316083],PARAMETER['False_Northing',999999.999996],PARAMETER['Central_Meridian',-105.5],PARAMETER['Standard_Parallel_1',39.71666666666667],PARAMETER['Standard_Parallel_2',40.78333333333333],PARAMETER['Latitude_Of_Origin',39.33333333333334],UNIT['Foot_US',0.3048006096012192]]" + scratchGDB = arcpy.env.scratchGDB + OutputFootprints = os.path.join(scratchGDB, "Footprints") + +######################## +# Set Global Variables # +######################## +global lasList +global ptFileInfoFile +global ptFileInfoList +global ptSpacing +global avgPtSpacing +global mp_list + + +################## +# Define Modules # +################## + +def findLasDatasetStatisticsfilePaths(file): + file_object = open(file, 'r') + lines = file_object.readlines() + file_object.close() + cleanLines = [] + for line in lines: + if len(line) > 1: + path = line.split(",")[0] + if os.path.isabs(path) is True and path not in cleanLines: + cleanLines.append(path) + return cleanLines + + +# Create Lists with LiDAR Statistical Information. Pt Spacing etc... Process only used in other modules. +def obtainLiDARInfo(inLASD, lasList): + if arcpy.Exists(inLASD): + arcpy.AddMessage("Calculating Necessary Statistics for Feature Extraction Process") + lasDatasetStatsText = os.path.join(scratchGDB, "lasDatasetStatsText.txt") + if arcpy.Exists(lasDatasetStatsText): + arcpy.Delete_management(lasDatasetStatsText) + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", + "DECIMAL_POINT") + ptFileInfoFile = os.path.join(scratchGDB, 'ptFileInfoFile') + if arcpy.Exists(ptFileInfoFile): + arcpy.Delete_management(ptFileInfoFile) + arcpy.PointFileInformation_3d(lasList, ptFileInfoFile, "LAS", None, sr, "false", "false", "DECIMAL_POINT", + "false", "false") + + rows = arcpy.SearchCursor(ptFileInfoFile, fields="FileName; Pt_Spacing; Z_Min; Z_Max", + sort_fields="FileName; Pt_Spacing; Z_Min; Z_Max") + # Iterate through the rows in the cursor and store the "FileName; Pt_Spacing; Z_Min; Z_Max" + # "FileName; Pt_Spacing; Z_Min; Z_Max" + ptFileInfoList = [] + PtSpacing = [] + for row in rows: + formattedfields = ("{0}, {1}, {2}, {3}".format( + row.getValue("FileName"), + row.getValue("Pt_Spacing"), + row.getValue("Z_Min"), + row.getValue("Z_Max"))) + ptFileInfoList.append(formattedfields) + ptspacinglist = float("{0}".format( + row.getValue("Pt_Spacing"))) + PtSpacing.append(ptspacinglist) + print(ptFileInfoList) + print(PtSpacing) + avgPtSpacing = sum(PtSpacing)/float(len(PtSpacing)) + print(avgPtSpacing) + if arcpy.Exists(ptFileInfoFile): + arcpy.Delete_management(ptFileInfoFile) + return ptFileInfoFile, ptFileInfoList, PtSpacing, avgPtSpacing + +################ +# Begin Script # +################ + +if arcpy.Exists(inLASD): + lasDatasetStatsText = os.path.join(scratchGDB, "lasDatasetStatsText.txt") + if arcpy.Exists(lasDatasetStatsText): + arcpy.Delete_management(lasDatasetStatsText) + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", "DECIMAL_POINT") + filenames = findLasDatasetStatisticsfilePaths(lasDatasetStatsText) + + if len(filenames) == 0: + arcpy.AddMessage("1 LAS file detected in LASD DATASET") + else: + arcpy.AddMessage("{0} LAS files detected in LASD DATASET".format(len(filenames))) + + # Process lasList into Esri GP tool friendly input format + newstr = str(filenames)[1:-1].replace("', ", ";") + lasList = '"' + newstr.replace("'", "") + '"' + + avgPtSpacing = obtainLiDARInfo(inLASD, lasList)[3] + + if arcpy.Exists("BuildingLASD"): + arcpy.Delete_management("BuildingLASD") + arcpy.MakeLasDatasetLayer_management(inLASD, "BuildingLASD", 6, None, "true", "true", "true", "false", None, + "false") + arcpy.AddMessage("Created LASD Layer with Building Class Only") +else: + arcpy.AddError("LASD Dataset Does not exist or is corrupt") + exit() + +arcpy.AddMessage("Converting Point-Cloud to Raster to Extract Footprint Outlines") +BuildingPointStatsRaster = os.path.join(scratchGDB, "PtStatsRaster") +if arcpy.Exists(BuildingPointStatsRaster): + arcpy.Delete_management(BuildingPointStatsRaster) +arcpy.LasPointStatsAsRaster_management("BuildingLASD", BuildingPointStatsRaster, "INTENSITY_RANGE", "CELLSIZE", + avgPtSpacing) +arcpy.AddMessage("Building Outline Raster Created from LASD Dataset") + +BCRaster = os.path.join(scratchGDB, "BCRaster") +if arcpy.Exists(BCRaster): + arcpy.Delete_management(BCRaster) +BCRasterProcess = arcpy.sa.BoundaryClean(BuildingPointStatsRaster, "ASCEND", "true") +BCRasterProcess.save(BCRaster) +arcpy.AddMessage("Filled No-Data Holes in Building Outline Raster") + +ConRaster = os.path.join(scratchGDB, "ConRaster") +if arcpy.Exists(ConRaster): + arcpy.Delete_management(ConRaster) +conRasterProcess = Con(BCRaster, 1, 0, "Value >= 0") +conRasterProcess.save(ConRaster) +arcpy.AddMessage("Converted Building Outline Raster to Raster Value of 1") + +arcpy.AddMessage("Begin Converting Building Footprint Raster to Polygon Outline") +Raster2Poly = os.path.join(scratchGDB, "Raster2Poly") +if arcpy.Exists(Raster2Poly): + arcpy.Delete_management(Raster2Poly) +arcpy.RasterToPolygon_conversion(ConRaster, Raster2Poly, "true", "Value") + +arcpy.AddMessage("Begin Regularizing Building Footprints") +BuildingFootprints = OutputFootprints +arcpy.RegularizeBuildingFootprint_3d(Raster2Poly, BuildingFootprints, "ANY_ANGLE", avgPtSpacing, avgPtSpacing, 0.25, + 1.5, 0.1, 1000000) + + +arcpy.AddMessage("Begin Removing Erroneous Geometry in Building Footprints") +BuildingFootprintsCleaned = OutputFootprints + "Cleaned" +arcpy.EliminatePolygonPart_management(BuildingFootprints, BuildingFootprintsCleaned, "AREA", + "600 SquareFeet", 0, "true") + +arcpy.AddMessage("Extraction Complete. Removing Intermediate Data. DO NOT STOP PROCESS") + +############################ +# Delete Intermediate Data # +############################ + +if arcpy.Exists("BuildingLASD"): + arcpy.Delete_management("BuildingLASD") +if arcpy.Exists(BuildingPointStatsRaster): + arcpy.Delete_management(BuildingPointStatsRaster) +if arcpy.Exists(BCRaster): + arcpy.Delete_management(BCRaster) +if arcpy.Exists(ConRaster): + arcpy.Delete_management(ConRaster) +if arcpy.Exists(Raster2Poly): + arcpy.Delete_management(Raster2Poly) + +arcpy.AddMessage("Intermediate Data Deleted. You are Free to Close GP Tool") diff --git a/CleanupUtil.rpk b/CleanupUtil.rpk new file mode 100644 index 0000000..18f3749 Binary files /dev/null and b/CleanupUtil.rpk differ diff --git a/CreateMultipatchBuilding.py b/CreateMultipatchBuilding.py new file mode 100644 index 0000000..0754977 --- /dev/null +++ b/CreateMultipatchBuilding.py @@ -0,0 +1,936 @@ +#------------------------------------------------------------------------------- +# Name: CreateMultipatchBuilding.py +# Purpose: process for extracting LOD2 multipatch building from Classified & +# Unclassifierd LiDAR. +# +# Building Footprints and LiDAR in .las Folder, .las file or +# .las dataset are required as inputs. +# a spatial reference output folder must be designated as well as +# the LiDAR building class codes. +# +# Author: Geoff Taylor, Joe McGlinchy & Dennis Jarrard +# +# Created: 28/09/2015 +# Copyright: (c) Esri 2015 +# Licence: Internal Use Only! +#------------------------------------------------------------------------------- + + +import arcpy +import os +import os.path +import tempfile +import glob +from datetime import datetime + +arcpy.env.overwriteOutput = True +arcpy.CheckOutExtension('spatial') +arcpy.CheckOutExtension('3d') + +inLASD = r'' +inLAS = r'C:\Users\geof7015\PycharmProjects\testData\Boulder\LAS\329.las' +DTMRaster = r'' # C:\workspace\data\testdata\bh12TVK1800084000.img +DSMRaster = r'' # 'C:\workspace\data\testdata\hh12TVK1800084000.img' +buildingFootprints = r'C:\Users\geof7015\PycharmProjects\testData\Boulder\Data.gdb\Building329_small' +sr = "PROJCS['NAD_1983_HARN_StatePlane_Colorado_North_FIPS_0501_Feet',GEOGCS['GCS_North_American_1983_HARN',DATUM['D_North_American_1983_HARN',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',3000000.000316083],PARAMETER['False_Northing',999999.999996],PARAMETER['Central_Meridian',-105.5],PARAMETER['Standard_Parallel_1',39.71666666666667],PARAMETER['Standard_Parallel_2',40.78333333333333],PARAMETER['Latitude_Of_Origin',39.33333333333334],UNIT['Foot_US',0.3048006096012192]]" +outputWS = r'C:\Users\geof7015\PycharmProjects\testData\Boulder\Workspace.gdb' + +#inLASD = r'' +#inLAS = r'E:\3D_City_Data\United States\North Carolina\Charlotte\AIC\LiDAR' +#DTMRaster = r'' # C:\workspace\data\testdata\bh12TVK1800084000.img +#DSMRaster = r'' # 'C:\workspace\data\testdata\hh12TVK1800084000.img' +#buildingFootprints = r'E:\3D_City_Data\United States\North Carolina\Charlotte\AIC\Data.gdb\BuildingFootprints' +#sr = "PROJCS['NAD_1983_StatePlane_North_Carolina_FIPS_3200_Feet',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',2000000.002616666],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-79.0],PARAMETER['Standard_Parallel_1',34.33333333333334],PARAMETER['Standard_Parallel_2',36.16666666666666],PARAMETER['Latitude_Of_Origin',33.75],UNIT['Foot_US',0.3048006096012192]]" +#outputWS = r'E:\3D_City_Data\United States\North Carolina\Charlotte\AIC\Workspace1.gdb' + +scratchGDB = arcpy.env.scratchGDB +tempFolder = tempfile.mkdtemp() + +buildingClassCode = 6 +groundClassCode = 2 + +groundReturn = "" +buildingReturn = "Last Return" + +beginOnFeatureNumber = 0 +pointSpacingCorrectionFactor = 0.75 + +# For Point-Cloud to raster process +interpolateBetweenPoints = True +rasterExtractionApproach = True + +# For Raster Only Extraction Approach + +#Currently Disconnected... +reduceTesselations = True + +## TODO Check and ensure Con raster in_memory bug is resolved in arcpy python v 3.4 before enabling! +optimizeRaster = True +optimizeRasterFactor = 0.5 + +############### +# Definitions # +############### + + +def createlasdataset(inLAS, sr): + global inLASD + inLASD = os.path.join(tempFolder, "LASDataSet.lasd") + if arcpy.Exists(inLASD): + arcpy.Delete_management(inLASD) + arcpy.CreateLasDataset_management(inLAS, inLASD, False, "", sr, "COMPUTE_STATS") + if arcpy.Exists(inLASD): + arcpy.AddMessage("LASD File Created @ Location: " + inLASD) + return inLASD + # for multiples: return inLASD,output2,output3,etc... + else: + arcpy.AddMessage("Could Not Create LASD DataSet. Check LAS inputs for errors") + + +def findLasDatasetStatisticsfilePaths(file): + file_object = open(file, 'r') + lines = file_object.readlines() + file_object.close() + cleanLines = [] + for line in lines: + if len(line) > 1: + path = line.split(",")[0] + if os.path.isabs(path) is True and path not in cleanLines: + cleanLines.append(path) + return cleanLines + + +# Create Lists with LiDAR Statistical Information. Pt Spacing etc... Process only used in other modules. +def obtainLiDARInfo(inLASD,lasList): + if arcpy.Exists(inLASD): + arcpy.AddMessage("Calculating Necessary Statistics for Feature Extraction Process") + lasDatasetStatsText = os.path.join(tempFolder, "lasDatasetStatsText.txt") + if arcpy.Exists(lasDatasetStatsText): + arcpy.Delete_management(lasDatasetStatsText) + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", + "DECIMAL_POINT") + + # TODO DJARRARD obtain a LiDAR file from list and parse the point_spacing to building footprints. + # TODO DJARRARD if multiple LiDAR tiles overlap building footprints then point_spacing = pt_spacing_average + #if recursivelyCreateAndClipRastersFromLasd: + #pass + + # run arcpy.PointFileInfo_3d on the single tile (no recursion) + ptFileInfoFile = os.path.join(outputWS, 'ptFileInfoFile') + if arcpy.Exists(ptFileInfoFile): + arcpy.Delete_management(ptFileInfoFile) + arcpy.PointFileInformation_3d(lasList, ptFileInfoFile, "LAS", None, sr, "false", "false", "DECIMAL_POINT", + "false", "false") + + rows = arcpy.SearchCursor(ptFileInfoFile, + fields="FileName; Pt_Spacing; Z_Min; Z_Max", + sort_fields="FileName; Pt_Spacing; Z_Min; Z_Max") + # Iterate through the rows in the cursor and store the + # "FileName; Pt_Spacing; Z_Min; Z_Max" + ptFileInfoList = [] + PtSpacing = [] + for row in rows: + formattedfields = ("{0}, {1}, {2}, {3}".format( + row.getValue("FileName"), + row.getValue("Pt_Spacing"), + row.getValue("Z_Min"), + row.getValue("Z_Max"))) + ptFileInfoList.append(formattedfields) + ptspacinglist = float("{0}".format(row.getValue("Pt_Spacing"))) + PtSpacing.append(ptspacinglist) + print(ptFileInfoList) + print(PtSpacing) + avgPtSpacing = sum(PtSpacing)/float(len(PtSpacing)) + print(avgPtSpacing) + return ptFileInfoFile, ptFileInfoList, PtSpacing, avgPtSpacing + + +# Omitted in current process (for Surface Rasters Only) +# Raster process will be added for Non-LiDAR DSM version of tool when Con(Raster) GP tool in_memory bug is resolved. +def interpolateBetweenLasPts(LrDSM): + # Run raster interpolation algorithm on LiDAR derived rasters if interpolateAdditionalPoints is True & <> Recursive + TimesRaster = os.path.join(tempFolder, "TimesRaster.tif") + if arcpy.Exists(TimesRaster): + arcpy.Delete_management(TimesRaster) + arcpy.Times_3d(LrDSM, 100, TimesRaster) + arcpy.AddMessage("Times Raster Complete") + + IntegerRaster = os.path.join(tempFolder, "IntRaster.tif") + if arcpy.Exists(IntegerRaster): + arcpy.Delete_management(IntegerRaster) + arcpy.Int_3d(TimesRaster, IntegerRaster) + arcpy.AddMessage("Integer Raster Complete") + + BoundaryCleanRaster = os.path.join(tempFolder, "BoundaryClean.tif") + if arcpy.Exists(BoundaryCleanRaster): + arcpy.Delete_management(BoundaryCleanRaster) + BC = arcpy.sa.BoundaryClean(IntegerRaster, "NO_SORT", "true") + BC.save(BoundaryCleanRaster) + arcpy.AddMessage("BoundaryClean Raster Complete") + + FloatRaster = os.path.join(tempFolder, "FloatRaster.tif") + if arcpy.Exists(FloatRaster): + arcpy.Delete_management(FloatRaster) + arcpy.Float_3d(BoundaryCleanRaster, FloatRaster) + arcpy.AddMessage("Float Raster Complete") + + if arcpy.Exists(LrDSM): + arcpy.Delete_management(LrDSM) + arcpy.Divide_3d(FloatRaster, 100, LrDSM) + arcpy.AddMessage("Divide Raster Complete") + return LrDSM + + +# Omitted in current process (for Surface Rasters Only) +# Raster process will be added for Non-LiDAR DSM version of tool when Con(Raster) GP tool in_memory bug is resolved. +def slopedAreaRasters(SlopeRaster, slopedAreasNullRaster): + # TODO Fix Memory Leak 1 + slopedAreasRaster = os.path.join(tempFolder, "slopedAreasRaster.tif") + if arcpy.Exists(slopedAreasRaster): + arcpy.Delete_management(slopedAreasRaster) + slopedAreasRasterProcess = arcpy.sa.Con(SlopeRaster, 1, 0, "VALUE >= 20") + slopedAreasRasterProcess.save(slopedAreasRaster) + # TODO Fix Memory Leak 2 + if arcpy.Exists(slopedAreasNullRaster): + arcpy.Delete_management(slopedAreasNullRaster) + slopedAreasNullRasterProcess = arcpy.sa.SetNull(slopedAreasRaster, 1, "Value = 0") + slopedAreasNullRasterProcess.save(slopedAreasNullRaster) + + arcpy.Delete_management(slopedAreasRaster) + + return slopedAreasNullRaster + + +# Omitted in current process (for Surface Rasters Only) +# Raster process will be added for Non-LiDAR DSM version of tool when Con(Raster) GP tool in_memory bug is resolved. +def reduceTesselationProcess(LrDSM, SlopedAreasPolygonBuffered): + SlopeRaster = os.path.join(tempFolder, "SlopeRaster.tif") + if arcpy.Exists(SlopeRaster): + arcpy.Delete_management(SlopeRaster) + arcpy.Slope_3d(LrDSM, SlopeRaster, "DEGREE", 1) + + slopedAreasNullRaster = os.path.join(tempFolder, "slopedAreasNullRaster.tif") + slopedAreaRasters(SlopeRaster=SlopeRaster, slopedAreasNullRaster=slopedAreasNullRaster) + + SlopedAreasPolygon = os.path.join(tempFolder, "SlopedAreasPolygon.shp") + if arcpy.Exists(SlopedAreasPolygon): + arcpy.Delete_management(SlopedAreasPolygon) + arcpy.RasterToPolygon_conversion(slopedAreasNullRaster, SlopedAreasPolygon, "false", "Value") + + if arcpy.Exists(SlopedAreasPolygonBuffered): + arcpy.Delete_management(SlopedAreasPolygonBuffered) + arcpy.Buffer_analysis(SlopedAreasPolygon, SlopedAreasPolygonBuffered, "2 Feet", "FULL", "ROUND", "ALL", None, "PLANAR") + + arcpy.Delete_management(slopedAreasNullRaster) + + return SlopedAreasPolygonBuffered + + +# Automatically removes Artifacts from point-clouds. +# Resolves issues where building sides were triangulated and other geometric flaws +def cleanupArtifacts(single_bldg_pts, single_bldg_pts_cleaned): + + arcpy.Near3D_3d(single_bldg_pts, single_bldg_pts, str(1.4 * pointSpace), "LOCATION", "ANGLE", "DELTA") + + bldgpts = os.path.join("in_memory", "bldgPoints") + arcpy.MakeFeatureLayer_management(single_bldg_pts, bldgpts) + + arcpy.SelectLayerByAttribute_management(bldgpts, "NEW_SELECTION", "NEAR_DELTZ >= -1 Or NEAR_ANG_V <> 0") + + arcpy.CopyFeatures_management(bldgpts, single_bldg_pts_cleaned) + print("Artifacts Removed") + if arcpy.Exists(bldgpts): + arcpy.Delete_management(bldgpts) + return single_bldg_pts_cleaned + + +# assigns nearest point value using planar distance to building footprint points from an input point feature class. +def interpolatePointsToBoundary(input_bldg_points, input_bldg_fp, output_bldg_points_with_border): + + # explode input multipoint FC to single part + # it is understood the input point FC will be multipoint. Need to convert to single part features + #single_bldg_pts = os.path.join(outputWS, "singlepts") + single_bldg_pts = os.path.join("in_memory", "singlepts") + arcpy.MultipartToSinglepart_management(input_bldg_points, single_bldg_pts) + print("created Single Pts") + + # Cleanup Artifacts + # single_bldg_pts_cleaned = os.path.join(outputWS, "single_bldg_pts_cleaned") + single_bldg_pts_cleaned = os.path.join("in_memory", "single_bldg_pts_cleaned") + cleanupArtifacts(single_bldg_pts=single_bldg_pts, single_bldg_pts_cleaned=single_bldg_pts_cleaned) + print("artifacts Cleaned") + + # add geometry attributes + arcpy.AddGeometryAttributes_management(Input_Features=single_bldg_pts_cleaned, Geometry_Properties="POINT_X_Y_Z_M", + Length_Unit="", Area_Unit="", Coordinate_System="") + print("added attr to geometry") + + # process the building footprint + footprintBuffer = os.path.join("in_memory", "footprintBuffer") + arcpy.Buffer_analysis(input_bldg_fp, footprintBuffer, "0.5 Feet", "FULL", "FLAT", "NONE", None, "GEODESIC") + + # convert to line + # bldg_line = os.path.join(outputWS, "bldgline") + bldg_line = os.path.join("in_memory", "bldgline") + arcpy.FeatureToLine_management(in_features=footprintBuffer, out_feature_class=bldg_line, cluster_tolerance=None, + attributes="NO_ATTRIBUTES") + if arcpy.Exists(footprintBuffer): + arcpy.Delete_management(footprintBuffer) + + # Densify + arcpy.Densify_edit(in_features=bldg_line, densification_method="DISTANCE", distance="1 Feet", + max_deviation="0.33 Feet", max_angle="10") + + # convert to points + # bldg_ln_pts = os.path.join(outputWS, "bldglinepts") + bldg_ln_pts = os.path.join("in_memory", "bldglinepts") + arcpy.FeatureVerticesToPoints_management(in_features=bldg_line, out_feature_class=bldg_ln_pts, point_location="ALL") + + # use Near tool to identify point FID from building points to the boundary points + arcpy.Near_analysis(in_features=bldg_ln_pts, near_features=single_bldg_pts_cleaned, search_radius="5 Feet", + location="NO_LOCATION", angle="NO_ANGLE", method="PLANAR") + + # now, grab the NEARI_FID field and assign that feature's z-value to the building footprint point z value + arcpy.AddField_management(bldg_ln_pts, "z_val", "DOUBLE") + tbl_fp = arcpy.da.FeatureClassToNumPyArray(bldg_ln_pts, ["NEAR_FID"]) + tbl_pts = arcpy.da.FeatureClassToNumPyArray(single_bldg_pts_cleaned, ["POINT_Z"]) + + # update the z_val attribute + with arcpy.da.UpdateCursor(bldg_ln_pts, ["z_val"]) as Pointsc: + for i, row in enumerate(Pointsc): + fid = tbl_fp[i][0] + row[0] = tbl_pts[fid-1][0] + # print(row[0]) + Pointsc.updateRow(row) + + # convert to 3D and copy + #bldg_ln_pts_z = os.path.join(outputWS, "bldg_ln_pts_Z") + bldg_ln_pts_z = os.path.join("in_memory", "bldg_ln_pts_Z") + arcpy.FeatureTo3DByAttribute_3d(bldg_ln_pts, bldg_ln_pts_z, "z_val") + + # pointsMerged = os.path.join("in_memory", "pointsMerged") + arcpy.Merge_management([bldg_ln_pts_z, single_bldg_pts_cleaned], output_bldg_points_with_border) + + # Remove Intermediate Data + if arcpy.Exists(single_bldg_pts): + arcpy.Delete_management(single_bldg_pts) + if arcpy.Exists(bldg_line): + arcpy.Delete_management(bldg_line) + if arcpy.Exists(bldg_ln_pts): + arcpy.Delete_management(bldg_ln_pts) + if arcpy.Exists(bldg_ln_pts_z): + arcpy.Delete_management(bldg_ln_pts_z) + + return output_bldg_points_with_border + + +def extractMultipatchFromPts(fullextent, row): + try: + arcpy.env.extent = fullextent + # get raster extent + geom = row[0] + print("geom = ", geom) + + # copy the feature temporarily + #tp = os.path.join("in_memory", "tp{0}".format(i)) + tp = os.path.join(outputWS, "tp{0}".format(i)) + #tp = os.path.join("in_memory", "tp{0}".format(i)) + arcpy.CopyFeatures_management(geom, tp) + + extentgeom = arcpy.Describe(tp) + arcpy.env.mask = tp + print("extentgeom = ", extentgeom) + extent = "{0} {1} {2} {3}".format(extentgeom.extent.XMin, extentgeom.extent.YMin, extentgeom.extent.XMax, extentgeom.extent.YMax) + print("extent = ", extent) + arcpy.env.extent = extent + + print("Begin Raster Creation Process") + + DTM = os.path.join("in_memory", "DTM") + # Delete terrain rasters if existing. + if arcpy.Exists(DTM): + arcpy.Delete_management(DTM) + + arcpy.LasDatasetToRaster_conversion("DTMLASD", DTM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacing, heightValue) + print("Created DTM Raster at location: " + DTM) + + arcpy.env.mask = tp + + arcpy.env.extent = extent + + # clip the DTM + print('clipping DTM') + dtmClipRast = os.path.join("in_memory", 'tempDEMclip{0}'.format(i + beginOnFeatureNumber)) + arcpy.Clip_management(DTM, extent, dtmClipRast, tp, "true", "false") + # convert DEM to Int + #dtmClipRastInt = Int(dtmClipRast) + + # add Min Height to Building Footprints + print('determining Minimum Building Elevation') + arcpy.AddField_management(tp, "ID", "SHORT", None, None, None, "ID", "true", "true", None) + arcpy.CalculateField_management(tp, "ID", 1, "PYTHON_9.3", None) + minMaxElevTable = os.path.join("in_memory", "minMaxElevTable") + arcpy.sa.ZonalStatisticsAsTable(tp, "ID", DTM, minMaxElevTable, "true", "MIN_MAX_MEAN") + arcpy.JoinField_management(tp, "ID", minMaxElevTable, "ID", "MIN;MAX") + + if arcpy.Exists(DTM): + arcpy.Delete_management(DTM) + + # then, move building footprints to MIN Z Height + #out_poly3d = os.path.join("in_memory", "out_poly3d") + out_poly3d = os.path.join(outputWS, "out_poly3d_{0}".format(i)) + arcpy.FeatureTo3DByAttribute_3d(tp, out_poly3d, "MIN", "") + + # multipoint = os.path.join("in_memory", "multipoint") + #multipoint = os.path.join("in_memory", "multipoint") + multipoint = os.path.join(outputWS, "multipoint{0}".format(i)) + if arcpy.Exists(multipoint): + arcpy.Delete_management(multipoint) + arcpy.LASToMultipoint_3d(lasList, multipoint, pointSpacing, buildingClassCode, buildingReturn, None, sr, "las", + 1, "false") + print("Las to Multipoint complete") + + roofPoints = os.path.join(outputWS, "roofPoints{0}".format(i)) + if arcpy.Exists(roofPoints): + arcpy.Delete_management(roofPoints) + arcpy.Clip_analysis(multipoint, tp, roofPoints, None) + + # Delete Mulipoint shp + #if arcpy.Exists(multipoint): + # arcpy.Delete_management(multipoint) + + # Interpolate Points to Boundary + buildingInsideAndBorderPoints = os.path.join(outputWS, "buildingBorderPoints{0}".format(i)) + interpolatePointsToBoundary(input_bldg_points=roofPoints, input_bldg_fp=tp, + output_bldg_points_with_border=buildingInsideAndBorderPoints) + + if arcpy.Exists(roofPoints): + arcpy.Delete_management(roofPoints) + roofPoints = os.path.join("in_memory", "roofPoints") + + arcpy.Dissolve_management(buildingInsideAndBorderPoints, roofPoints, None, None, "true", "false") + + #if arcpy.Exists(buildingInsideAndBorderPoints): + # arcpy.Delete_management(buildingInsideAndBorderPoints) + + # TODO: Resolve issue where roof-tin will not process + # Check to ensure that paths can have spaces. may be the problem. + roofTin = os.path.join(tempFolder, "roofTin") + arcpy.CreateTin_3d(roofTin, sr, "{0} Shape.Z Mass_Points ".format(roofPoints), "DELAUNAY") + print("roof Tin Created") + + #if arcpy.Exists(roofPoints): + # arcpy.Delete_management(roofPoints) + + # make ground TIN + gnd_feats_tin = "{} Shape.Z Hard_Clip ;".format(out_poly3d) + out_gnd_tin = os.path.join(tempFolder, "gndTin") + arcpy.CreateTin_3d(out_gnd_tin, sr, gnd_feats_tin, "DELAUNAY") + + # extrude polygon between TINs + print('creating Multipatch') + this_MP = os.path.join(outputWS, "bldgMP_{0}".format(i)) + arcpy.ExtrudeBetween_3d(roofTin, out_gnd_tin, out_poly3d, this_MP) + + # add feature name to list + mp_list.append(this_MP) + + # Delete Unnecessary files + #arcpy.Delete_management(tp) + arcpy.Delete_management(minMaxElevTable) + #arcpy.Delete_management(out_poly3d) + arcpy.Delete_management(dtmClipRast) + arcpy.Delete_management(out_gnd_tin) + arcpy.Delete_management(roofTin) + arcpy.Delete_management(out_poly3d) + arcpy.Delete_management(buildingInsideAndBorderPoints) + del row, tp + + print("Multipatch {0} Process complete @ ".format(i + beginOnFeatureNumber), str(datetime.now())) + + + # TODO Geoff7015 Incorporate Cleanup Building CGA from Geof7015 rule into tool here: + ''' every multipatch building must have LiDAR point spacing as a attribute and "Units: feet/meters + will need to update CGA cleanup rules settings with a conditional calculator operation where + it leverages these attributes and changes the cleanupGeometry operations optimally based on input features + final output will be two file geodatabases. one with original buildings and other with cleaned.''' + ''' Other Cleanup Utility tools/processes may be required to optimize building faces and roof geometries''' + except: + print("Unable to process feature {0}".format(i + beginOnFeatureNumber)) + print("Multipatch {0} Process failed @ ".format(i + beginOnFeatureNumber), str(datetime.now())) + + +def extractMultipatchRasterToPts(fullextent, row): + try: + arcpy.env.extent = fullextent + # get raster extent + geom = row[0] + print("geom = ", geom) + + # copy the feature temporarily + #tp = os.path.join("in_memory", "tp{0}".format(i)) + tp = os.path.join(outputWS, "tp{0}".format(i)) + #tp = os.path.join("in_memory", "tp{0}".format(i)) + arcpy.CopyFeatures_management(geom, tp) + + extentgeom = arcpy.Describe(tp) + arcpy.env.mask = tp + print("extentgeom = ", extentgeom) + extent = "{0} {1} {2} {3}".format(extentgeom.extent.XMin, extentgeom.extent.YMin, extentgeom.extent.XMax, extentgeom.extent.YMax) + print("extent = ", extent) + arcpy.env.extent = extent + + print("Begin Raster Creation Process") + + DTM = os.path.join("in_memory", "DTM") + # Delete terrain rasters if existing. + if arcpy.Exists(DTM): + arcpy.Delete_management(DTM) + + arcpy.LasDatasetToRaster_conversion("DTMLASD", DTM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacing, heightValue) + print("Created DTM Raster at location: " + DTM) + + # Set mask to building footprint geometry outline + arcpy.env.mask = tp + + # clip the DTM + print('clipping DTM') + dtmClipRast = os.path.join("in_memory", 'tempDEMclip{0}'.format(i + beginOnFeatureNumber)) + arcpy.Clip_management(DTM, extent, dtmClipRast, tp, "true", "false") + # convert DEM to Int + #dtmClipRastInt = Int(dtmClipRast) + + # add Min Height to Building Footprints + print('determining Minimum Building Elevation') + arcpy.AddField_management(tp, "ID", "SHORT", None, None, None, "ID", "true", "true", None) + arcpy.CalculateField_management(tp, "ID", 1, "PYTHON_9.3", None) + minMaxElevTable = os.path.join("in_memory", "minMaxElevTable") + arcpy.sa.ZonalStatisticsAsTable(tp, "ID", DTM, minMaxElevTable, "true", "MIN_MAX_MEAN") + arcpy.JoinField_management(tp, "ID", minMaxElevTable, "ID", "MIN;MAX") + + # Delete the DTM Raster + if arcpy.Exists(DTM): + arcpy.Delete_management(DTM) + + # then, move building footprints to MIN Z Height + #out_poly3d = os.path.join("in_memory", "out_poly3d") + out_poly3d = os.path.join("in_memory", "out_poly3d_{0}".format(i)) + arcpy.FeatureTo3DByAttribute_3d(tp, out_poly3d, "MIN", "") + + # Create DSM Raster + LrDSM = os.path.join("in_memory", "LrDSM{0}".format(i)) + # Delete terrain rasters if existing. + if arcpy.Exists(LrDSM): + arcpy.Delete_management(LrDSM) + + if optimizeRaster: + arcpy.LasDatasetToRaster_conversion("LRDSMLASD", LrDSM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacing * optimizeRasterFactor, heightValue) + print("Created LrDSM Raster at location: " + LrDSM) + + interpolateBetweenLasPts(LrDSM) + + if not optimizeRaster: + arcpy.LasDatasetToRaster_conversion("LRDSMLASD", LrDSM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacing, heightValue) + + #nbr = arcpy.sa.NbrRectangle(3, 3, "CELL") + + # Filter LrDSM PointCloud to remove Artifacts + LrDSMFilter = os.path.join("in_memory", "LrDSMFilter{0}".format(i)) + if arcpy.Exists(LrDSMFilter): + arcpy.Delete_management(LrDSMFilter) + + filterOut = arcpy.sa.Filter(LrDSM, "LOW", "DATA") + filterOut.save(LrDSMFilter) + + if arcpy.Exists(LrDSM): + arcpy.Delete_management(LrDSM) + + # Remove Artifact Pits + fillRaster = os.path.join("in_memory", "fillRaster{0}".format(i)) + if arcpy.Exists(fillRaster): + arcpy.Delete_management(fillRaster) + outputFill = arcpy.sa.Fill(LrDSMFilter, None) + outputFill.save(fillRaster) + + if arcpy.Exists(LrDSMFilter): + arcpy.Delete_management(LrDSMFilter) + + # slopedAreaRasters(SlopeRaster, slopedAreasNullRaster): + + # convert raster to points + print('converting raster to points') + extractedPoints = os.path.join("in_memory", "extractedPoints{0}".format(i)) + if arcpy.Exists(extractedPoints): + arcpy.Delete_management(extractedPoints) + arcpy.RasterToPoint_conversion(fillRaster, extractedPoints, "Value") + + if arcpy.Exists(fillRaster): + arcpy.Delete_management(fillRaster) + + # convert points to 3D + roofPoints = os.path.join("in_memory", "roofPoints{0}".format(i)) + arcpy.FeatureTo3DByAttribute_3d(in_features=extractedPoints, out_feature_class=roofPoints, + height_field="grid_code", to_height_field="") + + if arcpy.Exists(extractedPoints): + arcpy.Delete_management(extractedPoints) + + # Interpolate Points to Boundary + buildingInsideAndBorderPoints = os.path.join("in_memory", "buildingBorderPoints{0}".format(i)) + interpolatePointsToBoundary(input_bldg_points=roofPoints, input_bldg_fp=tp, + output_bldg_points_with_border=buildingInsideAndBorderPoints) + + if arcpy.Exists(roofPoints): + arcpy.Delete_management(roofPoints) + roofPoints = os.path.join("in_memory", "roofPoints{0}".format(i)) + + arcpy.Dissolve_management(buildingInsideAndBorderPoints, roofPoints, None, None, "true", "false") + print("Dissolved Points") + + #if arcpy.Exists(buildingInsideAndBorderPoints): + # arcpy.Delete_management(buildingInsideAndBorderPoints) + + # TODO: Resolve issue where roof-tin will not process + # Check to ensure that paths can have spaces. may be the problem. + roofTin = os.path.join(tempFolder, "roofTin{0}".format(i)) + arcpy.CreateTin_3d(roofTin, sr, "{0} Shape.Z Mass_Points ".format(roofPoints), "DELAUNAY") + print("roof Tin Created") + + #if arcpy.Exists(roofPoints): + # arcpy.Delete_management(roofPoints) + + # make ground TIN + gnd_feats_tin = "{} Shape.Z Hard_Clip ;".format(out_poly3d) + out_gnd_tin = os.path.join(tempFolder, "gndTin") + arcpy.CreateTin_3d(out_gnd_tin, sr, gnd_feats_tin, "DELAUNAY") + + # extrude polygon between TINs + print('creating Multipatch') + this_MP = os.path.join(outputWS, "bldgMP_{0}".format(i)) + arcpy.ExtrudeBetween_3d(roofTin, out_gnd_tin, out_poly3d, this_MP) + + # add feature name to list + mp_list.append(this_MP) + + # Delete Unnecessary files + arcpy.Delete_management(tp) + arcpy.Delete_management(minMaxElevTable) + arcpy.Delete_management(out_poly3d) + arcpy.Delete_management(dtmClipRast) + arcpy.Delete_management(out_gnd_tin) + arcpy.Delete_management(roofTin) + arcpy.Delete_management(out_poly3d) + arcpy.Delete_management(buildingInsideAndBorderPoints) + + print("Multipatch {0} Process complete @ ".format(i + beginOnFeatureNumber), str(datetime.now())) + + + # TODO Geoff7015 Incorporate Cleanup Building CGA from Geof7015 rule into tool here: + ''' every multipatch building must have LiDAR point spacing as a attribute and "Units: feet/meters + will need to update CGA cleanup rules settings with a conditional calculator operation where + it leverages these attributes and changes the cleanupGeometry operations optimally based on input features + final output will be two file geodatabases. one with original buildings and other with cleaned.''' + ''' Other Cleanup Utility tools/processes may be required to optimize building faces and roof geometries''' + except: + print("Unable to process feature {0}".format(i + beginOnFeatureNumber)) + print("Multipatch {0} Process failed @ ".format(i + beginOnFeatureNumber), str(datetime.now())) + +def extractMultipatchFromRaster(fullextent, row): + try: + arcpy.env.extent = fullextent + # get raster extent + geom = row[0] + print("geom = ", geom) + + # copy the feature temporarily + #tp = os.path.join("in_memory", "tp{0}".format(i)) + tp = os.path.join(outputWS, "tp{0}".format(i)) + #tp = os.path.join("in_memory", "tp{0}".format(i)) + arcpy.CopyFeatures_management(geom, tp) + + extentgeom = arcpy.Describe(tp) + arcpy.env.mask = tp + print("extentgeom = ", extentgeom) + extent = "{0} {1} {2} {3}".format(extentgeom.extent.XMin, extentgeom.extent.YMin, extentgeom.extent.XMax, extentgeom.extent.YMax) + print("extent = ", extent) + arcpy.env.extent = extent + + print("Begin Raster Creation Process") + + DTM = DTMRaster + + # Set mask to building footprint geometry outline + arcpy.env.mask = tp + + # clip the DTM + print('clipping DTM') + dtmClipRast = os.path.join("in_memory", 'tempDEMclip{0}'.format(i + beginOnFeatureNumber)) + arcpy.Clip_management(DTM, extent, dtmClipRast, tp, "true", "false") + + # add Min Height to Building Footprints + print('determining Minimum Building Elevation') + arcpy.AddField_management(tp, "ID", "SHORT", None, None, None, "ID", "true", "true", None) + arcpy.CalculateField_management(tp, "ID", 1, "PYTHON_9.3", None) + minMaxElevTable = os.path.join("in_memory", "minMaxElevTable") + arcpy.sa.ZonalStatisticsAsTable(tp, "ID", DTM, minMaxElevTable, "true", "MIN_MAX_MEAN") + arcpy.JoinField_management(tp, "ID", minMaxElevTable, "ID", "MIN;MAX") + + # Delete the DTM Raster + if arcpy.Exists(dtmClipRast): + arcpy.Delete_management(dtmClipRast) + + # then, move building footprints to MIN Z Height + #out_poly3d = os.path.join("in_memory", "out_poly3d") + out_poly3d = os.path.join("in_memory", "out_poly3d_{0}".format(i)) + arcpy.FeatureTo3DByAttribute_3d(tp, out_poly3d, "MIN", "") + + # Create DSM Raster + LrDSM = DSMRaster + + # clip the DTM + print('clipping DTM') + LrDSMClipRast = os.path.join("in_memory", 'tempLrDSMclip{0}'.format(i + beginOnFeatureNumber)) + arcpy.Clip_management(LrDSM, extent, dtmClipRast, tp, "true", "false") + + # Filter LrDSM PointCloud to remove Artifacts + LrDSMFilter = os.path.join("in_memory", "LrDSMFilter{0}".format(i)) + if arcpy.Exists(LrDSMFilter): + arcpy.Delete_management(LrDSMFilter) + + filterOut = arcpy.sa.Filter(LrDSMClipRast, "LOW", "DATA") + filterOut.save(LrDSMFilter) + + if arcpy.Exists(LrDSMClipRast): + arcpy.Delete_management(LrDSMClipRast) + + # Remove Artifact Pits + fillRaster = os.path.join("in_memory", "fillRaster{0}".format(i)) + if arcpy.Exists(fillRaster): + arcpy.Delete_management(fillRaster) + outputFill = arcpy.sa.Fill(LrDSMFilter, None) + outputFill.save(fillRaster) + + if arcpy.Exists(LrDSMFilter): + arcpy.Delete_management(LrDSMFilter) + + # slopedAreaRasters(SlopeRaster, slopedAreasNullRaster): + + # convert raster to points + print('converting raster to points') + extractedPoints = os.path.join("in_memory", "extractedPoints{0}".format(i)) + if arcpy.Exists(extractedPoints): + arcpy.Delete_management(extractedPoints) + arcpy.RasterToPoint_conversion(fillRaster, extractedPoints, "Value") + + if arcpy.Exists(fillRaster): + arcpy.Delete_management(fillRaster) + + # convert points to 3D + roofPoints = os.path.join("in_memory", "roofPoints{0}".format(i)) + arcpy.FeatureTo3DByAttribute_3d(in_features=extractedPoints, out_feature_class=roofPoints, + height_field="grid_code", to_height_field="") + + if arcpy.Exists(extractedPoints): + arcpy.Delete_management(extractedPoints) + + # Interpolate Points to Boundary + buildingInsideAndBorderPoints = os.path.join("in_memory", "buildingBorderPoints{0}".format(i)) + interpolatePointsToBoundary(input_bldg_points=roofPoints, input_bldg_fp=tp, + output_bldg_points_with_border=buildingInsideAndBorderPoints) + + if arcpy.Exists(roofPoints): + arcpy.Delete_management(roofPoints) + roofPoints = os.path.join("in_memory", "roofPoints{0}".format(i)) + + arcpy.Dissolve_management(buildingInsideAndBorderPoints, roofPoints, None, None, "true", "false") + print("Dissolved Points") + + #if arcpy.Exists(buildingInsideAndBorderPoints): + # arcpy.Delete_management(buildingInsideAndBorderPoints) + + # TODO: Resolve issue where roof-tin will not process + # Check to ensure that paths can have spaces. may be the problem. + roofTin = os.path.join(tempFolder, "roofTin{0}".format(i)) + arcpy.CreateTin_3d(roofTin, sr, "{0} Shape.Z Mass_Points ".format(roofPoints), "DELAUNAY") + print("roof Tin Created") + + #if arcpy.Exists(roofPoints): + # arcpy.Delete_management(roofPoints) + + # make ground TIN + gnd_feats_tin = "{} Shape.Z Hard_Clip ;".format(out_poly3d) + out_gnd_tin = os.path.join(tempFolder, "gndTin") + arcpy.CreateTin_3d(out_gnd_tin, sr, gnd_feats_tin, "DELAUNAY") + + # extrude polygon between TINs + print('creating Multipatch') + this_MP = os.path.join(outputWS, "bldgMP_{0}".format(i)) + arcpy.ExtrudeBetween_3d(roofTin, out_gnd_tin, out_poly3d, this_MP) + + # add feature name to list + mp_list.append(this_MP) + + # Delete Unnecessary files + arcpy.Delete_management(tp) + arcpy.Delete_management(minMaxElevTable) + arcpy.Delete_management(out_poly3d) + arcpy.Delete_management(dtmClipRast) + arcpy.Delete_management(out_gnd_tin) + arcpy.Delete_management(roofTin) + arcpy.Delete_management(out_poly3d) + arcpy.Delete_management(buildingInsideAndBorderPoints) + + print("Multipatch {0} Process complete @ ".format(i + beginOnFeatureNumber), str(datetime.now())) + + + # TODO Geoff7015 Incorporate Cleanup Building CGA from Geof7015 rule into tool here: + ''' every multipatch building must have LiDAR point spacing as a attribute and "Units: feet/meters + will need to update CGA cleanup rules settings with a conditional calculator operation where + it leverages these attributes and changes the cleanupGeometry operations optimally based on input features + final output will be two file geodatabases. one with original buildings and other with cleaned.''' + ''' Other Cleanup Utility tools/processes may be required to optimize building faces and roof geometries''' + except: + print("Unable to process feature {0}".format(i + beginOnFeatureNumber)) + print("Multipatch {0} Process failed @ ".format(i + beginOnFeatureNumber), str(datetime.now())) +############## +# Begin Code # +############## +print("Starting Process at ", str(datetime.now())) +# If LiDAR Input is a LASD DataSet then count number of LAS files in LAS Dataset and List LAS files as input 4 GP tools +if arcpy.Exists(inLASD): + arcpy.AddMessage("detected LASD Dataset as input: " + inLASD) + lasDatasetStatsText = os.path.join(tempFolder, "lasDatasetStatsText.txt") + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", "DECIMAL_POINT") + filenames = findLasDatasetStatisticsfilePaths(lasDatasetStatsText) + + if len(filenames) == 0: + arcpy.AddMessage("1 LAS file detected in LASD DATASET") + else: + arcpy.AddMessage("{0} LAS files detected in LASD DATASET".format(len(filenames))) + + # Process lasList into Esri GP tool friendly input format + newstr = str(filenames)[1:-1].replace("', ", ";") + lasList = '"' + newstr.replace("'", "") + '"' + +# If the LiDAR Input is a a single LAS then return 1 of LAS files and format file to string for GP tools input. +if inLAS.lower().endswith('.las') and ";" not in inLAS: + arcpy.AddMessage("1 LAS file detected") + lasList = '"' + inLAS + '"' + +# If the LiDAR Input is a string of LAS files then count number of LAS files and create List LAS files input 4 GP tools. +if inLAS.lower().endswith('.las') and ";" in inLAS: + numberLASFiles = (inLAS.count(';')+1) + arcpy.AddMessage(str(numberLASFiles) + " LAS file detected") + lasList = '"' + inLAS + '"' + +# If the LiDAR Input is a LAS Directory then count number of LAS files and create List of LAS files as input 4 GP tools. +if os.path.isdir(inLAS): + lasSearchPathDirectory = inLAS + "/*.las" + for name in glob.glob(lasSearchPathDirectory): + filename = name + file_extension = ".las" + filename, file_extension = os.path.splitext(name) + if file_extension == ".las": + # Find all LAS files in input folder. Optionally search recursively + recursive = True + lasList = [] + if recursive: + for root, dirs, files in os.walk(inLAS): + for file in files: + if file.endswith(".las") and file not in lasList: + lasList.append((os.path.join(root, file))) + else: + for file in os.listdir(inLAS): + if file.endswith(".las") and file not in lasList: + lasList.append((os.path.join(inLAS, file))) + + # Print Number of Las files + if len(lasList) == 0: + arcpy.AddMessage("1 LAS file detected in Directory") + else: + arcpy.AddMessage("{0} LAS files detected in Directory".format(len(lasList))) + + # Process lasList into Esri GP tool friendly input format + newstr = str(lasList)[1:-1].replace("', ", ";") + lasList = '"' + newstr.replace("'", "") + '"' + +# Convert Las file List as String and format for GP tool input +# Create LASDataset from LAS files. +if inLAS.lower().endswith('.las') or os.path.isdir(inLAS): + createlasdataset(inLAS=inLAS, sr=sr) + + +DTMLASD = "DTMLASD" +LRDSMLASD = "LRDSMLASD" + +if arcpy.Exists(DTMLASD): + arcpy.Delete_management(DTMLASD) +if arcpy.Exists(LRDSMLASD): + arcpy.Delete_management(LRDSMLASD) +arcpy.MakeLasDatasetLayer_management(inLASD, DTMLASD, str(groundClassCode), groundReturn, "", "", "", "", "", "") +arcpy.MakeLasDatasetLayer_management(inLASD, LRDSMLASD, str(buildingClassCode), buildingReturn, "", "", "", "", "", "") +if arcpy.Exists(DTMLASD): + if arcpy.Exists(LRDSMLASD): + arcpy.AddMessage("LASD Layers Created") +else: + arcpy.AddMessage("Could Not Create LASD Layers") +# selector determining whether or not to interpolateAdditional points for tin creation. Helps with Terribe LiDAR. +''' if interpolateAdditionalPoints is enabled then input correct heightValue & valueField for raster processing ''' +''' Determine the correct point spacing settings based on raster processing algorithm requirements ''' + +# TODO make point spacing support recursions. obtainLiDARInfo(inLASD, lasList)[3] is currently placeholder +''' calculate average pt spacing of LiDAR tiles building footprints intersect''' +pointSpace = obtainLiDARInfo(inLASD, lasList)[3] +pointSpacing = pointSpace * pointSpacingCorrectionFactor +heightValue = 1 +valueField = "FLOAT" + +result = arcpy.GetCount_management(buildingFootprints) +FootprintCount = int(result.getOutput(0)) +print("number of building Footprints to process = " + str(FootprintCount)) + +fullextent = arcpy.Describe(buildingFootprints).extent + +# create list for multiPatch features +mp_list = [] + +# make search cursor for footprint polygons +fields = ["SHAPE@"] +with arcpy.da.SearchCursor(buildingFootprints, fields) as sc: + for i, row in enumerate(sc): + if (i + beginOnFeatureNumber) < FootprintCount: + # if i is a multiple of 50 compact the gdb + print("on BuildingFootprint {0}".format(i + beginOnFeatureNumber) + " of " + str(FootprintCount)) + if not i % 50: + print("Began Compacting GDB @ ", str(datetime.now())) + arcpy.Compact_management(outputWS) + arcpy.Compact_management(scratchGDB) + print("Complete Compacting GDB @ ", str(datetime.now())) + if arcpy.Exists(DTMRaster) and arcpy.Exists(DSMRaster): + extractMultipatchRasterToPts(fullextent=fullextent, row=row) + if rasterExtractionApproach: + extractMultipatchFromRaster(fullextent=fullextent, row=row) + if not rasterExtractionApproach and not arcpy.Exists(DTMRaster) and not arcpy.Exists(DSMRaster): + extractMultipatchFromPts(fullextent=fullextent, row=row) + + +# merge the MultiPatches into a single FC +outputMerge = os.path.join(outputWS, 'outputMergeMP') +arcpy.Merge_management(mp_list, outputMerge) + +#TODO DJARRARD: delete all buildingMP* files that exist in the output workspace +# Delete Individual Multipatch Buildings +'''if arcpy.Exists(os.path.join(outputWS, "bldgMP_0")): + for fc in arcpy.ListFeatureClasses("bldgMP*", "MULTIPATCH", outputWS): + arcpy.Delete_management(fc)''' + +if arcpy.Exists(DTMLASD): + arcpy.Delete_management(DTMLASD) +if arcpy.Exists(LRDSMLASD): + arcpy.Delete_management(LRDSMLASD) + +print("Finished Process at ", str(datetime.now())) diff --git a/LOD1BuildingsFromLiDAR.py b/LOD1BuildingsFromLiDAR.py new file mode 100644 index 0000000..b645e1c --- /dev/null +++ b/LOD1BuildingsFromLiDAR.py @@ -0,0 +1,166 @@ +__author__ = 'geof7015' + +import arcpy +import os + + +#inLASD = r'E:\3D_City_Data\United States\North Carolina\Charlotte\AIC\InnovationCorridorLiDAR.lasd' +#outputDerivatives = r'E:\3D_City_Data\United States\North Carolina\Charlotte\AIC\Workspace.gdb' +#buildingFootprint = r'E:\3D_City_Data\United States\North Carolina\Charlotte\AIC\Data.gdb\BuildingFootprints_test' + +inLASD = r'E:\3D_City_Data\United States\North Carolina\Charlotte\Source\LiDAR\Charlotte.lasd' +outputDerivatives = r'E:\3D_City_Data\United States\North Carolina\Charlotte\AIC\Workspace.gdb' +buildingFootprint = r'E:\3D_City_Data\United States\North Carolina\Charlotte\Source\BuildingFootprints\Buildings.shp' + +pointSpacing = 2 +pointSpacingFactor = 3 + +buildingClassCode = 6 +buildingReturn = "Last Return" + +groundClassCode = 2 +groundReturn = "" + +DeleteIntermediateData = False + + +################ +# Begin Script # +################ + +# Create Initial Raster Derivatives. + +#Specify initial LiDAR Derivative Raster Settings +DTMLASD = "DTMLASD" +LRDSMLASD = "LRDSMLASD" +heightValue = 1 +valueField = "FLOAT" +pointSpacingEquation = pointSpacing * pointSpacingFactor + +# Create Las Dataset Layers for Ground and Buildings as inputs to raster tools. +if arcpy.Exists(DTMLASD): + arcpy.Delete_management(DTMLASD) +if arcpy.Exists(LRDSMLASD): + arcpy.Delete_management(LRDSMLASD) +arcpy.MakeLasDatasetLayer_management(inLASD, DTMLASD, str(groundClassCode), groundReturn, "", "", "", "", "", "") +arcpy.MakeLasDatasetLayer_management(inLASD, LRDSMLASD, str(buildingClassCode) + ";" + str(groundClassCode), buildingReturn, "", "", "", "", "", "") + +# Create Building Height Raster +arcpy.AddMessage("Beginning processing of Building Height Raster. " + "This process may take a few hours for Large Datasets") +BuildingHeightRaster = os.path.join(outputDerivatives, "BuildingHeightRaster") +# Delete Building Height Raster if existing +if arcpy.Exists(BuildingHeightRaster): + arcpy.Delete_management(BuildingHeightRaster) +# arcpy.LasPointStatsAsRaster_management(LRDSMLASD, BuildingHeightRaster, "Z_RANGE", "CELLSIZE", pointSpacingEquation) +arcpy.LasDatasetToRaster_conversion(LRDSMLASD, BuildingHeightRaster, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacingEquation, heightValue) +print("Created DSM/Building Height Raster") + +# Remove Artifacts from Building Height Raster +arcpy.AddMessage("Removing Potential Artifacts from Building Height Raster") +BuildingHtFilter = os.path.join(outputDerivatives, "BuildingHtFilter") +Filter_raster = arcpy.sa.Filter(BuildingHeightRaster, "LOW", "true") +Filter_raster.save(BuildingHtFilter) + +# Create Digital Elevation Model from LiDAR +arcpy.AddMessage("Beginning processing of Terrain Raster. " + "This process may take a few hours for Large Datasets") +DTM = os.path.join(outputDerivatives, "DTM") +# Delete Terrain Raster if existing +if arcpy.Exists(DTM): + arcpy.Delete_management(DTM) +arcpy.LasDatasetToRaster_conversion(DTMLASD, DTM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", valueField, + "CELLSIZE", pointSpacingEquation, heightValue) +print("Created DTM/Terrain Raster") + +# Delete LASDataset Layers to clear up space system in_memory +if arcpy.Exists(DTMLASD): + arcpy.Delete_management(DTMLASD) +if arcpy.Exists(LRDSMLASD): + arcpy.Delete_management(LRDSMLASD) + +# Begin Data Attribution Process + +# Copy building footprints to GDB +''' attempting to keep all mathematical processes in table in_memory before joining to footprint at end of process''' +buildingFootprintsCopy = os.path.join(outputDerivatives, "BuildingFootprintsCopy") +if arcpy.Exists(buildingFootprintsCopy): + arcpy.Delete_management(buildingFootprintsCopy) +arcpy.CopyFeatures_management(buildingFootprint, buildingFootprintsCopy) + +def createID(buildingFootprints): + # Determine if "ID" Field Exists + + #List of field names to be added + to_add = ["ID"] + + #Create a list of existing field names + fieldList = arcpy.ListFields(buildingFootprintsCopy) + fieldName = [f.name for f in fieldList] + + for field in to_add: + if field in fieldName: + print("ID Already Exists!") + else: + # Add Height Attributes to Building Footprints + arcpy.AddMessage("Calculating Minimum Building Elevations for Building Footprints") + arcpy.AddField_management(buildingFootprintsCopy, "ID", "LONG", None, None, None, "ID", "true", "true", None) + arcpy.CalculateField_management(buildingFootprintsCopy, "ID", "autoIncrement()", "PYTHON_9.3", r"rec=0\ndef autoIncrement():\n global rec\n pStart = 1 #adjust start value, if req'd \n pInterval = 1 #adjust interval value, if req'd\n if (rec == 0): \n rec = pStart \n else: \n rec = rec + pInterval \n return rec") + + +createID(buildingFootprints=buildingFootprintsCopy) + +arcpy.AddMessage("Calculating Ground Elevation for Building Footprints") +# Calculate Min Elevation +ElevTable = os.path.join("in_memory", "minMaxElevTable") +arcpy.sa.ZonalStatisticsAsTable(buildingFootprintsCopy, "ID", DTM, ElevTable, "true", "MINIMUM") +arcpy.AddField_management(ElevTable, "baseElevation", "DOUBLE", None, None, None, None, "true", "false", None) +arcpy.CalculateField_management(ElevTable, "baseElevation", "!MIN!", "PYTHON_9.3", None) + +arcpy.AddMessage("Calculating Building Roof Height for Building Footprints") + +# Calculate MaxElevation +MaxElevTable = os.path.join("in_memory", "MaxElevTable") +arcpy.sa.ZonalStatisticsAsTable(buildingFootprintsCopy, "ID", BuildingHtFilter, MaxElevTable, "true", "MAXIMUM") +arcpy.JoinField_management(ElevTable, "ID", MaxElevTable, "ID", "MAX") +arcpy.AddField_management(ElevTable, "MaxElevation", "DOUBLE", None, None, None, None, "true", "false", None) +arcpy.CalculateField_management(ElevTable, "MaxElevation", "!MAX!", "PYTHON_9.3", None) + +if arcpy.Exists(MaxElevTable): + arcpy.Delete_management(MaxElevTable) + +# Calculate totalHeight +arcpy.AddField_management(ElevTable, "totalHeight", "DOUBLE", None, None, None, None, "true", "false", None) +arcpy.CalculateField_management(ElevTable, "totalHeight", "!MaxElevation! - !baseElevation!", "PYTHON_9.3", None) + +# Merge Calculations to Building Footprints +arcpy.JoinField_management(buildingFootprintsCopy, "ID", ElevTable, "ID", "baseElevation;MaxElevation;totalHeight") + +if arcpy.Exists(ElevTable): + arcpy.Delete_management(ElevTable) + +arcpy.AddMessage("Z Enabling Building Footprints") +# then, move building footprints to MIN Z Height +BuildingFootprintsFinal = os.path.join(outputDerivatives, "buildingFootprints") +if arcpy.Exists(BuildingFootprintsFinal): + arcpy.Delete_management(BuildingFootprintsFinal) +arcpy.FeatureTo3DByAttribute_3d(buildingFootprintsCopy, BuildingFootprintsFinal, "baseElevation", "") + + +# Delete Intermediate Data +if DeleteIntermediateData: + arcpy.AddMessage("Deleting Intermediate Data") + if arcpy.Exists(DTM): + arcpy.Delete_management(DTM) + if arcpy.Exists(BuildingHeightRaster): + arcpy.Delete_management(BuildingHeightRaster) + if arcpy.Exists(BuildingHtFilter): + arcpy.Delete_management(BuildingHtFilter) + if arcpy.Exists(buildingFootprintsCopy): + arcpy.Delete_management(buildingFootprintsCopy) + +arcpy.AddMessage("Process Complete") +arcpy.AddMessage("Building Footprints Saved to: " + BuildingFootprintsFinal) +if not DeleteIntermediateData: + arcpy.AddMessage("Derivative Data Saved to: " + outputDerivatives) diff --git a/README.md b/README.md new file mode 100644 index 0000000..6132a3f --- /dev/null +++ b/README.md @@ -0,0 +1,2 @@ +# 3D_buildings +extracting buildings from lidar diff --git a/V3_delivery/V3_Delivery/BuildingTreeIdentification.tbx b/V3_delivery/V3_Delivery/BuildingTreeIdentification.tbx new file mode 100644 index 0000000..e2a4672 Binary files /dev/null and b/V3_delivery/V3_Delivery/BuildingTreeIdentification.tbx differ diff --git a/V3_delivery/V3_Delivery/ExtractBuildingsTreesAutomation_V3.py b/V3_delivery/V3_Delivery/ExtractBuildingsTreesAutomation_V3.py new file mode 100644 index 0000000..8ad9ff8 --- /dev/null +++ b/V3_delivery/V3_Delivery/ExtractBuildingsTreesAutomation_V3.py @@ -0,0 +1,2028 @@ +# --------------------------------------------------------------------------- +# Name: ExtractBuildingsTreesAutomation.py +# Purpose: +# Usage: +# Description: +# Author: Yiqun Xie, Joseph McGlinchy +# Organization: Esri Inc. +# +# Created: 06/25/2015 Yiqun Xie, Joseph McGlinchy +# Modified: 12/22/2015 Roslyn Dunn Version 1 +# Expand the algorithm to run over large areas in production +# Modified: 01/05/2016 Roslyn Dunn Version 2 +# Remove describe of file gdb, since gdb's are corrupted during +# file deletion. +# Output .tif file from Zonal Stats to avoid having SRS modified +# on output. +# Require output folder to exist (changed to input parameter instead +# of output. +# Fix issues with deletion of scratch workspaces (bug in core) +# Modified: 02/12/2016 Roslyn Dunn Version 3 +# Added functionality to construct surfaces directly from LAS +# --------------------------------------------------------------------------- + +import sys +import os +import inspect +import traceback +import math +import multiprocessing +import time +import shutil +# import resourceLogger +import arcpy +from arcpy.sa import * + + +def extract_buildings_trees(pp_params): + # In order to invoke this method as a separate process using python multiprocessing, + # import arcpy and other site packages + import arcpy + import sys + import os + import traceback + import time + try: + start = time.time() + # check out Spatial Analyst license + if arcpy.CheckExtension("Spatial") == "Available": + arcpy.CheckOutExtension("Spatial") + else: + arcpy.AddMessage("\n*** No Spatial Analyst License Available." + "Exiting extract_buildings_trees for OID: {0} ***".format(pp_params[1])) + sys.exit() + arcpy.env.overwriteOutput = True + # Unpack pp_params + out_folder_path = pp_params[0] + # arcpy.AddMessage("\n out_folder_path: {0}".format(out_folder_path)) + oid = pp_params[1] + arcpy.AddMessage("\n** Processing oid: {0} **\n".format(oid)) + # set a flag to indicate if input data type is LAS or DSM/DTM rasters + las_input = False + if pp_params[3] == "ALL RETURNS" or pp_params[3] == "LAST RETURNS": + las_input = True + lasd = pp_params[2] + # arcpy.AddMessage("\n lasd: {0}".format(lasd)) + las_return_type = pp_params[3] + # arcpy.AddMessage("\n las_return_type: {0}".format(las_return_type)) + arcpy.CheckOutExtension('3D') + else: + dsm_md = pp_params[2] + # arcpy.AddMessage("\n dsm_md: {0}".format(dsm_md)) + dtm_md = pp_params[3] + # arcpy.AddMessage("\n dtm_md: {0}".format(dtm_md)) + xmin_str = pp_params[4] + # arcpy.AddMessage("\n xmin_str: {0}".format(xmin_str)) + xmax_str = pp_params[5] + # arcpy.AddMessage("\n xmax_str: {0}".format(xmax_str)) + ymin_str = pp_params[6] + # arcpy.AddMessage("\n ymin_str: {0}".format(ymin_str)) + ymax_str = pp_params[7] + # arcpy.AddMessage("\n ymax_str: {0}".format(ymax_str)) + featureextraction = pp_params[8] + # arcpy.AddMessage("\n featureextraction: {0}".format(featureextraction)) + elevation_meter_scalefactor_str = pp_params[9] + # arcpy.AddMessage("\n elevation_meter_scalefactor_str: {0}".format(elevation_meter_scalefactor_str)) + use_pos_terrain_method = pp_params[10] + # arcpy.AddMessage("\n use_pos_terrain_method: {0}".format(use_pos_terrain_method)) + delete_intermediate_files = pp_params[11] + # arcpy.AddMessage("\n delete_intermediate_files: {0}".format(delete_intermediate_files)) + height_path = pp_params[12] + # arcpy.AddMessage("\n height_path: {0}".format(height_path)) + las_point_spacing = pp_params[13] # only utilized when las_input = True + # arcpy.AddMessage("\n las_point_spacing: {0}".format(las_point_spacing)) + + # Convert some passed parameters to float (all parameters were passed as strings) + elevation_meter_scalefactor = float(elevation_meter_scalefactor_str) + xmin = float(xmin_str) + xmax = float(xmax_str) + ymin = float(ymin_str) + ymax = float(ymax_str) + + # Each fishnet section gets a separate directory (named after the OID) to contain results + out_subfolder_path = os.path.join(out_folder_path, oid) + + # The results file gdb will store trees and buildings + results_gdb_name = r"Results.gdb" + # Entire path of the file gdb + results_file_gdb_path = os.path.join(out_subfolder_path, results_gdb_name) + + trees_output = "" + if "TREES" in featureextraction.upper(): + trees_output = os.path.join(results_file_gdb_path, r"trees_to_merge" + oid) + buildings_output = os.path.join(results_file_gdb_path, r"buildings_to_merge" + oid) + + # arcpy.env.workspace = out_subfolder_path + # arcpy.env.workspace = "in_memory" + if not os.path.exists(out_subfolder_path): + arcpy.AddMessage("Creating Results sub-Folder: " + out_subfolder_path) + os.makedirs(out_subfolder_path) + elif arcpy.Exists(buildings_output): + # if desired outputs already exist (buildings_to_merge* and possibly trees_to_merge*), + # then don't continue, since this run has be re-started + if "TREES" in featureextraction.upper(): + # If trees_to_merge* feature class exists then this oid has already been processed + if arcpy.Exists(trees_output): + arcpy.AddMessage("OID {0} already processed...skipping".format(oid)) + return + else: + arcpy.AddMessage("OID {0} already processed...skipping".format(oid)) + return + + # If the results file gdb doesn't exist, then create it + if not os.path.exists(results_file_gdb_path): + # arcpy.AddMessage("Creating Results File GDB: {0}".format(results_file_gdb_path)) + arcpy.CreateFileGDB_management(out_subfolder_path, results_gdb_name, out_version="CURRENT") + + # Create a scratch file gdb for intermediate file output + scratch_gdb_name = r"TempWorkArea.gdb" + # Entire path of the file gdb + scratch_file_gdb_path = os.path.join(out_subfolder_path, scratch_gdb_name) + # If the file gdb doesn't exist, then create it + if not os.path.exists(scratch_file_gdb_path): + # arcpy.AddMessage("Creating Scratch File GDB: {0}\n".format(scratch_file_gdb_path)) + arcpy.CreateFileGDB_management(out_subfolder_path, scratch_gdb_name, out_version="CURRENT") + + # send intermediate files to the scratch file gdb + arcpy.env.workspace = scratch_file_gdb_path + + # clip dsm raster from either LAS Dataset or DSM Mosaic Dataset + clip_dsm_raster = os.path.join(out_subfolder_path, "dsm{}.tif".format(oid)) + # arcpy.AddMessage("\nDSM clip_raster: {0}".format(clip_dsm_raster)) + # Extend the clip footprint by 1 unit in each direction so resulting features overlap + fishnet_rectangle = "{} {} {} {}".format(xmin - 1, ymin - 1, xmax + 1, ymax + 1) + if not arcpy.Exists(clip_dsm_raster): + if las_input: + if las_return_type == "ALL RETURNS": + return_val = "" + else: + # Las Return is better for defining buildings + return_val = "'Last Return'" + # use LAS Class codes 0-6 to create the DSM + # 0 - Never Classified, 1 - Unassigned, 2 - Ground, 3 - Low Vegetation, + # 4 - Medium Vegetation, 5 - High Vegetation, 6 - Building + arcpy.MakeLasDatasetLayer_management(lasd, out_layer="DSM_LASD_Layer", + class_code="0;1;2;3;4;5;6", + return_values=return_val, no_flag="true", + synthetic="true", keypoint="true", + withheld="false", surface_constraints="") + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from MakeLasDatasetLayer of DSM is: \n{0}".format(messages)) + arcpy.env.extent = fishnet_rectangle + arcpy.LasDatasetToRaster_conversion("DSM_LASD_Layer", clip_dsm_raster, value_field="ELEVATION", + interpolation_type="BINNING MAXIMUM NATURAL_NEIGHBOR", + data_type="FLOAT", sampling_type="CELLSIZE", + sampling_value=las_point_spacing, z_factor="1") + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from LasDatasetToRaster of DSM is: \n{0}".format(messages)) + else: + # Input is DSM and DTM Mosaic datasets + try: + arcpy.Clip_management(dsm_md, fishnet_rectangle, clip_dsm_raster, "#", "-3.40282346639e+038") + except: + arcpy.AddMessage("No data in DSM clip of area with oid: {0}".format(oid)) + return + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from Clip of DSM is: \n{0}".format(messages)) + + # clip dtm raster from either LAS Dataset or DTM Mosaic Dataset + clip_dtm_raster = os.path.join(out_subfolder_path, "dtm{}.tif".format(oid)) + # arcpy.AddMessage("\nDTM clip_raster: {0}".format(clip_dtm_raster)) + if not arcpy.Exists(clip_dtm_raster): + if las_input: + # use LAS Class codes 2 and 8 to create the DTM + # 2 - Ground, 8 - Model Key + arcpy.MakeLasDatasetLayer_management(lasd, out_layer="DTM_LASD_Layer", class_code="2;8", + return_values="", no_flag="true", synthetic="true", + keypoint="true", withheld="false", surface_constraints="") + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from MakeLasDatasetLayer of DTM is: \n{0}".format(messages)) + arcpy.env.extent = fishnet_rectangle + arcpy.LasDatasetToRaster_conversion("DTM_LASD_Layer", clip_dtm_raster, value_field="ELEVATION", + interpolation_type="BINNING AVERAGE NATURAL_NEIGHBOR", + data_type="FLOAT", sampling_type="CELLSIZE", + sampling_value=las_point_spacing, z_factor="1") + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from LasDatasetToRaster of DTM is: \n{0}".format(messages)) + else: + # Input is DSM and DTM Mosaic datasets + try: + arcpy.Clip_management(dtm_md, fishnet_rectangle, clip_dtm_raster, "#", "-3.40282346639e+038") + except: + arcpy.AddMessage("No data in DTM clip of area with oid: {0}".format(oid)) + if delete_intermediate_files == "true": + remove_rasters(out_subfolder_path) + remove_shapefiles(out_subfolder_path) + arcpy.env.workspace = "" # so we can successfully delete scratch_file_gdb_path + remove_filegdb(scratch_file_gdb_path) + return + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from Clip of DTM is: \n{0}".format(messages)) + + trees_output = "" + if "TREES" in featureextraction.upper(): + trees_output = os.path.join(results_file_gdb_path, r"trees_to_merge" + oid) + buildings_output = os.path.join(results_file_gdb_path, r"buildings_to_merge" + oid) + + # Minus - compute difference to get heights relative to the ground) + # Note the Height raster is put into a separate folder (height_path) + # (Used to create a Mosaic Dataset later in zonal stats step) + diff_ori = os.path.join(height_path, "Minus_img" + oid + r".tif") + arcpy.gp.Minus_sa(clip_dsm_raster, clip_dtm_raster, diff_ori) + + # Divide by a scale factor to convert all heights to meters + # divide_minus1 = os.path.join(out_subfolder_path, "Divide_Minus1" + oid) + flt_meter_elev_unit = float(elevation_meter_scalefactor) + divide_minus1 = Raster(diff_ori) / flt_meter_elev_unit + arcpy.Delete_management(flt_meter_elev_unit) + + # create a raster with all floating point values of 1.0 + # Note: Done because SetNull & Con won't take a constant 1.0 value for the false raster parameter + flt_raster_one = divide_minus1 / divide_minus1 + + # setnull_divi1 = os.path.join(out_subfolder_path, "SetNull_Divi1" + oid) + setnull_divi1 = SetNull(divide_minus1, flt_raster_one, "VALUE<2") + + arcpy.Delete_management(flt_raster_one) + + # check to see if any heights are > 2 meters, and if not, then we're done + if setnull_divi1.maximum <= 0: + arcpy.AddMessage("No Buildings or Trees were identified in region {0}".format(oid)) + # print "No Buildings or Trees were identified in region: " + oid + if delete_intermediate_files == "true": + remove_rasters(out_subfolder_path) + remove_shapefiles(out_subfolder_path) + arcpy.env.workspace = "" # so we can successfully delete scratch_file_gdb_path + remove_filegdb(scratch_file_gdb_path) + return + # Is Null - create a mask to indicate the areas where heights are < 2 (1: heights < 2, 0: heights >= 2) + isnull_newma1 = os.path.join(out_subfolder_path, "IsNullnewm" + oid) + arcpy.gp.IsNull_sa(setnull_divi1, isnull_newma1) + + # Make Raster Layer from the previously created mask + mask_null = "mask_null" + oid + arcpy.MakeRasterLayer_management(isnull_newma1, mask_null, "", "", "") + + # Select Layer By Attribute - Select those pixels in the layer where heights are < 2 + arcpy.SelectLayerByAttribute_management(mask_null, "NEW_SELECTION", "VALUE=1") + + # Euclidean Distance - disBD represents the distance of each 'tall' pixel to the closest 'short' pixel + # If a tree exists, the center (trunk) of the tree will have a larger value (it's toward the center) + # If a building, the center of the building will have a larger value + disbd = os.path.join(out_subfolder_path, "disBD" + oid) + arcpy.gp.EucDistance_sa(mask_null, disbd, "", mask_null, "") + arcpy.Delete_management(mask_null) + + # Set Null - set all ZERO (and very small distance) values to NoData + disbdnull = os.path.join(out_subfolder_path, "disBDnull" + oid) + arcpy.gp.SetNull_sa(disbd, disbd, disbdnull, "VALUE<0.0001") + + # Negate the Distances to create wells (as opposed to peaks) + # Now the peaks we have in the middle of the objects become basins and the boundary pixels become ridges + rdsm = os.path.join(out_subfolder_path, "Negatediff" + oid) + arcpy.gp.Negate_sa(disbdnull, rdsm) + + # Flow Direction + flowdir_filt1 = os.path.join(out_subfolder_path, "FlowDirFlt" + oid) + arcpy.gp.FlowDirection_sa(rdsm, flowdir_filt1, "NORMAL", "") + + # Basin + basin_flowdi5 = os.path.join(out_subfolder_path, "BasinFlowD" + oid + r".tif") + arcpy.gp.Basin_sa(flowdir_filt1, basin_flowdi5) + + # Times + diff = os.path.join(out_subfolder_path, "diff" + oid) + arcpy.gp.Times_sa(divide_minus1, setnull_divi1, diff) + arcpy.Delete_management(divide_minus1) + + # Focal Statistics + focalst_diff1 = os.path.join(out_subfolder_path, "FocalStdif" + oid) + arcpy.gp.FocalStatistics_sa(diff, focalst_diff1, "Rectangle 3 3 CELL", "MEAN", "DATA") + + # Times + mean_diff = os.path.join(out_subfolder_path, "mean_diff" + oid) + arcpy.gp.Times_sa(focalst_diff1, setnull_divi1, mean_diff) + arcpy.Delete_management(setnull_divi1) + + # Minus + diff_minus_avg = os.path.join(out_subfolder_path, "diffMinusA" + oid) + arcpy.gp.Minus_sa(diff, mean_diff, diff_minus_avg) + + # Send the output to a different file name so results can be compared for the 2 methods + if not use_pos_terrain_method == "true": + # use the 'slope' method by default + positive = os.path.join(out_subfolder_path, "Slope" + oid) + arcpy.gp.Slope_sa(diff_minus_avg, positive, "DEGREE", "1") + else: + # the 'Positive Terrains' method + input_raster_or_constant_value_2 = "0.3" + positive = os.path.join(out_subfolder_path, "GreaterMin" + oid) + arcpy.gp.GreaterThan_sa(diff_minus_avg, input_raster_or_constant_value_2, positive) + + # Set output coordinate system (need this to keep it from changing in Zonal Stats step) + desc = arcpy.Describe(clip_dtm_raster) + sr = desc.spatialReference + arcpy.env.outputCoordinateSystem = sr + linear_unit = sr.linearUnitName.upper() + # arcpy.AddMessage("Spatial ref of DSM clipped raster is: \n{0}\n".format(sr.exportToString())) + + # Zonal Statistics + # Output a .tif file to avoid modification of SRS (bug which applies only to GRID) + zonalst_basi4 = os.path.join(out_subfolder_path, "ZonalStBas" + oid + r".tif") + arcpy.gp.ZonalStatistics_sa(basin_flowdi5, "VALUE", positive, zonalst_basi4, "MEAN", "DATA") + + # Iso Cluster Unsupervised Classification + # arcpy.AddMessage("Classifying...") + isocluster2 = os.path.join(out_subfolder_path, "isocluster" + oid) + # Write out a permanent signature file to avoid conflicts with other concurrent processes + iso_sig_file = os.path.join(out_subfolder_path, r"iso_sig" + oid + r".gsg") + # wrap this in a try block in case zero (0) classes are found (this happens in desert areas) + try: + arcpy.gp.IsoClusterUnsupervisedClassification_sa(zonalst_basi4, "2", isocluster2, "20", "10", iso_sig_file) + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults from IsoClusterUnsupervisedClassification_sa of oid {0} are:" + # " \n{1}\n".format(oid, messages)) + + except: + arcpy.AddMessage("No Buildings or Trees were identified in region {0}".format(oid)) + if delete_intermediate_files == "true": + remove_rasters(out_subfolder_path) + remove_shapefiles(out_subfolder_path) + arcpy.env.workspace = "" # so we can successfully delete scratch_file_gdb_path + remove_filegdb(scratch_file_gdb_path) + del desc, sr + return + # check actual number of classes obtained + raster_iso = arcpy.Raster(isocluster2) + buildingsfound = True + if int(raster_iso.maximum) == int(raster_iso.minimum): + # Only one class found = assumed to be trees for now, but this might change + arcpy.AddMessage("No buildings were identified in region {0}".format(oid)) + # arcpy.AddMessage("No records will be output to: {0}".format(buildings_output)) + buildingsfound = False + + # Always look for buildings regardless if asked for, since we look for trees where no buildings exist + if buildingsfound: + # Set Null + setnullbd = os.path.join(out_subfolder_path, "SetNullbd" + oid) + arcpy.gp.SetNull_sa(isocluster2, isocluster2, setnullbd, "VALUE>1") + + # Raster to Polygon + bdiso = os.path.join(scratch_file_gdb_path, "bdiso" + oid) + arcpy.RasterToPolygon_conversion(setnullbd, bdiso, "NO_SIMPLIFY", "VALUE") + + # Add Geometry Attributes + arcpy.AddGeometryAttributes_management(bdiso, + "AREA;PERIMETER_LENGTH", "METERS", "SQUARE_METERS", "") + # Select + buildings_sel = "in_memory\\building_area50" + oid + if "FOOT" in linear_unit: + # xy linear units are foot or foot_us + arcpy.Select_analysis(bdiso, buildings_sel, "\"POLY_AREA\" >= 538.19") + else: + # xy linear units are meter + arcpy.Select_analysis(bdiso, buildings_sel, "\"POLY_AREA\" >= 50") + + # Add Field + if len(arcpy.ListFields(buildings_sel, "ratio")) < 1: + arcpy.AddField_management(buildings_sel, "ratio", "DOUBLE", "", "", "", "", + "NULLABLE", "NON_REQUIRED", "") + # Calculate Field + arcpy.CalculateField_management(buildings_sel, "ratio", "!POLY_AREA! / !PERIMETER!", + "PYTHON_9.3", "") + # Select + # bd_075 = "in_memory\\bd_075" + bd_075 = os.path.join(scratch_file_gdb_path, "bd_075" + oid) + arcpy.Select_analysis(buildings_sel, bd_075, "\"ratio\" >=0.75") + arcpy.Delete_management(buildings_sel) + + # Aggregate Polygons + # bdagg_tbl = "in_memory\\tbl" + bdagg_tbl = os.path.join(scratch_file_gdb_path, "bdagg_tbl" + oid) + arcpy.AggregatePolygons_cartography(bd_075, buildings_output, "1.5 Meters", "50 SquareMeters", + "250 SquareMeters", "ORTHOGONAL", "", bdagg_tbl) + + # Repair the building geometries in case they have self intersecting geometries + arcpy.RepairGeometry_management(buildings_output, "DELETE_NULL") + # Note: can do zonal stats here but buildings along borders are dissolved later, + # which invalidates the gathered stats for those particular buildings + # zonalstats(buildings_output, diff_ori, results_file_gdb_path) + + # If the user asks for trees, then the process is different if buildings are also found + if "TREES" in featureextraction.upper(): + isnull_setnu1 = os.path.join(out_subfolder_path, "IsNullSetN" + oid) + if buildingsfound: + # Is Null + isnull_isocl1 = os.path.join(out_subfolder_path, "IsNullisoc" + oid) + arcpy.gp.IsNull_sa(isocluster2, isnull_isocl1) + # Con + ras_isnull_isocl1 = Raster(isnull_isocl1) + int_raster_one = ras_isnull_isocl1 / ras_isnull_isocl1 + con_isnull_i1 = os.path.join(out_subfolder_path, "ConIsNulli" + oid) + arcpy.gp.Con_sa(isnull_isocl1, int_raster_one, con_isnull_i1, isocluster2, "VALUE=1") + arcpy.Delete_management(ras_isnull_isocl1) + arcpy.Delete_management(int_raster_one) + # Focal Statistics + focalst_isoc1 = os.path.join(out_subfolder_path, "FocalStiso" + oid) + arcpy.gp.FocalStatistics_sa(con_isnull_i1, focalst_isoc1, "Rectangle 3 3 CELL", "MAJORITY", "DATA") + # Set Null + setnull_isot = os.path.join(out_subfolder_path, "SetNulliso" + oid) + arcpy.gp.SetNull_sa(focalst_isoc1, 0, setnull_isot, "VALUE = 1") + + # Is Null + arcpy.gp.IsNull_sa(setnull_isot, isnull_setnu1) + else: + arcpy.gp.IsNull_sa(isocluster2, isnull_setnu1) + + # Make Raster Layer + treenull = "treeNULL" + oid + arcpy.MakeRasterLayer_management(isnull_setnu1, treenull, "", "", "") + + # Select Layer By Attribute + arcpy.SelectLayerByAttribute_management(treenull, "NEW_SELECTION", "VALUE=1") + + # Euclidean Distance + eucdist_make1 = os.path.join(out_subfolder_path, "EucDistMak" + oid) + arcpy.gp.EucDistance_sa(treenull, eucdist_make1, "", treenull, "") + arcpy.Delete_management(treenull) + + # Set Null + setnull_eucd1 = os.path.join(out_subfolder_path, "SetNulEucD" + oid) + if "FOOT" in linear_unit: + arcpy.gp.SetNull_sa(eucdist_make1, eucdist_make1, setnull_eucd1, "VALUE<2.95") + else: + arcpy.gp.SetNull_sa(eucdist_make1, eucdist_make1, setnull_eucd1, "VALUE<0.9") + + setnulldistest = arcpy.Raster(setnull_eucd1) + if setnulldistest.maximum <= -1: + arcpy.AddMessage("No Trees were identified in region {0}".format(oid)) + # print "No Trees were identified in region: " + oid + if delete_intermediate_files == "true": + remove_rasters(out_subfolder_path) + remove_shapefiles(out_subfolder_path) + arcpy.env.workspace = "" # so we can successfully delete scratch_file_gdb_path + remove_filegdb(scratch_file_gdb_path) + if arcpy.Exists(raster_iso): + arcpy.Delete_management(raster_iso) + del desc, sr + return + + # Focal Statistics + focalst_setn1 = os.path.join(out_subfolder_path, "FocalStStN" + oid) + arcpy.gp.FocalStatistics_sa(setnull_eucd1, focalst_setn1, "Circle 3 CELL", "MAXIMUM", "DATA") + + # Minus + minus_focals1 = os.path.join(out_subfolder_path, "MinusFoclS" + oid) + arcpy.gp.Minus_sa(focalst_setn1, setnull_eucd1, minus_focals1) + arcpy.Delete_management(setnulldistest) # deleted after done w/ setnull_eucd1 & setnulldistest + + # Equal To + equalto_minu1 = os.path.join(out_subfolder_path, "EqualToMin" + oid) + arcpy.gp.EqualTo_sa(minus_focals1, "0", equalto_minu1) + + # Set Null + setnull_equa1 = os.path.join(out_subfolder_path, "SetNulEqua" + oid) + arcpy.gp.SetNull_sa(equalto_minu1, eucdist_make1, setnull_equa1, "VALUE=0") + + setnulleqtst = arcpy.Raster(setnull_equa1) + if setnulleqtst.maximum <= -1: + arcpy.AddMessage("No Trees were identified in region {0}".format(oid)) + # print "No Trees were identified in region: " + oid + if delete_intermediate_files == "true": + remove_rasters(out_subfolder_path) + remove_shapefiles(out_subfolder_path) + arcpy.env.workspace = "" # so we can successfully delete scratch_file_gdb_path + remove_filegdb(scratch_file_gdb_path) + if arcpy.Exists(raster_iso): + arcpy.Delete_management(raster_iso) + del desc, sr + return + + # Plus + # plus_int_set1 = os.path.join(out_subfolder_path, "Plus_Int_Set1" + oid) + flt_meter_elev_unit = float(elevation_meter_scalefactor) + plus_int_set1 = Raster(setnull_equa1) + flt_meter_elev_unit + arcpy.Delete_management(flt_meter_elev_unit) + + # Int + int_setnull_1 = os.path.join(out_subfolder_path, "IntSetNull" + oid) + arcpy.gp.Int_sa(plus_int_set1, int_setnull_1) + arcpy.Delete_management(plus_int_set1) + arcpy.Delete_management(setnulleqtst) # deleted after done with setnull_equal & setnulleqtst + + # Clean Tree Raster + cleantreeras1 = os.path.join(out_subfolder_path, "CleanTreeR" + oid) + cleanfast(int_setnull_1, cleantreeras1) + + # Raster to Polygon + rastert_setnull1 = os.path.join(scratch_file_gdb_path, "RastrTSetN" + oid) + arcpy.RasterToPolygon_conversion(cleantreeras1, rastert_setnull1, "NO_SIMPLIFY", "Value") + arcpy.Delete_management(cleantreeras1) + + # Feature To Point + rastert_setnull1_featuretopo = os.path.join(scratch_file_gdb_path, + "RasterT_SetNull1_FeatureToPo" + oid) + arcpy.FeatureToPoint_management(rastert_setnull1, rastert_setnull1_featuretopo, "INSIDE") + + if buildingsfound: + # Make Feature Layer + rastert_setnull1_featuretopolyr = "RasterT_SetNull1_FeatureToPoLyr" + oid + arcpy.MakeFeatureLayer_management(rastert_setnull1_featuretopo, rastert_setnull1_featuretopolyr, + "", "", "") + + # Copy Features + bdagg_copyfeatures = os.path.join(scratch_file_gdb_path, "bdAgg_CopyFeatures" + oid) + arcpy.CopyFeatures_management(buildings_output, bdagg_copyfeatures, "", "0", "0", "0") + + # Select Layer By Location + arcpy.SelectLayerByLocation_management(rastert_setnull1_featuretopolyr, "WITHIN_A_DISTANCE", + bdagg_copyfeatures, "3.3 Feet", "NEW_SELECTION", + "NOT_INVERT") + + # Select Layer By Attribute + arcpy.SelectLayerByAttribute_management(rastert_setnull1_featuretopolyr, "SWITCH_SELECTION", "") + + # Buffer + arcpy.Buffer_analysis(rastert_setnull1_featuretopolyr, trees_output, + "GRIDCODE", "FULL", "ROUND", "NONE", "", "PLANAR") + # arcpy.AddMessage("Trees exported to: {0}".format(trees_output)) + arcpy.Delete_management(rastert_setnull1_featuretopolyr) + else: + # Buffer + arcpy.Buffer_analysis(rastert_setnull1_featuretopo, trees_output, "GRIDCODE", "FULL", "ROUND", + "NONE", "", "PLANAR") + # arcpy.AddMessage("Trees exported to: {0}".format(trees_output)) + + if arcpy.Exists(raster_iso): + arcpy.Delete_management(raster_iso) + arcpy.Delete_management("in_memory") + # Clean up the rasters in out_subfolder_path + if delete_intermediate_files == "true": + remove_rasters(out_subfolder_path) + remove_shapefiles(out_subfolder_path) + arcpy.env.workspace = "" # so we can successfully delete scratch_file_gdb_path + remove_filegdb(scratch_file_gdb_path) + # arcpy.env.workspace = out_subfolder_path + del desc, sr + end = time.time() + delta = end - start + arcpy.AddMessage("Elapsed time for OID {0} is {1} seconds".format(oid, delta)) + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + arcpy.CheckInExtension('3D') + arcpy.CheckInExtension('3D') + raise + + +def remove_shapefiles(directory): + # Delete all of the shapefiles in directory + arcpy.env.workspace = directory + featureclasses = arcpy.ListFeatureClasses() + for featureclass in featureclasses: + arcpy.Delete_management(featureclass) + return + + +def remove_rasters(directory): + # Delete all of the rasters in directory + arcpy.env.workspace = directory + rasters = arcpy.ListRasters("*", "ALL") + for raster in rasters: + arcpy.Delete_management(raster) + return + + +def remove_filegdb(filegdb): + # Delete all of the feature classes in filegdb, then delete filegdb + arcpy.env.workspace = filegdb + featureclasses = arcpy.ListFeatureClasses() + for featureclass in featureclasses: + arcpy.Delete_management(featureclass) + arcpy.env.workspace = "" + try: + arcpy.Delete_management(filegdb) + except: + arcpy.AddMessage("Unable to delete file GDB: {0}".format(filegdb)) + # This extra code is needed because of a bug which results in the deleted file gdb being + # changed into a folder (i.e. loses it's designation as a file gdb, but the folder still exists) + if arcpy.Exists(filegdb): + arcpy.Delete_management(filegdb) + return + + +def remove_tables(filegdb): + # Remove all of the tables in filegdb + arcpy.env.workspace = filegdb + tables = arcpy.ListTables() + for table in tables: + arcpy.Delete_management(table) + return + + +def delete(array, i, j, cellsize): + try: + # This cleans overlapping trees + ext = array[i][j] + for x in range(i - ext, i + ext): + for y in range(j - ext, j + ext): + if x > 0 and x < (array.shape[0] - 1) and y > 0 and y < (array.shape[1] - 1) and (x != i or y != j): + r = array[x][y] + if r > 0 and ext >= r: + if (float(r) / float(ext)) > 0.5: + distance = math.sqrt(math.pow(x - i, 2) + math.pow(y - j, 2)) * cellsize + if distance < (1.0 * float(ext)): # change threshold here + array[x][y] = 0 + return + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + raise + + +def cleanfast(inputraster, outputraster): + try: + # get raster information + desc = arcpy.Describe(inputraster) + sr = desc.spatialReference + point = arcpy.Point(desc.Extent.XMin, desc.Extent.YMin) + + # iterate through raster array + dist = arcpy.RasterToNumPyArray(in_raster=inputraster, nodata_to_value=0) + locs = dist.nonzero() + # part = int(len(locs[0]) / 10) + + for x in range(0, len(locs[0])): + # if (int(x) % part) == 0: + # arcpy.AddMessage(str(float(x) / float(part) * 10) + "% completed") + locx = locs[0][x] + locy = locs[1][x] + delete(dist, locx, locy, desc.meanCellWidth) + + # output + distraster = arcpy.NumPyArrayToRaster(in_array=dist, lower_left_corner=point, + x_cell_size=desc.meanCellWidth, + y_cell_size=desc.meanCellWidth, + value_to_nodata=0) + arcpy.DefineProjection_management(distraster, sr) + distraster.save(outputraster) + del desc, sr, point + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + raise + + +def zonalstats(vectorfc, rasterheights, resultstablegdb): + # Gather zonal statistics for the features in vectorfc + try: + start = time.time() + table_prefix = os.path.splitext(os.path.basename(vectorfc))[0] + zonaltable = os.path.join(resultstablegdb, table_prefix + r"_zonalTbl") + oid_fieldname = arcpy.Describe(vectorfc).OIDFieldName + arcpy.gp.ZonalStatisticsAsTable_sa(vectorfc, oid_fieldname, rasterheights, zonaltable, "DATA", "ALL") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from ZonalStatisticsAsTable_sa are: \n{0}\n".format(messages)) + arcpy.MakeFeatureLayer_management(vectorfc, "vectorfc_layer") + arcpy.JoinField_management("vectorfc_layer", oid_fieldname, zonaltable, join_field="OBJECTID", + fields="COUNT;AREA;MIN;MAX;RANGE;MEAN;STD;SUM") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from JoinField are: \n{0}\n".format(messages)) + arcpy.Delete_management("vectorfc_layer") + end = time.time() + delta = end - start + arcpy.AddMessage("Elapsed time for ZonalStats and JoinField is {0} seconds".format(delta)) + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def create_md_from_raster(raster_folder, file_gdb, md_name, md_boundary, build_footprints, export_boundary): + arcpy.env.overwriteOutput = True + # Create and populate a mosaic dataset with elevation rasters (32-bit float) + try: + arcpy.env.workspace = raster_folder + if not arcpy.Exists(file_gdb): + arcpy.AddMessage("\n*** Exiting create_md_from_raster...File GDB Does not exist: {0} ***".format(file_gdb)) + return + + if not os.path.exists(raster_folder): + arcpy.AddMessage("\n*** Exiting create_md_from_raster..." + "Raster Folder Does not exist: {0} ***".format(raster_folder)) + return + + full_md_path = os.path.join(file_gdb, md_name) + arcpy.AddMessage("\nMD to be created: {0}".format(full_md_path)) + + # Don't re-create the Mosaic Dataset if it already exists + if not arcpy.Exists(full_md_path): + # Get the spatial reference string of the first raster (to use in creation of MD) + rasters = arcpy.ListRasters("*", "All") + # Make sure there's at least one raster in raster_folder + # If not, then exit the script + # If so, get the raster's Spatial Reference + if len(rasters) > 0: + desc_firstraster = arcpy.Describe(rasters[0]) + spatref_firstraster = desc_firstraster.SpatialReference.exportToString() + arcpy.AddMessage("Spatial ref of 1st raster in {0} is: \n{1}\n".format(raster_folder, + spatref_firstraster)) + arcpy.AddMessage("Number of rasters in {0}: {1}".format(raster_folder, len(rasters))) + else: + arcpy.AddMessage("\n*** Exiting create_md_from_raster..." + "No rasters found in {0} ***".format(raster_folder)) + return + # Create a Mosaic Dataset + arcpy.CreateMosaicDataset_management(file_gdb, md_name, + coordinate_system=spatref_firstraster, + num_bands="1", pixel_type="32_BIT_FLOAT", product_definition="NONE", + product_band_definitions="#") + del desc_firstraster, spatref_firstraster, rasters + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from CreateMosaicDataset are: \n{0}\n".format(messages)) + + # set the data_type to ELEVATION + arcpy.SetRasterProperties_management(full_md_path, data_type="ELEVATION", statistics="", + stats_file="#", nodata="") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from SetRasterProperties are: \n{0}\n".format(messages)) + + # Add rasters from Raster folder to MD + arcpy.AddRastersToMosaicDataset_management(full_md_path, raster_type="Raster Dataset", + input_path=raster_folder, + update_cellsize_ranges="UPDATE_CELL_SIZES", + update_boundary="UPDATE_BOUNDARY", + update_overviews="NO_OVERVIEWS", maximum_pyramid_levels="", + maximum_cell_size="0", + minimum_dimension="1500", spatial_reference="", filter="", + sub_folder="SUBFOLDERS", + duplicate_items_action="ALLOW_DUPLICATES", + build_pyramids="NO_PYRAMIDS", + calculate_statistics="NO_STATISTICS", + build_thumbnails="NO_THUMBNAILS", + operation_description="#", + force_spatial_reference="NO_FORCE_SPATIAL_REFERENCE") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from AddRastersToMosaicDataset are: \n{0}\n".format(messages)) + + # re-calculate cell size ranges so export will work at various scales + arcpy.CalculateCellSizeRanges_management(full_md_path, where_clause="", do_compute_min="MIN_CELL_SIZES", + do_compute_max="MAX_CELL_SIZES", max_range_factor="100", + cell_size_tolerance_factor="0.8", update_missing_only="UPDATE_ALL") + + if build_footprints == "true": + arcpy.BuildFootprints_management(full_md_path, "", "RADIOMETRY", "-10", "4294967295", "80", "0", + "NO_MAINTAIN_EDGES", "SKIP_DERIVED_IMAGES", "UPDATE_BOUNDARY", + "2000", "100", "NONE", "", "20", "0.05") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from BuildFootprints are: \n{0}\n".format(messages)) + else: + arcpy.AddMessage("\n*** Mosaic Dataset already exists: {0} ***".format(full_md_path)) + + # default_compression_type="LERC" + # clip_to_footprints="NOT_CLIP" + # data_source_type="ELEVATION" + # rows_maximum_imagesize="15000" + arcpy.SetMosaicDatasetProperties_management(full_md_path, "15000", "15000", "None;JPEG;LZ77;LERC", "LERC", + "75", "0.01", "BILINEAR", "NOT_CLIP", + "FOOTPRINTS_MAY_CONTAIN_NODATA", + "NOT_CLIP", "NOT_APPLY", "#", "NONE", + "NorthWest;Center;LockRaster;ByAttribute;Nadir;Viewpoint;" + "Seamline;None", + "NorthWest", "", "", "ASCENDING", "FIRST", "10", "600", "300", "20", + "0.8", "", "BASIC", + "Name;MinPS;MaxPS;LowPS;HighPS;Tag;GroupName;ProductName;" + "CenterX;CenterY;ZOrder;Shape_Length;Shape_Area", "DISABLED", "", + "", "", "", "20", "1000", "ELEVATION", "1", "None", "None") + + # Get a record count just to be sure we found raster products to ingest + result = arcpy.GetCount_management(full_md_path) + count_rasters = int(result.getOutput(0)) + + if count_rasters == 0: + arcpy.AddMessage("\n*** Exiting: {0} Mosaic Dataset has no raster products ***".format(full_md_path)) + sys.exit() + else: + arcpy.AddMessage("{0} has {1} raster product(s).".format(full_md_path, count_rasters)) + + # boundary = os.path.join(file_gdb, md_boundary) + if export_boundary == "true": + if not arcpy.Exists(md_boundary): + # Export Boundary to the file GDB which holds the final results + arcpy.ExportMosaicDatasetGeometry_management(full_md_path, md_boundary, "", "BOUNDARY") + messages = arcpy.GetMessages() + arcpy.AddMessage("Results output from ExportMosaicDatasetGeometry are: \n{0}\n".format(messages)) + else: + arcpy.AddMessage("Exported boundary already exists: {}".format(md_boundary)) + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def create_md_from_las(lasd, raster_type_file, file_gdb, md_name, md_boundary, build_footprints, + export_boundary): + + arcpy.env.overwriteOutput = True + # Create and populate a mosaic dataset with a LAS dataset (32-bit float) + try: + # arcpy.env.workspace = raster_folder + if not arcpy.Exists(file_gdb): + arcpy.AddMessage("\n*** Exiting create_md_from_las...File GDB Does not exist: {0} ***".format(file_gdb)) + return + if not os.path.exists(lasd): + arcpy.AddMessage("\n*** Exiting create_md_from_las...LAS dataset Does not exist: {0} ***".format(lasd)) + return + + full_md_path = os.path.join(file_gdb, md_name) + arcpy.AddMessage("\nMD to be created: {0}".format(full_md_path)) + # md_boundary = full_md_path + boundary_append + + # Don't re-create the Mosaic Dataset if it already exists + if not arcpy.Exists(full_md_path): + # Get the spatial reference string of the LAS Dataset (to use in creation of MD) + desc_lasd = arcpy.Describe(lasd) + spat_ref_lasd = desc_lasd.SpatialReference + # Create a Mosaic Dataset + arcpy.CreateMosaicDataset_management(file_gdb, md_name, + coordinate_system=spat_ref_lasd, + num_bands="1", pixel_type="32_BIT_FLOAT", product_definition="NONE", + product_band_definitions="#") + del desc_lasd, spat_ref_lasd + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from CreateMosaicDataset are: \n{0}\n".format(messages)) + + # set the NoData value to -3.40282346639e+038 + arcpy.SetRasterProperties_management(full_md_path, data_type="ELEVATION", statistics="", + stats_file="#", nodata="") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from SetRasterProperties are: \n{0}\n".format(messages)) + + # Add rasters from Raster folder to MD + arcpy.AddRastersToMosaicDataset_management(full_md_path, raster_type=raster_type_file, + input_path=lasd, + update_cellsize_ranges="UPDATE_CELL_SIZES", + update_boundary="UPDATE_BOUNDARY", + update_overviews="NO_OVERVIEWS", maximum_pyramid_levels="", + maximum_cell_size="0", + minimum_dimension="1500", spatial_reference="", filter="#", + sub_folder="SUBFOLDERS", + duplicate_items_action="ALLOW_DUPLICATES", + build_pyramids="NO_PYRAMIDS", + calculate_statistics="NO_STATISTICS", + build_thumbnails="NO_THUMBNAILS", + operation_description="#", + force_spatial_reference="NO_FORCE_SPATIAL_REFERENCE") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from AddRastersToMosaicDataset are: \n{0}\n".format(messages)) + + if build_footprints == "true": + arcpy.BuildFootprints_management(full_md_path, "", "RADIOMETRY", "-100", "4294967295", "300", "0", + "MAINTAIN_EDGES", "SKIP_DERIVED_IMAGES", "UPDATE_BOUNDARY", + "2000", "20", "NONE", "", "20", "0.05") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from BuildFootprints are: \n{0}\n".format(messages)) + else: + arcpy.AddMessage("\n*** Mosaic Dataset already exists: {0} ***".format(full_md_path)) + + # Set the desired MD properties (non-default parameters are listed below): + # default_compression_type="LERC" + # clip_to_boundary="CLIP" + # data_source_type="ELEVATION" + # rows_maximum_imagesize="25000" + # columns_maximum_imagesize="25000" + arcpy.SetMosaicDatasetProperties_management(full_md_path, rows_maximum_imagesize="25000", + columns_maximum_imagesize="25000", + allowed_compressions="None;JPEG;LZ77;LERC", + default_compression_type="LERC", JPEG_quality="75", + LERC_Tolerance="0.01", resampling_type="BILINEAR", + clip_to_footprints="NOT_CLIP", + footprints_may_contain_nodata="FOOTPRINTS_MAY_CONTAIN_NODATA", + clip_to_boundary="CLIP", + color_correction="NOT_APPLY", + allowed_mensuration_capabilities="Basic", + default_mensuration_capabilities="Basic", + allowed_mosaic_methods="NorthWest;Center;LockRaster;ByAttribute;" + "Nadir;Viewpoint;Seamline;None", + default_mosaic_method="NorthWest", order_field="", order_base="#", + sorting_order="ASCENDING", mosaic_operator="FIRST", blend_width="0", + view_point_x="600", view_point_y="300", max_num_per_mosaic="20", + cell_size_tolerance="0.8", cell_size="#", metadata_level="BASIC", + transmission_fields="", + use_time="DISABLED", start_time_field="", end_time_field="#", + time_format="#", geographic_transform="#", + max_num_of_download_items="20", max_num_of_records_returned="1000", + data_source_type="ELEVATION", minimum_pixel_contribution="1") + + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from SetMosaicDatasetProperties are: \n{0}\n".format(messages)) + + # Get a record count just to be sure we found raster products to ingest + result = arcpy.GetCount_management(full_md_path) + count_lasd = int(result.getOutput(0)) + + if count_lasd == 0: + arcpy.AddMessage("\n*** Exiting: {0} Mosaic Dataset has no LASD contents ***".format(full_md_path)) + sys.exit() + else: + arcpy.AddMessage("{0} has {1} LASD(s).".format(full_md_path, count_lasd)) + + # boundary = os.path.join(file_gdb, md_boundary) + if export_boundary == "true": + if not arcpy.Exists(md_boundary): + # Export Boundary to the file GDB which holds the final results + arcpy.ExportMosaicDatasetGeometry_management(full_md_path, md_boundary, "", "BOUNDARY") + messages = arcpy.GetMessages() + arcpy.AddMessage("Results output from ExportMosaicDatasetGeometry are: \n{0}\n".format(messages)) + else: + arcpy.AddMessage("Exported boundary already exists: {}".format(md_boundary)) + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def regularize_some_buildings(building, bd_to_reg_shp, rastert_mask_mb1_symdiff_shp, expression, cellsize, scratch_ws): + # This routine will regularize smaller buildings that are oriented North-South + # The remainder of the buildings will be placed in bd_to_reg_shp for regularization by the 'Regularize Building + # Footprint' gp tool in ArcGIS Pro + try: + loc = building.rfind(".gdb") + bdfilename = building[loc + 5:] + + buildings_lyr = "buildings" + # ratio_vr = Buildings_output__2_ + # ratio_occ = ratio_vr + # ori_vex = ratio_occ + bdmbrv_shp = scratch_ws + "\\" + "bdMBRv.shp" + # MBR_vex = bdMBRv_shp + # all = ori_vex + bdmbrr_shp = scratch_ws + "\\" + "bdMBRr.shp" + # MBR = bdMBRr_shp + # building1 = all + # building2 = building1 + bd_mbr_output_shp = scratch_ws + "\\" + "bd_MBR_output.shp" + bd_mbr_final_shp = scratch_ws + "\\" + "bd_MBR_final.shp" + bd_mbr_final_polygontoraster_tif = scratch_ws + "\\" + "bd_MBR_final_PolygonToRaster.tif" + # Input_raster_or_constant_value_2__2_ = "-1" + lesstha_buil1 = scratch_ws + "\\" + "LessTha_buil1" + # Input_raster_or_constant_value_2__3_ = "1" + mask_mbr = scratch_ws + "\\" + "mask_mbr" + rastert_mask_mb1_shp = scratch_ws + "\\" + "RasterT_mask_mb1.shp" + buildings_polygontoraster_tif = scratch_ws + "\\" + "buildings_PolygonToRaster.tif" + # Input_raster_or_constant_value_2 = "-1" + greater_bd_m1 = scratch_ws + "\\" + "Greater_bd_M1" + isnull_great1 = scratch_ws + "\\" + "IsNull_Great1" + plus_isnull_1 = scratch_ws + "\\" + "Plus_IsNull_1" + # Input_true_raster_or_constant_value = "1" + # Input_false_raster_or_constant_value__2_ = "0" + con_plus_isn1 = scratch_ws + "\\" + "Con_Plus_IsN1" + isnull_con_p1 = scratch_ws + "\\" + "IsNull_Con_P1" + # Input_false_raster_or_constant_value__3_ = "0" + con_plus_isn2 = scratch_ws + "\\" + "Con_Plus_IsN2" + shrink_plus_1 = scratch_ws + "\\" + "Shrink_Plus_1" + times_shrink1 = scratch_ws + "\\" + "Times_Shrink1" + # Input_false_raster_or_constant_value = "0" + setnull_time1 = scratch_ws + "\\" + "SetNull_Time1" + rastert_setnull1_shp = scratch_ws + "\\" + "RasterT_SetNull1.shp" + rastert_setnull1_minimumboun_shp = scratch_ws + "\\" + "RasterT_SetNull1_MinimumBoun.shp" + mbr_sel_shp = scratch_ws + "\\" + "mbr_sel.shp" + # RasterT_SetNull1_MinimumBoun = "mbr_sel_Layer" + mbr_sel_layer = "mbr_sel_Layer" + # RasterT_SetNull1_MinimumBoun__2_ = RasterT_SetNull1_MinimumBoun + # RasterT_SetNull1_MinimumBoun__3_ = RasterT_SetNull1_MinimumBoun__2_ + + arcpy.MakeFeatureLayer_management(building, buildings_lyr) + # Process: Add Geometry Attributes + arcpy.AddGeometryAttributes_management(building, "AREA", "", "SQUARE_METERS", "") + + # Process: Add Field + if len(arcpy.ListFields(buildings_lyr, "ratio_vr")) < 1: + arcpy.AddField_management(buildings_lyr, "ratio_vr", "DOUBLE", "", "", "", "", + "NULLABLE", "NON_REQUIRED", "") + + # Process: Add Field + if len(arcpy.ListFields(buildings_lyr, "ratio_occ")) < 1: + arcpy.AddField_management(buildings_lyr, "ratio_occ", "DOUBLE", "", "", "", "", + "NULLABLE", "NON_REQUIRED", "") + + # Process: Minimum Bounding Geometry + arcpy.MinimumBoundingGeometry_management(building, bdmbrv_shp, "CONVEX_HULL", "NONE", "", "NO_MBG_FIELDS") + + # Process: Add Geometry Attributes + arcpy.AddGeometryAttributes_management(bdmbrv_shp, "AREA", "", "SQUARE_METERS", "") + + # Process: Add Join + arcpy.AddJoin_management(buildings_lyr, "OBJECTID", bdmbrv_shp, "ORIG_FID", "KEEP_ALL") + + # Process: Minimum Bounding Geometry + arcpy.MinimumBoundingGeometry_management(building, bdmbrr_shp, "ENVELOPE", "NONE", "", "NO_MBG_FIELDS") + + # Process: Add Geometry Attributes + arcpy.AddGeometryAttributes_management(bdmbrr_shp, "AREA", "", "SQUARE_METERS", "") + + # Process: Add Join + arcpy.AddJoin_management(buildings_lyr, "bdMBRv.ORIG_FID", bdmbrr_shp, "ORIG_FID", "KEEP_ALL") + + # Process: Calculate Field + arcpy.CalculateField_management(buildings_lyr, "ratio_vr", "!bdMBRv.POLY_AREA! / !bdMBRr.POLY_AREA!", + "PYTHON_9.3", "") + + # Process: Calculate Field + arcpy.CalculateField_management(buildings_lyr, "ratio_occ", + "!" + bdfilename + ".POLY_AREA! / !bdMBRr.POLY_AREA!", "PYTHON_9.3", "") + + # Process: Select + arcpy.Select_analysis(buildings_lyr, bd_to_reg_shp, + "\"" + bdfilename + ".POLY_AREA\" >= 500 OR \"" + bdfilename + ".ratio_vr\"<0.70") + + # Process: Select + arcpy.Select_analysis(buildings_lyr, bd_mbr_output_shp, + "\"" + bdfilename + ".POLY_AREA\" <= 500 AND \"" + bdfilename + ".ratio_vr\">=0.70") + + # Process: Minimum Bounding Geometry + arcpy.MinimumBoundingGeometry_management(bd_mbr_output_shp, bd_mbr_final_shp, "ENVELOPE", "NONE", "", + "NO_MBG_FIELDS") + + # Process: Polygon to Raster + arcpy.PolygonToRaster_conversion(bd_mbr_final_shp, "bdMBRv_rat", bd_mbr_final_polygontoraster_tif, + "CELL_CENTER", "NONE", cellsize) + + # Process: Less Than + arcpy.gp.LessThan_sa(bd_mbr_final_polygontoraster_tif, "-1", lesstha_buil1) + + # Process: Plus + # arcpy.gp.Plus_sa(LessTha_buil1, "1", mask_mbr) + lesstha_buil1_ras = Raster(lesstha_buil1) + int_raster_one = CreateConstantRaster(1, "INTEGER", lesstha_buil1_ras.meanCellWidth, lesstha_buil1_ras.extent) + arcpy.gp.Plus_sa(lesstha_buil1, int_raster_one, mask_mbr) + + # Process: Raster to Polygon + arcpy.RasterToPolygon_conversion(mask_mbr, rastert_mask_mb1_shp, "NO_SIMPLIFY", "VALUE") + + # Process: Extra Add Field (since building is now a feature class instead of a shapefile) + if len(arcpy.ListFields(building, "Id")) < 1: + arcpy.AddField_management(building, "Id", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") + arcpy.CalculateField_management(building, "Id", "0", "PYTHON_9.3", "") + + # Process: Polygon to Raster + arcpy.PolygonToRaster_conversion(building, "Id", buildings_polygontoraster_tif, "CELL_CENTER", "NONE", + cellsize) + + # Process: Greater Than + arcpy.gp.GreaterThan_sa(buildings_polygontoraster_tif, "-1", greater_bd_m1) + + # Process: Is Null + arcpy.gp.IsNull_sa(greater_bd_m1, isnull_great1) + + # Process: Plus + arcpy.gp.Plus_sa(isnull_great1, lesstha_buil1, plus_isnull_1) + arcpy.Delete_management(lesstha_buil1_ras) # Delete after we're done with lesstha_buil1_ras and lesstha_buil1 + + # Process: Con + arcpy.gp.Con_sa(plus_isnull_1, int_raster_one, con_plus_isn1, "0", "VALUE=0") + arcpy.Delete_management(int_raster_one) + + # Process: Is Null + arcpy.gp.IsNull_sa(con_plus_isn1, isnull_con_p1) + + # Process: Con + arcpy.gp.Con_sa(isnull_con_p1, con_plus_isn1, con_plus_isn2, "0", "VALUE=0") + + # Process: Shrink + arcpy.gp.Shrink_sa(con_plus_isn2, shrink_plus_1, "3", "0") + + # Process: Times + arcpy.gp.Times_sa(shrink_plus_1, mask_mbr, times_shrink1) + + # Process: Set Null + arcpy.gp.SetNull_sa(times_shrink1, "0", setnull_time1, "VALUE=1") + + # Process: Raster to Polygon + arcpy.RasterToPolygon_conversion(setnull_time1, rastert_setnull1_shp, "NO_SIMPLIFY", "VALUE") + + # Process: Minimum Bounding Geometry + arcpy.MinimumBoundingGeometry_management(rastert_setnull1_shp, rastert_setnull1_minimumboun_shp, "ENVELOPE", + "NONE", "", "MBG_FIELDS") + + # Process: Select + arcpy.Select_analysis(rastert_setnull1_minimumboun_shp, mbr_sel_shp, expression) + + # Process: Make Feature Layer + arcpy.MakeFeatureLayer_management(mbr_sel_shp, mbr_sel_layer, "", "", "") + + # Process: Select Layer By Location + arcpy.SelectLayerByLocation_management(mbr_sel_layer, "COMPLETELY_WITHIN", rastert_mask_mb1_shp, "", + "NEW_SELECTION", "NOT_INVERT") + + # Process: Select Layer By Attribute + arcpy.SelectLayerByAttribute_management(mbr_sel_layer, "SWITCH_SELECTION", "") + + # Process: Symmetrical Difference + arcpy.SymDiff_analysis(rastert_mask_mb1_shp, mbr_sel_layer, rastert_mask_mb1_symdiff_shp, "ALL", "") + + arcpy.Delete_management(buildings_lyr) + arcpy.Delete_management(mbr_sel_layer) + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def set_pixel_size(raster_type_file, las_point_spacing): + # The template raster type file has PUT_PIXEL_SIZE_HERE in place of + # the pixel size. This is because each set of LAS has a unique point + # spacing. This module will set pixel size in the art.xml so the + # resulting Mosaic Dataset will have an appropriate pixel size. + # Note: the Mosaic Dataset pixel size can't be too small or footprints aren't + # generated properly. + try: + # Get a good number for pixel size of the MD. It should be a minimum of 3. + md_pixel_size = max(3.0, round(2 * las_point_spacing + 0.5)) + arcpy.AddMessage("Mosaic Dataset pixel size will be: {0}".format(md_pixel_size)) + search_text = r"PUT_PIXEL_SIZE_HERE" + # Read in the file + filedata = None + with open(raster_type_file, 'r') as file: + filedata = file.read() + # Replace the target string + filedata = filedata.replace(search_text, str(md_pixel_size)) + # Write the file out again + with open(raster_type_file, 'w') as file: + file.write(filedata) + return + + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def create_lasd(las_path, lasd): + try: + arcpy.env.workspace = las_path + lasfiles = arcpy.ListFiles("*.las") + #arcpy.AddMessage("Entire LAS list in {0} is: \n{1}\n".format(las_path, lasfiles)) + if len(lasfiles) > 0: + arcpy.AddMessage("Creating LAS Dataset: {0}".format(lasd)) + # Create a LAS Dataset and add the LAS files in las_path + # Compute stats (lasx) if they don't already exist + arcpy.CreateLasDataset_management(lasfiles, lasd, folder_recursion="NO_RECURSION", + in_surface_constraints="#", spatial_reference="#", + compute_stats="COMPUTE_STATS", relative_paths="ABSOLUTE_PATHS") + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from CreateLasDataset are: \n{0}\n".format(messages)) + del lasfiles + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def get_las_point_spacing(las_path, lasd, output_path, output_gdb_name): + # This module determines the point spacing of the LAS files, and can + # be used to determine a reasonable raster product pixel size. + try: + if arcpy.Exists(lasd): + arcpy.AddMessage("Calculating point spacing of LASD: {0}".format(lasd)) + lasdatasetstatstext = os.path.join(output_path, "lasDatasetStatsText.txt") + if not arcpy.Exists(lasdatasetstatstext): + arcpy.Delete_management(lasdatasetstatstext) + arcpy.LasDatasetStatistics_management(lasd, "true", lasdatasetstatstext, "LAS_FILES", "COMMA", + "DECIMAL_POINT") + else: + arcpy.AddMessage("lasDatasetStatsText already exists: {0}".format(lasdatasetstatstext)) + + ptFileInfoFC = os.path.join(output_gdb_name, 'ptFileInfoFC') + if not arcpy.Exists(ptFileInfoFC): + # Note: This step is optional, so if it takes too long it's safe to remove it + # get lasd sr + descLASD = arcpy.Describe(lasd) + SpatRefLASD = descLASD.SpatialReference + # SpatRefStringLASD = SpatRefLASD.SpatialReference.exportToString() + arcpy.CheckOutExtension("3D") + arcpy.PointFileInformation_3d(las_path, ptFileInfoFC, "LAS", "las", "", "NO_RECURSION", "NO_EXTRUSION", + "DECIMAL_POINT", "NO_SUMMARIZE", "LAS_SPACING") + arcpy.CheckInExtension("3D") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from PointFileInformation_3d are: \n{0}\n".format(messages)) + del descLASD, SpatRefLASD + else: + arcpy.AddMessage("ptFileInfoFC already exists: {0}".format(ptFileInfoFC)) + + rows = arcpy.SearchCursor(ptFileInfoFC, + fields="FileName; Pt_Spacing; Z_Min; Z_Max", + sort_fields="FileName; Pt_Spacing; Z_Min; Z_Max") + # Iterate through the rows in the cursor and store the + # "FileName; Pt_Spacing; Z_Min; Z_Max" + ptFileInfoList = [] + PtSpacing = [] + # Z Min & Z Max added for auto-detecting LiDAR tiles with potential artifacts in LiDAR in the future. + for row in rows: + formattedfields = ("{0}, {1}, {2}, {3}".format( + row.getValue("FileName"), + row.getValue("Pt_Spacing"), + row.getValue("Z_Min"), + row.getValue("Z_Max"))) + ptFileInfoList.append(formattedfields) + ptspacinglist = float("{0}".format(row.getValue("Pt_Spacing"))) + PtSpacing.append(ptspacinglist) + del row + arcpy.AddMessage("ptFileInfoList: {0}".format(str(ptFileInfoList))) + arcpy.AddMessage("ptSpacing: {0}".format(str(PtSpacing))) + avgPtSpacing = sum(PtSpacing)/float(len(PtSpacing)) + arcpy.AddMessage("returning avgPtSpacing of: {0}".format(str(avgPtSpacing))) + else: + arcpy.AddMessage("\nExiting get_las_point_spacing, since no LASD found: \n{0}\n".format(lasd)) + return "" + del rows + return avgPtSpacing + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def get_las_boundary(las_path, lasd, md_name, las_raster_type, file_gdb, surface_constraint_fc): + try: + # Ensure the LAS Raster type file exists + if not os.path.exists(las_raster_type): + arcpy.AddError("\nExiting: LAS Raster type file doesn't exist {0}".format(las_raster_type)) + return + # Ensure the file gdb exists + if not os.path.exists(file_gdb): + arcpy.AddError("\nExiting: Geodatabase (in which to place boundary) doesn't exist {0}".format(file_gdb)) + return + # Ensure the lasd exists + if not arcpy.Exists(lasd): + arcpy.AddError("\nExiting: LAS Dataset (from which to get boundary) doesn't exist {0}".format(lasd)) + return + desc_lasd = arcpy.Describe(lasd) + spat_ref_lasd = desc_lasd.SpatialReference + spat_ref_lasd_str = desc_lasd.SpatialReference.exportToString() + if spat_ref_lasd.PCSCode == 0: + arcpy.AddWarning("\n*** NOTE: One or more LAS files has a PCSCode of 0. ***") + arcpy.AddWarning("\n*** PCSCode = 0 indicates a non-standard datum or unit of measure. ***") + + arcpy.AddMessage("\nSpatial reference of LASD is: \n\n{0}\n".format(spat_ref_lasd_str)) + #arcpy.AddMessage("Length of SR string is {0}:".format(len(SpatRefStringFirstLAS))) + arcpy.AddMessage("Spatial Reference name of LAS Dataset: {0}".format(spat_ref_lasd.name)) + arcpy.AddMessage("Spatial Reference XY Units of LAS Dataset: {0}".format(spat_ref_lasd.linearUnitName)) + + loc = md_name.rfind(".gdb") + # #arcpy.AddMessage("loc = {0}".format(loc)) + MD_ShortName = md_name[loc+5:] + arcpy.AddMessage("Temp MD Short Name: {0}".format(MD_ShortName)) + + # Create a MD in same SR as LAS Dataset + arcpy.CreateMosaicDataset_management(file_gdb, MD_ShortName, + coordinate_system=spat_ref_lasd, num_bands="1", pixel_type="32_BIT_FLOAT", + product_definition="NONE", product_band_definitions="#") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from CreateMosaicDataset are: \n{0}\n".format(messages)) + + # Add the LAS files to the Mosaic Dataset and don't update the boundary yet. + # The cell size of the Mosaic Dataset is determined by the art.xml file chosen by the user. + arcpy.AddRastersToMosaicDataset_management(md_name, las_raster_type, las_path, + update_cellsize_ranges="UPDATE_CELL_SIZES", + update_boundary="NO_BOUNDARY", update_overviews="NO_OVERVIEWS", + maximum_pyramid_levels="#", maximum_cell_size="0", + minimum_dimension="1500", spatial_reference=spat_ref_lasd_str, + filter="*.las", sub_folder="NO_SUBFOLDERS", + duplicate_items_action="ALLOW_DUPLICATES", + build_pyramids="NO_PYRAMIDS", calculate_statistics="NO_STATISTICS", + build_thumbnails="NO_THUMBNAILS", operation_description="#", + force_spatial_reference="NO_FORCE_SPATIAL_REFERENCE") + + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from AddRastersToMosaicDataset are: \n{0}\n".format(messages)) + + # Get a count of the number of LAS ingested + result = arcpy.GetCount_management(md_name) + countRowsWithLAS = int(result.getOutput(0)) + if countRowsWithLAS == 0: + arcpy.AddMessage("\nNo LAS rows were ingested into {0}".format(md_name)) + return + else: + arcpy.AddMessage("{0} has {1} LAS row(s).".format(md_name, countRowsWithLAS)) + + # Build Footprints with these non-standard parameters: + # min_region_size="20" + # approx_num_vertices="1000" + # Update the Boundary + arcpy.BuildFootprints_management(md_name,where_clause="#", reset_footprint="RADIOMETRY", min_data_value="-100", + max_data_value="4294967295", approx_num_vertices="2000", shrink_distance="0", + maintain_edges="MAINTAIN_EDGES", skip_derived_images="SKIP_DERIVED_IMAGES", + update_boundary="UPDATE_BOUNDARY", request_size="2000", min_region_size="20", + simplification_method="NONE", edge_tolerance="#", max_sliver_size="20", + min_thinness_ratio="0.05") + + messages = arcpy.GetMessages() + arcpy.AddMessage("Results output from BuildFootprints are: \n{0}\n".format(messages)) + + # The boundary will potentially have lots of vertices, so simplify the feature after exporting. + boundary_detailed = surface_constraint_fc + r"_detail" + arcpy.ExportMosaicDatasetGeometry_management(md_name, boundary_detailed, where_clause="#", + geometry_type="BOUNDARY") + messages = arcpy.GetMessages() + arcpy.AddMessage("Results output from ExportMosaicDatasetGeometry are: \n{0}\n".format(messages)) + + arcpy.SimplifyPolygon_cartography(boundary_detailed, surface_constraint_fc, + algorithm="POINT_REMOVE", tolerance="5 Meters", minimum_area="0 SquareMeters", + error_option="RESOLVE_ERRORS", collapsed_point_option="KEEP_COLLAPSED_POINTS") + messages = arcpy.GetMessages() + arcpy.AddMessage("Results output from SimplifyPolygon are: \n{0}\n".format(messages)) + del desc_lasd, spat_ref_lasd + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def main(first_parameter, second_parameter, z_unit, featureextraction, out_folder_path, processing_unit_length, + use_pos_terrain_method, delete_intermediate_files, regularize_buildings): + try: + start = time.time() + # resourceLogger.log() + executed_from = sys.executable.upper() + # Check out Spatial Analyst license + arcpy.CheckOutExtension("Spatial") + + # This python code can be invoked by two different gp script tools, depending upon + # whether the input is LAS or raster. Therefore the first two parameters can either be + # LAS folder and DSM Creation Method or DSM (raster) Folder and DTM (raster) Folder. + # + # If the second parameter is DSM Creation Method ("ALL Returns" or "Last Returns"), then + # the first parameter is assumed to be the LAS folder. Otherwise the first and second + # parameters are assumed to be DSM folder and DTM folder. + + if second_parameter == "ALL RETURNS" or second_parameter == "LAST RETURNS": + # Input is LAS + las_path = first_parameter + if not os.path.exists(las_path): + arcpy.AddMessage("*** Exiting...the LAS Path does not exist: {0} ***".format(las_path)) + sys.exit() + else: + # Input is DSM & DTM Rasters + dsm_path = first_parameter + dtm_path = second_parameter + if not os.path.exists(dsm_path): + arcpy.AddMessage("*** Exiting...the DSM Path does not exist: {0} ***".format(dsm_path)) + sys.exit() + if not os.path.exists(dtm_path): + arcpy.AddMessage("*** Exiting...the DTM Path does not exist: {0} ***".format(dtm_path)) + sys.exit() + + if z_unit == "METER": + elevation_meter_scalefactor = "1.0" + elif z_unit == "FOOT": + elevation_meter_scalefactor = "3.2808" # Feet per Meter + else: + elevation_meter_scalefactor = "1.0" + elevation_meter_scalefactor_str = str(elevation_meter_scalefactor) + + if os.path.exists(out_folder_path): + arcpy.AddMessage("Results Folder exists: {0}".format(out_folder_path)) + else: + arcpy.AddMessage("Creating Results Folder: {0}".format(out_folder_path)) + os.makedirs(out_folder_path) + + height_path = os.path.join(out_folder_path, r"HeightRasters") + # If the path doesn't exist, then create the folder + if not os.path.isdir(height_path): + arcpy.AddMessage("Creating folder for Height Rasters: {0}".format(height_path)) + os.makedirs(height_path) + + # Create a results file gdb to store trees and buildings results + results_gdb_name = r"Results.gdb" + # Entire path of the file gdb + results_file_gdb_path = os.path.join(out_folder_path, results_gdb_name) + # If the file gdb doesn't exist, then create it + if not os.path.exists(results_file_gdb_path): + arcpy.AddMessage("Creating Results File GDB: {0}".format(results_file_gdb_path)) + arcpy.CreateFileGDB_management(out_folder_path, results_gdb_name, out_version="CURRENT") + else: + arcpy.AddMessage("\nResults File GDB already exists: {0}".format(results_file_gdb_path)) + + # feature classes to be created + fishnet = os.path.join(results_file_gdb_path, r"aFishnet") + tmpfishnet = os.path.join(results_file_gdb_path, r"tmpFishnet") + all_trees_final = os.path.join(results_file_gdb_path, r"all_trees_final") + all_buildings_final = os.path.join(results_file_gdb_path, r"all_buildings_final") + + # Exit process if all_buildings_final (and all_trees_final, if requested) already exist + if arcpy.Exists(all_buildings_final): + if "TREES" in featureextraction.upper(): + if arcpy.Exists(all_trees_final): + arcpy.AddMessage("\nExiting process...Buildings and Trees output products already " + "exist: \n {0} \n {1}".format(all_buildings_final, all_trees_final)) + sys.exit() + else: + arcpy.AddMessage("\nExiting process...Buildings output product already " + "exists: \n {0}".format(all_buildings_final)) + sys.exit() + + # the following two fc's are only created if the user checks on + # "Regularize north-south oriented buildings" + buildings_to_reg = os.path.join(results_file_gdb_path, r"partial_buildings_to_regularize") + buildings_reg = os.path.join(results_file_gdb_path, r"partial_buildings_regularized") + + # the scratch gdb name + # scratch_gdb_name = r"Scratch.gdb" + scratch_gdb_name = r"TempWorkArea.gdb" + # Create a gdb to store the intermediate products + mp_gdb_name = r"MiscProducts.gdb" + # Entire path of the Miscellaneous Intermediate Products file gdb + mp_file_gdb_path = os.path.join(out_folder_path, mp_gdb_name) + # If the Miscellanous Intermediate Products file gdb doesn't exist, then create it + if not os.path.exists(mp_file_gdb_path): + arcpy.AddMessage("Creating Miscellanous Intermediate Products File GDB: {0}".format(mp_file_gdb_path)) + arcpy.CreateFileGDB_management(out_folder_path, mp_gdb_name, out_version="CURRENT") + else: + arcpy.AddMessage("\nMiscellanous Intermediate Products File GDB " + "already exists: {0}".format(mp_file_gdb_path)) + + # more Mosaic Datasets and Feature classes to be created... + dsm_md_name = r"DSM" + dsm_md = os.path.join(mp_file_gdb_path, dsm_md_name) + dsm_boundary = dsm_md + r"_boundary" + dtm_md_name = r"DTM" + dtm_md = os.path.join(mp_file_gdb_path, dtm_md_name) + dtm_boundary = dtm_md + r"_boundary" + las_md_name = r"LAS" + las_md = os.path.join(mp_file_gdb_path, las_md_name) + # las_boundary = las_md + r"_boundary" + height_md_name = r"HeightAboveGround" + height_md = os.path.join(mp_file_gdb_path, height_md_name) + buildings_merged = os.path.join(results_file_gdb_path, r"buildings_merged") + las_point_spacing = 0.0 + lasd_name = r"LasDataset.lasd" + lasd = os.path.join(out_folder_path, lasd_name) + + processing_area = os.path.join(mp_file_gdb_path, r"ProcessingArea") + arcpy.AddMessage("General processing area FC: {0}".format(processing_area)) + + if second_parameter == "ALL RETURNS" or second_parameter == "LAST RETURNS": + # Input is LAS + if not arcpy.Exists(lasd): + create_lasd(las_path, lasd) + if not arcpy.Exists(lasd): + arcpy.AddMessage("\nExiting...LAS Dataset not created: {0}".format(lasd)) + sys.exit() + las_point_spacing = get_las_point_spacing(las_path, lasd, out_folder_path, mp_file_gdb_path) + arcpy.AddMessage("\n las_point_spacing: {0}".format(str(las_point_spacing))) + # # Set point spacing to 0.5 if get_las_point_spacing is unable to determine point spacing + # if las_point_spacing_str == "": + # las_point_spacing_str = "0.5" + + las_raster_type_template = os.path.join(os.path.dirname(inspect.getfile(inspect.currentframe())), + r"LAS_Template_Raster_Type.art.xml") + las_raster_type_file = os.path.join(out_folder_path, r"LAS_Raster_Type.art.xml") + + # Create a copy of the Template LAS Raster type for this execution, since the art.xml file + # needs to be edited (in set_pixel_size) to modify the desired pixel size of the mosaic dataset. + if not arcpy.Exists(las_raster_type_file): + shutil.copyfile(las_raster_type_template, las_raster_type_file) + set_pixel_size(las_raster_type_file, las_point_spacing) + else: + arcpy.AddMessage("LAS Raster type file already exists: {0}".format(las_raster_type_file)) + # Load the LAS into a Mosaic dataset and return the boundary in processing_area + if not arcpy.Exists(processing_area): + get_las_boundary(las_path, lasd, las_md, las_raster_type_file, mp_file_gdb_path, processing_area) + else: + arcpy.AddMessage("LAS Boundary file already exists: {0}".format(processing_area)) + if not arcpy.Exists(processing_area): + arcpy.AddMessage("\nExiting...Surface constraint FC not created: {0}".format(processing_area)) + sys.exit() + + # assign the boundary as a hard clip constraint + surface_constraints = "'" + processing_area + "'" + r" Hard_Clip" + arcpy.AddFilesToLasDataset_management(lasd, "", "NO_RECURSION", surface_constraints) + else: + # Input is DSM & DTM Rasters + if not arcpy.Exists(dsm_md): + create_md_from_raster(dsm_path, mp_file_gdb_path, dsm_md_name, dsm_boundary, "true", "true") + arcpy.AddMessage("Created MD: {0}\n".format(dsm_md)) + else: + arcpy.AddMessage("\nMD already exists: {0}".format(dsm_md)) + if not arcpy.Exists(dtm_md): + create_md_from_raster(dtm_path, mp_file_gdb_path, dtm_md_name, dtm_boundary, "true", "true") + arcpy.AddMessage("Created MD: {0}\n".format(dtm_md)) + else: + arcpy.AddMessage("\nMD already exists: {0}".format(dtm_md)) + + # Find the intersection of the DSM and DTM datasets to determine the general processing area + # Subsequent processing will further eliminate areas that don't need to be processed + intersect_input_list = [dsm_boundary, dtm_boundary] + arcpy.AddMessage("\nIntersection of: {0}".format(intersect_input_list)) + if not arcpy.Exists(processing_area): + arcpy.Intersect_analysis(intersect_input_list, processing_area, + join_attributes="ONLY_FID", cluster_tolerance="", output_type="INPUT") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from Intersect_analysis are: \n{0}\n".format(messages)) + else: + arcpy.AddMessage("\nDSM & DTM intersection fc already exists: {0}".format(processing_area)) + # desc_bound = arcpy.Describe(processing_area) + + # Get the bounds of the processing area to create a fishnet + # If input is DSM and DTM folders, then processing area is the intersection of the two MD's (DSM & DTM) + # If input is a LAS folder, then processing area is the boundary of the MD that contains the LAS + desc_bound = arcpy.Describe(processing_area) + xmin = desc_bound.Extent.XMin + ymin = desc_bound.Extent.YMin + # xmax = desc_bound.Extent.XMax + ymax = desc_bound.Extent.YMax + origin_coord = str(xmin) + " " + str(ymin) + y_axis_coord = str(xmin) + " " + str(ymax) + + if not arcpy.Exists(fishnet): + arcpy.CreateFishnet_management(tmpfishnet, origin_coord, y_axis_coord, processing_unit_length, + processing_unit_length, number_rows="", number_columns="", corner_coord="", + labels="NO_LABELS", + template=processing_area, geometry_type="POLYGON") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from CreateFishnet are: \n{0}\n".format(messages)) + + # Now keep only fishnet features that intersect the processing area + fishnet_lyr = r"fishnetLyr" + arcpy.MakeFeatureLayer_management(tmpfishnet, fishnet_lyr, "", "", "") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from MakeFeatureLayer are: \n{0}\n".format(messages)) + arcpy.SelectLayerByLocation_management(fishnet_lyr, "INTERSECT", processing_area, + "", "NEW_SELECTION", "NOT_INVERT") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from SelectLayerByLocation are: \n{0}\n".format(messages)) + arcpy.CopyFeatures_management(fishnet_lyr, fishnet, config_keyword="", spatial_grid_1="0", + spatial_grid_2="0", spatial_grid_3="0") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from CopyFeatures are: \n{0}\n".format(messages)) + arcpy.Delete_management(fishnet_lyr) + arcpy.Delete_management(tmpfishnet) + else: + arcpy.AddMessage("\nFishnet fc already exists: {0}".format(fishnet)) + + # Intentionally set overwriteOutput here (as opposed to earlier in the script) because this might be a re-start + # of the script, in which case we don't want to re-do the pre-processing steps. Subsequent logic + # manually checks for the existence of each folder (one per fishnet oid) before overwriting folder contents, + # so if the tool is being re-started the newest folder(s) may need to be deleted before restarting, especially + # if the tool was forced to stop. + arcpy.env.overwriteOutput = True + + # Initialize + buildings_merged_list = [] + trees_merged_list = [] + # inititalize Lists used for passing arguments to extract_buildings_trees + out_folder_path_list = [] + oid_list = [] + dsm_md_list = [] # only used if DSM & DTM Raster input + dtm_md_list = [] # only used if DSM & DTM Raster input + lasd_list = [] # only used if LAS input (i.e. if second_parameter == "ALL RETURNS" or "LAST RETURNS") + dsm_type_list = [] # only used if LAS input (i.e. if second_parameter == "ALL RETURNS" or "LAST RETURNS") + xmin_list = [] + xmax_list = [] + ymin_list = [] + ymax_list = [] + featureextraction_list = [] + elevation_meter_scalefactor_str_list = [] + use_pos_terrain_method_list = [] + delete_intermediate_files_list = [] + height_path_list = [] + point_spacing_list = [] # only used if LAS input (second_parameter == "ALL RETURNS" or "LAST RETURNS") + + fields = ["OID@", "SHAPE@"] + with arcpy.da.SearchCursor(fishnet, fields) as sc: + # iterate through the fishnet features to populate the arguments for extract_buildings_trees + for row in sc: + oid = str(row[0]) + geom = row[1] + xmin = str(geom.extent.XMin) + ymin = str(geom.extent.YMin) + xmax = str(geom.extent.XMax) + ymax = str(geom.extent.YMax) + # populate lists for each parameter + out_folder_path_list.append(out_folder_path) + oid_list.append(oid) + if second_parameter == "ALL RETURNS" or second_parameter == "LAST RETURNS": + lasd_list.append(lasd) + dsm_type_list.append(second_parameter) + else: + dsm_md_list.append(dsm_md) + dtm_md_list.append(dtm_md) + xmin_list.append(xmin) + xmax_list.append(xmax) + ymin_list.append(ymin) + ymax_list.append(ymax) + featureextraction_list.append(featureextraction) + elevation_meter_scalefactor_str_list.append(elevation_meter_scalefactor_str) + use_pos_terrain_method_list.append(use_pos_terrain_method) + delete_intermediate_files_list.append(delete_intermediate_files) + height_path_list.append(height_path) + point_spacing_list.append(str(las_point_spacing)) # only applicable if LAS input + del row, geom + del sc + + num_iterations_str = str(len(out_folder_path_list)) + arcpy.AddMessage("\n** Number of iterations (fishnet features) is: {0} **\n".format(num_iterations_str)) + + if second_parameter == "ALL RETURNS" or second_parameter == "LAST RETURNS": + pp_params = [[out_folder_path_list[i], oid_list[i], lasd_list[i], dsm_type_list[i], xmin_list[i], + xmax_list[i], ymin_list[i], ymax_list[i], featureextraction_list[i], + elevation_meter_scalefactor_str_list[i], use_pos_terrain_method_list[i], + delete_intermediate_files_list[i], height_path_list[i], point_spacing_list[i]] + for i in range(len(out_folder_path_list))] + else: + pp_params = [[out_folder_path_list[i], oid_list[i], dsm_md_list[i], dtm_md_list[i], xmin_list[i], + xmax_list[i], ymin_list[i], ymax_list[i], featureextraction_list[i], + elevation_meter_scalefactor_str_list[i], use_pos_terrain_method_list[i], + delete_intermediate_files_list[i], height_path_list[i], point_spacing_list[i]] + for i in range(len(out_folder_path_list))] + # arcpy.AddMessage("\n pp_params: {0}".format(pp_params)) + + # If executing from the gp User Interface, then extract_buildings_trees will be run serially. + # If executing from the command line, then extract_buildings_trees will be run in parallel. + arcpy.AddMessage(executed_from) + if "ARCMAP" in executed_from or "ARCCATALOG" in executed_from or \ + "RUNTIME" in executed_from: + list(map(extract_buildings_trees, pp_params)) + elif "PYTHON" in executed_from: + # Number of cores to use (max will be 3 for now, otherwise we're I/O bound) + cpu_num = min(multiprocessing.cpu_count(), 3) + # Create the pool object + pool = multiprocessing.Pool(processes=cpu_num, maxtasksperchild=1) + arcpy.AddMessage("\nCPUs utilized: {0}".format(cpu_num)) + # Start Multiprocessing + arcpy.AddMessage("Start Multiprocessing") + pool.map(extract_buildings_trees, pp_params, chunksize=1) + + # Close the pool + pool.close() + pool.join() + + # clear extent for the remainder of processing - important step (or mosaic dataset functionality doesn't work) + arcpy.env.extent = None + + # Create a mosaic dataset of all of the height rasters and use it later to get + # zonal stats on buildings and/or trees + if not arcpy.Exists(height_md): + # Don't need to build footprints, since this takes a while and isn't necessary + create_md_from_raster(height_path, mp_file_gdb_path, height_md, "", "false", "false") + arcpy.AddMessage("Created MD: {0}".format(height_md)) + else: + arcpy.AddMessage("\nMD already exists: {0}".format(height_md)) + + # If the user wanted trees output, then merge all of the tree feature classes + if "TREES" in featureextraction.upper() and not arcpy.Exists(all_trees_final): + for i in range(1, len(out_folder_path_list) + 1): + sub_path = os.path.join(out_folder_path, str(i)) + # arcpy.AddMessage("sub Path: {0}".format(sub_path)) + if os.path.exists(sub_path): + sub_results_gdb = os.path.join(sub_path, results_gdb_name) + # arcpy.AddMessage("sub Results gdb: {0}".format(sub_results_gdb)) + if arcpy.Exists(sub_results_gdb): + sub_fc = os.path.join(sub_results_gdb, r"trees_to_merge" + str(i)) + # arcpy.AddMessage("sub fc: {0}".format(sub_fc)) + if arcpy.Exists(sub_fc): + # Construct a semicolon delimited list of tree feature classes, for subsequent merging + # trees_merged_list = trees_merged_list + sub_fc + ";" + trees_merged_list.append(sub_fc) + # arcpy.AddMessage("trees_merged_list: {0}".format(trees_merged_list)) + + if len(trees_merged_list) > 0: + # Merge all of the tree feature classes into one. + arcpy.Merge_management(inputs=trees_merged_list, output=all_trees_final) + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from Merge are: \n{0}\n".format(messages)) + # Gather elevation statistics for trees + zonalstats(all_trees_final, height_md, results_file_gdb_path) + + # merge all of the building feature classes + if not arcpy.Exists(all_buildings_final): + # Merge all of the building feature classes into one. + for i in range(1, len(out_folder_path_list) + 1): + sub_path = os.path.join(out_folder_path, str(i)) + # arcpy.AddMessage("sub Path: {0}".format(sub_path)) + if os.path.exists(sub_path): + sub_results_gdb = os.path.join(sub_path, results_gdb_name) + # arcpy.AddMessage("sub Results gdb: {0}".format(sub_results_gdb)) + if arcpy.Exists(sub_results_gdb): + sub_fc = os.path.join(sub_results_gdb, r"buildings_to_merge" + str(i)) + # arcpy.AddMessage("sub fc: {0}".format(sub_fc)) + if arcpy.Exists(sub_fc): + # Construct a semicolon delimited list of building feature classes, for subsequent merging + # buildings_merged_list = buildings_merged_list + sub_fc + ";" + buildings_merged_list.append(sub_fc) + # arcpy.AddMessage("buildings_merged_list: {0}".format(buildings_merged_list)) + + if len(buildings_merged_list) > 0: + arcpy.Merge_management(inputs=buildings_merged_list, output=buildings_merged) + messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from Merge are: \n{0}\n".format(messages)) + + # Dissolve buildings into one feature class so that buildings at the borders of + # each building feature class can be dissolved into one building feature. + arcpy.Dissolve_management(buildings_merged, all_buildings_final, dissolve_field="", + statistics_fields="", + multi_part="SINGLE_PART", unsplit_lines="DISSOLVE_LINES") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from Dissolve are: \n{0}\n".format(messages)) + # Gather elevation statistics for area under buildings + zonalstats(all_buildings_final, height_md, results_file_gdb_path) + + # If user wants to regularize, then regularize those buildings that can best be regularized + # (these are the buildings that are oriented North/South) + # Instead of using this option, try using the Building Regularization gp tool in ArcGIS Pro or ArcMap 10.4 + if regularize_buildings == "true": + # Don't bother if both feature classes already exist + if not arcpy.Exists(buildings_to_reg) or not arcpy.Exists(buildings_reg): + expression = "\"MBG_Width\"* \"MBG_Length\">1" + # cellsize = Raster(diff_ori).meanCellHeight + # Determine the cell size of the DSM Mosaic Dataset + cellsize_result = arcpy.GetRasterProperties_management(dsm_md, property_type="CELLSIZEX", band_index="") + cellsize = float(cellsize_result.getOutput(0)) + # arcpy.AddMessage("Cell size of MD: {0}".format(cellsize)) + regularize_some_buildings(all_buildings_final, buildings_to_reg, buildings_reg, expression, + cellsize, out_folder_path) + else: + arcpy.AddMessage("\npartial_buildings_regularized and partial_buildings_to_regularize already exist") + + # Delete all intermediate files if user checked on "Delete all intermediate files" + if delete_intermediate_files == "true": + if arcpy.Exists(buildings_merged): + arcpy.Delete_management(buildings_merged) + # Clean up the rasters in height_path and delete the height_path directory + try: + remove_rasters(height_path) + arcpy.Delete_management(height_path) + except: + arcpy.AddMessage("Unable to clean up directory: {0}".format(height_path)) + + # Delete files created during building regularization + if regularize_buildings == "true": + remove_rasters(out_folder_path) + remove_shapefiles(out_folder_path) + + # Delete tables created during Zonal Statistics creation (in def zonalstats) + remove_tables(results_file_gdb_path) + + # In each oid sub-folder, delete the individual Results.gdb & Scratch.gdb and all of their feature classes + for i in range(1, len(out_folder_path_list) + 1): + sub_path = os.path.join(out_folder_path, str(i)) + # arcpy.AddMessage("sub Path: {0}".format(sub_path)) + if os.path.exists(sub_path): + sub_results_gdb = os.path.join(sub_path, results_gdb_name) + if arcpy.Exists(sub_results_gdb): + try: + remove_filegdb(sub_results_gdb) + except: + arcpy.AddMessage("Unable to delete file GDB: {0}".format(sub_results_gdb)) + sub_scratch_gdb = os.path.join(sub_path, scratch_gdb_name) + if arcpy.Exists(sub_scratch_gdb): + try: + remove_filegdb(sub_scratch_gdb) + except: + arcpy.AddMessage("Unable to delete file GDB: {0}".format(sub_scratch_gdb)) + + # Delete Mosaic Datasets in MosaicDatasets.gdb, then delete remaining fc's and then MosaicDatasets.gdb + if arcpy.Exists(dsm_md): + arcpy.Delete_management(dsm_md) + if arcpy.Exists(dtm_md): + arcpy.Delete_management(dtm_md) + if arcpy.Exists(height_md): + arcpy.Delete_management(height_md) + # if arcpy.Exists(las_md): + # arcpy.Delete_management(las_md) + + # remove_filegdb(mp_file_gdb_path) + + end = time.time() + delta = end - start + # This is useful if the tool is run at the command line + arcpy.AddMessage("***** Total elapsed time is {0} hours *****".format(delta/3600)) + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +if __name__ == '__main__': + arcpy.AddMessage(inspect.getfile(inspect.currentframe())) + arcpy.AddMessage(os.path.dirname(inspect.getfile(inspect.currentframe()))) + arcpy.AddMessage(sys.version) + arcpy.AddMessage(sys.executable) + executed_from = sys.executable.upper() + + PYTHON_EXE = os.path.join(sys.exec_prefix, 'pythonw.exe') + # use pythonw for multiprocessing + multiprocessing.set_executable(PYTHON_EXE) + + # This python code can be invoked by two different script tools, depending upon + # whether the input is LAS or raster. Therefore the first two parameters can either be + # LAS folder and DSM Creation Method or DSM Folder and DTM Folder. + # + # If the second parameter is either "ALL Returns" or "Last Returns" then + # the first parameter is assumed to be the LAS folder. + + if "ARCGISPRO" in executed_from: + arcpy.AddMessage("Exiting...this tool does not yet run from ArcGIS Pro") + sys.exit(0) + + if not ("ARCMAP" in executed_from or "ARCCATALOG" in executed_from or + "RUNTIME" in executed_from): + arcpy.AddMessage("Getting parameters from command line...") + + second_parameter = sys.argv[2] + second_parameter = second_parameter.strip() + if second_parameter.upper() == "ALL RETURNS" or second_parameter.upper() == "LAST RETURNS": + # Input is LAS + first_parameter = sys.argv[1] + first_parameter = first_parameter.strip() + arcpy.AddMessage("LAS Path: {0}".format(first_parameter)) + second_parameter = second_parameter.upper() + arcpy.AddMessage("DSM Creation Method: {0}".format(second_parameter)) + else: + # Input is DSM & DTM Rasters + first_parameter = sys.argv[1] + first_parameter = first_parameter.strip() + arcpy.AddMessage("DSM Path: {0}".format(first_parameter)) + arcpy.AddMessage("DTM Path: {0}".format(second_parameter)) + + # dsm_path = sys.argv[1] + # dsm_path = dsm_path.strip() + # arcpy.AddMessage("DSM Path: {0}".format(dsm_path)) + # dtm_path = sys.argv[2] + # dtm_path = dtm_path.strip() + # arcpy.AddMessage("DTM Path: {0}".format(dtm_path)) + + z_unit = sys.argv[3] + z_unit = z_unit.upper() + arcpy.AddMessage("Z Unit: {0}".format(z_unit)) + + featureextraction = sys.argv[4] + arcpy.AddMessage("Desired Features extracted: {0}".format(featureextraction)) + + out_folder_path = sys.argv[5] + out_folder_path = out_folder_path.strip() + arcpy.AddMessage("Output Folder Path: {0}".format(out_folder_path)) + + processing_unit_length = sys.argv[6] + arcpy.AddMessage("Processing Unit Distance: {0}".format(processing_unit_length)) + + use_pos_terrain_method = sys.argv[7] + arcpy.AddMessage("Use alternative positive terrain method for buildings: {0}".format(use_pos_terrain_method)) + + delete_intermediate_files = sys.argv[8] + arcpy.AddMessage("Delete all intermediate files: {0}".format(delete_intermediate_files)) + + regularize_buildings = sys.argv[9] + arcpy.AddMessage("Regularize some buildings: {0}".format(regularize_buildings)) + + else: + arcpy.AddMessage("Getting parameters from GetParameterAsText...") + + second_parameter = arcpy.GetParameterAsText(1) + second_parameter = second_parameter.strip() + if second_parameter.upper() == "ALL RETURNS" or second_parameter.upper() == "LAST RETURNS": + # Input is LAS + first_parameter = arcpy.GetParameterAsText(0) + first_parameter = first_parameter.strip() + arcpy.AddMessage("LAS Path: {0}".format(first_parameter)) + second_parameter = second_parameter.upper() + arcpy.AddMessage("DSM Creation Method: {0}".format(second_parameter)) + else: + # Input is DSM & DTM Rasters + first_parameter = arcpy.GetParameterAsText(0) + first_parameter = first_parameter.strip() + arcpy.AddMessage("DSM Path: {0}".format(first_parameter)) + arcpy.AddMessage("DTM Path: {0}".format(second_parameter)) + + z_unit = arcpy.GetParameterAsText(2) + z_unit = z_unit.upper() + arcpy.AddMessage("Z Unit: {0}".format(z_unit)) + + featureextraction = arcpy.GetParameterAsText(3) + arcpy.AddMessage("Desired Features extracted: {0}".format(featureextraction)) + + out_folder_path = arcpy.GetParameterAsText(4) + out_folder_path = out_folder_path.strip() + arcpy.AddMessage("Output Folder Path: {0}".format(out_folder_path)) + + processing_unit_length = arcpy.GetParameterAsText(5) + arcpy.AddMessage("Processing Unit Distance: {0}".format(processing_unit_length)) + + use_pos_terrain_method = arcpy.GetParameterAsText(6) + arcpy.AddMessage("Use alternative positive terrain method for buildings: {0}".format(use_pos_terrain_method)) + + delete_intermediate_files = arcpy.GetParameterAsText(7) + arcpy.AddMessage("Delete all intermediate files: {0}".format(delete_intermediate_files)) + + regularize_buildings = arcpy.GetParameterAsText(8) + arcpy.AddMessage("Regularize some buildings: {0}".format(regularize_buildings)) + + main(first_parameter, second_parameter, z_unit, featureextraction, out_folder_path, processing_unit_length, + use_pos_terrain_method, delete_intermediate_files, regularize_buildings) diff --git a/V3_delivery/V3_Delivery/LAS_Template_Raster_Type.art.xml b/V3_delivery/V3_Delivery/LAS_Template_Raster_Type.art.xml new file mode 100644 index 0000000..0d807d0 --- /dev/null +++ b/V3_delivery/V3_Delivery/LAS_Template_Raster_Type.art.xml @@ -0,0 +1,190 @@ + + + RasterBuilder + Name + Aliases + Version + Description + InputDataSourceTypes + DataSourceFilter + SupportsOrthorectification + SupportsStereo + SupportsSeamline + EnableClipToFootprint + AllowSimplification + IsSensorRasterType + SupportsColorCorrection + FactoryCLSID + SupportedURIFilters + FullName + AddRastersParameters + SynchronizeParameters + + + + + AuxiliaryFields + AuxiliaryFieldAlias + ParentRasterTypeName + Properties + MergeItems + LasToRasterArguments + FolderAsDataset + ZFactor + + + + + + Version + esriFieldTypeString + true + 20 + 0 + 0 + Version + + + PointCount + esriFieldTypeDouble + true + 0 + 20 + 9 + Point Count + + + PointSpacing + esriFieldTypeDouble + true + 0 + 20 + 9 + Point Spacing + + + ZMin + esriFieldTypeDouble + true + 0 + 20 + 9 + ZMin + + + ZMax + esriFieldTypeDouble + true + 0 + 20 + 9 + ZMax + + + + + + + Version + Version + + + PointCount + PointCount + + + PointSpacing + PointSpacing + + + ZMin + ZMin + + + ZMax + ZMax + + + + LAS + + + + BandProperties + + + + DataType + Elevation + + + DefaultBandCount + 1 + + + + false + + + CacheLocation + CacheMaxNumSurfaces + CellAggregationType + ClassTypesSelected + ReturnTypesSelected + DataType + UseTinning + MaxHoleWidthFilled + VoidFillMethod + CellWidth + + + + + esriDataSourcesRaster.RasterWorkspaceFactory.1 + esriFileSystemWorkspace + + 0 + 3 + + -1 + + + -1 + + 0 + false + 5 + 2 + PUT_PIXEL_SIZE_HERE + + + false + 1 + + + LAS_Template_Raster_Type + + 1 + Supports LIDAR (Light Detection and Ranging) data. + 3 + + false + false + false + false + false + false + false + + {8F2800F4-5842-47DF-AD1D-2077A7966BBF} + + + + D:\YiqunXieInternshipSummary\tools_and_documentation\Feature Identification\fromBox\LAS_3_ALL_Binning_Mean_IDW.art.xml + -1 + + + + + + \ No newline at end of file diff --git a/V3_delivery/V3_Delivery/SimplifyBuildings_V1.py b/V3_delivery/V3_Delivery/SimplifyBuildings_V1.py new file mode 100644 index 0000000..151214d --- /dev/null +++ b/V3_delivery/V3_Delivery/SimplifyBuildings_V1.py @@ -0,0 +1,189 @@ +# --------------------------------------------------------------------------- +# Name: SimplifyBuildings.py +# Purpose: To Simplify building footprints +# Usage: +# Description: +# Author: Roslyn Dunn +# Organization: Esri Inc. +# +# Created: 03/11/2016 Roslyn Dunn +# --------------------------------------------------------------------------- + +import sys +import os +import inspect +import traceback +import time +import shutil +import arcpy + + +def main(input_fc, output_fgdb, output_fc_name, delete_intermediate_results): + try: + start = time.time() + + if not arcpy.Exists(input_fc): + arcpy.AddMessage("\nExiting process...Input Feature Class does not exist: \n {0}".format(input_fc)) + sys.exit() + + if not arcpy.Exists(output_fgdb): + arcpy.AddMessage("\nExiting process...Input File GDB does not exist: \n {0}".format(input_fc)) + sys.exit() + + output_fc = os.path.join(output_fgdb, output_fc_name) + if arcpy.Exists(output_fc): + arcpy.AddMessage("\nExiting process...Output Feature Class already exists: \n {0}".format(output_fc)) + sys.exit() + + # Send intermediate outputs to the same GDB as the final output + scratch_gdb_name = output_fgdb + arcpy.AddMessage("\nScratch GDB for intermediate products: \n {0}".format(scratch_gdb_name)) + + buff1_output = os.path.join(scratch_gdb_name, r"BuffInward") + arcpy.Buffer_analysis(in_features=input_fc, out_feature_class=buff1_output, + buffer_distance_or_field="-2 Meters", line_side="FULL", + line_end_type="ROUND", dissolve_option="NONE", dissolve_field="", method="GEODESIC") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from Buffer_analysis are: \n{0}\n".format(messages)) + + single_part_features = os.path.join(scratch_gdb_name, r"SinglePart") + arcpy.MultipartToSinglepart_management(in_features=buff1_output, out_feature_class=single_part_features) + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from MultipartToSinglepart_management are: \n{0}\n".format(messages)) + + buff2_output = os.path.join(scratch_gdb_name, r"BuffOutward") + arcpy.Buffer_analysis(in_features=single_part_features, out_feature_class=buff2_output, + buffer_distance_or_field="2 Meters", line_side="FULL", + line_end_type="ROUND", dissolve_option="NONE", dissolve_field="", method="GEODESIC") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from Buffer_analysis are: \n{0}\n".format(messages)) + + large_buildings = r"LargeBuildings" + arcpy.MakeFeatureLayer_management(in_features=buff2_output, out_layer=large_buildings, + where_clause="Shape_Area > 500", workspace="", field_info="#") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from MakeFeatureLayer_management are: \n{0}\n".format(messages)) + + small_buildings = r"SmallBuildings" + arcpy.MakeFeatureLayer_management(in_features=buff2_output, out_layer=small_buildings, + where_clause="Shape_Area <= 500 AND Shape_Area >= 10", + workspace="", field_info="#") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from MakeFeatureLayer_management are: \n{0}\n".format(messages)) + + large_reg = os.path.join(scratch_gdb_name, r"LargeReg") + arcpy.RegularizeBuildingFootprint_3d(in_features=large_buildings, out_feature_class=large_reg, + method="RIGHT_ANGLES_AND_DIAGONALS", tolerance="0.75", + densification="0.75", precision="0.25", diagonal_penalty="1.5", + min_radius="0.1", max_radius="1000000") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from RegularizeBuildingFootprint_3d are: \n{0}\n".format(messages)) + + small_reg = os.path.join(scratch_gdb_name, r"SmallReg") + arcpy.RegularizeBuildingFootprint_3d(in_features=small_buildings, out_feature_class=small_reg, + method="RIGHT_ANGLES", tolerance="0.75", + densification="0.75", precision="0.25", diagonal_penalty="1.5", + min_radius="0.1", max_radius="1000000") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from RegularizeBuildingFootprint_3d are: \n{0}\n".format(messages)) + + merge_list = [] + merge_list.append(large_reg) + merge_list.append(small_reg) + merge_reg = os.path.join(scratch_gdb_name, r"MergeReg") + arcpy.Merge_management(inputs=merge_list, output=merge_reg, + field_mappings="#") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from Merge_management are: \n{0}\n".format(messages)) + + simplify_once = os.path.join(scratch_gdb_name, r"SimplifyOnce") + arcpy.SimplifyBuilding_cartography(in_features=merge_reg, out_feature_class=simplify_once, + simplification_tolerance="2 Meters", minimum_area="0 SquareFeet", + conflict_option="NO_CHECK") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from SimplifyBuilding_cartography are: \n{0}\n".format(messages)) + + + arcpy.SimplifyBuilding_cartography(in_features=simplify_once, out_feature_class=output_fc, + simplification_tolerance="4 Meters", minimum_area="0 SquareFeet", + conflict_option="NO_CHECK") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from SimplifyBuilding_cartography are: \n{0}\n".format(messages)) + + if (delete_intermediate_results == "true"): + # Delete Mosaic Datasets in MosaicDatasets.gdb, then delete remaining fc's and then MosaicDatasets.gdb + if arcpy.Exists(buff1_output): + arcpy.Delete_management(buff1_output) + if arcpy.Exists(single_part_features): + arcpy.Delete_management(single_part_features) + if arcpy.Exists(buff2_output): + arcpy.Delete_management(buff2_output) + if arcpy.Exists(large_buildings): + arcpy.Delete_management(large_buildings) + if arcpy.Exists(small_buildings): + arcpy.Delete_management(small_buildings) + if arcpy.Exists(large_reg): + arcpy.Delete_management(large_reg) + if arcpy.Exists(small_reg): + arcpy.Delete_management(small_reg) + if arcpy.Exists(merge_reg): + arcpy.Delete_management(merge_reg) + if arcpy.Exists(simplify_once): + arcpy.Delete_management(simplify_once) + + arcpy.Delete_management(large_buildings) + arcpy.Delete_management(small_buildings) + + end = time.time() + delta = end - start + # This is useful if the tool is run at the command line + arcpy.AddMessage("***** Total elapsed time is {0} hours *****".format(delta/3600)) + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + +if __name__ == '__main__': + arcpy.AddMessage(inspect.getfile(inspect.currentframe())) + arcpy.AddMessage(os.path.dirname(inspect.getfile(inspect.currentframe()))) + arcpy.AddMessage(sys.version) + arcpy.AddMessage(sys.executable) + executed_from = sys.executable.upper() + + arcpy.CheckOutExtension('3D') + # if "ARCGISPRO" in executed_from: + # arcpy.AddMessage("Exiting...this tool does not yet run from ArcGIS Pro") + # sys.exit(0) + + if not ("ARCMAP" in executed_from or "ARCCATALOG" in executed_from or + "RUNTIME" in executed_from): + arcpy.AddMessage("Getting parameters from command line...") + + input_fc = sys.argv[1] + output_fgdb = sys.argv[2] + output_fc_name = sys.argv[3] + delete_intermediate_results = sys.argv[4] + else: + arcpy.AddMessage("Getting parameters from GetParameterAsText...") + input_fc = arcpy.GetParameterAsText(0) + output_fgdb = arcpy.GetParameterAsText(1) + output_fc_name = arcpy.GetParameterAsText(2) + delete_intermediate_results = arcpy.GetParameterAsText(3) + + input_fc = input_fc.strip() + output_fgdb = output_fgdb.strip() + output_fc_name = output_fc_name.strip() + arcpy.AddMessage("Input Feature Class: {0}".format(input_fc)) + arcpy.AddMessage("Output File GDB: {0}".format(output_fgdb)) + arcpy.AddMessage("Output Feature Class name: {0}".format(output_fc_name)) + arcpy.AddMessage("Delete Intermediate Results: {0}".format(delete_intermediate_results)) + + main(input_fc, output_fgdb, output_fc_name, delete_intermediate_results) diff --git a/V3_delivery/V4_Delivery/.idea/V3_Delivery.iml b/V3_delivery/V4_Delivery/.idea/V3_Delivery.iml new file mode 100644 index 0000000..63cfb66 --- /dev/null +++ b/V3_delivery/V4_Delivery/.idea/V3_Delivery.iml @@ -0,0 +1,11 @@ + + + + + + + + + + \ No newline at end of file diff --git a/V3_delivery/V4_Delivery/.idea/misc.xml b/V3_delivery/V4_Delivery/.idea/misc.xml new file mode 100644 index 0000000..e18a1c7 --- /dev/null +++ b/V3_delivery/V4_Delivery/.idea/misc.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/V3_delivery/V4_Delivery/.idea/modules.xml b/V3_delivery/V4_Delivery/.idea/modules.xml new file mode 100644 index 0000000..558a5ef --- /dev/null +++ b/V3_delivery/V4_Delivery/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/V3_delivery/V4_Delivery/.idea/workspace.xml b/V3_delivery/V4_Delivery/.idea/workspace.xml new file mode 100644 index 0000000..7cdf676 --- /dev/null +++ b/V3_delivery/V4_Delivery/.idea/workspace.xml @@ -0,0 +1,194 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1459891884352 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/V3_delivery/V4_Delivery/BuildingTreeIdentification.tbx b/V3_delivery/V4_Delivery/BuildingTreeIdentification.tbx new file mode 100644 index 0000000..c4480c2 Binary files /dev/null and b/V3_delivery/V4_Delivery/BuildingTreeIdentification.tbx differ diff --git a/V3_delivery/V4_Delivery/ExtractBuildingsTreesAutomation_V4.py b/V3_delivery/V4_Delivery/ExtractBuildingsTreesAutomation_V4.py new file mode 100644 index 0000000..76b4184 --- /dev/null +++ b/V3_delivery/V4_Delivery/ExtractBuildingsTreesAutomation_V4.py @@ -0,0 +1,2033 @@ +# --------------------------------------------------------------------------- +# Name: ExtractBuildingsTreesAutomation.py +# Purpose: +# Usage: +# Description: +# Author: Yiqun Xie, Joseph McGlinchy +# Organization: Esri Inc. +# +# Created: 06/25/2015 Yiqun Xie, Joseph McGlinchy +# Modified: 12/22/2015 Roslyn Dunn Version 1 +# Expand the algorithm to run over large areas in production +# Modified: 01/05/2016 Roslyn Dunn Version 2 +# Remove describe of file gdb, since gdb's are corrupted during +# file deletion. +# Output .tif file from Zonal Stats to avoid having SRS modified +# on output. +# Require output folder to exist (changed to input parameter instead +# of output. +# Fix issues with deletion of scratch workspaces (bug in core) +# Modified: 02/12/2016 Roslyn Dunn Version 3 +# Added functionality to construct surfaces directly from LAS +# Modified: 04/05/2016 Joe McGlinchy Version 4.0 +# Ensured correct execution from Desktop 10.4 and Pro 1.2 environments +# --------------------------------------------------------------------------- + +import sys +import os +import inspect +import traceback +import math +import multiprocessing +import time +import shutil +# import resourceLogger +import arcpy +from arcpy.sa import * + + +def extract_buildings_trees(pp_params): + # In order to invoke this method as a separate process using python multiprocessing, + # import arcpy and other site packages + import arcpy + import sys + import os + import traceback + import time + try: + start = time.time() + # check out Spatial Analyst license + if arcpy.CheckExtension("Spatial") == "Available": + arcpy.CheckOutExtension("Spatial") + else: + arcpy.AddMessage("\n*** No Spatial Analyst License Available." + "Exiting extract_buildings_trees for OID: {0} ***".format(pp_params[1])) + sys.exit() + arcpy.env.overwriteOutput = True + # Unpack pp_params + out_folder_path = pp_params[0] + # arcpy.AddMessage("\n out_folder_path: {0}".format(out_folder_path)) + oid = pp_params[1] + arcpy.AddMessage("\n** Processing oid: {0} **\n".format(oid)) + # set a flag to indicate if input data type is LAS or DSM/DTM rasters + las_input = False + if pp_params[3] == "ALL RETURNS" or pp_params[3] == "LAST RETURNS": + las_input = True + lasd = pp_params[2] + # arcpy.AddMessage("\n lasd: {0}".format(lasd)) + las_return_type = pp_params[3] + # arcpy.AddMessage("\n las_return_type: {0}".format(las_return_type)) + arcpy.CheckOutExtension('3D') + else: + dsm_md = pp_params[2] + # arcpy.AddMessage("\n dsm_md: {0}".format(dsm_md)) + dtm_md = pp_params[3] + # arcpy.AddMessage("\n dtm_md: {0}".format(dtm_md)) + xmin_str = pp_params[4] + # arcpy.AddMessage("\n xmin_str: {0}".format(xmin_str)) + xmax_str = pp_params[5] + # arcpy.AddMessage("\n xmax_str: {0}".format(xmax_str)) + ymin_str = pp_params[6] + # arcpy.AddMessage("\n ymin_str: {0}".format(ymin_str)) + ymax_str = pp_params[7] + # arcpy.AddMessage("\n ymax_str: {0}".format(ymax_str)) + featureextraction = pp_params[8] + # arcpy.AddMessage("\n featureextraction: {0}".format(featureextraction)) + elevation_meter_scalefactor_str = pp_params[9] + # arcpy.AddMessage("\n elevation_meter_scalefactor_str: {0}".format(elevation_meter_scalefactor_str)) + use_pos_terrain_method = pp_params[10] + # arcpy.AddMessage("\n use_pos_terrain_method: {0}".format(use_pos_terrain_method)) + delete_intermediate_files = pp_params[11] + # arcpy.AddMessage("\n delete_intermediate_files: {0}".format(delete_intermediate_files)) + height_path = pp_params[12] + # arcpy.AddMessage("\n height_path: {0}".format(height_path)) + las_point_spacing = pp_params[13] # only utilized when las_input = True + # arcpy.AddMessage("\n las_point_spacing: {0}".format(las_point_spacing)) + + # Convert some passed parameters to float (all parameters were passed as strings) + elevation_meter_scalefactor = float(elevation_meter_scalefactor_str) + xmin = float(xmin_str) + xmax = float(xmax_str) + ymin = float(ymin_str) + ymax = float(ymax_str) + + # Each fishnet section gets a separate directory (named after the OID) to contain results + out_subfolder_path = os.path.join(out_folder_path, oid) + + # The results file gdb will store trees and buildings + results_gdb_name = r"Results.gdb" + # Entire path of the file gdb + results_file_gdb_path = os.path.join(out_subfolder_path, results_gdb_name) + + trees_output = "" + if "TREES" in featureextraction.upper(): + trees_output = os.path.join(results_file_gdb_path, r"trees_to_merge" + oid) + buildings_output = os.path.join(results_file_gdb_path, r"buildings_to_merge" + oid) + + # arcpy.env.workspace = out_subfolder_path + # arcpy.env.workspace = "in_memory" + if not os.path.exists(out_subfolder_path): + arcpy.AddMessage("Creating Results sub-Folder: " + out_subfolder_path) + os.makedirs(out_subfolder_path) + elif arcpy.Exists(buildings_output): + # if desired outputs already exist (buildings_to_merge* and possibly trees_to_merge*), + # then don't continue, since this run has be re-started + if "TREES" in featureextraction.upper(): + # If trees_to_merge* feature class exists then this oid has already been processed + if arcpy.Exists(trees_output): + arcpy.AddMessage("OID {0} already processed...skipping".format(oid)) + return + else: + arcpy.AddMessage("OID {0} already processed...skipping".format(oid)) + return + + # If the results file gdb doesn't exist, then create it + if not os.path.exists(results_file_gdb_path): + # arcpy.AddMessage("Creating Results File GDB: {0}".format(results_file_gdb_path)) + arcpy.CreateFileGDB_management(out_subfolder_path, results_gdb_name, out_version="CURRENT") + + # Create a scratch file gdb for intermediate file output + scratch_gdb_name = r"TempWorkArea.gdb" + # Entire path of the file gdb + scratch_file_gdb_path = os.path.join(out_subfolder_path, scratch_gdb_name) + # If the file gdb doesn't exist, then create it + if not os.path.exists(scratch_file_gdb_path): + # arcpy.AddMessage("Creating Scratch File GDB: {0}\n".format(scratch_file_gdb_path)) + arcpy.CreateFileGDB_management(out_subfolder_path, scratch_gdb_name, out_version="CURRENT") + + # send intermediate files to the scratch file gdb + arcpy.env.workspace = scratch_file_gdb_path + + # clip dsm raster from either LAS Dataset or DSM Mosaic Dataset + clip_dsm_raster = os.path.join(out_subfolder_path, "dsm{}.tif".format(oid)) + # arcpy.AddMessage("\nDSM clip_raster: {0}".format(clip_dsm_raster)) + # Extend the clip footprint by 1 unit in each direction so resulting features overlap + fishnet_rectangle = "{} {} {} {}".format(xmin - 1, ymin - 1, xmax + 1, ymax + 1) + if not arcpy.Exists(clip_dsm_raster): + if las_input: + if las_return_type == "ALL RETURNS": + return_val = "" + else: + # Las Return is better for defining buildings + return_val = "'Last Return'" + # use LAS Class codes 0-6 to create the DSM + # 0 - Never Classified, 1 - Unassigned, 2 - Ground, 3 - Low Vegetation, + # 4 - Medium Vegetation, 5 - High Vegetation, 6 - Building + arcpy.MakeLasDatasetLayer_management(lasd, out_layer="DSM_LASD_Layer", + class_code="0;1;2;3;4;5;6", + return_values=return_val, no_flag="true", + synthetic="true", keypoint="true", + withheld="false", surface_constraints="") + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from MakeLasDatasetLayer of DSM is: \n{0}".format(messages)) + arcpy.env.extent = fishnet_rectangle + arcpy.LasDatasetToRaster_conversion("DSM_LASD_Layer", clip_dsm_raster, value_field="ELEVATION", + interpolation_type="BINNING MAXIMUM NATURAL_NEIGHBOR", + data_type="FLOAT", sampling_type="CELLSIZE", + sampling_value=las_point_spacing, z_factor="1") + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from LasDatasetToRaster of DSM is: \n{0}".format(messages)) + else: + # Input is DSM and DTM Mosaic datasets + try: + arcpy.Clip_management(dsm_md, fishnet_rectangle, clip_dsm_raster, "#", "-3.40282346639e+038") + except: + arcpy.AddMessage("No data in DSM clip of area with oid: {0}".format(oid)) + return + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from Clip of DSM is: \n{0}".format(messages)) + + # clip dtm raster from either LAS Dataset or DTM Mosaic Dataset + clip_dtm_raster = os.path.join(out_subfolder_path, "dtm{}.tif".format(oid)) + # arcpy.AddMessage("\nDTM clip_raster: {0}".format(clip_dtm_raster)) + if not arcpy.Exists(clip_dtm_raster): + if las_input: + # use LAS Class codes 2 and 8 to create the DTM + # 2 - Ground, 8 - Model Key + arcpy.MakeLasDatasetLayer_management(lasd, out_layer="DTM_LASD_Layer", class_code="2;8", + return_values="", no_flag="true", synthetic="true", + keypoint="true", withheld="false", surface_constraints="") + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from MakeLasDatasetLayer of DTM is: \n{0}".format(messages)) + arcpy.env.extent = fishnet_rectangle + arcpy.LasDatasetToRaster_conversion("DTM_LASD_Layer", clip_dtm_raster, value_field="ELEVATION", + interpolation_type="BINNING AVERAGE NATURAL_NEIGHBOR", + data_type="FLOAT", sampling_type="CELLSIZE", + sampling_value=las_point_spacing, z_factor="1") + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from LasDatasetToRaster of DTM is: \n{0}".format(messages)) + else: + # Input is DSM and DTM Mosaic datasets + try: + arcpy.Clip_management(dtm_md, fishnet_rectangle, clip_dtm_raster, "#", "-3.40282346639e+038") + except: + arcpy.AddMessage("No data in DTM clip of area with oid: {0}".format(oid)) + if delete_intermediate_files == "true": + remove_rasters(out_subfolder_path) + remove_shapefiles(out_subfolder_path) + arcpy.env.workspace = "" # so we can successfully delete scratch_file_gdb_path + remove_filegdb(scratch_file_gdb_path) + return + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from Clip of DTM is: \n{0}".format(messages)) + + trees_output = "" + if "TREES" in featureextraction.upper(): + trees_output = os.path.join(results_file_gdb_path, r"trees_to_merge" + oid) + buildings_output = os.path.join(results_file_gdb_path, r"buildings_to_merge" + oid) + + # Minus - compute difference to get heights relative to the ground) + # Note the Height raster is put into a separate folder (height_path) + # (Used to create a Mosaic Dataset later in zonal stats step) + diff_ori = os.path.join(height_path, "Minus_img" + oid + r".tif") + arcpy.gp.Minus_sa(clip_dsm_raster, clip_dtm_raster, diff_ori) + + # Divide by a scale factor to convert all heights to meters + # divide_minus1 = os.path.join(out_subfolder_path, "Divide_Minus1" + oid) + flt_meter_elev_unit = float(elevation_meter_scalefactor) + divide_minus1 = Raster(diff_ori) / flt_meter_elev_unit + arcpy.Delete_management(flt_meter_elev_unit) + + # create a raster with all floating point values of 1.0 + # Note: Done because SetNull & Con won't take a constant 1.0 value for the false raster parameter + flt_raster_one = divide_minus1 / divide_minus1 + + # setnull_divi1 = os.path.join(out_subfolder_path, "SetNull_Divi1" + oid) + setnull_divi1 = SetNull(divide_minus1, flt_raster_one, "VALUE<2") + + arcpy.Delete_management(flt_raster_one) + + # check to see if any heights are > 2 meters, and if not, then we're done + if setnull_divi1.maximum <= 0: + arcpy.AddMessage("No Buildings or Trees were identified in region {0}".format(oid)) + # print "No Buildings or Trees were identified in region: " + oid + if delete_intermediate_files == "true": + remove_rasters(out_subfolder_path) + remove_shapefiles(out_subfolder_path) + arcpy.env.workspace = "" # so we can successfully delete scratch_file_gdb_path + remove_filegdb(scratch_file_gdb_path) + return + # Is Null - create a mask to indicate the areas where heights are < 2 (1: heights < 2, 0: heights >= 2) + isnull_newma1 = os.path.join(out_subfolder_path, "IsNullnewm" + oid) + arcpy.gp.IsNull_sa(setnull_divi1, isnull_newma1) + + # Make Raster Layer from the previously created mask + mask_null = "mask_null" + oid + arcpy.MakeRasterLayer_management(isnull_newma1, mask_null, "", "", "") + + # Select Layer By Attribute - Select those pixels in the layer where heights are < 2 + arcpy.SelectLayerByAttribute_management(mask_null, "NEW_SELECTION", "VALUE=1") + + # Euclidean Distance - disBD represents the distance of each 'tall' pixel to the closest 'short' pixel + # If a tree exists, the center (trunk) of the tree will have a larger value (it's toward the center) + # If a building, the center of the building will have a larger value + disbd = os.path.join(out_subfolder_path, "disBD" + oid) + arcpy.gp.EucDistance_sa(mask_null, disbd, "", mask_null, "") + arcpy.Delete_management(mask_null) + + # Set Null - set all ZERO (and very small distance) values to NoData + disbdnull = os.path.join(out_subfolder_path, "disBDnull" + oid) + arcpy.gp.SetNull_sa(disbd, disbd, disbdnull, "VALUE<0.0001") + + # Negate the Distances to create wells (as opposed to peaks) + # Now the peaks we have in the middle of the objects become basins and the boundary pixels become ridges + rdsm = os.path.join(out_subfolder_path, "Negatediff" + oid) + arcpy.gp.Negate_sa(disbdnull, rdsm) + + # Flow Direction + flowdir_filt1 = os.path.join(out_subfolder_path, "FlowDirFlt" + oid) + arcpy.gp.FlowDirection_sa(rdsm, flowdir_filt1, "NORMAL", "") + + # Basin + basin_flowdi5 = os.path.join(out_subfolder_path, "BasinFlowD" + oid + r".tif") + arcpy.gp.Basin_sa(flowdir_filt1, basin_flowdi5) + + # Times + diff = os.path.join(out_subfolder_path, "diff" + oid) + arcpy.gp.Times_sa(divide_minus1, setnull_divi1, diff) + arcpy.Delete_management(divide_minus1) + + # Focal Statistics + focalst_diff1 = os.path.join(out_subfolder_path, "FocalStdif" + oid) + arcpy.gp.FocalStatistics_sa(diff, focalst_diff1, "Rectangle 3 3 CELL", "MEAN", "DATA") + + # Times + mean_diff = os.path.join(out_subfolder_path, "mean_diff" + oid) + arcpy.gp.Times_sa(focalst_diff1, setnull_divi1, mean_diff) + arcpy.Delete_management(setnull_divi1) + + # Minus + diff_minus_avg = os.path.join(out_subfolder_path, "diffMinusA" + oid) + arcpy.gp.Minus_sa(diff, mean_diff, diff_minus_avg) + + # Send the output to a different file name so results can be compared for the 2 methods + if not use_pos_terrain_method == "true": + # use the 'slope' method by default + positive = os.path.join(out_subfolder_path, "Slope" + oid) + arcpy.gp.Slope_sa(diff_minus_avg, positive, "DEGREE", "1") + else: + # the 'Positive Terrains' method + input_raster_or_constant_value_2 = "0.3" + positive = os.path.join(out_subfolder_path, "GreaterMin" + oid) + arcpy.gp.GreaterThan_sa(diff_minus_avg, input_raster_or_constant_value_2, positive) + + # Set output coordinate system (need this to keep it from changing in Zonal Stats step) + desc = arcpy.Describe(clip_dtm_raster) + sr = desc.spatialReference + arcpy.env.outputCoordinateSystem = sr + linear_unit = sr.linearUnitName.upper() + # arcpy.AddMessage("Spatial ref of DSM clipped raster is: \n{0}\n".format(sr.exportToString())) + + # Zonal Statistics + # first run build raster attribute table on basin_flowdi5 + arcpy.BuildRasterAttributeTable_management(basin_flowdi5, "Overwrite") + + # Output a .tif file to avoid modification of SRS (bug which applies only to GRID) + zonalst_basi4 = os.path.join(out_subfolder_path, "ZonalStBas" + oid + r".tif") + arcpy.gp.ZonalStatistics_sa(basin_flowdi5, "VALUE", positive, zonalst_basi4, "MEAN", "DATA") + + # Iso Cluster Unsupervised Classification + # arcpy.AddMessage("Classifying...") + isocluster2 = os.path.join(out_subfolder_path, "isocluster" + oid) + # Write out a permanent signature file to avoid conflicts with other concurrent processes + iso_sig_file = os.path.join(out_subfolder_path, r"iso_sig" + oid + r".gsg") + # wrap this in a try block in case zero (0) classes are found (this happens in desert areas) + try: + arcpy.gp.IsoClusterUnsupervisedClassification_sa(zonalst_basi4, "2", isocluster2, "20", "10", iso_sig_file) + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults from IsoClusterUnsupervisedClassification_sa of oid {0} are:" + # " \n{1}\n".format(oid, messages)) + + except: + arcpy.AddMessage("No Buildings or Trees were identified in region {0}".format(oid)) + if delete_intermediate_files == "true": + remove_rasters(out_subfolder_path) + remove_shapefiles(out_subfolder_path) + arcpy.env.workspace = "" # so we can successfully delete scratch_file_gdb_path + remove_filegdb(scratch_file_gdb_path) + del desc, sr + return + # check actual number of classes obtained + raster_iso = arcpy.Raster(isocluster2) + buildingsfound = True + if int(raster_iso.maximum) == int(raster_iso.minimum): + # Only one class found = assumed to be trees for now, but this might change + arcpy.AddMessage("No buildings were identified in region {0}".format(oid)) + # arcpy.AddMessage("No records will be output to: {0}".format(buildings_output)) + buildingsfound = False + + # Always look for buildings regardless if asked for, since we look for trees where no buildings exist + if buildingsfound: + # Set Null + setnullbd = os.path.join(out_subfolder_path, "SetNullbd" + oid) + arcpy.gp.SetNull_sa(isocluster2, isocluster2, setnullbd, "VALUE>1") + + # Raster to Polygon + bdiso = os.path.join(scratch_file_gdb_path, "bdiso" + oid) + arcpy.RasterToPolygon_conversion(setnullbd, bdiso, "NO_SIMPLIFY", "VALUE") + + # Add Geometry Attributes + arcpy.AddGeometryAttributes_management(bdiso, + "AREA;PERIMETER_LENGTH", "METERS", "SQUARE_METERS", "") + # Select + buildings_sel = "in_memory\\building_area50" + oid + if "FOOT" in linear_unit: + # xy linear units are foot or foot_us + arcpy.Select_analysis(bdiso, buildings_sel, "\"POLY_AREA\" >= 538.19") + else: + # xy linear units are meter + arcpy.Select_analysis(bdiso, buildings_sel, "\"POLY_AREA\" >= 50") + + # Add Field + if len(arcpy.ListFields(buildings_sel, "ratio")) < 1: + arcpy.AddField_management(buildings_sel, "ratio", "DOUBLE", "", "", "", "", + "NULLABLE", "NON_REQUIRED", "") + # Calculate Field + arcpy.CalculateField_management(buildings_sel, "ratio", "!POLY_AREA! / !PERIMETER!", + "PYTHON_9.3", "") + # Select + # bd_075 = "in_memory\\bd_075" + bd_075 = os.path.join(scratch_file_gdb_path, "bd_075" + oid) + arcpy.Select_analysis(buildings_sel, bd_075, "\"ratio\" >=0.75") + arcpy.Delete_management(buildings_sel) + + # Aggregate Polygons + # bdagg_tbl = "in_memory\\tbl" + bdagg_tbl = os.path.join(scratch_file_gdb_path, "bdagg_tbl" + oid) + arcpy.AggregatePolygons_cartography(bd_075, buildings_output, "1.5 Meters", "50 SquareMeters", + "250 SquareMeters", "ORTHOGONAL", "", bdagg_tbl) + + # Repair the building geometries in case they have self intersecting geometries + arcpy.RepairGeometry_management(buildings_output, "DELETE_NULL") + # Note: can do zonal stats here but buildings along borders are dissolved later, + # which invalidates the gathered stats for those particular buildings + # zonalstats(buildings_output, diff_ori, results_file_gdb_path) + + # If the user asks for trees, then the process is different if buildings are also found + if "TREES" in featureextraction.upper(): + isnull_setnu1 = os.path.join(out_subfolder_path, "IsNullSetN" + oid) + if buildingsfound: + # Is Null + isnull_isocl1 = os.path.join(out_subfolder_path, "IsNullisoc" + oid) + arcpy.gp.IsNull_sa(isocluster2, isnull_isocl1) + # Con + ras_isnull_isocl1 = Raster(isnull_isocl1) + int_raster_one = ras_isnull_isocl1 / ras_isnull_isocl1 + con_isnull_i1 = os.path.join(out_subfolder_path, "ConIsNulli" + oid) + arcpy.gp.Con_sa(isnull_isocl1, int_raster_one, con_isnull_i1, isocluster2, "VALUE=1") + arcpy.Delete_management(ras_isnull_isocl1) + arcpy.Delete_management(int_raster_one) + # Focal Statistics + focalst_isoc1 = os.path.join(out_subfolder_path, "FocalStiso" + oid) + arcpy.gp.FocalStatistics_sa(Int(con_isnull_i1), focalst_isoc1, "Rectangle 3 3 CELL", "MAJORITY", "DATA") + # Set Null + setnull_isot = os.path.join(out_subfolder_path, "SetNulliso" + oid) + arcpy.gp.SetNull_sa(focalst_isoc1, 0, setnull_isot, "VALUE = 1") + + # Is Null + arcpy.gp.IsNull_sa(setnull_isot, isnull_setnu1) + else: + arcpy.gp.IsNull_sa(isocluster2, isnull_setnu1) + + # Make Raster Layer + treenull = "treeNULL" + oid + arcpy.MakeRasterLayer_management(isnull_setnu1, treenull, "", "", "") + + # Select Layer By Attribute + arcpy.SelectLayerByAttribute_management(treenull, "NEW_SELECTION", "VALUE=1") + + # Euclidean Distance + eucdist_make1 = os.path.join(out_subfolder_path, "EucDistMak" + oid) + arcpy.gp.EucDistance_sa(treenull, eucdist_make1, "", treenull, "") + arcpy.Delete_management(treenull) + + # Set Null + setnull_eucd1 = os.path.join(out_subfolder_path, "SetNulEucD" + oid) + if "FOOT" in linear_unit: + arcpy.gp.SetNull_sa(eucdist_make1, eucdist_make1, setnull_eucd1, "VALUE<2.95") + else: + arcpy.gp.SetNull_sa(eucdist_make1, eucdist_make1, setnull_eucd1, "VALUE<0.9") + + setnulldistest = arcpy.Raster(setnull_eucd1) + if setnulldistest.maximum <= -1: + arcpy.AddMessage("No Trees were identified in region {0}".format(oid)) + # print "No Trees were identified in region: " + oid + if delete_intermediate_files == "true": + remove_rasters(out_subfolder_path) + remove_shapefiles(out_subfolder_path) + arcpy.env.workspace = "" # so we can successfully delete scratch_file_gdb_path + remove_filegdb(scratch_file_gdb_path) + if arcpy.Exists(raster_iso): + arcpy.Delete_management(raster_iso) + del desc, sr + return + + # Focal Statistics + focalst_setn1 = os.path.join(out_subfolder_path, "FocalStStN" + oid) + arcpy.gp.FocalStatistics_sa(setnull_eucd1, focalst_setn1, "Circle 3 CELL", "MAXIMUM", "DATA") + + # Minus + minus_focals1 = os.path.join(out_subfolder_path, "MinusFoclS" + oid) + arcpy.gp.Minus_sa(focalst_setn1, setnull_eucd1, minus_focals1) + arcpy.Delete_management(setnulldistest) # deleted after done w/ setnull_eucd1 & setnulldistest + + # Equal To + equalto_minu1 = os.path.join(out_subfolder_path, "EqualToMin" + oid) + arcpy.gp.EqualTo_sa(minus_focals1, "0", equalto_minu1) + + # Set Null + setnull_equa1 = os.path.join(out_subfolder_path, "SetNulEqua" + oid) + arcpy.gp.SetNull_sa(equalto_minu1, eucdist_make1, setnull_equa1, "VALUE=0") + + setnulleqtst = arcpy.Raster(setnull_equa1) + if setnulleqtst.maximum <= -1: + arcpy.AddMessage("No Trees were identified in region {0}".format(oid)) + # print "No Trees were identified in region: " + oid + if delete_intermediate_files == "true": + remove_rasters(out_subfolder_path) + remove_shapefiles(out_subfolder_path) + arcpy.env.workspace = "" # so we can successfully delete scratch_file_gdb_path + remove_filegdb(scratch_file_gdb_path) + if arcpy.Exists(raster_iso): + arcpy.Delete_management(raster_iso) + del desc, sr + return + + # Plus + # plus_int_set1 = os.path.join(out_subfolder_path, "Plus_Int_Set1" + oid) + flt_meter_elev_unit = float(elevation_meter_scalefactor) + plus_int_set1 = Raster(setnull_equa1) + flt_meter_elev_unit + arcpy.Delete_management(flt_meter_elev_unit) + + # Int + int_setnull_1 = os.path.join(out_subfolder_path, "IntSetNull" + oid) + arcpy.gp.Int_sa(plus_int_set1, int_setnull_1) + arcpy.Delete_management(plus_int_set1) + arcpy.Delete_management(setnulleqtst) # deleted after done with setnull_equal & setnulleqtst + + # Clean Tree Raster + cleantreeras1 = os.path.join(out_subfolder_path, "CleanTreeR" + oid) + cleanfast(int_setnull_1, cleantreeras1) + + # Raster to Polygon + rastert_setnull1 = os.path.join(scratch_file_gdb_path, "RastrTSetN" + oid) + arcpy.RasterToPolygon_conversion(cleantreeras1, rastert_setnull1, "NO_SIMPLIFY", "Value") + arcpy.Delete_management(cleantreeras1) + + # Feature To Point + rastert_setnull1_featuretopo = os.path.join(scratch_file_gdb_path, + "RasterT_SetNull1_FeatureToPo" + oid) + arcpy.FeatureToPoint_management(rastert_setnull1, rastert_setnull1_featuretopo, "INSIDE") + + if buildingsfound: + # Make Feature Layer + rastert_setnull1_featuretopolyr = "RasterT_SetNull1_FeatureToPoLyr" + oid + arcpy.MakeFeatureLayer_management(rastert_setnull1_featuretopo, rastert_setnull1_featuretopolyr, + "", "", "") + + # Copy Features + bdagg_copyfeatures = os.path.join(scratch_file_gdb_path, "bdAgg_CopyFeatures" + oid) + arcpy.CopyFeatures_management(buildings_output, bdagg_copyfeatures, "", "0", "0", "0") + + # Select Layer By Location + arcpy.SelectLayerByLocation_management(rastert_setnull1_featuretopolyr, "WITHIN_A_DISTANCE", + bdagg_copyfeatures, "3.3 Feet", "NEW_SELECTION", + "NOT_INVERT") + + # Select Layer By Attribute + arcpy.SelectLayerByAttribute_management(rastert_setnull1_featuretopolyr, "SWITCH_SELECTION", "") + + # Buffer + arcpy.Buffer_analysis(rastert_setnull1_featuretopolyr, trees_output, + "GRIDCODE", "FULL", "ROUND", "NONE", "", "PLANAR") + # arcpy.AddMessage("Trees exported to: {0}".format(trees_output)) + arcpy.Delete_management(rastert_setnull1_featuretopolyr) + else: + # Buffer + arcpy.Buffer_analysis(rastert_setnull1_featuretopo, trees_output, "GRIDCODE", "FULL", "ROUND", + "NONE", "", "PLANAR") + # arcpy.AddMessage("Trees exported to: {0}".format(trees_output)) + + if arcpy.Exists(raster_iso): + arcpy.Delete_management(raster_iso) + arcpy.Delete_management("in_memory") + # Clean up the rasters in out_subfolder_path + if delete_intermediate_files == "true": + remove_rasters(out_subfolder_path) + remove_shapefiles(out_subfolder_path) + arcpy.env.workspace = "" # so we can successfully delete scratch_file_gdb_path + remove_filegdb(scratch_file_gdb_path) + # arcpy.env.workspace = out_subfolder_path + del desc, sr + end = time.time() + delta = end - start + arcpy.AddMessage("Elapsed time for OID {0} is {1} seconds".format(oid, delta)) + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + arcpy.CheckInExtension('3D') + arcpy.CheckInExtension('3D') + raise + + +def remove_shapefiles(directory): + # Delete all of the shapefiles in directory + arcpy.env.workspace = directory + featureclasses = arcpy.ListFeatureClasses() + for featureclass in featureclasses: + arcpy.Delete_management(featureclass) + return + + +def remove_rasters(directory): + # Delete all of the rasters in directory + arcpy.env.workspace = directory + rasters = arcpy.ListRasters("*", "ALL") + for raster in rasters: + arcpy.Delete_management(raster) + return + + +def remove_filegdb(filegdb): + # Delete all of the feature classes in filegdb, then delete filegdb + arcpy.env.workspace = filegdb + featureclasses = arcpy.ListFeatureClasses() + for featureclass in featureclasses: + arcpy.Delete_management(featureclass) + arcpy.env.workspace = "" + try: + arcpy.Delete_management(filegdb) + except: + arcpy.AddMessage("Unable to delete file GDB: {0}".format(filegdb)) + # This extra code is needed because of a bug which results in the deleted file gdb being + # changed into a folder (i.e. loses it's designation as a file gdb, but the folder still exists) + if arcpy.Exists(filegdb): + arcpy.Delete_management(filegdb) + return + + +def remove_tables(filegdb): + # Remove all of the tables in filegdb + arcpy.env.workspace = filegdb + tables = arcpy.ListTables() + for table in tables: + arcpy.Delete_management(table) + return + + +def delete(array, i, j, cellsize): + try: + # This cleans overlapping trees + ext = array[i][j] + for x in range(i - ext, i + ext): + for y in range(j - ext, j + ext): + if x > 0 and x < (array.shape[0] - 1) and y > 0 and y < (array.shape[1] - 1) and (x != i or y != j): + r = array[x][y] + if r > 0 and ext >= r: + if (float(r) / float(ext)) > 0.5: + distance = math.sqrt(math.pow(x - i, 2) + math.pow(y - j, 2)) * cellsize + if distance < (1.0 * float(ext)): # change threshold here + array[x][y] = 0 + return + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + raise + + +def cleanfast(inputraster, outputraster): + try: + # get raster information + desc = arcpy.Describe(inputraster) + sr = desc.spatialReference + point = arcpy.Point(desc.Extent.XMin, desc.Extent.YMin) + + # iterate through raster array + dist = arcpy.RasterToNumPyArray(in_raster=inputraster, nodata_to_value=0) + locs = dist.nonzero() + # part = int(len(locs[0]) / 10) + + for x in range(0, len(locs[0])): + # if (int(x) % part) == 0: + # arcpy.AddMessage(str(float(x) / float(part) * 10) + "% completed") + locx = locs[0][x] + locy = locs[1][x] + delete(dist, locx, locy, desc.meanCellWidth) + + # output + distraster = arcpy.NumPyArrayToRaster(in_array=dist, lower_left_corner=point, + x_cell_size=desc.meanCellWidth, + y_cell_size=desc.meanCellWidth, + value_to_nodata=0) + arcpy.DefineProjection_management(distraster, sr) + distraster.save(outputraster) + del desc, sr, point + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + raise + + +def zonalstats(vectorfc, rasterheights, resultstablegdb): + # Gather zonal statistics for the features in vectorfc + try: + start = time.time() + table_prefix = os.path.splitext(os.path.basename(vectorfc))[0] + zonaltable = os.path.join(resultstablegdb, table_prefix + r"_zonalTbl") + oid_fieldname = arcpy.Describe(vectorfc).OIDFieldName + arcpy.gp.ZonalStatisticsAsTable_sa(vectorfc, oid_fieldname, rasterheights, zonaltable, "DATA", "ALL") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from ZonalStatisticsAsTable_sa are: \n{0}\n".format(messages)) + arcpy.MakeFeatureLayer_management(vectorfc, "vectorfc_layer") + arcpy.JoinField_management("vectorfc_layer", oid_fieldname, zonaltable, join_field="OBJECTID", + fields="COUNT;AREA;MIN;MAX;RANGE;MEAN;STD;SUM") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from JoinField are: \n{0}\n".format(messages)) + arcpy.Delete_management("vectorfc_layer") + end = time.time() + delta = end - start + arcpy.AddMessage("Elapsed time for ZonalStats and JoinField is {0} seconds".format(delta)) + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def create_md_from_raster(raster_folder, file_gdb, md_name, md_boundary, build_footprints, export_boundary): + arcpy.env.overwriteOutput = True + # Create and populate a mosaic dataset with elevation rasters (32-bit float) + try: + arcpy.env.workspace = raster_folder + if not arcpy.Exists(file_gdb): + arcpy.AddMessage("\n*** Exiting create_md_from_raster...File GDB Does not exist: {0} ***".format(file_gdb)) + return + + if not os.path.exists(raster_folder): + arcpy.AddMessage("\n*** Exiting create_md_from_raster..." + "Raster Folder Does not exist: {0} ***".format(raster_folder)) + return + + full_md_path = os.path.join(file_gdb, md_name) + arcpy.AddMessage("\nMD to be created: {0}".format(full_md_path)) + + # Don't re-create the Mosaic Dataset if it already exists + if not arcpy.Exists(full_md_path): + # Get the spatial reference string of the first raster (to use in creation of MD) + rasters = arcpy.ListRasters("*", "All") + # Make sure there's at least one raster in raster_folder + # If not, then exit the script + # If so, get the raster's Spatial Reference + if len(rasters) > 0: + desc_firstraster = arcpy.Describe(rasters[0]) + spatref_firstraster = desc_firstraster.SpatialReference.exportToString() + arcpy.AddMessage("Spatial ref of 1st raster in {0} is: \n{1}\n".format(raster_folder, + spatref_firstraster)) + arcpy.AddMessage("Number of rasters in {0}: {1}".format(raster_folder, len(rasters))) + else: + arcpy.AddMessage("\n*** Exiting create_md_from_raster..." + "No rasters found in {0} ***".format(raster_folder)) + return + # Create a Mosaic Dataset + arcpy.CreateMosaicDataset_management(file_gdb, md_name, + coordinate_system=spatref_firstraster, + num_bands="1", pixel_type="32_BIT_FLOAT", product_definition="NONE", + product_band_definitions="#") + del desc_firstraster, spatref_firstraster, rasters + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from CreateMosaicDataset are: \n{0}\n".format(messages)) + + # set the data_type to ELEVATION + arcpy.SetRasterProperties_management(full_md_path, data_type="ELEVATION", statistics="", + stats_file="#", nodata="") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from SetRasterProperties are: \n{0}\n".format(messages)) + + # Add rasters from Raster folder to MD + arcpy.AddRastersToMosaicDataset_management(full_md_path, raster_type="Raster Dataset", + input_path=raster_folder, + update_cellsize_ranges="UPDATE_CELL_SIZES", + update_boundary="UPDATE_BOUNDARY", + update_overviews="NO_OVERVIEWS", maximum_pyramid_levels="", + maximum_cell_size="0", + minimum_dimension="1500", spatial_reference="", filter="", + sub_folder="SUBFOLDERS", + duplicate_items_action="ALLOW_DUPLICATES", + build_pyramids="NO_PYRAMIDS", + calculate_statistics="NO_STATISTICS", + build_thumbnails="NO_THUMBNAILS", + operation_description="#", + force_spatial_reference="NO_FORCE_SPATIAL_REFERENCE") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from AddRastersToMosaicDataset are: \n{0}\n".format(messages)) + + # re-calculate cell size ranges so export will work at various scales + arcpy.CalculateCellSizeRanges_management(full_md_path, where_clause="", do_compute_min="MIN_CELL_SIZES", + do_compute_max="MAX_CELL_SIZES", max_range_factor="100", + cell_size_tolerance_factor="0.8", update_missing_only="UPDATE_ALL") + + if build_footprints == "true": + arcpy.BuildFootprints_management(full_md_path, "", "RADIOMETRY", "-10", "4294967295", "80", "0", + "NO_MAINTAIN_EDGES", "SKIP_DERIVED_IMAGES", "UPDATE_BOUNDARY", + "2000", "100", "NONE", "", "20", "0.05") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from BuildFootprints are: \n{0}\n".format(messages)) + else: + arcpy.AddMessage("\n*** Mosaic Dataset already exists: {0} ***".format(full_md_path)) + + # default_compression_type="LERC" + # clip_to_footprints="NOT_CLIP" + # data_source_type="ELEVATION" + # rows_maximum_imagesize="15000" + arcpy.SetMosaicDatasetProperties_management(full_md_path, "15000", "15000", "None;JPEG;LZ77;LERC", "LERC", + "75", "0.01", "BILINEAR", "NOT_CLIP", + "FOOTPRINTS_MAY_CONTAIN_NODATA", + "NOT_CLIP", "NOT_APPLY", "#", "NONE", + "NorthWest;Center;LockRaster;ByAttribute;Nadir;Viewpoint;" + "Seamline;None", + "NorthWest", "", "", "ASCENDING", "FIRST", "10", "600", "300", "20", + "0.8", "", "BASIC", + "Name;MinPS;MaxPS;LowPS;HighPS;Tag;GroupName;ProductName;" + "CenterX;CenterY;ZOrder;Shape_Length;Shape_Area", "DISABLED", "", + "", "", "", "20", "1000", "ELEVATION", "1", "None", "None") + + # Get a record count just to be sure we found raster products to ingest + result = arcpy.GetCount_management(full_md_path) + count_rasters = int(result.getOutput(0)) + + if count_rasters == 0: + arcpy.AddMessage("\n*** Exiting: {0} Mosaic Dataset has no raster products ***".format(full_md_path)) + sys.exit() + else: + arcpy.AddMessage("{0} has {1} raster product(s).".format(full_md_path, count_rasters)) + + # boundary = os.path.join(file_gdb, md_boundary) + if export_boundary == "true": + if not arcpy.Exists(md_boundary): + # Export Boundary to the file GDB which holds the final results + arcpy.ExportMosaicDatasetGeometry_management(full_md_path, md_boundary, "", "BOUNDARY") + messages = arcpy.GetMessages() + arcpy.AddMessage("Results output from ExportMosaicDatasetGeometry are: \n{0}\n".format(messages)) + else: + arcpy.AddMessage("Exported boundary already exists: {}".format(md_boundary)) + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def create_md_from_las(lasd, raster_type_file, file_gdb, md_name, md_boundary, build_footprints, + export_boundary): + + arcpy.env.overwriteOutput = True + # Create and populate a mosaic dataset with a LAS dataset (32-bit float) + try: + # arcpy.env.workspace = raster_folder + if not arcpy.Exists(file_gdb): + arcpy.AddMessage("\n*** Exiting create_md_from_las...File GDB Does not exist: {0} ***".format(file_gdb)) + return + if not os.path.exists(lasd): + arcpy.AddMessage("\n*** Exiting create_md_from_las...LAS dataset Does not exist: {0} ***".format(lasd)) + return + + full_md_path = os.path.join(file_gdb, md_name) + arcpy.AddMessage("\nMD to be created: {0}".format(full_md_path)) + # md_boundary = full_md_path + boundary_append + + # Don't re-create the Mosaic Dataset if it already exists + if not arcpy.Exists(full_md_path): + # Get the spatial reference string of the LAS Dataset (to use in creation of MD) + desc_lasd = arcpy.Describe(lasd) + spat_ref_lasd = desc_lasd.SpatialReference + # Create a Mosaic Dataset + arcpy.CreateMosaicDataset_management(file_gdb, md_name, + coordinate_system=spat_ref_lasd, + num_bands="1", pixel_type="32_BIT_FLOAT", product_definition="NONE", + product_band_definitions="#") + del desc_lasd, spat_ref_lasd + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from CreateMosaicDataset are: \n{0}\n".format(messages)) + + # set the NoData value to -3.40282346639e+038 + arcpy.SetRasterProperties_management(full_md_path, data_type="ELEVATION", statistics="", + stats_file="#", nodata="") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from SetRasterProperties are: \n{0}\n".format(messages)) + + # Add rasters from Raster folder to MD + arcpy.AddRastersToMosaicDataset_management(full_md_path, raster_type=raster_type_file, + input_path=lasd, + update_cellsize_ranges="UPDATE_CELL_SIZES", + update_boundary="UPDATE_BOUNDARY", + update_overviews="NO_OVERVIEWS", maximum_pyramid_levels="", + maximum_cell_size="0", + minimum_dimension="1500", spatial_reference="", filter="#", + sub_folder="SUBFOLDERS", + duplicate_items_action="ALLOW_DUPLICATES", + build_pyramids="NO_PYRAMIDS", + calculate_statistics="NO_STATISTICS", + build_thumbnails="NO_THUMBNAILS", + operation_description="#", + force_spatial_reference="NO_FORCE_SPATIAL_REFERENCE") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from AddRastersToMosaicDataset are: \n{0}\n".format(messages)) + + if build_footprints == "true": + arcpy.BuildFootprints_management(full_md_path, "", "RADIOMETRY", "-100", "4294967295", "300", "0", + "MAINTAIN_EDGES", "SKIP_DERIVED_IMAGES", "UPDATE_BOUNDARY", + "2000", "20", "NONE", "", "20", "0.05") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from BuildFootprints are: \n{0}\n".format(messages)) + else: + arcpy.AddMessage("\n*** Mosaic Dataset already exists: {0} ***".format(full_md_path)) + + # Set the desired MD properties (non-default parameters are listed below): + # default_compression_type="LERC" + # clip_to_boundary="CLIP" + # data_source_type="ELEVATION" + # rows_maximum_imagesize="25000" + # columns_maximum_imagesize="25000" + arcpy.SetMosaicDatasetProperties_management(full_md_path, rows_maximum_imagesize="25000", + columns_maximum_imagesize="25000", + allowed_compressions="None;JPEG;LZ77;LERC", + default_compression_type="LERC", JPEG_quality="75", + LERC_Tolerance="0.01", resampling_type="BILINEAR", + clip_to_footprints="NOT_CLIP", + footprints_may_contain_nodata="FOOTPRINTS_MAY_CONTAIN_NODATA", + clip_to_boundary="CLIP", + color_correction="NOT_APPLY", + allowed_mensuration_capabilities="Basic", + default_mensuration_capabilities="Basic", + allowed_mosaic_methods="NorthWest;Center;LockRaster;ByAttribute;" + "Nadir;Viewpoint;Seamline;None", + default_mosaic_method="NorthWest", order_field="", order_base="#", + sorting_order="ASCENDING", mosaic_operator="FIRST", blend_width="0", + view_point_x="600", view_point_y="300", max_num_per_mosaic="20", + cell_size_tolerance="0.8", cell_size="#", metadata_level="BASIC", + transmission_fields="", + use_time="DISABLED", start_time_field="", end_time_field="#", + time_format="#", geographic_transform="#", + max_num_of_download_items="20", max_num_of_records_returned="1000", + data_source_type="ELEVATION", minimum_pixel_contribution="1") + + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from SetMosaicDatasetProperties are: \n{0}\n".format(messages)) + + # Get a record count just to be sure we found raster products to ingest + result = arcpy.GetCount_management(full_md_path) + count_lasd = int(result.getOutput(0)) + + if count_lasd == 0: + arcpy.AddMessage("\n*** Exiting: {0} Mosaic Dataset has no LASD contents ***".format(full_md_path)) + sys.exit() + else: + arcpy.AddMessage("{0} has {1} LASD(s).".format(full_md_path, count_lasd)) + + # boundary = os.path.join(file_gdb, md_boundary) + if export_boundary == "true": + if not arcpy.Exists(md_boundary): + # Export Boundary to the file GDB which holds the final results + arcpy.ExportMosaicDatasetGeometry_management(full_md_path, md_boundary, "", "BOUNDARY") + messages = arcpy.GetMessages() + arcpy.AddMessage("Results output from ExportMosaicDatasetGeometry are: \n{0}\n".format(messages)) + else: + arcpy.AddMessage("Exported boundary already exists: {}".format(md_boundary)) + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def regularize_some_buildings(building, bd_to_reg_shp, rastert_mask_mb1_symdiff_shp, expression, cellsize, scratch_ws): + # This routine will regularize smaller buildings that are oriented North-South + # The remainder of the buildings will be placed in bd_to_reg_shp for regularization by the 'Regularize Building + # Footprint' gp tool in ArcGIS Pro + try: + loc = building.rfind(".gdb") + bdfilename = building[loc + 5:] + + buildings_lyr = "buildings" + # ratio_vr = Buildings_output__2_ + # ratio_occ = ratio_vr + # ori_vex = ratio_occ + bdmbrv_shp = scratch_ws + "\\" + "bdMBRv.shp" + # MBR_vex = bdMBRv_shp + # all = ori_vex + bdmbrr_shp = scratch_ws + "\\" + "bdMBRr.shp" + # MBR = bdMBRr_shp + # building1 = all + # building2 = building1 + bd_mbr_output_shp = scratch_ws + "\\" + "bd_MBR_output.shp" + bd_mbr_final_shp = scratch_ws + "\\" + "bd_MBR_final.shp" + bd_mbr_final_polygontoraster_tif = scratch_ws + "\\" + "bd_MBR_final_PolygonToRaster.tif" + # Input_raster_or_constant_value_2__2_ = "-1" + lesstha_buil1 = scratch_ws + "\\" + "LessTha_buil1" + # Input_raster_or_constant_value_2__3_ = "1" + mask_mbr = scratch_ws + "\\" + "mask_mbr" + rastert_mask_mb1_shp = scratch_ws + "\\" + "RasterT_mask_mb1.shp" + buildings_polygontoraster_tif = scratch_ws + "\\" + "buildings_PolygonToRaster.tif" + # Input_raster_or_constant_value_2 = "-1" + greater_bd_m1 = scratch_ws + "\\" + "Greater_bd_M1" + isnull_great1 = scratch_ws + "\\" + "IsNull_Great1" + plus_isnull_1 = scratch_ws + "\\" + "Plus_IsNull_1" + # Input_true_raster_or_constant_value = "1" + # Input_false_raster_or_constant_value__2_ = "0" + con_plus_isn1 = scratch_ws + "\\" + "Con_Plus_IsN1" + isnull_con_p1 = scratch_ws + "\\" + "IsNull_Con_P1" + # Input_false_raster_or_constant_value__3_ = "0" + con_plus_isn2 = scratch_ws + "\\" + "Con_Plus_IsN2" + shrink_plus_1 = scratch_ws + "\\" + "Shrink_Plus_1" + times_shrink1 = scratch_ws + "\\" + "Times_Shrink1" + # Input_false_raster_or_constant_value = "0" + setnull_time1 = scratch_ws + "\\" + "SetNull_Time1" + rastert_setnull1_shp = scratch_ws + "\\" + "RasterT_SetNull1.shp" + rastert_setnull1_minimumboun_shp = scratch_ws + "\\" + "RasterT_SetNull1_MinimumBoun.shp" + mbr_sel_shp = scratch_ws + "\\" + "mbr_sel.shp" + # RasterT_SetNull1_MinimumBoun = "mbr_sel_Layer" + mbr_sel_layer = "mbr_sel_Layer" + # RasterT_SetNull1_MinimumBoun__2_ = RasterT_SetNull1_MinimumBoun + # RasterT_SetNull1_MinimumBoun__3_ = RasterT_SetNull1_MinimumBoun__2_ + + arcpy.MakeFeatureLayer_management(building, buildings_lyr) + # Process: Add Geometry Attributes + arcpy.AddGeometryAttributes_management(building, "AREA", "", "SQUARE_METERS", "") + + # Process: Add Field + if len(arcpy.ListFields(buildings_lyr, "ratio_vr")) < 1: + arcpy.AddField_management(buildings_lyr, "ratio_vr", "DOUBLE", "", "", "", "", + "NULLABLE", "NON_REQUIRED", "") + + # Process: Add Field + if len(arcpy.ListFields(buildings_lyr, "ratio_occ")) < 1: + arcpy.AddField_management(buildings_lyr, "ratio_occ", "DOUBLE", "", "", "", "", + "NULLABLE", "NON_REQUIRED", "") + + # Process: Minimum Bounding Geometry + arcpy.MinimumBoundingGeometry_management(building, bdmbrv_shp, "CONVEX_HULL", "NONE", "", "NO_MBG_FIELDS") + + # Process: Add Geometry Attributes + arcpy.AddGeometryAttributes_management(bdmbrv_shp, "AREA", "", "SQUARE_METERS", "") + + # Process: Add Join + arcpy.AddJoin_management(buildings_lyr, "OBJECTID", bdmbrv_shp, "ORIG_FID", "KEEP_ALL") + + # Process: Minimum Bounding Geometry + arcpy.MinimumBoundingGeometry_management(building, bdmbrr_shp, "ENVELOPE", "NONE", "", "NO_MBG_FIELDS") + + # Process: Add Geometry Attributes + arcpy.AddGeometryAttributes_management(bdmbrr_shp, "AREA", "", "SQUARE_METERS", "") + + # Process: Add Join + arcpy.AddJoin_management(buildings_lyr, "bdMBRv.ORIG_FID", bdmbrr_shp, "ORIG_FID", "KEEP_ALL") + + # Process: Calculate Field + arcpy.CalculateField_management(buildings_lyr, "ratio_vr", "!bdMBRv.POLY_AREA! / !bdMBRr.POLY_AREA!", + "PYTHON_9.3", "") + + # Process: Calculate Field + arcpy.CalculateField_management(buildings_lyr, "ratio_occ", + "!" + bdfilename + ".POLY_AREA! / !bdMBRr.POLY_AREA!", "PYTHON_9.3", "") + + # Process: Select + arcpy.Select_analysis(buildings_lyr, bd_to_reg_shp, + "\"" + bdfilename + ".POLY_AREA\" >= 500 OR \"" + bdfilename + ".ratio_vr\"<0.70") + + # Process: Select + arcpy.Select_analysis(buildings_lyr, bd_mbr_output_shp, + "\"" + bdfilename + ".POLY_AREA\" <= 500 AND \"" + bdfilename + ".ratio_vr\">=0.70") + + # Process: Minimum Bounding Geometry + arcpy.MinimumBoundingGeometry_management(bd_mbr_output_shp, bd_mbr_final_shp, "ENVELOPE", "NONE", "", + "NO_MBG_FIELDS") + + # Process: Polygon to Raster + arcpy.PolygonToRaster_conversion(bd_mbr_final_shp, "bdMBRv_rat", bd_mbr_final_polygontoraster_tif, + "CELL_CENTER", "NONE", cellsize) + + # Process: Less Than + arcpy.gp.LessThan_sa(bd_mbr_final_polygontoraster_tif, "-1", lesstha_buil1) + + # Process: Plus + # arcpy.gp.Plus_sa(LessTha_buil1, "1", mask_mbr) + lesstha_buil1_ras = Raster(lesstha_buil1) + int_raster_one = CreateConstantRaster(1, "INTEGER", lesstha_buil1_ras.meanCellWidth, lesstha_buil1_ras.extent) + arcpy.gp.Plus_sa(lesstha_buil1, int_raster_one, mask_mbr) + + # Process: Raster to Polygon + arcpy.RasterToPolygon_conversion(mask_mbr, rastert_mask_mb1_shp, "NO_SIMPLIFY", "VALUE") + + # Process: Extra Add Field (since building is now a feature class instead of a shapefile) + if len(arcpy.ListFields(building, "Id")) < 1: + arcpy.AddField_management(building, "Id", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") + arcpy.CalculateField_management(building, "Id", "0", "PYTHON_9.3", "") + + # Process: Polygon to Raster + arcpy.PolygonToRaster_conversion(building, "Id", buildings_polygontoraster_tif, "CELL_CENTER", "NONE", + cellsize) + + # Process: Greater Than + arcpy.gp.GreaterThan_sa(buildings_polygontoraster_tif, "-1", greater_bd_m1) + + # Process: Is Null + arcpy.gp.IsNull_sa(greater_bd_m1, isnull_great1) + + # Process: Plus + arcpy.gp.Plus_sa(isnull_great1, lesstha_buil1, plus_isnull_1) + arcpy.Delete_management(lesstha_buil1_ras) # Delete after we're done with lesstha_buil1_ras and lesstha_buil1 + + # Process: Con + arcpy.gp.Con_sa(plus_isnull_1, int_raster_one, con_plus_isn1, "0", "VALUE=0") + arcpy.Delete_management(int_raster_one) + + # Process: Is Null + arcpy.gp.IsNull_sa(con_plus_isn1, isnull_con_p1) + + # Process: Con + arcpy.gp.Con_sa(isnull_con_p1, con_plus_isn1, con_plus_isn2, "0", "VALUE=0") + + # Process: Shrink + arcpy.gp.Shrink_sa(con_plus_isn2, shrink_plus_1, "3", "0") + + # Process: Times + arcpy.gp.Times_sa(shrink_plus_1, mask_mbr, times_shrink1) + + # Process: Set Null + arcpy.gp.SetNull_sa(times_shrink1, "0", setnull_time1, "VALUE=1") + + # Process: Raster to Polygon + arcpy.RasterToPolygon_conversion(setnull_time1, rastert_setnull1_shp, "NO_SIMPLIFY", "VALUE") + + # Process: Minimum Bounding Geometry + arcpy.MinimumBoundingGeometry_management(rastert_setnull1_shp, rastert_setnull1_minimumboun_shp, "ENVELOPE", + "NONE", "", "MBG_FIELDS") + + # Process: Select + arcpy.Select_analysis(rastert_setnull1_minimumboun_shp, mbr_sel_shp, expression) + + # Process: Make Feature Layer + arcpy.MakeFeatureLayer_management(mbr_sel_shp, mbr_sel_layer, "", "", "") + + # Process: Select Layer By Location + arcpy.SelectLayerByLocation_management(mbr_sel_layer, "COMPLETELY_WITHIN", rastert_mask_mb1_shp, "", + "NEW_SELECTION", "NOT_INVERT") + + # Process: Select Layer By Attribute + arcpy.SelectLayerByAttribute_management(mbr_sel_layer, "SWITCH_SELECTION", "") + + # Process: Symmetrical Difference + arcpy.SymDiff_analysis(rastert_mask_mb1_shp, mbr_sel_layer, rastert_mask_mb1_symdiff_shp, "ALL", "") + + arcpy.Delete_management(buildings_lyr) + arcpy.Delete_management(mbr_sel_layer) + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def set_pixel_size(raster_type_file, las_point_spacing): + # The template raster type file has PUT_PIXEL_SIZE_HERE in place of + # the pixel size. This is because each set of LAS has a unique point + # spacing. This module will set pixel size in the art.xml so the + # resulting Mosaic Dataset will have an appropriate pixel size. + # Note: the Mosaic Dataset pixel size can't be too small or footprints aren't + # generated properly. + try: + # Get a good number for pixel size of the MD. It should be a minimum of 3. + md_pixel_size = max(3.0, round(2 * las_point_spacing + 0.5)) + arcpy.AddMessage("Mosaic Dataset pixel size will be: {0}".format(md_pixel_size)) + search_text = r"PUT_PIXEL_SIZE_HERE" + # Read in the file + filedata = None + with open(raster_type_file, 'r') as file: + filedata = file.read() + # Replace the target string + filedata = filedata.replace(search_text, str(md_pixel_size)) + # Write the file out again + with open(raster_type_file, 'w') as file: + file.write(filedata) + return + + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def create_lasd(las_path, lasd): + try: + arcpy.env.workspace = las_path + lasfiles = arcpy.ListFiles("*.las") + #arcpy.AddMessage("Entire LAS list in {0} is: \n{1}\n".format(las_path, lasfiles)) + if len(lasfiles) > 0: + arcpy.AddMessage("Creating LAS Dataset: {0}".format(lasd)) + # Create a LAS Dataset and add the LAS files in las_path + # Compute stats (lasx) if they don't already exist + arcpy.CreateLasDataset_management(lasfiles, lasd, folder_recursion="NO_RECURSION", + in_surface_constraints="#", spatial_reference="#", + compute_stats="COMPUTE_STATS", relative_paths="ABSOLUTE_PATHS") + # messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from CreateLasDataset are: \n{0}\n".format(messages)) + del lasfiles + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def get_las_point_spacing(las_path, lasd, output_path, output_gdb_name): + # This module determines the point spacing of the LAS files, and can + # be used to determine a reasonable raster product pixel size. + try: + if arcpy.Exists(lasd): + arcpy.AddMessage("Calculating point spacing of LASD: {0}".format(lasd)) + lasdatasetstatstext = os.path.join(output_path, "lasDatasetStatsText.txt") + if not arcpy.Exists(lasdatasetstatstext): + arcpy.Delete_management(lasdatasetstatstext) + arcpy.LasDatasetStatistics_management(lasd, "true", lasdatasetstatstext, "LAS_FILES", "COMMA", + "DECIMAL_POINT") + else: + arcpy.AddMessage("lasDatasetStatsText already exists: {0}".format(lasdatasetstatstext)) + + ptFileInfoFC = os.path.join(output_gdb_name, 'ptFileInfoFC') + if not arcpy.Exists(ptFileInfoFC): + # Note: This step is optional, so if it takes too long it's safe to remove it + # get lasd sr + descLASD = arcpy.Describe(lasd) + SpatRefLASD = descLASD.SpatialReference + # SpatRefStringLASD = SpatRefLASD.SpatialReference.exportToString() + arcpy.CheckOutExtension("3D") + arcpy.PointFileInformation_3d(las_path, ptFileInfoFC, "LAS", "las", "", "NO_RECURSION", "NO_EXTRUSION", + "DECIMAL_POINT", "NO_SUMMARIZE", "LAS_SPACING") + arcpy.CheckInExtension("3D") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from PointFileInformation_3d are: \n{0}\n".format(messages)) + del descLASD, SpatRefLASD + else: + arcpy.AddMessage("ptFileInfoFC already exists: {0}".format(ptFileInfoFC)) + + rows = arcpy.SearchCursor(ptFileInfoFC, + fields="FileName; Pt_Spacing; Z_Min; Z_Max", + sort_fields="FileName; Pt_Spacing; Z_Min; Z_Max") + # Iterate through the rows in the cursor and store the + # "FileName; Pt_Spacing; Z_Min; Z_Max" + ptFileInfoList = [] + PtSpacing = [] + # Z Min & Z Max added for auto-detecting LiDAR tiles with potential artifacts in LiDAR in the future. + for row in rows: + formattedfields = ("{0}, {1}, {2}, {3}".format( + row.getValue("FileName"), + row.getValue("Pt_Spacing"), + row.getValue("Z_Min"), + row.getValue("Z_Max"))) + ptFileInfoList.append(formattedfields) + ptspacinglist = float("{0}".format(row.getValue("Pt_Spacing"))) + PtSpacing.append(ptspacinglist) + del row + arcpy.AddMessage("ptFileInfoList: {0}".format(str(ptFileInfoList))) + arcpy.AddMessage("ptSpacing: {0}".format(str(PtSpacing))) + avgPtSpacing = sum(PtSpacing)/float(len(PtSpacing)) + arcpy.AddMessage("returning avgPtSpacing of: {0}".format(str(avgPtSpacing))) + else: + arcpy.AddMessage("\nExiting get_las_point_spacing, since no LASD found: \n{0}\n".format(lasd)) + return "" + del rows + return avgPtSpacing + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def get_las_boundary(las_path, lasd, md_name, las_raster_type, file_gdb, surface_constraint_fc): + try: + # Ensure the LAS Raster type file exists + if not os.path.exists(las_raster_type): + arcpy.AddError("\nExiting: LAS Raster type file doesn't exist {0}".format(las_raster_type)) + return + # Ensure the file gdb exists + if not os.path.exists(file_gdb): + arcpy.AddError("\nExiting: Geodatabase (in which to place boundary) doesn't exist {0}".format(file_gdb)) + return + # Ensure the lasd exists + if not arcpy.Exists(lasd): + arcpy.AddError("\nExiting: LAS Dataset (from which to get boundary) doesn't exist {0}".format(lasd)) + return + desc_lasd = arcpy.Describe(lasd) + spat_ref_lasd = desc_lasd.SpatialReference + spat_ref_lasd_str = desc_lasd.SpatialReference.exportToString() + if spat_ref_lasd.PCSCode == 0: + arcpy.AddWarning("\n*** NOTE: One or more LAS files has a PCSCode of 0. ***") + arcpy.AddWarning("\n*** PCSCode = 0 indicates a non-standard datum or unit of measure. ***") + + arcpy.AddMessage("\nSpatial reference of LASD is: \n\n{0}\n".format(spat_ref_lasd_str)) + #arcpy.AddMessage("Length of SR string is {0}:".format(len(SpatRefStringFirstLAS))) + arcpy.AddMessage("Spatial Reference name of LAS Dataset: {0}".format(spat_ref_lasd.name)) + arcpy.AddMessage("Spatial Reference XY Units of LAS Dataset: {0}".format(spat_ref_lasd.linearUnitName)) + + loc = md_name.rfind(".gdb") + # #arcpy.AddMessage("loc = {0}".format(loc)) + MD_ShortName = md_name[loc+5:] + arcpy.AddMessage("Temp MD Short Name: {0}".format(MD_ShortName)) + + # Create a MD in same SR as LAS Dataset + arcpy.CreateMosaicDataset_management(file_gdb, MD_ShortName, + coordinate_system=spat_ref_lasd, num_bands="1", pixel_type="32_BIT_FLOAT", + product_definition="NONE", product_band_definitions="#") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from CreateMosaicDataset are: \n{0}\n".format(messages)) + + # Add the LAS files to the Mosaic Dataset and don't update the boundary yet. + # The cell size of the Mosaic Dataset is determined by the art.xml file chosen by the user. + arcpy.AddRastersToMosaicDataset_management(md_name, las_raster_type, las_path, + update_cellsize_ranges="UPDATE_CELL_SIZES", + update_boundary="NO_BOUNDARY", update_overviews="NO_OVERVIEWS", + maximum_pyramid_levels="#", maximum_cell_size="0", + minimum_dimension="1500", spatial_reference=spat_ref_lasd_str, + filter="*.las", sub_folder="NO_SUBFOLDERS", + duplicate_items_action="ALLOW_DUPLICATES", + build_pyramids="NO_PYRAMIDS", calculate_statistics="NO_STATISTICS", + build_thumbnails="NO_THUMBNAILS", operation_description="#", + force_spatial_reference="NO_FORCE_SPATIAL_REFERENCE") + + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from AddRastersToMosaicDataset are: \n{0}\n".format(messages)) + + # Get a count of the number of LAS ingested + result = arcpy.GetCount_management(md_name) + countRowsWithLAS = int(result.getOutput(0)) + if countRowsWithLAS == 0: + arcpy.AddMessage("\nNo LAS rows were ingested into {0}".format(md_name)) + return + else: + arcpy.AddMessage("{0} has {1} LAS row(s).".format(md_name, countRowsWithLAS)) + + # Build Footprints with these non-standard parameters: + # min_region_size="20" + # approx_num_vertices="1000" + # Update the Boundary + arcpy.BuildFootprints_management(md_name,where_clause="#", reset_footprint="RADIOMETRY", min_data_value="-100", + max_data_value="4294967295", approx_num_vertices="2000", shrink_distance="0", + maintain_edges="MAINTAIN_EDGES", skip_derived_images="SKIP_DERIVED_IMAGES", + update_boundary="UPDATE_BOUNDARY", request_size="2000", min_region_size="20", + simplification_method="NONE", edge_tolerance="#", max_sliver_size="20", + min_thinness_ratio="0.05") + + messages = arcpy.GetMessages() + arcpy.AddMessage("Results output from BuildFootprints are: \n{0}\n".format(messages)) + + # The boundary will potentially have lots of vertices, so simplify the feature after exporting. + boundary_detailed = surface_constraint_fc + r"_detail" + arcpy.ExportMosaicDatasetGeometry_management(md_name, boundary_detailed, where_clause="#", + geometry_type="BOUNDARY") + messages = arcpy.GetMessages() + arcpy.AddMessage("Results output from ExportMosaicDatasetGeometry are: \n{0}\n".format(messages)) + + arcpy.SimplifyPolygon_cartography(boundary_detailed, surface_constraint_fc, + algorithm="POINT_REMOVE", tolerance="5 Meters", minimum_area="0 SquareMeters", + error_option="RESOLVE_ERRORS", collapsed_point_option="KEEP_COLLAPSED_POINTS") + messages = arcpy.GetMessages() + arcpy.AddMessage("Results output from SimplifyPolygon are: \n{0}\n".format(messages)) + del desc_lasd, spat_ref_lasd + return + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +def main(first_parameter, second_parameter, z_unit, featureextraction, out_folder_path, processing_unit_length, + use_pos_terrain_method, delete_intermediate_files, regularize_buildings): + try: + start = time.time() + # resourceLogger.log() + executed_from = sys.executable.upper() + # Check out Spatial Analyst license + arcpy.CheckOutExtension("Spatial") + + # This python code can be invoked by two different gp script tools, depending upon + # whether the input is LAS or raster. Therefore the first two parameters can either be + # LAS folder and DSM Creation Method or DSM (raster) Folder and DTM (raster) Folder. + # + # If the second parameter is DSM Creation Method ("ALL Returns" or "Last Returns"), then + # the first parameter is assumed to be the LAS folder. Otherwise the first and second + # parameters are assumed to be DSM folder and DTM folder. + + if second_parameter == "ALL RETURNS" or second_parameter == "LAST RETURNS": + # Input is LAS + las_path = first_parameter + if not os.path.exists(las_path): + arcpy.AddMessage("*** Exiting...the LAS Path does not exist: {0} ***".format(las_path)) + sys.exit() + else: + # Input is DSM & DTM Rasters + dsm_path = first_parameter + dtm_path = second_parameter + if not os.path.exists(dsm_path): + arcpy.AddMessage("*** Exiting...the DSM Path does not exist: {0} ***".format(dsm_path)) + sys.exit() + if not os.path.exists(dtm_path): + arcpy.AddMessage("*** Exiting...the DTM Path does not exist: {0} ***".format(dtm_path)) + sys.exit() + + if z_unit == "METER": + elevation_meter_scalefactor = "1.0" + elif z_unit == "FOOT": + elevation_meter_scalefactor = "3.2808" # Feet per Meter + else: + elevation_meter_scalefactor = "1.0" + elevation_meter_scalefactor_str = str(elevation_meter_scalefactor) + + if os.path.exists(out_folder_path): + arcpy.AddMessage("Results Folder exists: {0}".format(out_folder_path)) + else: + arcpy.AddMessage("Creating Results Folder: {0}".format(out_folder_path)) + os.makedirs(out_folder_path) + + height_path = os.path.join(out_folder_path, r"HeightRasters") + # If the path doesn't exist, then create the folder + if not os.path.isdir(height_path): + arcpy.AddMessage("Creating folder for Height Rasters: {0}".format(height_path)) + os.makedirs(height_path) + + # Create a results file gdb to store trees and buildings results + results_gdb_name = r"Results.gdb" + # Entire path of the file gdb + results_file_gdb_path = os.path.join(out_folder_path, results_gdb_name) + # If the file gdb doesn't exist, then create it + if not os.path.exists(results_file_gdb_path): + arcpy.AddMessage("Creating Results File GDB: {0}".format(results_file_gdb_path)) + arcpy.CreateFileGDB_management(out_folder_path, results_gdb_name, out_version="CURRENT") + else: + arcpy.AddMessage("\nResults File GDB already exists: {0}".format(results_file_gdb_path)) + + # feature classes to be created + fishnet = os.path.join(results_file_gdb_path, r"aFishnet") + tmpfishnet = os.path.join(results_file_gdb_path, r"tmpFishnet") + all_trees_final = os.path.join(results_file_gdb_path, r"all_trees_final") + all_buildings_final = os.path.join(results_file_gdb_path, r"all_buildings_final") + + # Exit process if all_buildings_final (and all_trees_final, if requested) already exist + if arcpy.Exists(all_buildings_final): + if "TREES" in featureextraction.upper(): + if arcpy.Exists(all_trees_final): + arcpy.AddMessage("\nExiting process...Buildings and Trees output products already " + "exist: \n {0} \n {1}".format(all_buildings_final, all_trees_final)) + sys.exit() + else: + arcpy.AddMessage("\nExiting process...Buildings output product already " + "exists: \n {0}".format(all_buildings_final)) + sys.exit() + + # the following two fc's are only created if the user checks on + # "Regularize north-south oriented buildings" + buildings_to_reg = os.path.join(results_file_gdb_path, r"partial_buildings_to_regularize") + buildings_reg = os.path.join(results_file_gdb_path, r"partial_buildings_regularized") + + # the scratch gdb name + # scratch_gdb_name = r"Scratch.gdb" + scratch_gdb_name = r"TempWorkArea.gdb" + # Create a gdb to store the intermediate products + mp_gdb_name = r"MiscProducts.gdb" + # Entire path of the Miscellaneous Intermediate Products file gdb + mp_file_gdb_path = os.path.join(out_folder_path, mp_gdb_name) + # If the Miscellanous Intermediate Products file gdb doesn't exist, then create it + if not os.path.exists(mp_file_gdb_path): + arcpy.AddMessage("Creating Miscellanous Intermediate Products File GDB: {0}".format(mp_file_gdb_path)) + arcpy.CreateFileGDB_management(out_folder_path, mp_gdb_name, out_version="CURRENT") + else: + arcpy.AddMessage("\nMiscellanous Intermediate Products File GDB " + "already exists: {0}".format(mp_file_gdb_path)) + + # more Mosaic Datasets and Feature classes to be created... + dsm_md_name = r"DSM" + dsm_md = os.path.join(mp_file_gdb_path, dsm_md_name) + dsm_boundary = dsm_md + r"_boundary" + dtm_md_name = r"DTM" + dtm_md = os.path.join(mp_file_gdb_path, dtm_md_name) + dtm_boundary = dtm_md + r"_boundary" + las_md_name = r"LAS" + las_md = os.path.join(mp_file_gdb_path, las_md_name) + # las_boundary = las_md + r"_boundary" + height_md_name = r"HeightAboveGround" + height_md = os.path.join(mp_file_gdb_path, height_md_name) + buildings_merged = os.path.join(results_file_gdb_path, r"buildings_merged") + las_point_spacing = 0.0 + lasd_name = r"LasDataset.lasd" + lasd = os.path.join(out_folder_path, lasd_name) + + processing_area = os.path.join(mp_file_gdb_path, r"ProcessingArea") + arcpy.AddMessage("General processing area FC: {0}".format(processing_area)) + + if second_parameter == "ALL RETURNS" or second_parameter == "LAST RETURNS": + # Input is LAS + if not arcpy.Exists(lasd): + create_lasd(las_path, lasd) + if not arcpy.Exists(lasd): + arcpy.AddMessage("\nExiting...LAS Dataset not created: {0}".format(lasd)) + sys.exit() + las_point_spacing = get_las_point_spacing(las_path, lasd, out_folder_path, mp_file_gdb_path) + arcpy.AddMessage("\n las_point_spacing: {0}".format(str(las_point_spacing))) + # # Set point spacing to 0.5 if get_las_point_spacing is unable to determine point spacing + # if las_point_spacing_str == "": + # las_point_spacing_str = "0.5" + + las_raster_type_template = os.path.join(os.path.dirname(inspect.getfile(inspect.currentframe())), + r"LAS_Template_Raster_Type.art.xml") + las_raster_type_file = os.path.join(out_folder_path, r"LAS_Raster_Type.art.xml") + + # Create a copy of the Template LAS Raster type for this execution, since the art.xml file + # needs to be edited (in set_pixel_size) to modify the desired pixel size of the mosaic dataset. + if not arcpy.Exists(las_raster_type_file): + shutil.copyfile(las_raster_type_template, las_raster_type_file) + set_pixel_size(las_raster_type_file, las_point_spacing) + else: + arcpy.AddMessage("LAS Raster type file already exists: {0}".format(las_raster_type_file)) + # Load the LAS into a Mosaic dataset and return the boundary in processing_area + if not arcpy.Exists(processing_area): + get_las_boundary(las_path, lasd, las_md, las_raster_type_file, mp_file_gdb_path, processing_area) + else: + arcpy.AddMessage("LAS Boundary file already exists: {0}".format(processing_area)) + if not arcpy.Exists(processing_area): + arcpy.AddMessage("\nExiting...Surface constraint FC not created: {0}".format(processing_area)) + sys.exit() + + # assign the boundary as a hard clip constraint + surface_constraints = "'" + processing_area + "'" + r" Hard_Clip" + arcpy.AddFilesToLasDataset_management(lasd, "", "NO_RECURSION", surface_constraints) + else: + # Input is DSM & DTM Rasters + if not arcpy.Exists(dsm_md): + create_md_from_raster(dsm_path, mp_file_gdb_path, dsm_md_name, dsm_boundary, "true", "true") + arcpy.AddMessage("Created MD: {0}\n".format(dsm_md)) + else: + arcpy.AddMessage("\nMD already exists: {0}".format(dsm_md)) + if not arcpy.Exists(dtm_md): + create_md_from_raster(dtm_path, mp_file_gdb_path, dtm_md_name, dtm_boundary, "true", "true") + arcpy.AddMessage("Created MD: {0}\n".format(dtm_md)) + else: + arcpy.AddMessage("\nMD already exists: {0}".format(dtm_md)) + + # Find the intersection of the DSM and DTM datasets to determine the general processing area + # Subsequent processing will further eliminate areas that don't need to be processed + intersect_input_list = [dsm_boundary, dtm_boundary] + arcpy.AddMessage("\nIntersection of: {0}".format(intersect_input_list)) + if not arcpy.Exists(processing_area): + arcpy.Intersect_analysis(intersect_input_list, processing_area, + join_attributes="ONLY_FID", cluster_tolerance="", output_type="INPUT") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from Intersect_analysis are: \n{0}\n".format(messages)) + else: + arcpy.AddMessage("\nDSM & DTM intersection fc already exists: {0}".format(processing_area)) + # desc_bound = arcpy.Describe(processing_area) + + # Get the bounds of the processing area to create a fishnet + # If input is DSM and DTM folders, then processing area is the intersection of the two MD's (DSM & DTM) + # If input is a LAS folder, then processing area is the boundary of the MD that contains the LAS + desc_bound = arcpy.Describe(processing_area) + xmin = desc_bound.Extent.XMin + ymin = desc_bound.Extent.YMin + # xmax = desc_bound.Extent.XMax + ymax = desc_bound.Extent.YMax + origin_coord = str(xmin) + " " + str(ymin) + y_axis_coord = str(xmin) + " " + str(ymax) + + if not arcpy.Exists(fishnet): + arcpy.CreateFishnet_management(tmpfishnet, origin_coord, y_axis_coord, processing_unit_length, + processing_unit_length, number_rows="", number_columns="", corner_coord="", + labels="NO_LABELS", + template=processing_area, geometry_type="POLYGON") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from CreateFishnet are: \n{0}\n".format(messages)) + + # Now keep only fishnet features that intersect the processing area + fishnet_lyr = r"fishnetLyr" + arcpy.MakeFeatureLayer_management(tmpfishnet, fishnet_lyr, "", "", "") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from MakeFeatureLayer are: \n{0}\n".format(messages)) + arcpy.SelectLayerByLocation_management(fishnet_lyr, "INTERSECT", processing_area, + "", "NEW_SELECTION", "NOT_INVERT") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from SelectLayerByLocation are: \n{0}\n".format(messages)) + arcpy.CopyFeatures_management(fishnet_lyr, fishnet, config_keyword="", spatial_grid_1="0", + spatial_grid_2="0", spatial_grid_3="0") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from CopyFeatures are: \n{0}\n".format(messages)) + arcpy.Delete_management(fishnet_lyr) + arcpy.Delete_management(tmpfishnet) + else: + arcpy.AddMessage("\nFishnet fc already exists: {0}".format(fishnet)) + + # Intentionally set overwriteOutput here (as opposed to earlier in the script) because this might be a re-start + # of the script, in which case we don't want to re-do the pre-processing steps. Subsequent logic + # manually checks for the existence of each folder (one per fishnet oid) before overwriting folder contents, + # so if the tool is being re-started the newest folder(s) may need to be deleted before restarting, especially + # if the tool was forced to stop. + arcpy.env.overwriteOutput = True + + # Initialize + buildings_merged_list = [] + trees_merged_list = [] + # inititalize Lists used for passing arguments to extract_buildings_trees + out_folder_path_list = [] + oid_list = [] + dsm_md_list = [] # only used if DSM & DTM Raster input + dtm_md_list = [] # only used if DSM & DTM Raster input + lasd_list = [] # only used if LAS input (i.e. if second_parameter == "ALL RETURNS" or "LAST RETURNS") + dsm_type_list = [] # only used if LAS input (i.e. if second_parameter == "ALL RETURNS" or "LAST RETURNS") + xmin_list = [] + xmax_list = [] + ymin_list = [] + ymax_list = [] + featureextraction_list = [] + elevation_meter_scalefactor_str_list = [] + use_pos_terrain_method_list = [] + delete_intermediate_files_list = [] + height_path_list = [] + point_spacing_list = [] # only used if LAS input (second_parameter == "ALL RETURNS" or "LAST RETURNS") + + fields = ["OID@", "SHAPE@"] + with arcpy.da.SearchCursor(fishnet, fields) as sc: + # iterate through the fishnet features to populate the arguments for extract_buildings_trees + for row in sc: + oid = str(row[0]) + geom = row[1] + xmin = str(geom.extent.XMin) + ymin = str(geom.extent.YMin) + xmax = str(geom.extent.XMax) + ymax = str(geom.extent.YMax) + # populate lists for each parameter + out_folder_path_list.append(out_folder_path) + oid_list.append(oid) + if second_parameter == "ALL RETURNS" or second_parameter == "LAST RETURNS": + lasd_list.append(lasd) + dsm_type_list.append(second_parameter) + else: + dsm_md_list.append(dsm_md) + dtm_md_list.append(dtm_md) + xmin_list.append(xmin) + xmax_list.append(xmax) + ymin_list.append(ymin) + ymax_list.append(ymax) + featureextraction_list.append(featureextraction) + elevation_meter_scalefactor_str_list.append(elevation_meter_scalefactor_str) + use_pos_terrain_method_list.append(use_pos_terrain_method) + delete_intermediate_files_list.append(delete_intermediate_files) + height_path_list.append(height_path) + point_spacing_list.append(str(las_point_spacing)) # only applicable if LAS input + del row, geom + del sc + + num_iterations_str = str(len(out_folder_path_list)) + arcpy.AddMessage("\n** Number of iterations (fishnet features) is: {0} **\n".format(num_iterations_str)) + + if second_parameter == "ALL RETURNS" or second_parameter == "LAST RETURNS": + pp_params = [[out_folder_path_list[i], oid_list[i], lasd_list[i], dsm_type_list[i], xmin_list[i], + xmax_list[i], ymin_list[i], ymax_list[i], featureextraction_list[i], + elevation_meter_scalefactor_str_list[i], use_pos_terrain_method_list[i], + delete_intermediate_files_list[i], height_path_list[i], point_spacing_list[i]] + for i in range(len(out_folder_path_list))] + else: + pp_params = [[out_folder_path_list[i], oid_list[i], dsm_md_list[i], dtm_md_list[i], xmin_list[i], + xmax_list[i], ymin_list[i], ymax_list[i], featureextraction_list[i], + elevation_meter_scalefactor_str_list[i], use_pos_terrain_method_list[i], + delete_intermediate_files_list[i], height_path_list[i], point_spacing_list[i]] + for i in range(len(out_folder_path_list))] + # arcpy.AddMessage("\n pp_params: {0}".format(pp_params)) + + # If executing from the gp User Interface, then extract_buildings_trees will be run serially. + # If executing from the command line, then extract_buildings_trees will be run in parallel. + arcpy.AddMessage(executed_from) + if "ARCMAP" in executed_from or "ARCCATALOG" in executed_from or \ + "RUNTIME" in executed_from or "ARCGISPRO" in executed_from: + list(map(extract_buildings_trees, pp_params)) + elif "PYTHON" in executed_from: + # Number of cores to use (max will be 3 for now, otherwise we're I/O bound) + cpu_num = min(multiprocessing.cpu_count(), 3) + # Create the pool object + pool = multiprocessing.Pool(processes=cpu_num, maxtasksperchild=1) + arcpy.AddMessage("\nCPUs utilized: {0}".format(cpu_num)) + # Start Multiprocessing + arcpy.AddMessage("Start Multiprocessing") + pool.map(extract_buildings_trees, pp_params, chunksize=1) + + # Close the pool + pool.close() + pool.join() + + # clear extent for the remainder of processing - important step (or mosaic dataset functionality doesn't work) + arcpy.env.extent = None + + # Create a mosaic dataset of all of the height rasters and use it later to get + # zonal stats on buildings and/or trees + if not arcpy.Exists(height_md): + # Don't need to build footprints, since this takes a while and isn't necessary + create_md_from_raster(height_path, mp_file_gdb_path, height_md, "", "false", "false") + arcpy.AddMessage("Created MD: {0}".format(height_md)) + else: + arcpy.AddMessage("\nMD already exists: {0}".format(height_md)) + + # If the user wanted trees output, then merge all of the tree feature classes + if "TREES" in featureextraction.upper() and not arcpy.Exists(all_trees_final): + for i in range(1, len(out_folder_path_list) + 1): + sub_path = os.path.join(out_folder_path, str(i)) + # arcpy.AddMessage("sub Path: {0}".format(sub_path)) + if os.path.exists(sub_path): + sub_results_gdb = os.path.join(sub_path, results_gdb_name) + # arcpy.AddMessage("sub Results gdb: {0}".format(sub_results_gdb)) + if arcpy.Exists(sub_results_gdb): + sub_fc = os.path.join(sub_results_gdb, r"trees_to_merge" + str(i)) + # arcpy.AddMessage("sub fc: {0}".format(sub_fc)) + if arcpy.Exists(sub_fc): + # Construct a semicolon delimited list of tree feature classes, for subsequent merging + # trees_merged_list = trees_merged_list + sub_fc + ";" + trees_merged_list.append(sub_fc) + # arcpy.AddMessage("trees_merged_list: {0}".format(trees_merged_list)) + + if len(trees_merged_list) > 0: + # Merge all of the tree feature classes into one. + arcpy.Merge_management(inputs=trees_merged_list, output=all_trees_final) + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from Merge are: \n{0}\n".format(messages)) + # Gather elevation statistics for trees + zonalstats(all_trees_final, height_md, results_file_gdb_path) + + # merge all of the building feature classes + if not arcpy.Exists(all_buildings_final): + # Merge all of the building feature classes into one. + for i in range(1, len(out_folder_path_list) + 1): + sub_path = os.path.join(out_folder_path, str(i)) + # arcpy.AddMessage("sub Path: {0}".format(sub_path)) + if os.path.exists(sub_path): + sub_results_gdb = os.path.join(sub_path, results_gdb_name) + # arcpy.AddMessage("sub Results gdb: {0}".format(sub_results_gdb)) + if arcpy.Exists(sub_results_gdb): + sub_fc = os.path.join(sub_results_gdb, r"buildings_to_merge" + str(i)) + # arcpy.AddMessage("sub fc: {0}".format(sub_fc)) + if arcpy.Exists(sub_fc): + # Construct a semicolon delimited list of building feature classes, for subsequent merging + # buildings_merged_list = buildings_merged_list + sub_fc + ";" + buildings_merged_list.append(sub_fc) + # arcpy.AddMessage("buildings_merged_list: {0}".format(buildings_merged_list)) + + if len(buildings_merged_list) > 0: + arcpy.Merge_management(inputs=buildings_merged_list, output=buildings_merged) + messages = arcpy.GetMessages() + # arcpy.AddMessage("\nResults output from Merge are: \n{0}\n".format(messages)) + + # Dissolve buildings into one feature class so that buildings at the borders of + # each building feature class can be dissolved into one building feature. + arcpy.Dissolve_management(buildings_merged, all_buildings_final, dissolve_field="", + statistics_fields="", + multi_part="SINGLE_PART", unsplit_lines="DISSOLVE_LINES") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from Dissolve are: \n{0}\n".format(messages)) + # Gather elevation statistics for area under buildings + zonalstats(all_buildings_final, height_md, results_file_gdb_path) + + # If user wants to regularize, then regularize those buildings that can best be regularized + # (these are the buildings that are oriented North/South) + # Instead of using this option, try using the Building Regularization gp tool in ArcGIS Pro or ArcMap 10.4 + if regularize_buildings == "true": + # Don't bother if both feature classes already exist + if not arcpy.Exists(buildings_to_reg) or not arcpy.Exists(buildings_reg): + expression = "\"MBG_Width\"* \"MBG_Length\">1" + # cellsize = Raster(diff_ori).meanCellHeight + # Determine the cell size of the DSM Mosaic Dataset + cellsize_result = arcpy.GetRasterProperties_management(dsm_md, property_type="CELLSIZEX", band_index="") + cellsize = float(cellsize_result.getOutput(0)) + # arcpy.AddMessage("Cell size of MD: {0}".format(cellsize)) + regularize_some_buildings(all_buildings_final, buildings_to_reg, buildings_reg, expression, + cellsize, out_folder_path) + else: + arcpy.AddMessage("\npartial_buildings_regularized and partial_buildings_to_regularize already exist") + + # Delete all intermediate files if user checked on "Delete all intermediate files" + if delete_intermediate_files == "true": + if arcpy.Exists(buildings_merged): + arcpy.Delete_management(buildings_merged) + # Clean up the rasters in height_path and delete the height_path directory + try: + remove_rasters(height_path) + arcpy.Delete_management(height_path) + except: + arcpy.AddMessage("Unable to clean up directory: {0}".format(height_path)) + + # Delete files created during building regularization + if regularize_buildings == "true": + remove_rasters(out_folder_path) + remove_shapefiles(out_folder_path) + + # Delete tables created during Zonal Statistics creation (in def zonalstats) + remove_tables(results_file_gdb_path) + + # In each oid sub-folder, delete the individual Results.gdb & Scratch.gdb and all of their feature classes + for i in range(1, len(out_folder_path_list) + 1): + sub_path = os.path.join(out_folder_path, str(i)) + # arcpy.AddMessage("sub Path: {0}".format(sub_path)) + if os.path.exists(sub_path): + sub_results_gdb = os.path.join(sub_path, results_gdb_name) + if arcpy.Exists(sub_results_gdb): + try: + remove_filegdb(sub_results_gdb) + except: + arcpy.AddMessage("Unable to delete file GDB: {0}".format(sub_results_gdb)) + sub_scratch_gdb = os.path.join(sub_path, scratch_gdb_name) + if arcpy.Exists(sub_scratch_gdb): + try: + remove_filegdb(sub_scratch_gdb) + except: + arcpy.AddMessage("Unable to delete file GDB: {0}".format(sub_scratch_gdb)) + + # Delete Mosaic Datasets in MosaicDatasets.gdb, then delete remaining fc's and then MosaicDatasets.gdb + if arcpy.Exists(dsm_md): + arcpy.Delete_management(dsm_md) + if arcpy.Exists(dtm_md): + arcpy.Delete_management(dtm_md) + if arcpy.Exists(height_md): + arcpy.Delete_management(height_md) + # if arcpy.Exists(las_md): + # arcpy.Delete_management(las_md) + + # remove_filegdb(mp_file_gdb_path) + + end = time.time() + delta = end - start + # This is useful if the tool is run at the command line + arcpy.AddMessage("***** Total elapsed time is {0} hours *****".format(delta/3600)) + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + + +if __name__ == '__main__': + arcpy.AddMessage(inspect.getfile(inspect.currentframe())) + arcpy.AddMessage(os.path.dirname(inspect.getfile(inspect.currentframe()))) + arcpy.AddMessage(sys.version) + arcpy.AddMessage(sys.executable) + executed_from = sys.executable.upper() + + PYTHON_EXE = os.path.join(sys.exec_prefix, 'pythonw.exe') + # use pythonw for multiprocessing + multiprocessing.set_executable(PYTHON_EXE) + + # This python code can be invoked by two different script tools, depending upon + # whether the input is LAS or raster. Therefore the first two parameters can either be + # LAS folder and DSM Creation Method or DSM Folder and DTM Folder. + # + # If the second parameter is either "ALL Returns" or "Last Returns" then + # the first parameter is assumed to be the LAS folder. + + if "ARCGISPRO" in executed_from: + arcpy.AddMessage("Exiting...this tool does not yet run from ArcGIS Pro") + # sys.exit(0) + + if not ("ARCMAP" in executed_from or "ARCCATALOG" in executed_from or + "RUNTIME" in executed_from): + arcpy.AddMessage("Getting parameters from command line...") + + second_parameter = sys.argv[2] + second_parameter = second_parameter.strip() + if second_parameter.upper() == "ALL RETURNS" or second_parameter.upper() == "LAST RETURNS": + # Input is LAS + first_parameter = sys.argv[1] + first_parameter = first_parameter.strip() + arcpy.AddMessage("LAS Path: {0}".format(first_parameter)) + second_parameter = second_parameter.upper() + arcpy.AddMessage("DSM Creation Method: {0}".format(second_parameter)) + else: + # Input is DSM & DTM Rasters + first_parameter = sys.argv[1] + first_parameter = first_parameter.strip() + arcpy.AddMessage("DSM Path: {0}".format(first_parameter)) + arcpy.AddMessage("DTM Path: {0}".format(second_parameter)) + + # dsm_path = sys.argv[1] + # dsm_path = dsm_path.strip() + # arcpy.AddMessage("DSM Path: {0}".format(dsm_path)) + # dtm_path = sys.argv[2] + # dtm_path = dtm_path.strip() + # arcpy.AddMessage("DTM Path: {0}".format(dtm_path)) + + z_unit = sys.argv[3] + z_unit = z_unit.upper() + arcpy.AddMessage("Z Unit: {0}".format(z_unit)) + + featureextraction = sys.argv[4] + arcpy.AddMessage("Desired Features extracted: {0}".format(featureextraction)) + + out_folder_path = sys.argv[5] + out_folder_path = out_folder_path.strip() + arcpy.AddMessage("Output Folder Path: {0}".format(out_folder_path)) + + processing_unit_length = sys.argv[6] + arcpy.AddMessage("Processing Unit Distance: {0}".format(processing_unit_length)) + + use_pos_terrain_method = sys.argv[7] + arcpy.AddMessage("Use alternative positive terrain method for buildings: {0}".format(use_pos_terrain_method)) + + delete_intermediate_files = sys.argv[8] + arcpy.AddMessage("Delete all intermediate files: {0}".format(delete_intermediate_files)) + + regularize_buildings = sys.argv[9] + arcpy.AddMessage("Regularize some buildings: {0}".format(regularize_buildings)) + + else: + arcpy.AddMessage("Getting parameters from GetParameterAsText...") + + second_parameter = arcpy.GetParameterAsText(1) + second_parameter = second_parameter.strip() + if second_parameter.upper() == "ALL RETURNS" or second_parameter.upper() == "LAST RETURNS": + # Input is LAS + first_parameter = arcpy.GetParameterAsText(0) + first_parameter = first_parameter.strip() + arcpy.AddMessage("LAS Path: {0}".format(first_parameter)) + second_parameter = second_parameter.upper() + arcpy.AddMessage("DSM Creation Method: {0}".format(second_parameter)) + else: + # Input is DSM & DTM Rasters + first_parameter = arcpy.GetParameterAsText(0) + first_parameter = first_parameter.strip() + arcpy.AddMessage("DSM Path: {0}".format(first_parameter)) + arcpy.AddMessage("DTM Path: {0}".format(second_parameter)) + + z_unit = arcpy.GetParameterAsText(2) + z_unit = z_unit.upper() + arcpy.AddMessage("Z Unit: {0}".format(z_unit)) + + featureextraction = arcpy.GetParameterAsText(3) + arcpy.AddMessage("Desired Features extracted: {0}".format(featureextraction)) + + out_folder_path = arcpy.GetParameterAsText(4) + out_folder_path = out_folder_path.strip() + arcpy.AddMessage("Output Folder Path: {0}".format(out_folder_path)) + + processing_unit_length = arcpy.GetParameterAsText(5) + arcpy.AddMessage("Processing Unit Distance: {0}".format(processing_unit_length)) + + use_pos_terrain_method = arcpy.GetParameterAsText(6) + arcpy.AddMessage("Use alternative positive terrain method for buildings: {0}".format(use_pos_terrain_method)) + + delete_intermediate_files = arcpy.GetParameterAsText(7) + arcpy.AddMessage("Delete all intermediate files: {0}".format(delete_intermediate_files)) + + regularize_buildings = arcpy.GetParameterAsText(8) + arcpy.AddMessage("Regularize some buildings: {0}".format(regularize_buildings)) + + main(first_parameter, second_parameter, z_unit, featureextraction, out_folder_path, processing_unit_length, + use_pos_terrain_method, delete_intermediate_files, regularize_buildings) diff --git a/V3_delivery/V4_Delivery/LAS_Template_Raster_Type.art.xml b/V3_delivery/V4_Delivery/LAS_Template_Raster_Type.art.xml new file mode 100644 index 0000000..0d807d0 --- /dev/null +++ b/V3_delivery/V4_Delivery/LAS_Template_Raster_Type.art.xml @@ -0,0 +1,190 @@ + + + RasterBuilder + Name + Aliases + Version + Description + InputDataSourceTypes + DataSourceFilter + SupportsOrthorectification + SupportsStereo + SupportsSeamline + EnableClipToFootprint + AllowSimplification + IsSensorRasterType + SupportsColorCorrection + FactoryCLSID + SupportedURIFilters + FullName + AddRastersParameters + SynchronizeParameters + + + + + AuxiliaryFields + AuxiliaryFieldAlias + ParentRasterTypeName + Properties + MergeItems + LasToRasterArguments + FolderAsDataset + ZFactor + + + + + + Version + esriFieldTypeString + true + 20 + 0 + 0 + Version + + + PointCount + esriFieldTypeDouble + true + 0 + 20 + 9 + Point Count + + + PointSpacing + esriFieldTypeDouble + true + 0 + 20 + 9 + Point Spacing + + + ZMin + esriFieldTypeDouble + true + 0 + 20 + 9 + ZMin + + + ZMax + esriFieldTypeDouble + true + 0 + 20 + 9 + ZMax + + + + + + + Version + Version + + + PointCount + PointCount + + + PointSpacing + PointSpacing + + + ZMin + ZMin + + + ZMax + ZMax + + + + LAS + + + + BandProperties + + + + DataType + Elevation + + + DefaultBandCount + 1 + + + + false + + + CacheLocation + CacheMaxNumSurfaces + CellAggregationType + ClassTypesSelected + ReturnTypesSelected + DataType + UseTinning + MaxHoleWidthFilled + VoidFillMethod + CellWidth + + + + + esriDataSourcesRaster.RasterWorkspaceFactory.1 + esriFileSystemWorkspace + + 0 + 3 + + -1 + + + -1 + + 0 + false + 5 + 2 + PUT_PIXEL_SIZE_HERE + + + false + 1 + + + LAS_Template_Raster_Type + + 1 + Supports LIDAR (Light Detection and Ranging) data. + 3 + + false + false + false + false + false + false + false + + {8F2800F4-5842-47DF-AD1D-2077A7966BBF} + + + + D:\YiqunXieInternshipSummary\tools_and_documentation\Feature Identification\fromBox\LAS_3_ALL_Binning_Mean_IDW.art.xml + -1 + + + + + + \ No newline at end of file diff --git a/V3_delivery/V4_Delivery/SimplifyBuildings_V1.py b/V3_delivery/V4_Delivery/SimplifyBuildings_V1.py new file mode 100644 index 0000000..151214d --- /dev/null +++ b/V3_delivery/V4_Delivery/SimplifyBuildings_V1.py @@ -0,0 +1,189 @@ +# --------------------------------------------------------------------------- +# Name: SimplifyBuildings.py +# Purpose: To Simplify building footprints +# Usage: +# Description: +# Author: Roslyn Dunn +# Organization: Esri Inc. +# +# Created: 03/11/2016 Roslyn Dunn +# --------------------------------------------------------------------------- + +import sys +import os +import inspect +import traceback +import time +import shutil +import arcpy + + +def main(input_fc, output_fgdb, output_fc_name, delete_intermediate_results): + try: + start = time.time() + + if not arcpy.Exists(input_fc): + arcpy.AddMessage("\nExiting process...Input Feature Class does not exist: \n {0}".format(input_fc)) + sys.exit() + + if not arcpy.Exists(output_fgdb): + arcpy.AddMessage("\nExiting process...Input File GDB does not exist: \n {0}".format(input_fc)) + sys.exit() + + output_fc = os.path.join(output_fgdb, output_fc_name) + if arcpy.Exists(output_fc): + arcpy.AddMessage("\nExiting process...Output Feature Class already exists: \n {0}".format(output_fc)) + sys.exit() + + # Send intermediate outputs to the same GDB as the final output + scratch_gdb_name = output_fgdb + arcpy.AddMessage("\nScratch GDB for intermediate products: \n {0}".format(scratch_gdb_name)) + + buff1_output = os.path.join(scratch_gdb_name, r"BuffInward") + arcpy.Buffer_analysis(in_features=input_fc, out_feature_class=buff1_output, + buffer_distance_or_field="-2 Meters", line_side="FULL", + line_end_type="ROUND", dissolve_option="NONE", dissolve_field="", method="GEODESIC") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from Buffer_analysis are: \n{0}\n".format(messages)) + + single_part_features = os.path.join(scratch_gdb_name, r"SinglePart") + arcpy.MultipartToSinglepart_management(in_features=buff1_output, out_feature_class=single_part_features) + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from MultipartToSinglepart_management are: \n{0}\n".format(messages)) + + buff2_output = os.path.join(scratch_gdb_name, r"BuffOutward") + arcpy.Buffer_analysis(in_features=single_part_features, out_feature_class=buff2_output, + buffer_distance_or_field="2 Meters", line_side="FULL", + line_end_type="ROUND", dissolve_option="NONE", dissolve_field="", method="GEODESIC") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from Buffer_analysis are: \n{0}\n".format(messages)) + + large_buildings = r"LargeBuildings" + arcpy.MakeFeatureLayer_management(in_features=buff2_output, out_layer=large_buildings, + where_clause="Shape_Area > 500", workspace="", field_info="#") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from MakeFeatureLayer_management are: \n{0}\n".format(messages)) + + small_buildings = r"SmallBuildings" + arcpy.MakeFeatureLayer_management(in_features=buff2_output, out_layer=small_buildings, + where_clause="Shape_Area <= 500 AND Shape_Area >= 10", + workspace="", field_info="#") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from MakeFeatureLayer_management are: \n{0}\n".format(messages)) + + large_reg = os.path.join(scratch_gdb_name, r"LargeReg") + arcpy.RegularizeBuildingFootprint_3d(in_features=large_buildings, out_feature_class=large_reg, + method="RIGHT_ANGLES_AND_DIAGONALS", tolerance="0.75", + densification="0.75", precision="0.25", diagonal_penalty="1.5", + min_radius="0.1", max_radius="1000000") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from RegularizeBuildingFootprint_3d are: \n{0}\n".format(messages)) + + small_reg = os.path.join(scratch_gdb_name, r"SmallReg") + arcpy.RegularizeBuildingFootprint_3d(in_features=small_buildings, out_feature_class=small_reg, + method="RIGHT_ANGLES", tolerance="0.75", + densification="0.75", precision="0.25", diagonal_penalty="1.5", + min_radius="0.1", max_radius="1000000") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from RegularizeBuildingFootprint_3d are: \n{0}\n".format(messages)) + + merge_list = [] + merge_list.append(large_reg) + merge_list.append(small_reg) + merge_reg = os.path.join(scratch_gdb_name, r"MergeReg") + arcpy.Merge_management(inputs=merge_list, output=merge_reg, + field_mappings="#") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from Merge_management are: \n{0}\n".format(messages)) + + simplify_once = os.path.join(scratch_gdb_name, r"SimplifyOnce") + arcpy.SimplifyBuilding_cartography(in_features=merge_reg, out_feature_class=simplify_once, + simplification_tolerance="2 Meters", minimum_area="0 SquareFeet", + conflict_option="NO_CHECK") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from SimplifyBuilding_cartography are: \n{0}\n".format(messages)) + + + arcpy.SimplifyBuilding_cartography(in_features=simplify_once, out_feature_class=output_fc, + simplification_tolerance="4 Meters", minimum_area="0 SquareFeet", + conflict_option="NO_CHECK") + messages = arcpy.GetMessages() + arcpy.AddMessage("\nResults output from SimplifyBuilding_cartography are: \n{0}\n".format(messages)) + + if (delete_intermediate_results == "true"): + # Delete Mosaic Datasets in MosaicDatasets.gdb, then delete remaining fc's and then MosaicDatasets.gdb + if arcpy.Exists(buff1_output): + arcpy.Delete_management(buff1_output) + if arcpy.Exists(single_part_features): + arcpy.Delete_management(single_part_features) + if arcpy.Exists(buff2_output): + arcpy.Delete_management(buff2_output) + if arcpy.Exists(large_buildings): + arcpy.Delete_management(large_buildings) + if arcpy.Exists(small_buildings): + arcpy.Delete_management(small_buildings) + if arcpy.Exists(large_reg): + arcpy.Delete_management(large_reg) + if arcpy.Exists(small_reg): + arcpy.Delete_management(small_reg) + if arcpy.Exists(merge_reg): + arcpy.Delete_management(merge_reg) + if arcpy.Exists(simplify_once): + arcpy.Delete_management(simplify_once) + + arcpy.Delete_management(large_buildings) + arcpy.Delete_management(small_buildings) + + end = time.time() + delta = end - start + # This is useful if the tool is run at the command line + arcpy.AddMessage("***** Total elapsed time is {0} hours *****".format(delta/3600)) + + except arcpy.ExecuteError: + print(arcpy.GetMessages()) + except Exception: + # Return any Python specific errors and any error returned by the geoprocessor + tb = sys.exc_info()[2] + tbinfo = traceback.format_tb(tb)[0] + pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ + str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" + arcpy.AddError(pymsg) + msgs = "GP ERRORS:\n" + arcpy.GetMessages() + "\n" + arcpy.AddError(msgs) + +if __name__ == '__main__': + arcpy.AddMessage(inspect.getfile(inspect.currentframe())) + arcpy.AddMessage(os.path.dirname(inspect.getfile(inspect.currentframe()))) + arcpy.AddMessage(sys.version) + arcpy.AddMessage(sys.executable) + executed_from = sys.executable.upper() + + arcpy.CheckOutExtension('3D') + # if "ARCGISPRO" in executed_from: + # arcpy.AddMessage("Exiting...this tool does not yet run from ArcGIS Pro") + # sys.exit(0) + + if not ("ARCMAP" in executed_from or "ARCCATALOG" in executed_from or + "RUNTIME" in executed_from): + arcpy.AddMessage("Getting parameters from command line...") + + input_fc = sys.argv[1] + output_fgdb = sys.argv[2] + output_fc_name = sys.argv[3] + delete_intermediate_results = sys.argv[4] + else: + arcpy.AddMessage("Getting parameters from GetParameterAsText...") + input_fc = arcpy.GetParameterAsText(0) + output_fgdb = arcpy.GetParameterAsText(1) + output_fc_name = arcpy.GetParameterAsText(2) + delete_intermediate_results = arcpy.GetParameterAsText(3) + + input_fc = input_fc.strip() + output_fgdb = output_fgdb.strip() + output_fc_name = output_fc_name.strip() + arcpy.AddMessage("Input Feature Class: {0}".format(input_fc)) + arcpy.AddMessage("Output File GDB: {0}".format(output_fgdb)) + arcpy.AddMessage("Output Feature Class name: {0}".format(output_fc_name)) + arcpy.AddMessage("Delete Intermediate Results: {0}".format(delete_intermediate_results)) + + main(input_fc, output_fgdb, output_fc_name, delete_intermediate_results) diff --git a/V3_delivery/V4_Delivery/run_feature_id.bat b/V3_delivery/V4_Delivery/run_feature_id.bat new file mode 100644 index 0000000..35c2434 --- /dev/null +++ b/V3_delivery/V4_Delivery/run_feature_id.bat @@ -0,0 +1,17 @@ +SET DSMPATH="V:\Demo\DSM_subset" +SET DTMPATH="V:\Demo\DTM_subset" +SET ZUNITS="Meter" +SET FEATS2EXTRACT="Buildings and Trees" +SET RESULTS="V:\Demo\utah_demo_pro_debug1" +SET PROCLEN=5000 +SET POST=true +SET DEL=false +SET REG=false + +SET pathtoProPy="C:\Python34\python.exe" +SET pyscript="V:\Demo\V3_Delivery\ExtractBuildingsTreesAutomation_V3.py" + +%pathToProPy% %pyscript% %DSMPATH% %DTMPATH% %ZUNITS% %FEATS2EXTRACT% %RESULTS% %PROCLEN% %POST% %DEL% %REG% + + + diff --git a/clipRasterToPolyExtrudeMultipointApproach.py b/clipRasterToPolyExtrudeMultipointApproach.py new file mode 100644 index 0000000..d82f095 --- /dev/null +++ b/clipRasterToPolyExtrudeMultipointApproach.py @@ -0,0 +1,564 @@ +__author__ = 'geof7015' + +import arcpy +import os +import os.path +import tempfile +import glob +from arcpy.sa import * +from datetime import datetime +import gc + +arcpy.env.overwriteOutput = True +arcpy.CheckOutExtension('spatial') +arcpy.CheckOutExtension('3d') + +#inLAS = r'E:\3D_City_Data\United States\Georgia\Athens\LiDAR' +#inLASD = r'' #r'E:\3D_City_Data\United States\Georgia\Athens\New LasDataset.lasd' +#buildingFootprints = r'E:\3D_City_Data\United States\Georgia\Athens\Data.gdb\BuildingFootprints_1' +#sr = "PROJCS['NAD_1983_StatePlane_Georgia_West_FIPS_1002_Feet',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',2296583.333333333],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-84.16666666666667],PARAMETER['Scale_Factor',0.9999],PARAMETER['Latitude_Of_Origin',30.0],UNIT['Foot_US',0.3048006096012192]]" +#outputWS = r'E:\3D_City_Data\United States\Georgia\Athens\multipatch.gdb' +#scratchGDB = arcpy.env.scratchGDB +#tempFolder = tempfile.mkdtemp() + +inLASD = r'' +inLAS = r'C:\Users\geof7015\PycharmProjects\testData\Charlotte\LiDAR' +DTMRaster = r'' # C:\workspace\data\testdata\bh12TVK1800084000.img +DSMRaster = r'' # 'C:\workspace\data\testdata\hh12TVK1800084000.img' +buildingFootprints = r'C:\Users\geof7015\PycharmProjects\testData\Charlotte\Data.gdb\BldgFootprints' +sr = "PROJCS['NAD_1983_StatePlane_North_Carolina_FIPS_3200_Feet',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',2000000.002616666],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-79.0],PARAMETER['Standard_Parallel_1',34.33333333333334],PARAMETER['Standard_Parallel_2',36.16666666666666],PARAMETER['Latitude_Of_Origin',33.75],UNIT['Foot_US',0.3048006096012192]]" +outputWS = r'C:\Users\geof7015\PycharmProjects\testData\Charlotte\Workspace2.gdb' +scratchGDB = arcpy.env.scratchGDB +tempFolder = tempfile.mkdtemp() + +beginOnFeatureNumber = 0 +pointSpacingCorrectionFactor = 0.5 +interpolateBetweenPoints = False # Currently Bugged... +reduceTesselations = True +rasterExtractionApproach = True + +############### +# Definitions # +############### + + +def createlasdataset(inLAS, sr): + global inLASD + inLASD = os.path.join(tempFolder, "LASDataSet.lasd") + if arcpy.Exists(inLASD): + arcpy.Delete_management(inLASD) + arcpy.CreateLasDataset_management(inLAS, inLASD, False, "", sr, "COMPUTE_STATS") + if arcpy.Exists(inLASD): + arcpy.AddMessage("LASD File Created @ Location: " + inLASD) + return inLASD + # for multiples: return inLASD,output2,output3,etc... + else: + arcpy.AddMessage("Could Not Create LASD DataSet. Check LAS inputs for errors") + + +def findLasDatasetStatisticsfilePaths(file): + file_object = open(file, 'r') + lines = file_object.readlines() + file_object.close() + cleanLines = [] + for line in lines: + if len(line) > 1: + path = line.split(",")[0] + if os.path.isabs(path) is True and path not in cleanLines: + cleanLines.append(path) + return cleanLines + + +# Create Lists with LiDAR Statistical Information. Pt Spacing etc... Process only used in other modules. +def obtainLiDARInfo(inLASD,lasList): + if arcpy.Exists(inLASD): + arcpy.AddMessage("Calculating Necessary Statistics for Feature Extraction Process") + lasDatasetStatsText = os.path.join(tempFolder, "lasDatasetStatsText.txt") + if arcpy.Exists(lasDatasetStatsText): + arcpy.Delete_management(lasDatasetStatsText) + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", + "DECIMAL_POINT") + + # TODO DJARRARD obtain a LiDAR file from list and parse the point_spacing to building footprints. + # TODO DJARRARD if multiple LiDAR tiles overlap building footprints then point_spacing = pt_spacing_average + #if recursivelyCreateAndClipRastersFromLasd: + #pass + + # run arcpy.PointFileInfo_3d on the single tile (no recursion) + ptFileInfoFile = os.path.join(outputWS, 'ptFileInfoFile') + if arcpy.Exists(ptFileInfoFile): + arcpy.Delete_management(ptFileInfoFile) + arcpy.PointFileInformation_3d(lasList, ptFileInfoFile, "LAS", None, sr, "false", "false", "DECIMAL_POINT", + "false", "false") + + rows = arcpy.SearchCursor(ptFileInfoFile, + fields="FileName; Pt_Spacing; Z_Min; Z_Max", + sort_fields="FileName; Pt_Spacing; Z_Min; Z_Max") + # Iterate through the rows in the cursor and store the + # "FileName; Pt_Spacing; Z_Min; Z_Max" + ptFileInfoList = [] + PtSpacing = [] + for row in rows: + formattedfields = ("{0}, {1}, {2}, {3}".format( + row.getValue("FileName"), + row.getValue("Pt_Spacing"), + row.getValue("Z_Min"), + row.getValue("Z_Max"))) + ptFileInfoList.append(formattedfields) + ptspacinglist = float("{0}".format(row.getValue("Pt_Spacing"))) + PtSpacing.append(ptspacinglist) + print(ptFileInfoList) + print(PtSpacing) + avgPtSpacing = sum(PtSpacing)/float(len(PtSpacing)) + print(avgPtSpacing) + return ptFileInfoFile, ptFileInfoList, PtSpacing, avgPtSpacing + + +def interpolateBetweenLasPts(LrDSM): + # Run raster interpolation algorithm on LiDAR derived rasters if interpolateAdditionalPoints is True and not Recursive + TimesRaster = os.path.join(tempFolder, "TimesRaster.tif") + if arcpy.Exists(TimesRaster): + arcpy.Delete_management(TimesRaster) + arcpy.Times_3d(LrDSM, 100, TimesRaster) + arcpy.AddMessage("Times Raster Complete") + + IntegerRaster = os.path.join(tempFolder, "IntRaster.tif") + if arcpy.Exists(IntegerRaster): + arcpy.Delete_management(IntegerRaster) + arcpy.Int_3d(TimesRaster, IntegerRaster) + arcpy.AddMessage("Integer Raster Complete") + + BoundaryCleanRaster = os.path.join(tempFolder, "BoundaryClean.tif") + if arcpy.Exists(BoundaryCleanRaster): + arcpy.Delete_management(BoundaryCleanRaster) + BC = arcpy.sa.BoundaryClean(IntegerRaster, "NO_SORT", "true") + BC.save(BoundaryCleanRaster) + arcpy.AddMessage("BoundaryClean Raster Complete") + + FloatRaster = os.path.join(tempFolder, "FloatRaster.tif") + if arcpy.Exists(FloatRaster): + arcpy.Delete_management(FloatRaster) + arcpy.Float_3d(BoundaryCleanRaster, FloatRaster) + arcpy.AddMessage("Float Raster Complete") + + if arcpy.Exists(LrDSM): + arcpy.Delete_management(LrDSM) + arcpy.Divide_3d(FloatRaster, 100, LrDSM) + arcpy.AddMessage("Divide Raster Complete") + return LrDSM + + +def slopedAreaRasters(SlopeRaster, slopedAreasNullRaster): + # TODO Fix Memory Leak 1 + slopedAreasRaster = os.path.join(tempFolder, "slopedAreasRaster.tif") + if arcpy.Exists(slopedAreasRaster): + arcpy.Delete_management(slopedAreasRaster) + slopedAreasRasterProcess = arcpy.sa.Con(SlopeRaster, 1, 0, "VALUE >= 20") + slopedAreasRasterProcess.save(slopedAreasRaster) + # TODO Fix Memory Leak 2 + if arcpy.Exists(slopedAreasNullRaster): + arcpy.Delete_management(slopedAreasNullRaster) + slopedAreasNullRasterProcess = arcpy.sa.SetNull(slopedAreasRaster, 1, "Value = 0") + slopedAreasNullRasterProcess.save(slopedAreasNullRaster) + + arcpy.Delete_management(slopedAreasRaster) + + return slopedAreasNullRaster + + +def reduceTesselationProcess(LrDSM, SlopedAreasPolygonBuffered): + SlopeRaster = os.path.join(tempFolder, "SlopeRaster.tif") + if arcpy.Exists(SlopeRaster): + arcpy.Delete_management(SlopeRaster) + arcpy.Slope_3d(LrDSM, SlopeRaster, "DEGREE", 1) + + slopedAreasNullRaster = os.path.join(tempFolder, "slopedAreasNullRaster.tif") + slopedAreaRasters(SlopeRaster=SlopeRaster, slopedAreasNullRaster=slopedAreasNullRaster) + + SlopedAreasPolygon = os.path.join(tempFolder, "SlopedAreasPolygon.shp") + if arcpy.Exists(SlopedAreasPolygon): + arcpy.Delete_management(SlopedAreasPolygon) + arcpy.RasterToPolygon_conversion(slopedAreasNullRaster, SlopedAreasPolygon, "false", "Value") + + if arcpy.Exists(SlopedAreasPolygonBuffered): + arcpy.Delete_management(SlopedAreasPolygonBuffered) + arcpy.Buffer_analysis(SlopedAreasPolygon, SlopedAreasPolygonBuffered, "2 Feet", "FULL", "ROUND", "ALL", None, "PLANAR") + + arcpy.Delete_management(slopedAreasNullRaster) + + return SlopedAreasPolygonBuffered + + +def cleanupArtifacts(single_bldg_pts, single_bldg_pts_cleaned): + + arcpy.Near3D_3d(single_bldg_pts, single_bldg_pts, str(1.4 * pointSpace), "LOCATION", "ANGLE", "DELTA") + + bldgpts = "bldgPoints" + arcpy.MakeFeatureLayer_management(single_bldg_pts, bldgpts) + + arcpy.SelectLayerByAttribute_management(bldgpts, "NEW_SELECTION", "NEAR_DELTZ < -1 Or NEAR_ANG_V = 0", "true") + + arcpy.CopyFeatures_management(bldgpts, single_bldg_pts_cleaned) + print("Artifacts Removed") + return single_bldg_pts_cleaned + +def interpolatePointsToBoundary(input_bldg_points, input_bldg_fp, output_bldg_points_with_border): + + # explode input multipoint FC to single part + # it is understood the input point FC will be multipoint. Need to convert to single part features + single_bldg_pts = os.path.join(outputWS, "singlepts") + arcpy.MultipartToSinglepart_management(input_bldg_points, single_bldg_pts) + + # Cleanup Artifacts + single_bldg_pts_cleaned = os.path.join(outputWS, "single_bldg_pts_cleaned") + cleanupArtifacts(single_bldg_pts=single_bldg_pts, single_bldg_pts_cleaned=single_bldg_pts_cleaned) + + # add geometry attributes + arcpy.AddGeometryAttributes_management(Input_Features=single_bldg_pts_cleaned, Geometry_Properties="POINT_X_Y_Z_M", + Length_Unit="", Area_Unit="", Coordinate_System="") + + # process the building footprint + # convert to line + bldg_line = os.path.join(outputWS, "bldgline") + + arcpy.FeatureToLine_management(in_features=input_bldg_fp, out_feature_class=bldg_line, cluster_tolerance=None, + attributes="NO_ATTRIBUTES") + + # Densify + arcpy.Densify_edit(in_features=bldg_line, densification_method="DISTANCE", distance="1 Feet", + max_deviation="0.33 Feet", max_angle="10") + + # convert to points + bldg_ln_pts = os.path.join(outputWS, "bldglinepts") + arcpy.FeatureVerticesToPoints_management(in_features=bldg_line, out_feature_class=bldg_ln_pts, point_location="ALL") + + # use Near tool to identify point FID from building points to the boundary points + arcpy.Near_analysis(in_features=bldg_ln_pts, near_features=single_bldg_pts_cleaned, search_radius="5 Feet", + location="NO_LOCATION", angle="NO_ANGLE", method="PLANAR") + + # now, grab the NEARI_FID field and assign that feature's z-value to the building footprint point z value + arcpy.AddField_management(bldg_ln_pts, "z_val", "DOUBLE") + tbl_fp = arcpy.da.FeatureClassToNumPyArray(bldg_ln_pts, ["NEAR_FID"]) + tbl_pts = arcpy.da.FeatureClassToNumPyArray(single_bldg_pts_cleaned, ["POINT_Z"]) + + # update the z_val attribute + with arcpy.da.UpdateCursor(bldg_ln_pts, ["z_val"]) as Pointsc: + for i, row in enumerate(Pointsc): + fid = tbl_fp[i][0] + row[0] = tbl_pts[fid-1][0] + #print(row[0]) + Pointsc.updateRow(row) + + # convert to 3D and copy + bldg_ln_pts_z = os.path.join(outputWS, "bldg_ln_pts_Z") + arcpy.FeatureTo3DByAttribute_3d(bldg_ln_pts, bldg_ln_pts_z, "z_val") + + # pointsMerged = os.path.join("in_memory", "pointsMerged") + arcpy.Merge_management([bldg_ln_pts_z, single_bldg_pts_cleaned], output_bldg_points_with_border) + + # Remove Intermediate Data + #if arcpy.Exists(single_bldg_pts): + # arcpy.Delete_management(single_bldg_pts) + #if arcpy.Exists(bldg_line): + # arcpy.Delete_management(bldg_line) + #if arcpy.Exists(bldg_ln_pts): + # arcpy.Delete_management(bldg_ln_pts) + #if arcpy.Exists(bldg_ln_pts_z): + # arcpy.Delete_management(bldg_ln_pts_z) + + return output_bldg_points_with_border + +def extractMultipatch(fullextent, row): + #try: + arcpy.env.extent = fullextent + # get raster extent + geom = row[0] + print("geom = ", geom) + #ext = "{0} {1} {2} {3}".format(geom.extent.XMin, geom.extent.YMin, geom.extent.XMax, geom.extent.YMax) + + # copy the feature temporarily + tp = os.path.join("in_memory", "tp{0}".format(i + beginOnFeatureNumber)) + tempGeom = arcpy.CopyFeatures_management(geom, tp) + + #extentgeom = arcpy.Describe(tp) + #extent = "{0} {1} {2} {3}".format(extentgeom.extent.XMin, extentgeom.extent.YMin, extentgeom.extent.XMax, extentgeom.extent.YMax) + #print("Building Footprint Extent = ", extent) + extentgeom = arcpy.Describe(tp) + arcpy.env.mask = tp + print("extentgeom = ", extentgeom) + extent = "{0} {1} {2} {3}".format(extentgeom.extent.XMin, extentgeom.extent.YMin, extentgeom.extent.XMax, extentgeom.extent.YMax) + print("extent = ", extent) + arcpy.env.extent = extent + + print("Begin Raster Creation Process") + #LrDSM = os.path.join(tempFolder, "LrDSM.tif") + DTM = os.path.join("in_memory", "DTM") + # Delete terrain rasters if existing. + if arcpy.Exists(DTM): + arcpy.Delete_management(DTM) + #if arcpy.Exists(LrDSM): + #arcpy.Delete_management(LrDSM) + arcpy.LasDatasetToRaster_conversion("DTMLASD", DTM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacing, heightValue) + print("Created DTM Raster at location: " + DTM) + #arcpy.LasDatasetToRaster_conversion("LRDSMLASD", LrDSM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + # valueField, "CELLSIZE", pointSpacing, heightValue) + #print("Created Last Return DSM Raster at location: " + LrDSM) + + #if interpolateBetweenPoints: + # interpolateBetweenLasPts(LrDSM=LrDSM) + + #if reduceTesselations: + # SlopedAreasPolygonBuffered = os.path.join(tempFolder, "SlopedAreasPolygonBuff.shp") + # reduceTesselationProcess(LrDSM, SlopedAreasPolygonBuffered) + # arcpy.env.mask = SlopedAreasPolygonBuffered + + # smooth the DSM + #print('smoothing DSM') + # nbr = NbrRectangle(3, 3, "CELL") + + #sm_clip = FilterRasterProcess = Filter(LrDSM, "LOW", "DATA") + + # clean up clipped raster + #arcpy.Delete_management(DSMClipRast) + + # convert raster to points + #print('converting raster to points') + + arcpy.env.mask = tp + + #arcpy.RasterToPoint_conversion(sm_clip, out_points, "Value") + + arcpy.env.extent = extent + multipoint = os.path.join("in_memory", "multipoint") + if arcpy.Exists(multipoint): + arcpy.Delete_management(multipoint) + arcpy.LASToMultipoint_3d(lasList, multipoint, pointSpacing, 1, "LAST_RETURNS", None, sr, "las", 1, "false") + print("Las to Multipoint complete") + + roofPoints = os.path.join("in_memory", "roofPoints") + if arcpy.Exists(roofPoints): + arcpy.Delete_management(roofPoints) + arcpy.Clip_analysis(multipoint, tp, roofPoints, None) + + # Delete Mulipoint.shp + if arcpy.Exists(multipoint): + arcpy.Delete_management(multipoint) + + # Interpolate Points to Boundary + buildingInsideAndBorderPoints = os.path.join(outputWS, "buildingBorderPoints") + interpolatePointsToBoundary(input_bldg_points=roofPoints, input_bldg_fp=tp, + output_bldg_points_with_border=buildingInsideAndBorderPoints) + + if arcpy.Exists(roofPoints): + arcpy.Delete_management(roofPoints) + + roofTin = os.path.join(tempFolder, "roofTin") + roofPtsFormula = "{0} Shape.Z Mass_Points ;{1} Soft_Clip ".format(buildingInsideAndBorderPoints, tp) + print(roofPtsFormula) + arcpy.CreateTin_3d(roofTin, sr, roofPtsFormula, "false") + print("roof Tin Created") + + # Create TIN with points + # print('making surface TIN') + # feats_tin = "{} Shape.Z Mass_Points ;".format(out_points3d) + #feats_tin = "{0} grid_code Mass_Points ;".format(out_points) + #out_surf_tin = os.path.join(tempFolder, "surfTin") + #if arcpy.Exists(out_surf_tin): + # arcpy.Delete_management(out_surf_tin) + #arcpy.CreateTin_3d(out_surf_tin, sr, feats_tin, 'DELAUNAY') + + # clip the DTM + print('clipping DTM') + dtmClipRast = os.path.join("in_memory", 'tempDEMclip{0}'.format(i + beginOnFeatureNumber)) + arcpy.Clip_management(DTM, extent, dtmClipRast, tp, "true", "false") + # convert DEM to Int + #dtmClipRastInt = Int(dtmClipRast) + + # add Min Height to Building Footprints + print('determining Minimum Building Elevation') + arcpy.AddField_management(tp, "ID", "SHORT", None, None, None, "ID", "true", "true", None) + arcpy.CalculateField_management(tp, "ID", 1, "PYTHON_9.3", None) + minMaxElevTable = os.path.join("in_memory", "minMaxElevTable") + arcpy.sa.ZonalStatisticsAsTable(tp, "ID", DTM, minMaxElevTable, "true", "MIN_MAX_MEAN") + arcpy.JoinField_management(tp, "ID", minMaxElevTable, "ID", "MIN;MAX") + + + # then, move building footprints to MIN Z Height + out_poly3d = os.path.join("in_memory", "out_poly3d") + arcpy.FeatureTo3DByAttribute_3d(tp, out_poly3d, "MIN", "") + + # make ground TIN + gnd_feats_tin = "{} Shape.Z Hard_Clip ;".format(out_poly3d) + out_gnd_tin = os.path.join(tempFolder, "gndTin") + arcpy.CreateTin_3d(out_gnd_tin, sr, gnd_feats_tin, "DELAUNAY") + + # extrude polygon between TINs + print('creating Multipatch') + this_MP = os.path.join(outputWS, "bldgMP_{0}".format(i + beginOnFeatureNumber)) + arcpy.ExtrudeBetween_3d(roofTin, out_gnd_tin, out_poly3d, this_MP) + + # add feature name to list + mp_list.append(this_MP) + + # Delete Unnecessary files + arcpy.Delete_management(tp) + arcpy.Delete_management(minMaxElevTable) + arcpy.Delete_management(out_poly3d) + arcpy.Delete_management(dtmClipRast) + arcpy.Delete_management(out_gnd_tin) + #arcpy.Delete_management(out_surf_tin) + arcpy.Delete_management(roofTin) + arcpy.Delete_management(out_poly3d) + arcpy.Delete_management(buildingInsideAndBorderPoints) + #arcpy.Delete_management(FilterRasterProcess) + #del nbr + #del sm_clip + del row, tp + + print("Multipatch {0} Process complete @ ".format(i + beginOnFeatureNumber), str(datetime.now())) + + + # TODO Geoff7015 Incorporate Cleanup Building CGA from Geof7015 rule into tool here: + ''' every multipatch building must have LiDAR point spacing as a attribute and "Units: feet/meters + will need to update CGA cleanup rules settings with a conditional calculator operation where + it leverages these attributes and changes the cleanupGeometry operations optimally based on input features + final output will be two file geodatabases. one with original buildings and other with cleaned.''' + ''' Other Cleanup Utility tools/processes may be required to optimize building faces and roof geometries''' + #except: + print("Unable to process feature {0}".format(i + beginOnFeatureNumber)) + print("Multipatch {0} Process failed @ ".format(i + beginOnFeatureNumber), str(datetime.now())) + +############## +# Begin Code # +############## +print("Starting Process at ", str(datetime.now())) +# If LiDAR Input is a LASD DataSet then count number of LAS files in LAS Dataset and List LAS files as input 4 GP tools +if arcpy.Exists(inLASD): + arcpy.AddMessage("detected LASD Dataset as input: " + inLASD) + lasDatasetStatsText = os.path.join(tempFolder, "lasDatasetStatsText.txt") + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", "DECIMAL_POINT") + filenames = findLasDatasetStatisticsfilePaths(lasDatasetStatsText) + + if len(filenames) == 0: + arcpy.AddMessage("1 LAS file detected in LASD DATASET") + else: + arcpy.AddMessage("{0} LAS files detected in LASD DATASET".format(len(filenames))) + + # Process lasList into Esri GP tool friendly input format + newstr = str(filenames)[1:-1].replace("', ", ";") + lasList = '"' + newstr.replace("'", "") + '"' + +# If the LiDAR Input is a a single LAS then return 1 of LAS files and format file to string for GP tools input. +if inLAS.lower().endswith('.las') and ";" not in inLAS: + arcpy.AddMessage("1 LAS file detected") + lasList = '"' + inLAS + '"' + +# If the LiDAR Input is a string of LAS files then count number of LAS files and create List LAS files input 4 GP tools. +if inLAS.lower().endswith('.las') and ";" in inLAS: + numberLASFiles = (inLAS.count(';')+1) + arcpy.AddMessage(str(numberLASFiles) + " LAS file detected") + lasList = '"' + inLAS + '"' + +# If the LiDAR Input is a LAS Directory then count number of LAS files and create List of LAS files as input 4 GP tools. +if os.path.isdir(inLAS): + lasSearchPathDirectory = inLAS + "/*.las" + for name in glob.glob(lasSearchPathDirectory): + filename = name + file_extension = ".las" + filename, file_extension = os.path.splitext(name) + if file_extension == ".las": + # Find all LAS files in input folder. Optionally search recursively + recursive = True + lasList = [] + if recursive: + for root, dirs, files in os.walk(inLAS): + for file in files: + if file.endswith(".las") and file not in lasList: + lasList.append((os.path.join(root, file))) + else: + for file in os.listdir(inLAS): + if file.endswith(".las") and file not in lasList: + lasList.append((os.path.join(inLAS, file))) + + # Print Number of Las files + if len(lasList) == 0: + arcpy.AddMessage("1 LAS file detected in Directory") + else: + arcpy.AddMessage("{0} LAS files detected in Directory".format(len(lasList))) + + # Process lasList into Esri GP tool friendly input format + newstr = str(lasList)[1:-1].replace("', ", ";") + lasList = '"' + newstr.replace("'", "") + '"' + +# Convert Las file List as String and format for GP tool input +# Create LASDataset from LAS files. +if inLAS.lower().endswith('.las') or os.path.isdir(inLAS): + createlasdataset(inLAS=inLAS, sr=sr) + + +DTMLASD = "DTMLASD" +LRDSMLASD = "LRDSMLASD" + +if arcpy.Exists(DTMLASD): + arcpy.Delete_management(DTMLASD) +if arcpy.Exists(LRDSMLASD): + arcpy.Delete_management(LRDSMLASD) +arcpy.MakeLasDatasetLayer_management(inLASD, DTMLASD, "2", "", "", "", "", "", "", "") +arcpy.MakeLasDatasetLayer_management(inLASD, LRDSMLASD, "1;2", "Last Return", "", "", "", "", "", "") +if arcpy.Exists(DTMLASD): + if arcpy.Exists(LRDSMLASD): + arcpy.AddMessage("LASD Layers Created") +else: + arcpy.AddMessage("Could Not Create LASD Layers") +# selector determining whether or not to interpolateAdditional points for tin creation. Helps with Terribe LiDAR. +''' if interpolateAdditionalPoints is enabled then input correct heightValue & valueField for raster processing ''' +''' Determine the correct point spacing settings based on raster processing algorithm requirements ''' + +# TODO make point spacing support recursions. obtainLiDARInfo(inLASD, lasList)[3] is currently placeholder +''' calculate average pt spacing of LiDAR tiles building footprints intersect''' +pointSpace = obtainLiDARInfo(inLASD, lasList)[3] +pointSpacing = pointSpace * pointSpacingCorrectionFactor +heightValue = 1 +valueField = "FLOAT" + +result = arcpy.GetCount_management(buildingFootprints) +FootprintCount = int(result.getOutput(0)) +print("number of building Footprints to process = " + str(FootprintCount)) + +fullextent = arcpy.Describe(buildingFootprints).extent + +# create list for multiPatch features +mp_list = [] + +# make search cursor for footprint polygons +fields = ["SHAPE@"] +with arcpy.da.SearchCursor(buildingFootprints, fields) as sc: + for i, row in enumerate(sc): + if (i + beginOnFeatureNumber) < FootprintCount: + # if i is a multiple of 50 compact the gdb + print("on BuildingFootprint {0}".format(i + beginOnFeatureNumber) + " of " + str(FootprintCount)) + if not i % 50: + print("Began Compacting GDB @ ", str(datetime.now())) + arcpy.Compact_management(outputWS) + arcpy.Compact_management(scratchGDB) + print("Complete Compacting GDB @ ", str(datetime.now())) + extractMultipatch(fullextent=fullextent, row=row) + + +# merge the MultiPatches into a single FC +outputMerge = os.path.join(outputWS, 'outputMergeMP') +arcpy.Merge_management(mp_list, outputMerge) + +#TODO DJARRARD: delete all buildingMP* files that exist in the output workspace +# Delete Individual Multipatch Buildings +'''if arcpy.Exists(os.path.join(outputWS, "bldgMP_0")): + for fc in arcpy.ListFeatureClasses("bldgMP*", "MULTIPATCH", outputWS): + arcpy.Delete_management(fc)''' + +if arcpy.Exists(DTMLASD): + arcpy.Delete_management(DTMLASD) +if arcpy.Exists(LRDSMLASD): + arcpy.Delete_management(LRDSMLASD) + +print("Finished Process at ", str(datetime.now())) diff --git a/clipRasterToPolyExtrudeTin.py b/clipRasterToPolyExtrudeTin.py new file mode 100644 index 0000000..a8a4d9d --- /dev/null +++ b/clipRasterToPolyExtrudeTin.py @@ -0,0 +1,466 @@ +# this script will create MultiPatch feature classes from a polygon and DSM/DTM Raster. +# These rasters acn also be automatically derived from LAS or LASD in this process. +# Original Process & concept by Joseph McGlinchy & (intern name) +# LAS & LASD Integration, interpolateAdditionalPoints algorithm, & optimization by Geoff Taylor +# Enjoy :) + +import arcpy +import os +import os.path +import tempfile +import glob +from arcpy.sa import * + +arcpy.env.overwriteOutput = True +arcpy.CheckOutExtension('spatial') +arcpy.CheckOutExtension('3d') + +################# +# Define Inputs # +################# + +ProductionMode = False # Set to True for ArcGIS Pro GP Tool Use +if ProductionMode: + ''' For ArcGIS PRO GP Tool ''' + inLASD = arcpy.GetParameterAsText(0) + inLAS = arcpy.GetParameterAsText(1) + DTMRaster = arcpy.GetParameterAsText(2) + DSMRaster = arcpy.GetParameterAsText(3) + buildingFootprints = arcpy.GetParameterAsText(4) + sr = arcpy.GetParameterAsText(5) # Spatial Reference + outputWS = arcpy.GetParameterAsText(6) + scratchGDB = arcpy.env.scratchGDB + tempFolder = tempfile.mkdtemp() + interpolateAdditionalPoints = arcpy.GetParameterAsText(7) + recursivelyCreateAndClipRastersFromLasd = arcpy.GetParameterAsText(8) + +else: + ''' For Testing Purposes''' # Comment out inputs + inLASD = r'' #C:\Users\geof7015\PycharmProjects\LiDARTestData\testData\LiDAR\backup\LASD\test.lasd' #C:\Users\geof7015\PycharmProjects\LiDARTestData\testData\LiDAR\backup\LASD\test.lasd' # C:\Users\geof7015\PycharmProjects\testData\Charlotte\LiDAR\LiDARAOI.lasd' + inLAS = r'C:\Users\geof7015\PycharmProjects\testData\Boulder\LAS\329.las' #C:\Users\geof7015\PycharmProjects\testData\Charlotte\LiDAR' #C:\Users\geof7015\PycharmProjects\LiDARTestData\testData\LiDAR' # C:\Users\geof7015\PycharmProjects\LiDARTestData\testData\LiDAR13810E4750N.las # C:\Users\geof7015\PycharmProjects\LiDARTestData\testData # C:\Users\geof7015\PycharmProjects\LiDARTestData\testData\LiDAR13810E4750N.las # r'C:\Users\geof7015\PycharmProjects\testData\Charlotte\LiDAR' # C:\Users\geof7015\PycharmProjects\testData\Charlotte\LiDAR + DTMRaster = r'' # C:\workspace\data\testdata\bh12TVK1800084000.img + DSMRaster = r'' # 'C:\workspace\data\testdata\hh12TVK1800084000.img' + buildingFootprints = r'C:\Users\geof7015\PycharmProjects\testData\Boulder\Data.gdb\Buildings329' # C:\Users\geof7015\PycharmProjects\testData\Charlotte\Data.gdb\BldgFootprints' + sr = "PROJCS['NAD_1983_HARN_StatePlane_Colorado_North_FIPS_0501_Feet',GEOGCS['GCS_North_American_1983_HARN',DATUM['D_North_American_1983_HARN',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',3000000.000316083],PARAMETER['False_Northing',999999.999996],PARAMETER['Central_Meridian',-105.5],PARAMETER['Standard_Parallel_1',39.71666666666667],PARAMETER['Standard_Parallel_2',40.78333333333333],PARAMETER['Latitude_Of_Origin',39.33333333333334],UNIT['Foot_US',0.3048006096012192]]" + # "PROJCS['NAD_1983_StatePlane_North_Carolina_FIPS_3200_Feet',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',2000000.002616666],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-79.0],PARAMETER['Standard_Parallel_1',34.33333333333334],PARAMETER['Standard_Parallel_2',36.16666666666666],PARAMETER['Latitude_Of_Origin',33.75],UNIT['Foot_US',0.3048006096012192]]" + outputWS = r'C:\Users\geof7015\PycharmProjects\testData\Boulder\Data.gdb' + scratchGDB = arcpy.env.scratchGDB + tempFolder = tempfile.mkdtemp() + # TODO Geof7015 resolve bug with Boundary Clean Raster Int to Float issue! + # interpolateAdditionalPoints currently broken + interpolateAdditionalPoints = True + recursivelyCreateAndClipRastersFromLasd = False + +######################## +# Set Global Variables # +######################## + +global lasList +global DTM +global DSM +global LrDSM +global ptFileInfoFile +global ptFileInfoList +global ptSpacing +global avgPtSpacing + +################## +# Define Modules # +################## +''' place all code modules here and notate each well...''' + +def createlasdataset(inLAS, sr): + global inLASD + inLASD = os.path.join(tempFolder, "LASDataSet.lasd") + if arcpy.Exists(inLASD): + arcpy.Delete_management(inLASD) + arcpy.CreateLasDataset_management(inLAS, inLASD, False, "", sr, "COMPUTE_STATS") + if arcpy.Exists(inLASD): + arcpy.AddMessage("LASD File Created @ Location: " + inLASD) + return inLASD + # for multiples: return inLASD,output2,output3,etc... + else: + arcpy.AddMessage("Could Not Create LASD DataSet. Check LAS inputs for errors") + +def findLasDatasetStatisticsfilePaths(file): + file_object = open(file, 'r') + lines = file_object.readlines() + file_object.close() + cleanLines = [] + for line in lines: + if len(line) > 1: + path = line.split(",")[0] + if os.path.isabs(path) is True and path not in cleanLines: + cleanLines.append(path) + return cleanLines + +# Create Lists with LiDAR Statistical Information. Pt Spacing etc... Process only used in other modules. +def obtainLiDARInfo(inLASD,lasList): + if arcpy.Exists(inLASD): + arcpy.AddMessage("Calculating Necessary Statistics for Feature Extraction Process") + lasDatasetStatsText = os.path.join(tempFolder, "lasDatasetStatsText.txt") + if arcpy.Exists(lasDatasetStatsText): + arcpy.Delete_management(lasDatasetStatsText) + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", + "DECIMAL_POINT") + + # TODO DJARRARD obtain a LiDAR file from list and parse the point_spacing to building footprints. + # TODO DJARRARD if multiple LiDAR tiles overlap building footprints then point_spacing = pt_spacing_average + if recursivelyCreateAndClipRastersFromLasd: + pass + + if not recursivelyCreateAndClipRastersFromLasd: + # run arcpy.PointFileInfo_3d on the single tile (no recursion) + ptFileInfoFile = os.path.join(outputWS, 'ptFileInfoFile') + if arcpy.Exists(ptFileInfoFile): + arcpy.Delete_management(ptFileInfoFile) + arcpy.PointFileInformation_3d(lasList, ptFileInfoFile, "LAS", None, sr, "false", "false", "DECIMAL_POINT", + "false", "false") + + rows = arcpy.SearchCursor(ptFileInfoFile, + fields="FileName; Pt_Spacing; Z_Min; Z_Max", + sort_fields="FileName; Pt_Spacing; Z_Min; Z_Max") + # Iterate through the rows in the cursor and store the + # "FileName; Pt_Spacing; Z_Min; Z_Max" + ptFileInfoList = [] + PtSpacing = [] + for row in rows: + formattedfields = ("{0}, {1}, {2}, {3}".format( + row.getValue("FileName"), + row.getValue("Pt_Spacing"), + row.getValue("Z_Min"), + row.getValue("Z_Max"))) + ptFileInfoList.append(formattedfields) + ptspacinglist = float("{0}".format( + row.getValue("Pt_Spacing"))) + PtSpacing.append(ptspacinglist) + print(ptFileInfoList) + print(PtSpacing) + avgPtSpacing = sum(PtSpacing)/float(len(PtSpacing)) + print(avgPtSpacing) + return ptFileInfoFile, ptFileInfoList, PtSpacing, avgPtSpacing + +# TODO Geof7015 & DJARRARD Integrate automated class code detection for feature extraction process for .las or .lasd use + +def createlaslayers(inLASD): + # Something like def createLasDatasetLayer(inLASD, #DTMLASDclasses, #DTMLASDreturns, #LrDSMLASDclasses, #LrDSMLASDreturns, interpolateAdditionalPoints): + if arcpy.Exists("DTMLASD"): + arcpy.Delete_management("DTMLASD") + if arcpy.Exists("LRDSMLASD"): + arcpy.Delete_management("LRDSMLASD") + arcpy.MakeLasDatasetLayer_management(inLASD, "DTMLASD", "2", "", "", "", "", "", "", "") + arcpy.MakeLasDatasetLayer_management(inLASD, "LRDSMLASD", "1;2;4", "Last Return", "", "", "", "", "", "") + if arcpy.Exists("DTMLASD"): + if arcpy.Exists("LRDSMLASD"): + arcpy.AddMessage("LASD Layers Created") + else: + arcpy.AddMessage("Could Not Create LASD Layers") + +def createSurfaceRasters(lasdLayerGround, lasdLayerSurface, outputDTM, outputDSM): + # selector determining whether or not to interpolateAdditional points for tin creation. Helps with Terribe LiDAR. + ''' if interpolateAdditionalPoints is enabled then input correct heightValue & valueField for raster processing ''' + ''' Determine the correct point spacing settings based on raster processing algorithm requirements ''' + if interpolateAdditionalPoints: + if not recursivelyCreateAndClipRastersFromLasd: + pointSpacing = obtainLiDARInfo(inLASD, lasList)[3] # return 3 is Average LiDAR point Spacing + else: + # TODO make point spacing support recursions. obtainLiDARInfo(inLASD, lasList)[3] is currently placeholder + ''' calculate average pt spacing of LiDAR tiles building footprints intersect''' + pointSpacing = 2.2 / 0.5 + heightValue = 100 + valueField = "INT" + else: + if not recursivelyCreateAndClipRastersFromLasd: + pointSpacing = obtainLiDARInfo(inLASD, lasList)[3] + else: + # TODO make point spacing support recursions. obtainLiDARInfo(inLASD, lasList)[3] is currently placeholder + ''' calculate average pt spacing of LiDAR tiles building footprints intersect''' + pointSpacing = obtainLiDARInfo(inLASD, lasList)[3] + heightValue = 1 + valueField = "FLOAT" + + # Delete terrain rasters if existing. + if arcpy.Exists(outputDTM): + arcpy.Delete_management(outputDTM) + if arcpy.Exists(outputDSM): + arcpy.Delete_management(outputDSM) + + arcpy.LasDatasetToRaster_conversion(lasdLayerGround, outputDTM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacing, heightValue) + print("Created DTM Raster at location: " + outputDTM) + arcpy.LasDatasetToRaster_conversion(lasdLayerSurface, outputDSM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacing, heightValue) + print("Created Last Return DSM Raster at location: " + outputDSM) + return outputDTM, outputDSM + +def interpolateBetweenLasPts(DTM,LrDSM): + # Raster Cleanup Process via Boundary Clean Operation (Fills & Smooths Voids based on nearest pixel value) + DTMBC = os.path.join(scratchGDB, "DTMBC") + DTMBoundaryClean = arcpy.sa.BoundaryClean(DTM, "ASCEND", "TWO_WAY") + DTMBoundaryClean.save(DTMBC) + LrDSMBC = os.path.join(scratchGDB, "LrDSMBC") + LrDSMBoundaryClean = arcpy.sa.BoundaryClean(LrDSM, "ASCEND", "TWO_WAY") + LrDSMBoundaryClean.save(LrDSMBC) + + # Divide rasters by 1000 for accurate Height & Override the Original DEM & DSM Raster Inputs with the LiDAR Ext ones + DTMFinal = arcpy.Raster(DTMBC) / 100 # May need to resolve output rasters format to true Float + DTM = os.path.join(scratchGDB, "DTM") + DTMFinal.save(DTM) + LrDSMFinal = arcpy.Raster(LrDSMBC) / 100 # May need to resolve output rasters format to true Float + DSM = os.path.join(scratchGDB, "DSM") + LrDSMFinal.save(DSM) + print("Complete Creation of Interpolation Data") + return DTM, DSM + +def createNDSM(DSM, DTM, nDSM): + temp = arcpy.Raster(DSM) - arcpy.Raster(DTM) + # TODO Geof7015 obtain feet or meters from SR as input for automated unit selector function + ''' then pass conversion equation case units == meters: minBldgHeight * 0.3048 else: minBldgHeight ''' + minBldgHeight = 6 # Value is in Feet + nDSMRaster = SetNull(temp < minBldgHeight, temp) + nDSMRaster.save(nDSM) + del temp + return nDSM + +def maskDSM(DSM,maskRaster,DSMMasked): + # Mask DSM to nDSM height limiting value + DSMMaskOperation = arcpy.sa.ExtractByMask(DSM, maskRaster) + DSMMaskOperation.save(DSMMasked) + print("DSMMasked File Location" + DSMMasked) + print("DTM File Location" + DTM) + return DSMMasked + +############## +# Begin Code # +############## +''' commence Code/Script operation here and notate well...''' + +# Conditional operation to allow for input of .lasd file, .las file folder or Rasters +''' Detects if dataSet exists and runs the correct operation based on dataSet''' + +# If LiDAR Input is a LASD DataSet then count number of LAS files in LAS Dataset and List LAS files as input 4 GP tools +if arcpy.Exists(inLASD): + arcpy.AddMessage("detected LASD Dataset as input: " + inLASD) + lasDatasetStatsText = os.path.join(tempFolder, "lasDatasetStatsText.txt") + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", "DECIMAL_POINT") + filenames = findLasDatasetStatisticsfilePaths(lasDatasetStatsText) + + if len(filenames) == 0: + arcpy.AddMessage("1 LAS file detected in LASD DATASET") + else: + arcpy.AddMessage("{0} LAS files detected in LASD DATASET".format(len(filenames))) + + # Process lasList into Esri GP tool friendly input format + newstr = str(filenames)[1:-1].replace("', ", ";") + lasList = '"' + newstr.replace("'", "") + '"' + +# If the LiDAR Input is a a single LAS then return 1 of LAS files and format file to string for GP tools input. +if inLAS.lower().endswith('.las') and ";" not in inLAS: + arcpy.AddMessage("1 LAS file detected") + lasList = '"' + inLAS + '"' + +# If the LiDAR Input is a string of LAS files then count number of LAS files and create List LAS files input 4 GP tools. +if inLAS.lower().endswith('.las') and ";" in inLAS: + numberLASFiles = (inLAS.count(';')+1) + arcpy.AddMessage(str(numberLASFiles) + " LAS file detected") + lasList = '"' + inLAS + '"' + +# If the LiDAR Input is a LAS Directory then count number of LAS files and create List of LAS files as input 4 GP tools. +if os.path.isdir(inLAS): + lasSearchPathDirectory = inLAS + "/*.las" + for name in glob.glob(lasSearchPathDirectory): + filename = name + file_extension = ".las" + filename, file_extension = os.path.splitext(name) + if file_extension == ".las": + # Find all LAS files in input folder. Optionally search recursively + recursive = True + lasList = [] + if recursive: + for root, dirs, files in os.walk(inLAS): + for file in files: + if file.endswith(".las") and file not in lasList: + lasList.append((os.path.join(root, file))) + else: + for file in os.listdir(inLAS): + if file.endswith(".las") and file not in lasList: + lasList.append((os.path.join(inLAS, file))) + + # Print Number of Las files + if len(lasList) == 0: + arcpy.AddMessage("1 LAS file detected in Directory") + else: + arcpy.AddMessage("{0} LAS files detected in Directory".format(len(lasList))) + + # Process lasList into Esri GP tool friendly input format + newstr = str(lasList)[1:-1].replace("', ", ";") + lasList = '"' + newstr.replace("'", "") + '"' + +# Convert Las file List as String and format for GP tool input +# Create LASDataset from LAS files. +if inLAS.lower().endswith('.las') or os.path.isdir(inLAS): + createlasdataset(inLAS=inLAS, sr=sr) + +if inLAS.lower().endswith('.las') or os.path.isdir(inLAS) or arcpy.Exists(inLASD): + createlaslayers(inLASD=inLASD) + if not recursivelyCreateAndClipRastersFromLasd: + LrDSM = os.path.join(scratchGDB, "LrDSM") + DTM = os.path.join(scratchGDB, "DTM") + createSurfaceRasters(lasdLayerGround="DTMLASD", lasdLayerSurface="LRDSMLASD", outputDTM=DTM, outputDSM=LrDSM) + arcpy.Delete_management("DTMLASD") + arcpy.Delete_management("LRDSMLASD") + else: + pass + +# Run raster interpolation algorithm on LiDAR derived rasters if interpolateAdditionalPoints is True and not Recursive +if inLAS.lower().endswith('.las') or os.path.isdir(inLAS) or arcpy.Exists(inLASD): + if interpolateAdditionalPoints and not recursivelyCreateAndClipRastersFromLasd: + interpolateBetweenLasPts(DTM, LrDSM) + else: + DSM = os.path.join(outputWS, "LrDSM") + +# Process Rasters prior to recursion process if Raster as user Input or recursivelyCreateAndClipRastersFromLasd deselect +if (arcpy.Exists(DTMRaster) and arcpy.Exists(DSMRaster)) or not recursivelyCreateAndClipRastersFromLasd: + + # Change names of initial input rasters to DTM and DSM to align with LiDAR derived rasters for less coding :) + if arcpy.Exists(DTMRaster) and arcpy.Exists(DSMRaster): + DTM = DTMRaster + DSM = DSMRaster + + # Begin process of deriving rasters from created LAS DataSets + if inLAS.lower().endswith('.las') or os.path.isdir(inLAS) or arcpy.Exists(inLASD): + pass + # get spatial reference + sr = arcpy.Describe(DTM).spatialReference + + DSM = LrDSM + nDSM = os.path.join(scratchGDB, "nDSM") + createNDSM(DSM=DSM, DTM=DTM, nDSM=nDSM) + + DSMMaskedRaster = os.path.join(scratchGDB, "DSMMaskedRaster") + maskDSM(DSM=DSM, maskRaster=nDSM, DSMMasked=DSMMaskedRaster) + + DSM = DSMMaskedRaster + +# create list for multiPatch features +mp_list = [] + +# make search cursor for footprint polygons +fields = ["SHAPE@"] +with arcpy.da.SearchCursor(buildingFootprints, fields) as sc: + for i, row in enumerate(sc): + try: + print("on feature {0}".format(i)) + # get raster extent + geom = row[0] + ext = "{0} {1} {2} {3}".format(geom.extent.XMin, geom.extent.YMin, geom.extent.XMax, geom.extent.YMax) + + # copy the feature temporarily + tp = os.path.join(outputWS, "tp") + tempGeom = arcpy.CopyFeatures_management(geom, tp) + + # clip the DSM + print('clipping DSM') + DSMClipRast = os.path.join(arcpy.env.scratchFolder, 'tempclip{0}.tif'.format(i)) + arcpy.Clip_management(DSM, ext, DSMClipRast, tp, "true", "false") + + if interpolateAdditionalPoints: + # Int Raster + DSMClipRast = Int(DSMClipRast) + + # smooth the DSM + print('smoothing DSM') + nbr = NbrRectangle(3, 3, "CELL") + + # sm_clip = FocalStatistics(out_raster, nbr, "MEAN", "DATA") + sm_clip = Filter(DSMClipRast, "LOW", "DATA") + + # smooth the DSM + print('smoothing DSM') + nbr = NbrRectangle(3, 3, "CELL") + + # sm_clip = FocalStatistics(out_raster, nbr, "MEAN", "DATA") + sm_clip = Filter(DSMClipRast, "LOW", "DATA") + + # clean up clipped raster + arcpy.Delete_management(DSMClipRast) + + # convert raster to points + print('converting raster to points') + out_points = "in_memory/clipPoints" + arcpy.RasterToPoint_conversion(sm_clip, out_points, "Value") + + # convert to 3d by values + # out_points3d ="in_memory/clipPoints3d" + # arcpy.FeatureTo3DByAttribute_3d(in_features=out_points, out_feature_class=out_points3d, + # height_field="grid_code", to_height_field="") + + # Create TIN with points + print('making surface TIN') + # feats_tin = "{} Shape.Z Mass_Points ;".format(out_points3d) + feats_tin = "{0} grid_code Mass_Points ;".format(out_points) + out_surf_tin = os.path.join(arcpy.env.scratchFolder, "surfTin") + arcpy.CreateTin_3d(out_surf_tin, sr, feats_tin, 'DELAUNAY') + + # clip the DTM + print('clipping DTM') + dtmClipRast = os.path.join(arcpy.env.scratchFolder, 'tempDEMclip{0}.tif'.format(i)) + arcpy.Clip_management(DTM, ext, dtmClipRast, tp, "true", "false") + # convert DEM to Int + dtmClipRastInt = Int(dtmClipRast) + # add a field delimiter + + # add Min Height to Building Footprints + print('determining Minimum Building Elevation') + arcpy.AddField_management(tp, "ID", "SHORT", None, None, None, "ID", "true", "true", None) + arcpy.CalculateField_management(tp, "ID", 1, "PYTHON_9.3", None) + minMaxElevTable = "in_memory/minMaxElev" + arcpy.sa.ZonalStatisticsAsTable(tp, "ID", dtmClipRastInt, minMaxElevTable, "true", "MIN_MAX_MEAN") + arcpy.JoinField_management(tp, "ID", minMaxElevTable, "ID", "MIN;MAX") + + # then, move building footprints to MIN Z Height + out_poly3d = "in_memory/out_poly3d" + arcpy.FeatureTo3DByAttribute_3d(tp, out_poly3d, "MIN", "") + + # make ground TIN + gnd_feats_tin = "{} Shape.Z Hard_Clip ;".format(out_poly3d) + out_gnd_tin = os.path.join(arcpy.env.scratchFolder, "gndTin") + arcpy.CreateTin_3d(out_gnd_tin, sr, gnd_feats_tin, "DELAUNAY") + + # extrude polygon between TINs + print('creating Multipatch') + this_MP = os.path.join(outputWS, "bldgMP_{0}".format(i)) + arcpy.ExtrudeBetween_3d(out_surf_tin, out_gnd_tin, out_poly3d, this_MP) + + # add feature name to list + mp_list.append(this_MP) + + # Delete Unnecessary files + arcpy.Delete_management(tp) + arcpy.Delete_management(out_points) + arcpy.Delete_management(minMaxElevTable) + arcpy.Delete_management(out_poly3d) + + # TODO Geoff7015 Incorporate Cleanup Building CGA from Geof7015 rule into tool here: + ''' every multipatch building must have LiDAR point spacing as a attribute and "Units: feet/meters + will need to update CGA cleanup rules settings with a conditional calculator operation where + it leverages these attributes and changes the cleanupGeometry operations optimally based on input features + final output will be two file geodatabases. one with original buildings and other with cleaned.''' + ''' Other Cleanup Utility tools/processes may be required to optimize building faces and roof geometries''' + except: + print("Unable to process feature {0}".format(i)) + +# merge the MultiPatches into a single FC +outputMerge = os.path.join(outputWS, 'outputMergeMP') +arcpy.Merge_management(mp_list, outputMerge) + +#TODO DJARRARD: delete all buildingMP* files that exist in the output workspace +# Delete Individual Multipatch Buildings +'''if arcpy.Exists(os.path.join(outputWS, "bldgMP_0")): + for fc in arcpy.ListFeatureClasses("bldgMP*", "MULTIPATCH", outputWS): + arcpy.Delete_management(fc)''' + +if arcpy.Exists("DTMLASD"): + arcpy.Delete_management("DTMLASD") +if arcpy.Exists("LRDSMLASD"): + arcpy.Delete_management("LRDSMLASD") diff --git a/clipRasterToPolyExtrudeTinBoulder.py b/clipRasterToPolyExtrudeTinBoulder.py new file mode 100644 index 0000000..7add5e1 --- /dev/null +++ b/clipRasterToPolyExtrudeTinBoulder.py @@ -0,0 +1,468 @@ +# this script will create MultiPatch feature classes from a polygon and DSM/DTM Raster. +# These rasters acn also be automatically derived from LAS or LASD in this process. +# Original Process & concept by Joseph McGlinchy & (intern name) +# LAS & LASD Integration, interpolateAdditionalPoints algorithm, & optimization by Geoff Taylor +# Enjoy :) + +import arcpy +import os +import os.path +import tempfile +import glob +from arcpy.sa import * + +arcpy.env.overwriteOutput = True +arcpy.CheckOutExtension('spatial') +arcpy.CheckOutExtension('3d') + +################# +# Define Inputs # +################# + +ProductionMode = False # Set to True for ArcGIS Pro GP Tool Use +if ProductionMode: + ''' For ArcGIS PRO GP Tool ''' + inLASD = arcpy.GetParameterAsText(0) + inLAS = arcpy.GetParameterAsText(1) + DTMRaster = arcpy.GetParameterAsText(2) + DSMRaster = arcpy.GetParameterAsText(3) + buildingFootprints = arcpy.GetParameterAsText(4) + sr = arcpy.GetParameterAsText(5) # Spatial Reference + outputWS = arcpy.GetParameterAsText(6) + interpolateAdditionalPoints = arcpy.GetParameterAsText(7) + recursivelyCreateAndClipRastersFromLasd = arcpy.GetParameterAsText(8) + scratchGDB = arcpy.env.scratchGDB + tempFolder = tempfile.mkdtemp() + +else: + ''' For Testing Purposes''' # Comment out inputs + inLASD = r'' #C:\Users\geof7015\PycharmProjects\LiDARTestData\testData\LiDAR\backup\LASD\test.lasd' #C:\Users\geof7015\PycharmProjects\LiDARTestData\testData\LiDAR\backup\LASD\test.lasd' # C:\Users\geof7015\PycharmProjects\testData\Charlotte\LiDAR\LiDARAOI.lasd' + inLAS = r'C:\Users\geof7015\PycharmProjects\testData\Boulder\LAS\329.las' #C:\Users\geof7015\PycharmProjects\testData\Charlotte\LiDAR' #C:\Users\geof7015\PycharmProjects\LiDARTestData\testData\LiDAR' # C:\Users\geof7015\PycharmProjects\LiDARTestData\testData\LiDAR13810E4750N.las # C:\Users\geof7015\PycharmProjects\LiDARTestData\testData # C:\Users\geof7015\PycharmProjects\LiDARTestData\testData\LiDAR13810E4750N.las # r'C:\Users\geof7015\PycharmProjects\testData\Charlotte\LiDAR' # C:\Users\geof7015\PycharmProjects\testData\Charlotte\LiDAR + DTMRaster = r'' # C:\workspace\data\testdata\bh12TVK1800084000.img + DSMRaster = r'' # 'C:\workspace\data\testdata\hh12TVK1800084000.img' + buildingFootprints = r'C:\Users\geof7015\PycharmProjects\testData\Boulder\Data.gdb\Building329' # C:\Users\geof7015\PycharmProjects\testData\Charlotte\Data.gdb\BldgFootprints' + sr = "PROJCS['NAD_1983_HARN_StatePlane_Colorado_North_FIPS_0501_Feet',GEOGCS['GCS_North_American_1983_HARN',DATUM['D_North_American_1983_HARN',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',3000000.000316083],PARAMETER['False_Northing',999999.999996],PARAMETER['Central_Meridian',-105.5],PARAMETER['Standard_Parallel_1',39.71666666666667],PARAMETER['Standard_Parallel_2',40.78333333333333],PARAMETER['Latitude_Of_Origin',39.33333333333334],UNIT['Foot_US',0.3048006096012192]]" + # "PROJCS['NAD_1983_StatePlane_North_Carolina_FIPS_3200_Feet',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',2000000.002616666],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-79.0],PARAMETER['Standard_Parallel_1',34.33333333333334],PARAMETER['Standard_Parallel_2',36.16666666666666],PARAMETER['Latitude_Of_Origin',33.75],UNIT['Foot_US',0.3048006096012192]]" + outputWS = r'C:\Users\geof7015\PycharmProjects\testData\Boulder\Workspace.gdb' + scratchGDB = arcpy.env.scratchGDB + tempFolder = tempfile.mkdtemp() + # TODO Geof7015 resolve bug with Boundary Clean Raster Int to Float issue! + # interpolateAdditionalPoints currently broken + interpolateAdditionalPoints = False + recursivelyCreateAndClipRastersFromLasd = False + +######################## +# Set Global Variables # +######################## + +global lasList +global DTM +global DSM +global LrDSM +global ptFileInfoFile +global ptFileInfoList +global ptSpacing +global avgPtSpacing + +################## +# Define Modules # +################## +''' place all code modules here and notate each well...''' + +def createlasdataset(inLAS, sr): + global inLASD + inLASD = os.path.join(tempFolder, "LASDataSet.lasd") + if arcpy.Exists(inLASD): + arcpy.Delete_management(inLASD) + arcpy.CreateLasDataset_management(inLAS, inLASD, False, "", sr, "COMPUTE_STATS") + if arcpy.Exists(inLASD): + arcpy.AddMessage("LASD File Created @ Location: " + inLASD) + return inLASD + # for multiples: return inLASD,output2,output3,etc... + else: + arcpy.AddMessage("Could Not Create LASD DataSet. Check LAS inputs for errors") + +def findLasDatasetStatisticsfilePaths(file): + file_object = open(file, 'r') + lines = file_object.readlines() + file_object.close() + cleanLines = [] + for line in lines: + if len(line) > 1: + path = line.split(",")[0] + if os.path.isabs(path) is True and path not in cleanLines: + cleanLines.append(path) + return cleanLines + +# Create Lists with LiDAR Statistical Information. Pt Spacing etc... Process only used in other modules. +def obtainLiDARInfo(inLASD,lasList): + if arcpy.Exists(inLASD): + arcpy.AddMessage("Calculating Necessary Statistics for Feature Extraction Process") + lasDatasetStatsText = os.path.join(tempFolder, "lasDatasetStatsText.txt") + if arcpy.Exists(lasDatasetStatsText): + arcpy.Delete_management(lasDatasetStatsText) + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", + "DECIMAL_POINT") + + # TODO DJARRARD obtain a LiDAR file from list and parse the point_spacing to building footprints. + # TODO DJARRARD if multiple LiDAR tiles overlap building footprints then point_spacing = pt_spacing_average + if recursivelyCreateAndClipRastersFromLasd: + pass + + if not recursivelyCreateAndClipRastersFromLasd: + # run arcpy.PointFileInfo_3d on the single tile (no recursion) + ptFileInfoFile = os.path.join(outputWS, 'ptFileInfoFile') + if arcpy.Exists(ptFileInfoFile): + arcpy.Delete_management(ptFileInfoFile) + arcpy.PointFileInformation_3d(lasList, ptFileInfoFile, "LAS", None, sr, "false", "false", "DECIMAL_POINT", + "false", "false") + + rows = arcpy.SearchCursor(ptFileInfoFile, + fields="FileName; Pt_Spacing; Z_Min; Z_Max", + sort_fields="FileName; Pt_Spacing; Z_Min; Z_Max") + # Iterate through the rows in the cursor and store the + # "FileName; Pt_Spacing; Z_Min; Z_Max" + ptFileInfoList = [] + PtSpacing = [] + for row in rows: + formattedfields = ("{0}, {1}, {2}, {3}".format( + row.getValue("FileName"), + row.getValue("Pt_Spacing"), + row.getValue("Z_Min"), + row.getValue("Z_Max"))) + ptFileInfoList.append(formattedfields) + ptspacinglist = float("{0}".format( + row.getValue("Pt_Spacing"))) + PtSpacing.append(ptspacinglist) + print(ptFileInfoList) + print(PtSpacing) + avgPtSpacing = sum(PtSpacing)/float(len(PtSpacing)) + print(avgPtSpacing) + return ptFileInfoFile, ptFileInfoList, PtSpacing, avgPtSpacing + +# TODO Geof7015 & DJARRARD Integrate automated class code detection for feature extraction process for .las or .lasd use + +def createlaslayers(inLASD): + # Something like def createLasDatasetLayer(inLASD, #DTMLASDclasses, #DTMLASDreturns, #LrDSMLASDclasses, #LrDSMLASDreturns, interpolateAdditionalPoints): + if arcpy.Exists("DTMLASD"): + arcpy.Delete_management("DTMLASD") + if arcpy.Exists("LRDSMLASD"): + arcpy.Delete_management("LRDSMLASD") + arcpy.MakeLasDatasetLayer_management(inLASD, "DTMLASD", "2", "", "", "", "", "", "", "") + arcpy.MakeLasDatasetLayer_management(inLASD, "LRDSMLASD", "2;6", "", "", "", "", "", "", "") + if arcpy.Exists("DTMLASD"): + if arcpy.Exists("LRDSMLASD"): + arcpy.AddMessage("LASD Layers Created") + else: + arcpy.AddMessage("Could Not Create LASD Layers") + +def createSurfaceRasters(lasdLayerGround, lasdLayerSurface, outputDTM, outputDSM): + # selector determining whether or not to interpolateAdditional points for tin creation. Helps with Terribe LiDAR. + ''' if interpolateAdditionalPoints is enabled then input correct heightValue & valueField for raster processing ''' + ''' Determine the correct point spacing settings based on raster processing algorithm requirements ''' + if interpolateAdditionalPoints: + if not recursivelyCreateAndClipRastersFromLasd: + pointSpacing = obtainLiDARInfo(inLASD, lasList)[3] # return 3 is Average LiDAR point Spacing + else: + # TODO make point spacing support recursions. obtainLiDARInfo(inLASD, lasList)[3] is currently placeholder + ''' calculate average pt spacing of LiDAR tiles building footprints intersect''' + pointSpace = obtainLiDARInfo(inLASD, lasList)[3] + pointSpacing = pointSpace / 0.5 + heightValue = 100 + valueField = "INT" + else: + if not recursivelyCreateAndClipRastersFromLasd: + pointSpacing = obtainLiDARInfo(inLASD, lasList)[3] + else: + # TODO make point spacing support recursions. obtainLiDARInfo(inLASD, lasList)[3] is currently placeholder + ''' calculate average pt spacing of LiDAR tiles building footprints intersect''' + pointSpacing = obtainLiDARInfo(inLASD, lasList)[3] + heightValue = 1 + valueField = "FLOAT" + + # Delete terrain rasters if existing. + if arcpy.Exists(outputDTM): + arcpy.Delete_management(outputDTM) + if arcpy.Exists(outputDSM): + arcpy.Delete_management(outputDSM) + + arcpy.LasDatasetToRaster_conversion(lasdLayerGround, outputDTM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacing, heightValue) + print("Created DTM Raster at location: " + outputDTM) + arcpy.LasDatasetToRaster_conversion(lasdLayerSurface, outputDSM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacing, heightValue) + print("Created Last Return DSM Raster at location: " + outputDSM) + return outputDTM, outputDSM + + +# TODO INT. make sure that integer raster is being produced +def interpolateBetweenLasPts(DTM,LrDSM): + # Raster Cleanup Process via Boundary Clean Operation (Fills & Smooths Voids based on nearest pixel value) + DTMBC = os.path.join(scratchGDB, "DTMBC") + DTMBoundaryClean = arcpy.sa.BoundaryClean(DTM, "ASCEND", "TWO_WAY") + DTMBoundaryClean.save(DTMBC) + LrDSMBC = os.path.join(scratchGDB, "LrDSMBC") + LrDSMBoundaryClean = arcpy.sa.BoundaryClean(LrDSM, "ASCEND", "TWO_WAY") + LrDSMBoundaryClean.save(LrDSMBC) + + # TODO INT. check and see id produced raster is created correctly + # Divide rasters by 1000 for accurate Height & Override the Original DEM & DSM Raster Inputs with the LiDAR Ext ones + DTMFinal = arcpy.Raster(DTMBC) / 100 # May need to resolve output rasters format to true Float + DTM = os.path.join(scratchGDB, "DTM") + DTMFinal.save(DTM) + LrDSMFinal = arcpy.Raster(LrDSMBC) / 100 # May need to resolve output rasters format to true Float + DSM = os.path.join(scratchGDB, "DSM") + LrDSMFinal.save(DSM) + print("Complete Creation of Interpolation Data") + return DTM, DSM + +def createNDSM(DSM, DTM, nDSM): + temp = arcpy.Raster(DSM) - arcpy.Raster(DTM) + # TODO Geof7015 obtain feet or meters from SR as input for automated unit selector function + ''' then pass conversion equation case units == meters: minBldgHeight * 0.3048 else: minBldgHeight ''' + minBldgHeight = 6 # Value is in Feet + nDSMRaster = SetNull(temp < minBldgHeight, temp) + nDSMRaster.save(nDSM) + del temp + return nDSM + +def maskDSM(DSM,maskRaster,DSMMasked): + # Mask DSM to nDSM height limiting value + DSMMaskOperation = arcpy.sa.ExtractByMask(DSM, maskRaster) + DSMMaskOperation.save(DSMMasked) + print("DSMMasked File Location" + DSMMasked) + print("DTM File Location" + DTM) + return DSMMasked + +############## +# Begin Code # +############## +''' commence Code/Script operation here and notate well...''' + +# Conditional operation to allow for input of .lasd file, .las file folder or Rasters +''' Detects if dataSet exists and runs the correct operation based on dataSet''' + +# If LiDAR Input is a LASD DataSet then count number of LAS files in LAS Dataset and List LAS files as input 4 GP tools +if arcpy.Exists(inLASD): + arcpy.AddMessage("detected LASD Dataset as input: " + inLASD) + lasDatasetStatsText = os.path.join(tempFolder, "lasDatasetStatsText.txt") + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", "DECIMAL_POINT") + filenames = findLasDatasetStatisticsfilePaths(lasDatasetStatsText) + + if len(filenames) == 0: + arcpy.AddMessage("1 LAS file detected in LASD DATASET") + else: + arcpy.AddMessage("{0} LAS files detected in LASD DATASET".format(len(filenames))) + + # Process lasList into Esri GP tool friendly input format + newstr = str(filenames)[1:-1].replace("', ", ";") + lasList = '"' + newstr.replace("'", "") + '"' + +# If the LiDAR Input is a a single LAS then return 1 of LAS files and format file to string for GP tools input. +if inLAS.lower().endswith('.las') and ";" not in inLAS: + arcpy.AddMessage("1 LAS file detected") + lasList = '"' + inLAS + '"' + +# If the LiDAR Input is a string of LAS files then count number of LAS files and create List LAS files input 4 GP tools. +if inLAS.lower().endswith('.las') and ";" in inLAS: + numberLASFiles = (inLAS.count(';')+1) + arcpy.AddMessage(str(numberLASFiles) + " LAS file detected") + lasList = '"' + inLAS + '"' + +# If the LiDAR Input is a LAS Directory then count number of LAS files and create List of LAS files as input 4 GP tools. +if os.path.isdir(inLAS): + lasSearchPathDirectory = inLAS + "/*.las" + for name in glob.glob(lasSearchPathDirectory): + filename = name + file_extension = ".las" + filename, file_extension = os.path.splitext(name) + if file_extension == ".las": + # Find all LAS files in input folder. Optionally search recursively + recursive = True + lasList = [] + if recursive: + for root, dirs, files in os.walk(inLAS): + for file in files: + if file.endswith(".las") and file not in lasList: + lasList.append((os.path.join(root, file))) + else: + for file in os.listdir(inLAS): + if file.endswith(".las") and file not in lasList: + lasList.append((os.path.join(inLAS, file))) + + # Print Number of Las files + if len(lasList) == 0: + arcpy.AddMessage("1 LAS file detected in Directory") + else: + arcpy.AddMessage("{0} LAS files detected in Directory".format(len(lasList))) + + # Process lasList into Esri GP tool friendly input format + newstr = str(lasList)[1:-1].replace("', ", ";") + lasList = '"' + newstr.replace("'", "") + '"' + +# Convert Las file List as String and format for GP tool input +# Create LASDataset from LAS files. +if inLAS.lower().endswith('.las') or os.path.isdir(inLAS): + createlasdataset(inLAS=inLAS, sr=sr) + +if inLAS.lower().endswith('.las') or os.path.isdir(inLAS) or arcpy.Exists(inLASD): + createlaslayers(inLASD=inLASD) + if not recursivelyCreateAndClipRastersFromLasd: + LrDSM = os.path.join(scratchGDB, "LrDSM") + DTM = os.path.join(scratchGDB, "DTM") + createSurfaceRasters(lasdLayerGround="DTMLASD", lasdLayerSurface="LRDSMLASD", outputDTM=DTM, outputDSM=LrDSM) + arcpy.Delete_management("DTMLASD") + arcpy.Delete_management("LRDSMLASD") + else: + pass + +# TODO INT Make sure that INT raster is produced +# Run raster interpolation algorithm on LiDAR derived rasters if interpolateAdditionalPoints is True and not Recursive +if inLAS.lower().endswith('.las') or os.path.isdir(inLAS) or arcpy.Exists(inLASD): + if interpolateAdditionalPoints and not recursivelyCreateAndClipRastersFromLasd: + interpolateBetweenLasPts(DTM, LrDSM) + else: + DSM = os.path.join(outputWS, "LrDSM") + +# Process Rasters prior to recursion process if Raster as user Input or recursivelyCreateAndClipRastersFromLasd deselect +if (arcpy.Exists(DTMRaster) and arcpy.Exists(DSMRaster)) or not recursivelyCreateAndClipRastersFromLasd: + + # Change names of initial input rasters to DTM and DSM to align with LiDAR derived rasters for less coding :) + if arcpy.Exists(DTMRaster) and arcpy.Exists(DSMRaster): + DTM = DTMRaster + DSM = DSMRaster + + # Begin process of deriving rasters from created LAS DataSets + if inLAS.lower().endswith('.las') or os.path.isdir(inLAS) or arcpy.Exists(inLASD): + pass + # get spatial reference + sr = arcpy.Describe(DTM).spatialReference + + DSM = LrDSM + nDSM = os.path.join(scratchGDB, "nDSM") + createNDSM(DSM=DSM, DTM=DTM, nDSM=nDSM) + + DSMMaskedRaster = os.path.join(scratchGDB, "DSMMaskedRaster") + maskDSM(DSM=DSM, maskRaster=nDSM, DSMMasked=DSMMaskedRaster) + + DSM = DSMMaskedRaster + +# create list for multiPatch features +mp_list = [] + +# make search cursor for footprint polygons +fields = ["SHAPE@"] +with arcpy.da.SearchCursor(buildingFootprints, fields) as sc: + for i, row in enumerate(sc): + try: + print("on feature {0}".format(i)) + # get raster extent + geom = row[0] + ext = "{0} {1} {2} {3}".format(geom.extent.XMin, geom.extent.YMin, geom.extent.XMax, geom.extent.YMax) + + # copy the feature temporarily + tp = os.path.join(outputWS, "tp") + tempGeom = arcpy.CopyFeatures_management(geom, tp) + + # clip the DSM + print('clipping DSM') + DSMClipRast = os.path.join(arcpy.env.scratchFolder, 'tempclip{0}.tif'.format(i)) + arcpy.Clip_management(DSM, ext, DSMClipRast, tp, "true", "false") + + if interpolateAdditionalPoints: + # Int Raster + DSMClipRast = Int(DSMClipRast) + + # smooth the DSM + print('smoothing DSM') + nbr = NbrRectangle(3, 3, "CELL") + + # sm_clip = FocalStatistics(out_raster, nbr, "MEAN", "DATA") + sm_clip = Filter(DSMClipRast, "LOW", "DATA") + + # clean up clipped raster + arcpy.Delete_management(DSMClipRast) + + # TODO INT Make sure that Raster Points are produced correctly and at precise elevations + # convert raster to points + print('converting raster to points') + out_points = "in_memory/clipPoints" + arcpy.RasterToPoint_conversion(sm_clip, out_points, "Value") + + # convert to 3d by values + # out_points3d ="in_memory/clipPoints3d" + # arcpy.FeatureTo3DByAttribute_3d(in_features=out_points, out_feature_class=out_points3d, + # height_field="grid_code", to_height_field="") + + # TODO INT Make sure that TIN Surface is produced correctly and at precise elevation + # Create TIN with points + print('making surface TIN') + # feats_tin = "{} Shape.Z Mass_Points ;".format(out_points3d) + feats_tin = "{0} grid_code Mass_Points ;".format(out_points) + out_surf_tin = os.path.join(arcpy.env.scratchFolder, "surfTin") + arcpy.CreateTin_3d(out_surf_tin, sr, feats_tin, 'DELAUNAY') + + # clip the DTM + print('clipping DTM') + dtmClipRast = os.path.join(arcpy.env.scratchFolder, 'tempDEMclip{0}.tif'.format(i)) + arcpy.Clip_management(DTM, ext, dtmClipRast, tp, "true", "false") + + # TODO INT Check to ensure whether or not this is necessary + # convert DEM to Int + if interpolateAdditionalPoints: + dtmClipRast = Int(dtmClipRast) + + # add Min Height to Building Footprints + print('determining Minimum Building Elevation') + arcpy.AddField_management(tp, "ID", "SHORT", None, None, None, "ID", "true", "true", None) + arcpy.CalculateField_management(tp, "ID", 1, "PYTHON_9.3", None) + minMaxElevTable = "in_memory/minMaxElev" + arcpy.sa.ZonalStatisticsAsTable(tp, "ID", dtmClipRast, minMaxElevTable, "true", "MIN_MAX_MEAN") + arcpy.JoinField_management(tp, "ID", minMaxElevTable, "ID", "MIN;MAX") + + # then, move building footprints to MIN Z Height + out_poly3d = "in_memory/out_poly3d" + arcpy.FeatureTo3DByAttribute_3d(tp, out_poly3d, "MIN", "") + + # make ground TIN + gnd_feats_tin = "{} Shape.Z Hard_Clip ;".format(out_poly3d) + out_gnd_tin = os.path.join(arcpy.env.scratchFolder, "gndTin") + arcpy.CreateTin_3d(out_gnd_tin, sr, gnd_feats_tin, "DELAUNAY") + + # extrude polygon between TINs + print('creating Multipatch') + this_MP = os.path.join(outputWS, "bldgMP_{0}".format(i)) + arcpy.ExtrudeBetween_3d(out_surf_tin, out_gnd_tin, out_poly3d, this_MP) + + # add feature name to list + mp_list.append(this_MP) + + # Delete Unnecessary files + arcpy.Delete_management(tp) + arcpy.Delete_management(out_points) + arcpy.Delete_management(minMaxElevTable) + arcpy.Delete_management(out_poly3d) + + # TODO Geoff7015 Incorporate Cleanup Building CGA from Geof7015 rule into tool here: + ''' every multipatch building must have LiDAR point spacing as a attribute and "Units: feet/meters + will need to update CGA cleanup rules settings with a conditional calculator operation where + it leverages these attributes and changes the cleanupGeometry operations optimally based on input features + final output will be two file geodatabases. one with original buildings and other with cleaned.''' + ''' Other Cleanup Utility tools/processes may be required to optimize building faces and roof geometries''' + except: + print("Unable to process feature {0}".format(i)) + +# merge the MultiPatches into a single FC +outputMerge = os.path.join(outputWS, 'outputMergeMP') +arcpy.Merge_management(mp_list, outputMerge) + +#TODO DJARRARD: delete all buildingMP* files that exist in the output workspace +# Delete Individual Multipatch Buildings +'''if arcpy.Exists(os.path.join(outputWS, "bldgMP_0")): + for fc in arcpy.ListFeatureClasses("bldgMP*", "MULTIPATCH", outputWS): + arcpy.Delete_management(fc)''' + +if arcpy.Exists("DTMLASD"): + arcpy.Delete_management("DTMLASD") +if arcpy.Exists("LRDSMLASD"): + arcpy.Delete_management("LRDSMLASD") diff --git a/clipRasterToPolyExtrudeTinCharlotte.py b/clipRasterToPolyExtrudeTinCharlotte.py new file mode 100644 index 0000000..dac75cc --- /dev/null +++ b/clipRasterToPolyExtrudeTinCharlotte.py @@ -0,0 +1,470 @@ +# this script will create MultiPatch feature classes from a polygon and DSM/DTM Raster. +# These rasters acn also be automatically derived from LAS or LASD in this process. +# Original Process & concept by Joseph McGlinchy & (intern name) +# LAS & LASD Integration, interpolateAdditionalPoints algorithm, & optimization by Geoff Taylor +# Enjoy :) + +import arcpy +import os +import os.path +import tempfile +import glob +from arcpy.sa import * + +arcpy.env.overwriteOutput = True +arcpy.CheckOutExtension('spatial') +arcpy.CheckOutExtension('3d') + +################# +# Define Inputs # +################# + +ProductionMode = False # Set to True for ArcGIS Pro GP Tool Use +if ProductionMode: + ''' For ArcGIS PRO GP Tool ''' + inLASD = arcpy.GetParameterAsText(0) + inLAS = arcpy.GetParameterAsText(1) + DTMRaster = arcpy.GetParameterAsText(2) + DSMRaster = arcpy.GetParameterAsText(3) + buildingFootprints = arcpy.GetParameterAsText(4) + sr = arcpy.GetParameterAsText(5) # Spatial Reference + outputWS = arcpy.GetParameterAsText(6) + interpolateAdditionalPoints = arcpy.GetParameterAsText(7) + recursivelyCreateAndClipRastersFromLasd = arcpy.GetParameterAsText(8) + scratchGDB = arcpy.env.scratchGDB + tempFolder = tempfile.mkdtemp() + +else: + ''' For Testing Purposes''' # Comment out inputs + inLASD = r'' + inLAS = r'C:\Users\geof7015\PycharmProjects\testData\Charlotte\LiDAR' + DTMRaster = r'' # C:\workspace\data\testdata\bh12TVK1800084000.img + DSMRaster = r'' # 'C:\workspace\data\testdata\hh12TVK1800084000.img' + buildingFootprints = r'C:\Users\geof7015\PycharmProjects\testData\Charlotte\Data.gdb\BldgFootprints' + sr = "PROJCS['NAD_1983_StatePlane_North_Carolina_FIPS_3200_Feet',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',2000000.002616666],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-79.0],PARAMETER['Standard_Parallel_1',34.33333333333334],PARAMETER['Standard_Parallel_2',36.16666666666666],PARAMETER['Latitude_Of_Origin',33.75],UNIT['Foot_US',0.3048006096012192]]" + outputWS = r'C:\Users\geof7015\PycharmProjects\testData\Charlotte\Workspace.gdb' + scratchGDB = arcpy.env.scratchGDB + tempFolder = tempfile.mkdtemp() + # TODO Geof7015 resolve bug with Boundary Clean Raster Int to Float issue! + # interpolateAdditionalPoints currently broken + interpolateAdditionalPoints = True + recursivelyCreateAndClipRastersFromLasd = False + +######################## +# Set Global Variables # +######################## + +global lasList +global DTM +global DSM +global LrDSM +global ptFileInfoFile +global ptFileInfoList +global ptSpacing +global avgPtSpacing + +################## +# Define Modules # +################## +''' place all code modules here and notate each well...''' + +def createlasdataset(inLAS, sr): + global inLASD + inLASD = os.path.join(tempFolder, "LASDataSet.lasd") + if arcpy.Exists(inLASD): + arcpy.Delete_management(inLASD) + arcpy.CreateLasDataset_management(inLAS, inLASD, False, "", sr, "COMPUTE_STATS") + if arcpy.Exists(inLASD): + arcpy.AddMessage("LASD File Created @ Location: " + inLASD) + return inLASD + # for multiples: return inLASD,output2,output3,etc... + else: + arcpy.AddMessage("Could Not Create LASD DataSet. Check LAS inputs for errors") + +def findLasDatasetStatisticsfilePaths(file): + file_object = open(file, 'r') + lines = file_object.readlines() + file_object.close() + cleanLines = [] + for line in lines: + if len(line) > 1: + path = line.split(",")[0] + if os.path.isabs(path) is True and path not in cleanLines: + cleanLines.append(path) + return cleanLines + +# Create Lists with LiDAR Statistical Information. Pt Spacing etc... Process only used in other modules. +def obtainLiDARInfo(inLASD,lasList): + if arcpy.Exists(inLASD): + arcpy.AddMessage("Calculating Necessary Statistics for Feature Extraction Process") + lasDatasetStatsText = os.path.join(tempFolder, "lasDatasetStatsText.txt") + if arcpy.Exists(lasDatasetStatsText): + arcpy.Delete_management(lasDatasetStatsText) + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", + "DECIMAL_POINT") + + # TODO DJARRARD obtain a LiDAR file from list and parse the point_spacing to building footprints. + # TODO DJARRARD if multiple LiDAR tiles overlap building footprints then point_spacing = pt_spacing_average + if recursivelyCreateAndClipRastersFromLasd: + pass + + if not recursivelyCreateAndClipRastersFromLasd: + # run arcpy.PointFileInfo_3d on the single tile (no recursion) + ptFileInfoFile = os.path.join(outputWS, 'ptFileInfoFile') + if arcpy.Exists(ptFileInfoFile): + arcpy.Delete_management(ptFileInfoFile) + arcpy.PointFileInformation_3d(lasList, ptFileInfoFile, "LAS", None, sr, "false", "false", "DECIMAL_POINT", + "false", "false") + + rows = arcpy.SearchCursor(ptFileInfoFile, + fields="FileName; Pt_Spacing; Z_Min; Z_Max", + sort_fields="FileName; Pt_Spacing; Z_Min; Z_Max") + # Iterate through the rows in the cursor and store the + # "FileName; Pt_Spacing; Z_Min; Z_Max" + ptFileInfoList = [] + PtSpacing = [] + for row in rows: + formattedfields = ("{0}, {1}, {2}, {3}".format( + row.getValue("FileName"), + row.getValue("Pt_Spacing"), + row.getValue("Z_Min"), + row.getValue("Z_Max"))) + ptFileInfoList.append(formattedfields) + ptspacinglist = float("{0}".format( + row.getValue("Pt_Spacing"))) + PtSpacing.append(ptspacinglist) + print(ptFileInfoList) + print(PtSpacing) + avgPtSpacing = sum(PtSpacing)/float(len(PtSpacing)) + print(avgPtSpacing) + return ptFileInfoFile, ptFileInfoList, PtSpacing, avgPtSpacing + +# TODO Geof7015 & DJARRARD Integrate automated class code detection for feature extraction process for .las or .lasd use + +def createlaslayers(inLASD): + # Something like def createLasDatasetLayer(inLASD, #DTMLASDclasses, #DTMLASDreturns, #LrDSMLASDclasses, #LrDSMLASDreturns, interpolateAdditionalPoints): + if arcpy.Exists("DTMLASD"): + arcpy.Delete_management("DTMLASD") + if arcpy.Exists("LRDSMLASD"): + arcpy.Delete_management("LRDSMLASD") + arcpy.MakeLasDatasetLayer_management(inLASD, "DTMLASD", "2", "", "", "", "", "", "", "") + arcpy.MakeLasDatasetLayer_management(inLASD, "LRDSMLASD", "1;2;4", "Last Return", "", "", "", "", "", "") + if arcpy.Exists("DTMLASD"): + if arcpy.Exists("LRDSMLASD"): + arcpy.AddMessage("LASD Layers Created") + else: + arcpy.AddMessage("Could Not Create LASD Layers") + +def createSurfaceRasters(lasdLayerGround, lasdLayerSurface, outputDTM, outputDSM): + # selector determining whether or not to interpolateAdditional points for tin creation. Helps with Terribe LiDAR. + ''' if interpolateAdditionalPoints is enabled then input correct heightValue & valueField for raster processing ''' + ''' Determine the correct point spacing settings based on raster processing algorithm requirements ''' + if interpolateAdditionalPoints: + if not recursivelyCreateAndClipRastersFromLasd: + pointSpacing = obtainLiDARInfo(inLASD, lasList)[3] # return 3 is Average LiDAR point Spacing + else: + # TODO make point spacing support recursions. obtainLiDARInfo(inLASD, lasList)[3] is currently placeholder + ''' calculate average pt spacing of LiDAR tiles building footprints intersect''' + pointSpace = obtainLiDARInfo(inLASD, lasList)[3] + pointSpacing = pointSpace / 0.5 + heightValue = 100 + valueField = "INT" + else: + if not recursivelyCreateAndClipRastersFromLasd: + pointSpacing = obtainLiDARInfo(inLASD, lasList)[3] + else: + # TODO make point spacing support recursions. obtainLiDARInfo(inLASD, lasList)[3] is currently placeholder + ''' calculate average pt spacing of LiDAR tiles building footprints intersect''' + pointSpacing = obtainLiDARInfo(inLASD, lasList)[3] + heightValue = 1 + valueField = "FLOAT" + + # Delete terrain rasters if existing. + if arcpy.Exists(outputDTM): + arcpy.Delete_management(outputDTM) + if arcpy.Exists(outputDSM): + arcpy.Delete_management(outputDSM) + + arcpy.LasDatasetToRaster_conversion(lasdLayerGround, outputDTM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacing, heightValue) + print("Created DTM Raster at location: " + outputDTM) + arcpy.LasDatasetToRaster_conversion(lasdLayerSurface, outputDSM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacing, heightValue) + print("Created Last Return DSM Raster at location: " + outputDSM) + return outputDTM, outputDSM + + +# TODO INT. make sure that integer raster is being produced +def interpolateBetweenLasPts(DTM,LrDSM): + # Raster Cleanup Process via Boundary Clean Operation (Fills & Smooths Voids based on nearest pixel value) + DTMBC = os.path.join(scratchGDB, "DTMBC") + DTMBoundaryClean = arcpy.sa.BoundaryClean(DTM, "ASCEND", "TWO_WAY") + DTMBoundaryClean.save(DTMBC) + LrDSMBC = os.path.join(scratchGDB, "LrDSMBC") + LrDSMBoundaryClean = arcpy.sa.BoundaryClean(LrDSM, "ASCEND", "TWO_WAY") + LrDSMBoundaryClean.save(LrDSMBC) + + # TODO INT. check and see id produced raster is created correctly + # Divide rasters by 1000 for accurate Height & Override the Original DEM & DSM Raster Inputs with the LiDAR Ext ones + DTMFinal = arcpy.Raster(DTMBC) / 100 # May need to resolve output rasters format to true Float + DTM = os.path.join(scratchGDB, "DTM") + DTMFinal.save(DTM) + LrDSMFinal = arcpy.Raster(LrDSMBC) / 100 # May need to resolve output rasters format to true Float + DSM = os.path.join(scratchGDB, "DSM") + LrDSMFinal.save(DSM) + print("Complete Creation of Interpolation Data") + return DTM, DSM + +def createNDSM(DSM, DTM, nDSM): + temp = arcpy.Raster(DSM) - arcpy.Raster(DTM) + # TODO Geof7015 obtain feet or meters from SR as input for automated unit selector function + ''' then pass conversion equation case units == meters: minBldgHeight * 0.3048 else: minBldgHeight ''' + minBldgHeight = 6 # Value is in Feet + nDSMRaster = SetNull(temp < minBldgHeight, temp) + nDSMRaster.save(nDSM) + del temp + return nDSM + +def maskDSM(DSM,maskRaster,DSMMasked): + # Mask DSM to nDSM height limiting value + DSMMaskOperation = arcpy.sa.ExtractByMask(DSM, maskRaster) + DSMMaskOperation.save(DSMMasked) + print("DSMMasked File Location" + DSMMasked) + print("DTM File Location" + DTM) + return DSMMasked + +############## +# Begin Code # +############## +''' commence Code/Script operation here and notate well...''' + +# Conditional operation to allow for input of .lasd file, .las file folder or Rasters +''' Detects if dataSet exists and runs the correct operation based on dataSet''' + +# If LiDAR Input is a LASD DataSet then count number of LAS files in LAS Dataset and List LAS files as input 4 GP tools +if arcpy.Exists(inLASD): + arcpy.AddMessage("detected LASD Dataset as input: " + inLASD) + lasDatasetStatsText = os.path.join(tempFolder, "lasDatasetStatsText.txt") + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", "DECIMAL_POINT") + filenames = findLasDatasetStatisticsfilePaths(lasDatasetStatsText) + + if len(filenames) == 0: + arcpy.AddMessage("1 LAS file detected in LASD DATASET") + else: + arcpy.AddMessage("{0} LAS files detected in LASD DATASET".format(len(filenames))) + + # Process lasList into Esri GP tool friendly input format + newstr = str(filenames)[1:-1].replace("', ", ";") + lasList = '"' + newstr.replace("'", "") + '"' + +# If the LiDAR Input is a a single LAS then return 1 of LAS files and format file to string for GP tools input. +if inLAS.lower().endswith('.las') and ";" not in inLAS: + arcpy.AddMessage("1 LAS file detected") + lasList = '"' + inLAS + '"' + +# If the LiDAR Input is a string of LAS files then count number of LAS files and create List LAS files input 4 GP tools. +if inLAS.lower().endswith('.las') and ";" in inLAS: + numberLASFiles = (inLAS.count(';')+1) + arcpy.AddMessage(str(numberLASFiles) + " LAS file detected") + lasList = '"' + inLAS + '"' + +# If the LiDAR Input is a LAS Directory then count number of LAS files and create List of LAS files as input 4 GP tools. +if os.path.isdir(inLAS): + lasSearchPathDirectory = inLAS + "/*.las" + for name in glob.glob(lasSearchPathDirectory): + filename = name + file_extension = ".las" + filename, file_extension = os.path.splitext(name) + if file_extension == ".las": + # Find all LAS files in input folder. Optionally search recursively + recursive = True + lasList = [] + if recursive: + for root, dirs, files in os.walk(inLAS): + for file in files: + if file.endswith(".las") and file not in lasList: + lasList.append((os.path.join(root, file))) + else: + for file in os.listdir(inLAS): + if file.endswith(".las") and file not in lasList: + lasList.append((os.path.join(inLAS, file))) + + # Print Number of Las files + if len(lasList) == 0: + arcpy.AddMessage("1 LAS file detected in Directory") + else: + arcpy.AddMessage("{0} LAS files detected in Directory".format(len(lasList))) + + # Process lasList into Esri GP tool friendly input format + newstr = str(lasList)[1:-1].replace("', ", ";") + lasList = '"' + newstr.replace("'", "") + '"' + +# Convert Las file List as String and format for GP tool input +# Create LASDataset from LAS files. +if inLAS.lower().endswith('.las') or os.path.isdir(inLAS): + createlasdataset(inLAS=inLAS, sr=sr) + +if inLAS.lower().endswith('.las') or os.path.isdir(inLAS) or arcpy.Exists(inLASD): + createlaslayers(inLASD=inLASD) + if not recursivelyCreateAndClipRastersFromLasd: + LrDSM = os.path.join(scratchGDB, "LrDSM") + DTM = os.path.join(scratchGDB, "DTM") + createSurfaceRasters(lasdLayerGround="DTMLASD", lasdLayerSurface="LRDSMLASD", outputDTM=DTM, outputDSM=LrDSM) + arcpy.Delete_management("DTMLASD") + arcpy.Delete_management("LRDSMLASD") + else: + pass + +# TODO INT Make sure that INT raster is produced +# Run raster interpolation algorithm on LiDAR derived rasters if interpolateAdditionalPoints is True and not Recursive +if inLAS.lower().endswith('.las') or os.path.isdir(inLAS) or arcpy.Exists(inLASD): + if interpolateAdditionalPoints and not recursivelyCreateAndClipRastersFromLasd: + interpolateBetweenLasPts(DTM, LrDSM) + else: + DSM = os.path.join(outputWS, "LrDSM") + +# Process Rasters prior to recursion process if Raster as user Input or recursivelyCreateAndClipRastersFromLasd deselect +if (arcpy.Exists(DTMRaster) and arcpy.Exists(DSMRaster)) or not recursivelyCreateAndClipRastersFromLasd: + + # Change names of initial input rasters to DTM and DSM to align with LiDAR derived rasters for less coding :) + if arcpy.Exists(DTMRaster) and arcpy.Exists(DSMRaster): + DTM = DTMRaster + DSM = DSMRaster + + # Begin process of deriving rasters from created LAS DataSets + if inLAS.lower().endswith('.las') or os.path.isdir(inLAS) or arcpy.Exists(inLASD): + pass + # get spatial reference + sr = arcpy.Describe(DTM).spatialReference + + DSM = LrDSM + nDSM = os.path.join(scratchGDB, "nDSM") + createNDSM(DSM=DSM, DTM=DTM, nDSM=nDSM) + + DSMMaskedRaster = os.path.join(scratchGDB, "DSMMaskedRaster") + maskDSM(DSM=DSM, maskRaster=nDSM, DSMMasked=DSMMaskedRaster) + + DSM = DSMMaskedRaster + +# create list for multiPatch features +mp_list = [] + +# make search cursor for footprint polygons +fields = ["SHAPE@"] +with arcpy.da.SearchCursor(buildingFootprints, fields) as sc: + for i, row in enumerate(sc): + try: + print("on feature {0}".format(i)) + # get raster extent + geom = row[0] + ext = "{0} {1} {2} {3}".format(geom.extent.XMin, geom.extent.YMin, geom.extent.XMax, geom.extent.YMax) + + # copy the feature temporarily + tp = os.path.join(outputWS, "tp_{0}".format(i)) + tempGeom = arcpy.CopyFeatures_management(geom, tp) + + # clip the DSM + print('clipping DSM') + DSMClipRast = os.path.join(arcpy.env.scratchFolder, 'tempclip{0}.tif'.format(i)) + arcpy.Clip_management(DSM, ext, DSMClipRast, tp, "true", "false") + + if interpolateAdditionalPoints: + # Int Raster + DSMClipRast = Int(DSMClipRast) + + # smooth the DSM + print('smoothing DSM') + nbr = NbrRectangle(3, 3, "CELL") + + # sm_clip = FocalStatistics(out_raster, nbr, "MEAN", "DATA") + sm_clip = Filter(DSMClipRast, "LOW", "DATA") + + # clean up clipped raster + # arcpy.Delete_management(DSMClipRast) + + # TODO INT Make sure that Raster Points are produced correctly and at precise elevations + # convert raster to points + print('converting raster to points') + out_points = "in_memory/clipPoints_{0}".format(i) + arcpy.RasterToPoint_conversion(sm_clip, out_points, "Value") + + # convert to 3d by values + # out_points3d ="in_memory/clipPoints3d" + # arcpy.FeatureTo3DByAttribute_3d(in_features=out_points, out_feature_class=out_points3d, + # height_field="grid_code", to_height_field="") + + # TODO INT Make sure that TIN Surface is produced correctly and at precise elevation + # Create TIN with points + print('making surface TIN') + # feats_tin = "{} Shape.Z Mass_Points ;".format(out_points3d) + feats_tin = "{0} grid_code Mass_Points ;".format(out_points) + out_surf_tin = os.path.join(arcpy.env.scratchFolder, "surfTin_{0}".format(i)) + arcpy.CreateTin_3d(out_surf_tin, sr, feats_tin, 'DELAUNAY') + + # clip the DTM + print('clipping DTM') + dtmClipRast = os.path.join(arcpy.env.scratchFolder, 'tempDEMclip{0}.tif'.format(i)) + arcpy.Clip_management(DTM, ext, dtmClipRast, tp, "true", "false") + + # TODO INT Check to ensure whether or not this is necessary + # convert DEM to Int + if interpolateAdditionalPoints: + dtmClipRast = Int(dtmClipRast) + + # add Min Height to Building Footprints + print('determining Minimum Building Elevation') + arcpy.AddField_management(tp, "ID", "SHORT", None, None, None, "ID", "true", "true", None) + arcpy.CalculateField_management(tp, "ID", 1, "PYTHON_9.3", None) + minMaxElevTable = "in_memory/minMaxElev_{0}".format(i) + arcpy.sa.ZonalStatisticsAsTable(tp, "ID", dtmClipRast, minMaxElevTable, "true", "MIN_MAX_MEAN") + arcpy.JoinField_management(tp, "ID", minMaxElevTable, "ID", "MIN;MAX") + + # then, move building footprints to MIN Z Height + out_poly3d = "in_memory/out_poly3d_{0}".format(i) + arcpy.FeatureTo3DByAttribute_3d(tp, out_poly3d, "MIN", "") + + # TODO INT determine whether or not ground TIN can be removed from process and multipatch be produced + # make ground TIN + gnd_feats_tin = "{} Shape.Z Hard_Clip ;".format(out_poly3d) + out_gnd_tin = os.path.join(arcpy.env.scratchFolder, "gndTin_{0}".format(i)) + arcpy.CreateTin_3d(out_gnd_tin, sr, gnd_feats_tin, "DELAUNAY") + + # extrude polygon between TINs + print('creating Multipatch') + this_MP = os.path.join(outputWS, "bldgMP_{0}".format(i)) + arcpy.ExtrudeBetween_3d(out_surf_tin, out_gnd_tin, out_poly3d, this_MP) + + # add feature name to list + mp_list.append(this_MP) + + # Delete Unnecessary files + # arcpy.Delete_management(tp) + # arcpy.Delete_management(out_points) + # arcpy.Delete_management(minMaxElevTable) + # arcpy.Delete_management(out_poly3d) + # arcpy.Delete_management (out_gnd_tin) + # arcpy.Delete_management (out_poly3d) + + # TODO Geoff7015 Incorporate Cleanup Building CGA from Geof7015 rule into tool here: + ''' every multipatch building must have LiDAR point spacing as a attribute and "Units: feet/meters + will need to update CGA cleanup rules settings with a conditional calculator operation where + it leverages these attributes and changes the cleanupGeometry operations optimally based on input features + final output will be two file geodatabases. one with original buildings and other with cleaned.''' + ''' Other Cleanup Utility tools/processes may be required to optimize building faces and roof geometries''' + except: + print("Unable to process feature {0}".format(i)) + +# merge the MultiPatches into a single FC +outputMerge = os.path.join(outputWS, 'outputMergeMP') +arcpy.Merge_management(mp_list, outputMerge) + +#TODO Geof7015: delete all buildingMP* files that exist in the output workspace +# Delete Individual Multipatch Buildings +'''if arcpy.Exists(os.path.join(outputWS, "bldgMP_0")): + for fc in arcpy.ListFeatureClasses("bldgMP*", "MULTIPATCH", outputWS): + arcpy.Delete_management(fc)''' + +if arcpy.Exists("DTMLASD"): + arcpy.Delete_management("DTMLASD") +if arcpy.Exists("LRDSMLASD"): + arcpy.Delete_management("LRDSMLASD") diff --git a/clipRasterToPolyExtrudeTinFinal.py b/clipRasterToPolyExtrudeTinFinal.py new file mode 100644 index 0000000..8b89399 --- /dev/null +++ b/clipRasterToPolyExtrudeTinFinal.py @@ -0,0 +1,439 @@ +__author__ = 'geof7015' + +import arcpy +import os +import os.path +import tempfile +import glob +from arcpy.sa import * +from datetime import datetime +import gc + +arcpy.env.overwriteOutput = True +arcpy.CheckOutExtension('spatial') +arcpy.CheckOutExtension('3d') + +inLAS = r'E:\3D_City_Data\United States\Georgia\Athens\LiDAR' +inLASD = r'' #r'E:\3D_City_Data\United States\Georgia\Athens\New LasDataset.lasd' +buildingFootprints = r'E:\3D_City_Data\United States\Georgia\Athens\Data.gdb\BuildingFootprints_1' +sr = "PROJCS['NAD_1983_StatePlane_Georgia_West_FIPS_1002_Feet',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',2296583.333333333],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-84.16666666666667],PARAMETER['Scale_Factor',0.9999],PARAMETER['Latitude_Of_Origin',30.0],UNIT['Foot_US',0.3048006096012192]]" +outputWS = r'E:\3D_City_Data\United States\Georgia\Athens\multipatch.gdb' +scratchGDB = arcpy.env.scratchGDB +tempFolder = tempfile.mkdtemp() + +beginOnFeatureNumber = 0 +pointSpacingCorrectionFactor = 0.5 +interpolateBetweenPoints = False # Currently Bugged... +reduceTesselations = True +rasterExtractionApproach = True + +############### +# Definitions # +############### + + +def createlasdataset(inLAS, sr): + global inLASD + inLASD = os.path.join(tempFolder, "LASDataSet.lasd") + if arcpy.Exists(inLASD): + arcpy.Delete_management(inLASD) + arcpy.CreateLasDataset_management(inLAS, inLASD, False, "", sr, "COMPUTE_STATS") + if arcpy.Exists(inLASD): + arcpy.AddMessage("LASD File Created @ Location: " + inLASD) + return inLASD + # for multiples: return inLASD,output2,output3,etc... + else: + arcpy.AddMessage("Could Not Create LASD DataSet. Check LAS inputs for errors") + +def findLasDatasetStatisticsfilePaths(file): + file_object = open(file, 'r') + lines = file_object.readlines() + file_object.close() + cleanLines = [] + for line in lines: + if len(line) > 1: + path = line.split(",")[0] + if os.path.isabs(path) is True and path not in cleanLines: + cleanLines.append(path) + return cleanLines + +# Create Lists with LiDAR Statistical Information. Pt Spacing etc... Process only used in other modules. +def obtainLiDARInfo(inLASD,lasList): + if arcpy.Exists(inLASD): + arcpy.AddMessage("Calculating Necessary Statistics for Feature Extraction Process") + lasDatasetStatsText = os.path.join(tempFolder, "lasDatasetStatsText.txt") + if arcpy.Exists(lasDatasetStatsText): + arcpy.Delete_management(lasDatasetStatsText) + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", + "DECIMAL_POINT") + + # TODO DJARRARD obtain a LiDAR file from list and parse the point_spacing to building footprints. + # TODO DJARRARD if multiple LiDAR tiles overlap building footprints then point_spacing = pt_spacing_average + #if recursivelyCreateAndClipRastersFromLasd: + #pass + + # run arcpy.PointFileInfo_3d on the single tile (no recursion) + ptFileInfoFile = os.path.join(outputWS, 'ptFileInfoFile') + if arcpy.Exists(ptFileInfoFile): + arcpy.Delete_management(ptFileInfoFile) + arcpy.PointFileInformation_3d(lasList, ptFileInfoFile, "LAS", None, sr, "false", "false", "DECIMAL_POINT", + "false", "false") + + rows = arcpy.SearchCursor(ptFileInfoFile, + fields="FileName; Pt_Spacing; Z_Min; Z_Max", + sort_fields="FileName; Pt_Spacing; Z_Min; Z_Max") + # Iterate through the rows in the cursor and store the + # "FileName; Pt_Spacing; Z_Min; Z_Max" + ptFileInfoList = [] + PtSpacing = [] + for row in rows: + formattedfields = ("{0}, {1}, {2}, {3}".format( + row.getValue("FileName"), + row.getValue("Pt_Spacing"), + row.getValue("Z_Min"), + row.getValue("Z_Max"))) + ptFileInfoList.append(formattedfields) + ptspacinglist = float("{0}".format(row.getValue("Pt_Spacing"))) + PtSpacing.append(ptspacinglist) + print(ptFileInfoList) + print(PtSpacing) + avgPtSpacing = sum(PtSpacing)/float(len(PtSpacing)) + print(avgPtSpacing) + return ptFileInfoFile, ptFileInfoList, PtSpacing, avgPtSpacing + + +def interpolateBetweenLasPts(LrDSM): + # Run raster interpolation algorithm on LiDAR derived rasters if interpolateAdditionalPoints is True and not Recursive + TimesRaster = os.path.join(tempFolder, "TimesRaster.tif") + if arcpy.Exists(TimesRaster): + arcpy.Delete_management(TimesRaster) + arcpy.Times_3d(LrDSM, 100, TimesRaster) + arcpy.AddMessage("Times Raster Complete") + + IntegerRaster = os.path.join(tempFolder, "IntRaster.tif") + if arcpy.Exists(IntegerRaster): + arcpy.Delete_management(IntegerRaster) + arcpy.Int_3d(TimesRaster, IntegerRaster) + arcpy.AddMessage("Integer Raster Complete") + + BoundaryCleanRaster = os.path.join(tempFolder, "BoundaryClean.tif") + if arcpy.Exists(BoundaryCleanRaster): + arcpy.Delete_management(BoundaryCleanRaster) + BC = arcpy.sa.BoundaryClean(IntegerRaster, "NO_SORT", "true") + BC.save(BoundaryCleanRaster) + arcpy.AddMessage("BoundaryClean Raster Complete") + + FloatRaster = os.path.join(tempFolder, "FloatRaster.tif") + if arcpy.Exists(FloatRaster): + arcpy.Delete_management(FloatRaster) + arcpy.Float_3d(BoundaryCleanRaster, FloatRaster) + arcpy.AddMessage("Float Raster Complete") + + if arcpy.Exists(LrDSM): + arcpy.Delete_management(LrDSM) + arcpy.Divide_3d(FloatRaster, 100, LrDSM) + arcpy.AddMessage("Divide Raster Complete") + return LrDSM + + +def slopedAreaRasters(SlopeRaster, slopedAreasNullRaster): + # TODO Fix Memory Leak 1 + slopedAreasRaster = os.path.join(tempFolder, "slopedAreasRaster.tif") + if arcpy.Exists(slopedAreasRaster): + arcpy.Delete_management(slopedAreasRaster) + slopedAreasRasterProcess = arcpy.sa.Con(SlopeRaster, 1, 0, "VALUE >= 20") + slopedAreasRasterProcess.save(slopedAreasRaster) + # TODO Fix Memory Leak 2 + if arcpy.Exists(slopedAreasNullRaster): + arcpy.Delete_management(slopedAreasNullRaster) + slopedAreasNullRasterProcess = arcpy.sa.SetNull(slopedAreasRaster, 1, "Value = 0") + slopedAreasNullRasterProcess.save(slopedAreasNullRaster) + + arcpy.Delete_management(slopedAreasRaster) + + return slopedAreasNullRaster + +def reduceTesselationProcess(LrDSM,SlopedAreasPolygonBuffered): + SlopeRaster = os.path.join(tempFolder, "SlopeRaster.tif") + if arcpy.Exists(SlopeRaster): + arcpy.Delete_management(SlopeRaster) + arcpy.Slope_3d(LrDSM, SlopeRaster, "DEGREE", 1) + + slopedAreasNullRaster = os.path.join(tempFolder, "slopedAreasNullRaster.tif") + slopedAreaRasters(SlopeRaster=SlopeRaster, slopedAreasNullRaster=slopedAreasNullRaster) + + SlopedAreasPolygon = os.path.join(tempFolder, "SlopedAreasPolygon.shp") + if arcpy.Exists(SlopedAreasPolygon): + arcpy.Delete_management(SlopedAreasPolygon) + arcpy.RasterToPolygon_conversion(slopedAreasNullRaster, SlopedAreasPolygon, "false", "Value") + + if arcpy.Exists(SlopedAreasPolygonBuffered): + arcpy.Delete_management(SlopedAreasPolygonBuffered) + arcpy.Buffer_analysis(SlopedAreasPolygon, SlopedAreasPolygonBuffered, "2 Feet", "FULL", "ROUND", "ALL", None, "PLANAR") + + arcpy.Delete_management(slopedAreasNullRaster) + + return SlopedAreasPolygonBuffered + + +def extractMultipatch(fullextent, row): + try: + arcpy.env.extent = fullextent + # get raster extent + geom = row[0] + #ext = "{0} {1} {2} {3}".format(geom.extent.XMin, geom.extent.YMin, geom.extent.XMax, geom.extent.YMax) + + # copy the feature temporarily + tp = os.path.join(scratchGDB, "tp{0}".format(i + beginOnFeatureNumber)) + tempGeom = arcpy.CopyFeatures_management(geom, tp) + + #extentgeom = arcpy.Describe(tp) + #extent = "{0} {1} {2} {3}".format(extentgeom.extent.XMin, extentgeom.extent.YMin, extentgeom.extent.XMax, extentgeom.extent.YMax) + #print("Building Footprint Extent = ", extent) + extentgeom = arcpy.Describe(tp) + arcpy.env.mask = tp + print("extentgeom = ", extentgeom) + extent = "{0} {1} {2} {3}".format(extentgeom.extent.XMin, extentgeom.extent.YMin, extentgeom.extent.XMax, extentgeom.extent.YMax) + print("extent = ", extent) + arcpy.env.extent = extent + + print("Begin Raster Creation Process") + LrDSM = os.path.join(tempFolder, "LrDSM.tif") + DTM = os.path.join(tempFolder, "DTM.tif") + # Delete terrain rasters if existing. + if arcpy.Exists(DTM): + arcpy.Delete_management(DTM) + if arcpy.Exists(LrDSM): + arcpy.Delete_management(LrDSM) + arcpy.LasDatasetToRaster_conversion("DTMLASD", DTM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacing, heightValue) + print("Created DTM Raster at location: " + DTM) + arcpy.LasDatasetToRaster_conversion("LRDSMLASD", LrDSM, "ELEVATION", "BINNING MAXIMUM NATURAL_NEIGHBOR", + valueField, "CELLSIZE", pointSpacing, heightValue) + print("Created Last Return DSM Raster at location: " + LrDSM) + + if interpolateBetweenPoints: + interpolateBetweenLasPts(LrDSM=LrDSM) + + if reduceTesselations: + SlopedAreasPolygonBuffered = os.path.join(tempFolder, "SlopedAreasPolygonBuff.shp") + reduceTesselationProcess(LrDSM, SlopedAreasPolygonBuffered) + arcpy.env.mask = SlopedAreasPolygonBuffered + + # smooth the DSM + print('smoothing DSM') + # nbr = NbrRectangle(3, 3, "CELL") + + sm_clip = FilterRasterProcess = Filter(LrDSM, "LOW", "DATA") + + # clean up clipped raster + #arcpy.Delete_management(DSMClipRast) + + # convert raster to points + print('converting raster to points') + + out_points = os.path.join(tempFolder, "clipPoints.shp") + if arcpy.Exists(out_points): + arcpy.Delete_management(out_points) + arcpy.RasterToPoint_conversion(sm_clip, out_points, "Value") + + arcpy.env.mask = tp + + # Create TIN with points + print('making surface TIN') + # feats_tin = "{} Shape.Z Mass_Points ;".format(out_points3d) + feats_tin = "{0} grid_code Mass_Points ;".format(out_points) + out_surf_tin = os.path.join(tempFolder, "surfTin") + if arcpy.Exists(out_surf_tin): + arcpy.Delete_management(out_surf_tin) + arcpy.CreateTin_3d(out_surf_tin, sr, feats_tin, 'DELAUNAY') + + # clip the DTM + print('clipping DTM') + dtmClipRast = os.path.join(tempFolder, 'tempDEMclip{0}.tif'.format(i + beginOnFeatureNumber)) + arcpy.Clip_management(DTM, extent, dtmClipRast, tp, "true", "false") + # convert DEM to Int + #dtmClipRastInt = Int(dtmClipRast) + + # add Min Height to Building Footprints + print('determining Minimum Building Elevation') + arcpy.AddField_management(tp, "ID", "SHORT", None, None, None, "ID", "true", "true", None) + arcpy.CalculateField_management(tp, "ID", 1, "PYTHON_9.3", None) + minMaxElevTable = os.path.join(scratchGDB, "minMaxElevTable") + arcpy.sa.ZonalStatisticsAsTable(tp, "ID", LrDSM, minMaxElevTable, "true", "MIN_MAX_MEAN") + arcpy.JoinField_management(tp, "ID", minMaxElevTable, "ID", "MIN;MAX") + + + # then, move building footprints to MIN Z Height + out_poly3d = os.path.join(scratchGDB, "out_poly3d") + arcpy.FeatureTo3DByAttribute_3d(tp, out_poly3d, "MIN", "") + + # make ground TIN + gnd_feats_tin = "{} Shape.Z Hard_Clip ;".format(out_poly3d) + out_gnd_tin = os.path.join(tempFolder, "gndTin") + arcpy.CreateTin_3d(out_gnd_tin, sr, gnd_feats_tin, "DELAUNAY") + + # extrude polygon between TINs + print('creating Multipatch') + this_MP = os.path.join(outputWS, "bldgMP_{0}".format(i + beginOnFeatureNumber)) + arcpy.ExtrudeBetween_3d(out_surf_tin, out_gnd_tin, out_poly3d, this_MP) + + # add feature name to list + mp_list.append(this_MP) + + # Delete Unnecessary files + arcpy.Delete_management(tp) + arcpy.Delete_management(out_points) + arcpy.Delete_management(minMaxElevTable) + arcpy.Delete_management(out_poly3d) + arcpy.Delete_management(dtmClipRast) + arcpy.Delete_management(out_gnd_tin) + arcpy.Delete_management(out_surf_tin) + arcpy.Delete_management(out_poly3d) + arcpy.Delete_management(FilterRasterProcess) + #del nbr + del sm_clip + del row, tp + + + # TODO Geoff7015 Incorporate Cleanup Building CGA from Geof7015 rule into tool here: + ''' every multipatch building must have LiDAR point spacing as a attribute and "Units: feet/meters + will need to update CGA cleanup rules settings with a conditional calculator operation where + it leverages these attributes and changes the cleanupGeometry operations optimally based on input features + final output will be two file geodatabases. one with original buildings and other with cleaned.''' + ''' Other Cleanup Utility tools/processes may be required to optimize building faces and roof geometries''' + except: + print("Unable to process feature {0}".format(i + beginOnFeatureNumber)) + +############## +# Begin Code # +############## +print("Starting Process at ", str(datetime.now())) +# If LiDAR Input is a LASD DataSet then count number of LAS files in LAS Dataset and List LAS files as input 4 GP tools +if arcpy.Exists(inLASD): + arcpy.AddMessage("detected LASD Dataset as input: " + inLASD) + lasDatasetStatsText = os.path.join(tempFolder, "lasDatasetStatsText.txt") + arcpy.LasDatasetStatistics_management(inLASD, "true", lasDatasetStatsText, "LAS_FILES", "COMMA", "DECIMAL_POINT") + filenames = findLasDatasetStatisticsfilePaths(lasDatasetStatsText) + + if len(filenames) == 0: + arcpy.AddMessage("1 LAS file detected in LASD DATASET") + else: + arcpy.AddMessage("{0} LAS files detected in LASD DATASET".format(len(filenames))) + + # Process lasList into Esri GP tool friendly input format + newstr = str(filenames)[1:-1].replace("', ", ";") + lasList = '"' + newstr.replace("'", "") + '"' + +# If the LiDAR Input is a a single LAS then return 1 of LAS files and format file to string for GP tools input. +if inLAS.lower().endswith('.las') and ";" not in inLAS: + arcpy.AddMessage("1 LAS file detected") + lasList = '"' + inLAS + '"' + +# If the LiDAR Input is a string of LAS files then count number of LAS files and create List LAS files input 4 GP tools. +if inLAS.lower().endswith('.las') and ";" in inLAS: + numberLASFiles = (inLAS.count(';')+1) + arcpy.AddMessage(str(numberLASFiles) + " LAS file detected") + lasList = '"' + inLAS + '"' + +# If the LiDAR Input is a LAS Directory then count number of LAS files and create List of LAS files as input 4 GP tools. +if os.path.isdir(inLAS): + lasSearchPathDirectory = inLAS + "/*.las" + for name in glob.glob(lasSearchPathDirectory): + filename = name + file_extension = ".las" + filename, file_extension = os.path.splitext(name) + if file_extension == ".las": + # Find all LAS files in input folder. Optionally search recursively + recursive = True + lasList = [] + if recursive: + for root, dirs, files in os.walk(inLAS): + for file in files: + if file.endswith(".las") and file not in lasList: + lasList.append((os.path.join(root, file))) + else: + for file in os.listdir(inLAS): + if file.endswith(".las") and file not in lasList: + lasList.append((os.path.join(inLAS, file))) + + # Print Number of Las files + if len(lasList) == 0: + arcpy.AddMessage("1 LAS file detected in Directory") + else: + arcpy.AddMessage("{0} LAS files detected in Directory".format(len(lasList))) + + # Process lasList into Esri GP tool friendly input format + newstr = str(lasList)[1:-1].replace("', ", ";") + lasList = '"' + newstr.replace("'", "") + '"' + +# Convert Las file List as String and format for GP tool input +# Create LASDataset from LAS files. +if inLAS.lower().endswith('.las') or os.path.isdir(inLAS): + createlasdataset(inLAS=inLAS, sr=sr) + + +DTMLASD = "DTMLASD" +LRDSMLASD = "LRDSMLASD" + +if arcpy.Exists(DTMLASD): + arcpy.Delete_management(DTMLASD) +if arcpy.Exists(LRDSMLASD): + arcpy.Delete_management(LRDSMLASD) +arcpy.MakeLasDatasetLayer_management(inLASD, DTMLASD, "2", "", "", "", "", "", "", "") +arcpy.MakeLasDatasetLayer_management(inLASD, LRDSMLASD, "1;2", "Last Return", "", "", "", "", "", "") +if arcpy.Exists(DTMLASD): + if arcpy.Exists(LRDSMLASD): + arcpy.AddMessage("LASD Layers Created") +else: + arcpy.AddMessage("Could Not Create LASD Layers") +# selector determining whether or not to interpolateAdditional points for tin creation. Helps with Terribe LiDAR. +''' if interpolateAdditionalPoints is enabled then input correct heightValue & valueField for raster processing ''' +''' Determine the correct point spacing settings based on raster processing algorithm requirements ''' + +# TODO make point spacing support recursions. obtainLiDARInfo(inLASD, lasList)[3] is currently placeholder +''' calculate average pt spacing of LiDAR tiles building footprints intersect''' +pointSpace = obtainLiDARInfo(inLASD, lasList)[3] +pointSpacing = pointSpace * pointSpacingCorrectionFactor +heightValue = 1 +valueField = "FLOAT" + +result = arcpy.GetCount_management(buildingFootprints) +FootprintCount = int(result.getOutput(0)) +print("number of building Footprints to process = " + str(FootprintCount)) + +fullextent = arcpy.Describe(buildingFootprints).extent + +# create list for multiPatch features +mp_list = [] + +# make search cursor for footprint polygons +fields = ["SHAPE@"] +with arcpy.da.SearchCursor(buildingFootprints, fields) as sc: + for i, row in enumerate(sc): + if (i + beginOnFeatureNumber) < FootprintCount: + # if i is a multiple of 50 compact the gdb + print("on BuildingFootprint {0}".format(i + beginOnFeatureNumber) + " of " + str(FootprintCount)) + if not i % 50: + print("Began Compacting GDB @ ", str(datetime.now())) + arcpy.Compact_management(outputWS) + arcpy.Compact_management(scratchGDB) + print("Complete Compacting GDB @ ", str(datetime.now())) + extractMultipatch(fullextent=fullextent, row=row) + +# merge the MultiPatches into a single FC +outputMerge = os.path.join(outputWS, 'outputMergeMP') +arcpy.Merge_management(mp_list, outputMerge) + +#TODO DJARRARD: delete all buildingMP* files that exist in the output workspace +# Delete Individual Multipatch Buildings +'''if arcpy.Exists(os.path.join(outputWS, "bldgMP_0")): + for fc in arcpy.ListFeatureClasses("bldgMP*", "MULTIPATCH", outputWS): + arcpy.Delete_management(fc)''' + +if arcpy.Exists(DTMLASD): + arcpy.Delete_management(DTMLASD) +if arcpy.Exists(LRDSMLASD): + arcpy.Delete_management(LRDSMLASD) + +print("Finished Process at ", str(datetime.now())) diff --git a/fromArt/BuildingCleaner_1_8_2016.tbx b/fromArt/BuildingCleaner_1_8_2016.tbx new file mode 100644 index 0000000..2859d68 Binary files /dev/null and b/fromArt/BuildingCleaner_1_8_2016.tbx differ diff --git a/fromArt/BuildingCleaner_1_8_2016_desktop.tbx b/fromArt/BuildingCleaner_1_8_2016_desktop.tbx new file mode 100644 index 0000000..722dffc Binary files /dev/null and b/fromArt/BuildingCleaner_1_8_2016_desktop.tbx differ diff --git a/interpolate_points_to_boundary.py b/interpolate_points_to_boundary.py new file mode 100644 index 0000000..b64b3e7 --- /dev/null +++ b/interpolate_points_to_boundary.py @@ -0,0 +1,76 @@ +#------------------------------------------------------------------------------- +# Name: interpolate_points_to_boundary.py +# Purpose: this script assigns nearest point value using planar distance to +# building footprint points from an input point feature class. +# +# Author: Joe McGlinchy +# +# Created: 21/09/2015 +# Copyright: (c) jose6641 2015 +# Licence: +#------------------------------------------------------------------------------- + +import arcpy +#import time +import os + + +arcpy.env.overwriteOutput = True +arcpy.CheckOutExtension('3D') + +#inputs +input_bldg_points = r"J:\Projects\ResearchGroup\3dcities\PointsToMultipatch\PointsToMultipatch.gdb\PointsClipped1" +input_bldg_fp = r"J:\Projects\ResearchGroup\3dcities\PointsToMultipatch\PointsToMultipatch.gdb\Footprint1" +output_bldg_points_with_border = r"J:\Projects\ResearchGroup\3dcities\PointsToMultipatch\PointsToMultipatch.gdb\border_incl1" + +# define in memory workspace +mem = "in_memory" + +## it is understood the input point FC will be multipoint. Need to convert to +## single part features +# explode input multipoint FC to single part +single_bldg_pts = os.path.join(mem, "singlepts") +arcpy.MultipartToSinglepart_management(input_bldg_points,single_bldg_pts) + +# add geometry attributes +arcpy.AddGeometryAttributes_management(Input_Features=single_bldg_pts, Geometry_Properties="POINT_X_Y_Z_M", Length_Unit="", Area_Unit="", Coordinate_System="") + + +## process the building footprint +# convert to line +bldg_line = os.path.join(mem, "bldgline") +arcpy.FeatureToLine_management(in_features=input_bldg_fp, out_feature_class=bldg_line, cluster_tolerance="", attributes="NO_ATTRIBUTES") + +# densify +arcpy.Densify_edit(in_features=bldg_line, densification_method="DISTANCE", distance="1 Feet", max_deviation="0.33 Feet", max_angle="10") + +# convert to points +bldg_ln_pts = os.path.join(mem, "bldglinepts") +arcpy.FeatureVerticesToPoints_management(in_features=bldg_line, out_feature_class=bldg_ln_pts, point_location="ALL") + +# use Near tool to identify point FID from building points to the boundary points +arcpy.Near_analysis(in_features=bldg_ln_pts, near_features=single_bldg_pts, search_radius="5 Feet", location="NO_LOCATION", angle="NO_ANGLE", method="PLANAR") + + +# now, grab the NEARI_FID field and assign that feature's z-value to the building footprint point z value +arcpy.AddField_management(bldg_ln_pts, "z_val", "DOUBLE") +tbl_fp = arcpy.da.FeatureClassToNumPyArray(bldg_ln_pts, ["NEAR_FID"]) +tbl_pts = arcpy.da.FeatureClassToNumPyArray(single_bldg_pts, ["POINT_Z"]) + +# update the z_val attribute +with arcpy.da.UpdateCursor(bldg_ln_pts, ["z_val"]) as sc: + for i,row in enumerate(sc): + #print i + fid = tbl_fp[i][0] + row[0] = tbl_pts[fid-1][0] + + sc.updateRow(row) + +# convert to 3D and copy +arcpy.FeatureTo3DByAttribute_3d(bldg_ln_pts, output_bldg_points_with_border, "z_val") + + + + + +# using time, this took about 0.07 seconds \ No newline at end of file diff --git a/point_processing_RD_DBS/toolboxes/Desktop_3D_Cities_Research_Development.tbx b/point_processing_RD_DBS/toolboxes/Desktop_3D_Cities_Research_Development.tbx new file mode 100644 index 0000000..36178d1 Binary files /dev/null and b/point_processing_RD_DBS/toolboxes/Desktop_3D_Cities_Research_Development.tbx differ diff --git a/point_processing_RD_DBS/toolboxes/Pro_3D_Cities_Research_DevelopmentCopy.tbx b/point_processing_RD_DBS/toolboxes/Pro_3D_Cities_Research_DevelopmentCopy.tbx new file mode 100644 index 0000000..836a3d4 Binary files /dev/null and b/point_processing_RD_DBS/toolboxes/Pro_3D_Cities_Research_DevelopmentCopy.tbx differ diff --git a/point_processing_RD_DBS/toolboxes/gp_bldg_proc_cgal_pt_cleanup_desktop.py b/point_processing_RD_DBS/toolboxes/gp_bldg_proc_cgal_pt_cleanup_desktop.py new file mode 100644 index 0000000..f0fe995 --- /dev/null +++ b/point_processing_RD_DBS/toolboxes/gp_bldg_proc_cgal_pt_cleanup_desktop.py @@ -0,0 +1,204 @@ +#------------------------------------------------------------------------------- +# Name: bldg_proc_cgal_pt_cleanup.py +# Purpose: this is the final script for the first task in the point processing R&D project +# +# Author: jose6641 +# +# Created: 05/02/2016 +# Copyright: (c) jose6641 2016 +# Licence: +#------------------------------------------------------------------------------- + +#------------------------------------------------------------------------------- +# Name: bldg1pts_cgal.py +# Purpose: +# +# Author: jose6641 +# +# Created: 08/01/2016 +# Copyright: (c) jose6641 2016 +# Licence: +#------------------------------------------------------------------------------- + +import os, sys, time + +import numpy as np +import arcpy +import CGAL.CGAL_Point_set_processing_3 as cgal_pt_proc +from CGAL.CGAL_Kernel import Point_3 + +# plotting +from matplotlib import pyplot as plt +from mpl_toolkits.mplot3d import Axes3D + +arcpy.env.overwriteOutput = True + +def numpy_cgal_point_conversion(array): + ''' utility function for converting array of points to CGAL Point_3 ''' + + points = [] + for pt in array: + points.append( Point_3(pt[0], pt[1], pt[2]) ) + + return points + +def cgal_point_to_numpy_conversion(points, dt): + ''' utility function for converting array of points to CGAL Point_3 ''' + + npts = len(points) + array = np.zeros((npts,3)) + for i,pt in enumerate(points): + array[i,0] = pt.x() + array[i,1] = pt.y() + array[i,2] = pt.z() + + array.dtype = dt + return array + + +def calc_similarity_dot_product(a, b, nbins): + ''' this function will calculate the similarity of two equal sized arrays using the dot product ''' + + # calculate histograms + ha, abins = np.histogram(a['SHAPE@Z'], nbins) + hb, abins = np.histogram(b['SHAPE@Z'], nbins) + + # calculate cosine similarity using dot product of the pmf arrays (divided by sum) + sim = 1-np.dot(ha/float(ha.sum()), hb/float(hb.sum())) + + return sim + + +def iter_remove_outliers(cgal_pts, nbins, dt, nbrs=24, pct=1.5, sim=0.99, dif = 0.01): + ''' this applies CGAL's remove outliers iteratively similiarity measure is reached''' + + # calculate first outlier removal + first = cgal_pt_proc.remove_outliers(cgal_pts, nbrs, pct) + new_pts = cgal_pts[0:first] + first_dif = len(cgal_pts) - len(new_pts) + + if not first_dif: + return 1, new_pts + else: + ctr = 0 + flag = True + sim_list = [0.0] + while flag: + second = cgal_pt_proc.remove_outliers(new_pts, nbrs, pct) + #a = len(new_pts) + a = cgal_point_to_numpy_conversion(new_pts, dt) + new_pts = new_pts[0:second] + #b = len(new_pts) + b = cgal_point_to_numpy_conversion(new_pts, dt) + sim_it = calc_similarity_dot_product(a, b, nbins) + sim_list.append(sim_it) + ctr += 1 + + print ('{}, {}, {} points'.format(ctr, sim_it, len(b))) + arcpy.AddMessage('{}, {}, {} points'.format(ctr, sim_it, len(b))) + + # check for similarity threshold hit or no significant change between iterations + if (sim_it > sim) or (abs(sim_list[-1] - sim_list[-2]) < dif): + flag = False + + + return ctr+1, new_pts + + +def plot_points(array): + # show the points + fig = plt.figure() + ax = fig.add_subplot(111, projection='3d') + ax.scatter(array['SHAPE@X'], array['SHAPE@Y'], array['SHAPE@Z']) + plt.show() + plt.close() + +## main stuff +# inputs for testing +#sourceGDB = r"V:\ResearchGroup\3dcities\research_project\data\DevTesting.gdb" +#pt_fcs = ("points1", "points4", "points5_singlePart", "points6_singlePart") +#out_gdb = r"V:\ResearchGroup\3dcities\research_project\data\test_pro.gdb" + +# get inputs +fc = arcpy.GetParameterAsText(0) +out_gdb = arcpy.GetParameterAsText(1) + +# get feature class name +pfc = os.path.basename(fc) + + +if os.path.exists(out_gdb): + arcpy.Delete_management(out_gdb) + +# check for existence of out_gdb +if not os.path.exists(out_gdb): + arcpy.CreateFileGDB_management(os.path.dirname(out_gdb), os.path.basename(out_gdb)) + + +# process iteratively using z-histogram dot product techinique # +# iterate through point feature classes and process # +# convert to point FC to 3-tuple array and save dtype # + +# variables needed for conversion to and from numpy world +field_names = ["SHAPE@X", "SHAPE@Y", "SHAPE@Z"] +sr = arcpy.Describe(fc).SpatialReference +array = arcpy.da.FeatureClassToNumPyArray(fc, field_names) +array_dt = array.dtype + +# number of height bins (0.5 meter resolution) +nbins = np.floor((array['SHAPE@Z'].max() - array['SHAPE@Z'].min()) * 2) + +# convert to CGAL points +cgal_pts = numpy_cgal_point_conversion(array) + +# get point spacing using 24 nearest neighbors +avg_space = cgal_pt_proc.compute_average_spacing(cgal_pts, 24) +arcpy.AddMessage("{} points. average point spacing is: {}".format(len(cgal_pts), avg_space)) + +# run simplification +grid_simp = True +if grid_simp: + grid_size = avg_space/5 + new_pts = cgal_pt_proc.grid_simplify_point_set(cgal_pts, grid_size) + cgal_pts = cgal_pts[0:new_pts] + + print ('grid_simplification: grid size {}, points remaining {}'.format(grid_size, new_pts)) + arcpy.AddMessage('grid_simplification: grid size {}, points remaining {}'.format(grid_size, new_pts)) + + # convert back to numpy + filt_array = cgal_point_to_numpy_conversion(cgal_pts, array_dt) + + # plot the points + #plot_points(filt_array) + +rand_simp = True +if rand_simp: + new_pts = cgal_pt_proc.random_simplify_point_set(cgal_pts, 1) + cgal_pts = cgal_pts[0:new_pts] + + print ('random_simplification: points remaining {}'.format(new_pts)) + arcpy.AddMessage('random_simplification: points remaining {}'.format(new_pts)) + + # convert back to numpy + filt_array1 = cgal_point_to_numpy_conversion(cgal_pts, array_dt) + + # plot the points + #plot_points(filt_array1) + + +# iteratively remove outliers with large neighbor count and low percentage +iters,cgal_pts = iter_remove_outliers(cgal_pts, nbins, array_dt, 24, 1.5) +print ('remove_outliers: points remaining {} after {} iterations'.format(len(cgal_pts), iters)) +arcpy.AddMessage('remove_outliers: points remaining {} after {} iterations'.format(len(cgal_pts), iters)) + +# convert back to numpy +filt_array = cgal_point_to_numpy_conversion(cgal_pts, array_dt) + +# make out_fc name +ofc = "{}_iterative_outlier_removal".format(pfc) +out_fc = os.path.join(out_gdb, ofc) + +# save the processed points to a feature class +arcpy.da.NumPyArrayToFeatureClass(filt_array, out_fc, field_names, sr) + +print ('finished') diff --git a/point_processing_RD_DBS/toolboxes/gp_bldg_proc_cgal_pt_cleanup_pro.py b/point_processing_RD_DBS/toolboxes/gp_bldg_proc_cgal_pt_cleanup_pro.py new file mode 100644 index 0000000..a1c4eea --- /dev/null +++ b/point_processing_RD_DBS/toolboxes/gp_bldg_proc_cgal_pt_cleanup_pro.py @@ -0,0 +1,200 @@ +#------------------------------------------------------------------------------- +# Name: bldg_proc_cgal_pt_cleanup.py +# Purpose: this is the final script for the first task in the point processing R&D project +# +# Author: jose6641 +# +# Created: 05/02/2016 +# Copyright: (c) jose6641 2016 +# Licence: +#------------------------------------------------------------------------------- + +#------------------------------------------------------------------------------- +# Name: bldg1pts_cgal.py +# Purpose: +# +# Author: jose6641 +# +# Created: 08/01/2016 +# Copyright: (c) jose6641 2016 +# Licence: +#------------------------------------------------------------------------------- + +import os, sys, time + +import numpy as np +import arcpy +import CGAL.CGAL_Point_set_processing_3 as cgal_pt_proc +from CGAL.CGAL_Kernel import Point_3 + +# plotting +from matplotlib import pyplot as plt +from mpl_toolkits.mplot3d import Axes3D + +arcpy.env.overwriteOutput = True + +def numpy_cgal_point_conversion(array): + ''' utility function for converting array of points to CGAL Point_3 ''' + + points = [] + for pt in array: + points.append( Point_3(pt[0], pt[1], pt[2]) ) + + return points + +def cgal_point_to_numpy_conversion(points, dt): + ''' utility function for converting array of points to CGAL Point_3 ''' + + npts = len(points) + array = np.zeros((npts,3)) + for i,pt in enumerate(points): + array[i,0] = pt.x() + array[i,1] = pt.y() + array[i,2] = pt.z() + + array.dtype = dt + return array + + +def calc_similarity_dot_product(a, b, nbins): + ''' this function will calculate the similarity of two equal sized arrays using the dot product ''' + + # calculate histograms + ha, abins = np.histogram(a['SHAPE@Z'], nbins) + hb, abins = np.histogram(b['SHAPE@Z'], nbins) + + # calculate cosine similarity using dot product of the pmf arrays (divided by sum) + sim = 1-np.dot(ha/float(ha.sum()), hb/float(hb.sum())) + + return sim + + +def iter_remove_outliers(cgal_pts, nbins, dt, nbrs=24, pct=1.5, sim=0.99, dif = 0.01): + ''' this applies CGAL's remove outliers iteratively similiarity measure is reached''' + + # calculate first outlier removal + first = cgal_pt_proc.remove_outliers(cgal_pts, nbrs, pct) + new_pts = cgal_pts[0:first] + first_dif = len(cgal_pts) - len(new_pts) + + if not first_dif: + return 1, new_pts + else: + ctr = 0 + flag = True + sim_list = [0.0] + while flag: + second = cgal_pt_proc.remove_outliers(new_pts, nbrs, pct) + #a = len(new_pts) + a = cgal_point_to_numpy_conversion(new_pts, dt) + new_pts = new_pts[0:second] + #b = len(new_pts) + b = cgal_point_to_numpy_conversion(new_pts, dt) + sim_it = calc_similarity_dot_product(a, b, nbins) + sim_list.append(sim_it) + ctr += 1 + + print ('{}, {}, {} points'.format(ctr, sim_it, len(b))) + arcpy.AddMessage('{}, {}, {} points'.format(ctr, sim_it, len(b))) + + # check for similarity threshold hit or no significant change between iterations + if (sim_it > sim) or (abs(sim_list[-1] - sim_list[-2]) < dif): + flag = False + + + return ctr+1, new_pts + + +def plot_points(array): + # show the points + fig = plt.figure() + ax = fig.add_subplot(111, projection='3d') + ax.scatter(array['SHAPE@X'], array['SHAPE@Y'], array['SHAPE@Z']) + plt.show() + plt.close() + +## main stuff +# inputs for testing +#sourceGDB = r"V:\ResearchGroup\3dcities\research_project\data\DevTesting.gdb" +#pt_fcs = ("points1", "points4", "points5_singlePart", "points6_singlePart") +#out_gdb = r"V:\ResearchGroup\3dcities\research_project\data\test_pro.gdb" + +# get inputs +fc = arcpy.GetParameterAsText(0) +out_gdb = arcpy.GetParameterAsText(1) + +# get feature class name +pfc = os.path.basename(fc) + + +if os.path.exists(out_gdb): + arcpy.Delete_management(out_gdb) + +# check for existence of out_gdb +if not os.path.exists(out_gdb): + arcpy.CreateFileGDB_management(os.path.dirname(out_gdb), os.path.basename(out_gdb)) + + +# process iteratively using z-histogram dot product techinique # +# iterate through point feature classes and process # +# convert to point FC to 3-tuple array and save dtype # + +# variables needed for conversion to and from numpy world +field_names = ["SHAPE@X", "SHAPE@Y", "SHAPE@Z"] +sr = arcpy.Describe(fc).SpatialReference +array = arcpy.da.FeatureClassToNumPyArray(fc, field_names) +array_dt = array.dtype + +# number of height bins (0.5 meter resolution) +nbins = np.floor((array['SHAPE@Z'].max() - array['SHAPE@Z'].min()) * 2) + +# convert to CGAL points +cgal_pts = numpy_cgal_point_conversion(array) + +# get point spacing using 24 nearest neighbors +avg_space = cgal_pt_proc.compute_average_spacing(cgal_pts, 24) + +# run simplification +grid_simp = True +if grid_simp: + grid_size = avg_space/5 + new_pts = cgal_pt_proc.grid_simplify_point_set(cgal_pts, grid_size) + cgal_pts = cgal_pts[0:new_pts] + + print ('grid_simplification: grid size {}, points remaining {}'.format(grid_size, new_pts)) + + # convert back to numpy + filt_array = cgal_point_to_numpy_conversion(cgal_pts, array_dt) + + # plot the points + #plot_points(filt_array) + +rand_simp = True +if rand_simp: + new_pts = cgal_pt_proc.random_simplify_point_set(cgal_pts, 1) + cgal_pts = cgal_pts[0:new_pts] + + print ('random_simplification: points remaining {}'.format(new_pts)) + + # convert back to numpy + filt_array1 = cgal_point_to_numpy_conversion(cgal_pts, array_dt) + + # plot the points + #plot_points(filt_array1) + + +# iteratively remove outliers with large neighbor count and low percentage +iters,cgal_pts = iter_remove_outliers(cgal_pts, nbins, array_dt, 24, 1.5) +print ('remove_outliers: points remaining {} after {} iterations'.format(len(cgal_pts), iters)) + +# convert back to numpy +filt_array = cgal_point_to_numpy_conversion(cgal_pts, array_dt) + +# make out_fc name +ofc = "{}_iterative_outlier_removal".format(pfc) +out_fc = os.path.join(out_gdb, ofc) + +# save the processed points to a feature class +arcpy.da.NumPyArrayToFeatureClass(filt_array, out_fc, field_names, sr) + +print ('finished') diff --git a/removeArtifacts.py b/removeArtifacts.py new file mode 100644 index 0000000..485fbe0 --- /dev/null +++ b/removeArtifacts.py @@ -0,0 +1,133 @@ +__author__ = 'geof7015' + +import arcpy +import os +import os.path +import tempfile +from datetime import datetime + +arcpy.env.overwriteOutput = True +arcpy.CheckOutExtension('spatial') +arcpy.CheckOutExtension('3d') + +roofPoints = r"C:\Users\geof7015\PycharmProjects\testData\Charlotte\Workspace2.gdb\singlepts" # This is the clipped roof Points +tp = r'C:\Users\geof7015\PycharmProjects\testData\Charlotte\Workspace2.gdb\tp' # This is the building footprint +sr = "PROJCS['NAD_1983_HARN_StatePlane_Colorado_North_FIPS_0501_Feet',GEOGCS['GCS_North_American_1983_HARN',DATUM['D_North_American_1983_HARN',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['False_Easting',3000000.000316083],PARAMETER['False_Northing',999999.999996],PARAMETER['Central_Meridian',-105.5],PARAMETER['Standard_Parallel_1',39.71666666666667],PARAMETER['Standard_Parallel_2',40.78333333333333],PARAMETER['Latitude_Of_Origin',39.33333333333334],UNIT['Foot_US',0.3048006096012192]]" +pointSpace = 1.9 # LiDAR PointSpacing **Auto-Determined in larger script +outputWS = r'C:\Users\geof7015\PycharmProjects\testData\Charlotte\Workspace2.gdb' # Output +scratchGDB = arcpy.env.scratchGDB +tempFolder = tempfile.mkdtemp() + +#################### +# Define Functions # +#################### + +# Automatically removes Artifacts from point-clouds. +# Resolves issues where building sides were triangulated and other geometric flaws +def cleanupArtifacts(single_bldg_pts, single_bldg_pts_cleaned): + + arcpy.Near3D_3d(single_bldg_pts, single_bldg_pts, str(1.4 * pointSpace), "LOCATION", "ANGLE", "DELTA") + + bldgpts = os.path.join(outputWS, "bldgPoints") + arcpy.MakeFeatureLayer_management(single_bldg_pts, bldgpts) + + arcpy.SelectLayerByAttribute_management(bldgpts, "NEW_SELECTION", "NEAR_DELTZ >= -1 Or NEAR_ANG_V <> 0") + + arcpy.CopyFeatures_management(bldgpts, single_bldg_pts_cleaned) + print("Artifacts Removed") + return single_bldg_pts_cleaned + + +# assigns nearest point value using planar distance to building footprint points from an input point feature class. +def interpolatePointsToBoundary(input_bldg_points, input_bldg_fp, output_bldg_points_with_border): + + # explode input multipoint FC to single part + # it is understood the input point FC will be multipoint. Need to convert to single part features + #single_bldg_pts = os.path.join(outputWS, "singlepts") + single_bldg_pts = os.path.join("in_memory", "singlepts") + arcpy.MultipartToSinglepart_management(input_bldg_points, single_bldg_pts) + print("createdSinglePts") + + # Cleanup Artifacts + # single_bldg_pts_cleaned = os.path.join(outputWS, "single_bldg_pts_cleaned") + single_bldg_pts_cleaned = os.path.join("in_memory", "single_bldg_pts_cleaned") + cleanupArtifacts(single_bldg_pts=single_bldg_pts, single_bldg_pts_cleaned=single_bldg_pts_cleaned) + + # add geometry attributes + arcpy.AddGeometryAttributes_management(Input_Features=single_bldg_pts_cleaned, Geometry_Properties="POINT_X_Y_Z_M", + Length_Unit="", Area_Unit="", Coordinate_System="") + + # process the building footprint + footprintBuffer = os.path.join("in_memory", "footprintBuffer") + arcpy.Buffer_analysis(input_bldg_fp, footprintBuffer, "0.5 Feet", "FULL", "FLAT", "NONE", None, "GEODESIC") + + # convert to line + # bldg_line = os.path.join(outputWS, "bldgline") + bldg_line = os.path.join("in_memory", "bldgline") + arcpy.FeatureToLine_management(in_features=footprintBuffer, out_feature_class=bldg_line, cluster_tolerance=None, + attributes="NO_ATTRIBUTES") + if arcpy.Exists(footprintBuffer): + arcpy.Delete_management(footprintBuffer) + + # Densify + arcpy.Densify_edit(in_features=bldg_line, densification_method="DISTANCE", distance="1 Feet", + max_deviation="0.33 Feet", max_angle="10") + + # convert to points + # bldg_ln_pts = os.path.join(outputWS, "bldglinepts") + bldg_ln_pts = os.path.join("in_memory", "bldglinepts") + arcpy.FeatureVerticesToPoints_management(in_features=bldg_line, out_feature_class=bldg_ln_pts, point_location="ALL") + + # use Near tool to identify point FID from building points to the boundary points + arcpy.Near_analysis(in_features=bldg_ln_pts, near_features=single_bldg_pts_cleaned, search_radius="5 Feet", + location="NO_LOCATION", angle="NO_ANGLE", method="PLANAR") + + # now, grab the NEARI_FID field and assign that feature's z-value to the building footprint point z value + arcpy.AddField_management(bldg_ln_pts, "z_val", "DOUBLE") + tbl_fp = arcpy.da.FeatureClassToNumPyArray(bldg_ln_pts, ["NEAR_FID"]) + tbl_pts = arcpy.da.FeatureClassToNumPyArray(single_bldg_pts_cleaned, ["POINT_Z"]) + + # update the z_val attribute + with arcpy.da.UpdateCursor(bldg_ln_pts, ["z_val"]) as Pointsc: + for i, row in enumerate(Pointsc): + fid = tbl_fp[i][0] + row[0] = tbl_pts[fid-1][0] + # print(row[0]) + Pointsc.updateRow(row) + + # convert to 3D and copy + #bldg_ln_pts_z = os.path.join(outputWS, "bldg_ln_pts_Z") + bldg_ln_pts_z = os.path.join("in_memory", "bldg_ln_pts_Z") + arcpy.FeatureTo3DByAttribute_3d(bldg_ln_pts, bldg_ln_pts_z, "z_val") + + # pointsMerged = os.path.join("in_memory", "pointsMerged") + arcpy.Merge_management([bldg_ln_pts_z, single_bldg_pts_cleaned], output_bldg_points_with_border) + + # Remove Intermediate Data + if arcpy.Exists(single_bldg_pts): + arcpy.Delete_management(single_bldg_pts) + if arcpy.Exists(bldg_line): + arcpy.Delete_management(bldg_line) + if arcpy.Exists(bldg_ln_pts): + arcpy.Delete_management(bldg_ln_pts) + if arcpy.Exists(bldg_ln_pts_z): + arcpy.Delete_management(bldg_ln_pts_z) + + return output_bldg_points_with_border + +################ +# Begin Script # +################ + +buildingInsideAndBorderPoints = os.path.join(outputWS, "buildingBorderPoints") +interpolatePointsToBoundary(input_bldg_points=roofPoints, input_bldg_fp=tp, + output_bldg_points_with_border=buildingInsideAndBorderPoints) + +if arcpy.Exists(roofPoints): + arcpy.Delete_management(roofPoints) + +roofTin = os.path.join(tempFolder, "roofTin") +roofPtsFormula = "{0} Shape.Z Mass_Points ;{1} Soft_Clip ".format(buildingInsideAndBorderPoints, tp) +print(roofPtsFormula) +arcpy.CreateTin_3d(roofTin, sr, roofPtsFormula, "false") +print("roof Tin Created")