I have a few large datasets (about 10 million points) which I am buffering, then joining, dowgrading and finally buffering and dissolving. This is all in a model which works fine with sample smaller datasets. Details are in Buffer point dataset based on compared value
Is there a way to split the point dataset and then process it through to the dissolved buffer and then re-join the files? The key question here is to split a "square" dataset into blocks so that the re-join does not create jagged edges. OR do I just have to swith to another software as in Improving upon ArcGIS Buffer/Dissolve performance??
The model builder python code is below. BUT FIRST THE ERROR IS
Executing (Buffer): Buffer P:\2012\Job_044_DM_Radio_Propogation\Working\FinalPropogation\TRC_Longlands\TRC_Longlands.gdb\TRC_Longlands_pts P:\2012\Job_044_DM_Radio_Propogation\Working\FinalPropogation\TRC_Longlands\TRC_Longlands.gdb\TRC_Longlands_buf "30 Meters" FULL ROUND NONE # Start Time: Thu Mar 08 12:54:21 2012 Succeeded at Thu Mar 08 14:42:55 2012 (Elapsed Time: 1 hours 48 minutes 34 seconds) Executing (Spatial Join): SpatialJoin P:\2012\Job_044_DM_Radio_Propogation\Working\FinalPropogation\TRC_Longlands\TRC_Longlands.gdb\TRC_Longlands_pts P:\2012\Job_044_DM_Radio_Propogation\Working\FinalPropogation\TRC_Longlands\TRC_Longlands.gdb\TRC_Longlands_buf P:\2012\Job_044_DM_Radio_Propogation\Working\FinalPropogation\TRC_Longlands\TRC_Longlands.gdb\TRC_Longlands_join JOIN_ONE_TO_ONE KEEP_ALL # WITHIN "30 Meters" # Start Time: Thu Mar 08 14:42:56 2012 ERROR 000426: Out Of Memory Failed to execute (Spatial Join). Failed at Thu Mar 08 15:01:31 2012 (Elapsed Time: 18 minutes 35 seconds)
# ---------------------------------------------------------------------------
# xy_downgradedSS.py
# Created on: 2012-03-08 11:05:07.00000
# (generated by ArcGIS/ModelBuilder)
# Usage: xy_downgradedSS
# Description:
# ---------------------------------------------------------------------------
# Import arcpy module
import arcpy
# Load required toolboxes
arcpy.ImportToolbox("Model Functions")
arcpy.ImportToolbox("P:/2011/Job_237_BushfireHazard_Datasets/Working/post_erratum/ProcessingModels.tbx")
# Script arguments
Imported_XY = arcpy.GetParameterAsText(0)
if Imported_XY == '#' or not Imported_XY:
Imported_XY = "%Path%\\%Name%.gdb\\%Name%_layer" # provide a default value if unspecified
Buffered_tower_range = arcpy.GetParameterAsText(1)
if Buffered_tower_range == '#' or not Buffered_tower_range:
Buffered_tower_range = "%Path%\\%Name%.gdb\\%Name%_buf" # provide a default value if unspecified
XY_Feature_Class = arcpy.GetParameterAsText(2)
if XY_Feature_Class == '#' or not XY_Feature_Class:
XY_Feature_Class = "%Path%\\%Name%.gdb\\%Name%_pts" # provide a default value if unspecified
INPUT_txt_file = arcpy.GetParameterAsText(3)
if INPUT_txt_file == '#' or not INPUT_txt_file:
INPUT_txt_file = "P:\\2012\\Job_044_DM_Radio_Propogation\\Working\\FinalPropogation\\TRC_Longlands\\trc_longlands.txt" # provide a default value if unspecified
# Local variables:
v_Name__join = Buffered_tower_range
v_Signal_Strength__2_ = v_Name__join
Signal_Strength_Added__2_ = v_Signal_Strength__2_
Buffered_tower_range__3_ = Signal_Strength_Added__2_
Source = XY_Feature_Class
Source_Added = Source
v_Signal_Strength = Source_Added
Signal_Strength_Added = v_Signal_Strength
Path = INPUT_txt_file
Name = Path
v_Name__gdb = Name
v_Path_ = "%Path%"
# Process: Parse Path
arcpy.ParsePath_mb(INPUT_txt_file, "PATH")
# Process: ParseName
arcpy.gp.toolbox = "P:/2011/Job_237_BushfireHazard_Datasets/Working/post_erratum/ProcessingModels.tbx";
# Warning: the toolbox P:/2011/Job_237_BushfireHazard_Datasets/Working/post_erratum/ProcessingModels.tbx DOES NOT have an alias.
# Please assign this toolbox an alias to avoid tool name collisions
# And replace arcpy.gp.ParseName(...) with arcpy.ParseName_ALIAS(...)
arcpy.gp.ParseName(Path)
# Process: Create File GDB
arcpy.CreateFileGDB_management(v_Path_, "%Name%", "CURRENT")
# Process: Make XY Event Layer
arcpy.MakeXYEventLayer_management(INPUT_txt_file, "longitude", "latitude", Imported_XY, "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]];-400 -400 1000000000;-100000 10000;-100000 10000;8.98315284119521E-09;0.001;0.001;IsHighPrecision", "")
# Process: Copy Features
arcpy.CopyFeatures_management(Imported_XY, XY_Feature_Class, "", "0", "0", "0")
# Process: Add Field (4)
arcpy.AddField_management(XY_Feature_Class, "Source_Ch", "TEXT", "3", "2", "50", "", "NULLABLE", "NON_REQUIRED", "")
# Process: Calculate Field (4)
arcpy.CalculateField_management(Source, "Source_Ch", Name, "PYTHON", "")
# Process: Add Field (2)
arcpy.AddField_management(Source_Added, "Signal_Strength", "FLOAT", "3", "2", "10", "", "NULLABLE", "NON_REQUIRED", "")
# Process: Calculate Field (3)
arcpy.CalculateField_management(v_Signal_Strength, "Signal_Strength", "re_score(!Rx!,f2)", "PYTHON_9.3", "f2='99'\\ndef re_score(my_field,f2):\\n\\n if (my_field*1) < 14:\\n f2 = '0'\\n elif (my_field*1) < 24:\\n f2 = '1'\\n elif (my_field*1) >= 24:\\n f2 = \"2\"\\n \\n return f2")
# Process: Buffer
arcpy.Buffer_analysis(Signal_Strength_Added, Buffered_tower_range, "30 Meters", "FULL", "ROUND", "NONE", "")
# Process: Spatial Join
arcpy.SpatialJoin_analysis(v_Signal_Strength, Buffered_tower_range, v_Name__join, "JOIN_ONE_TO_ONE", "KEEP_ALL", "", "WITHIN", "30 Meters", "")
# Process: Add Field (3)
arcpy.AddField_management(v_Name__join, "Downgraded_SS", "FLOAT", "3", "2", "10", "", "NULLABLE", "NON_REQUIRED", "")
# Process: Calculate Field (2)
arcpy.CalculateField_management(v_Name__join, "Downgraded_SS", "re_score(!Signal_Strength!, !Signal_Strength_1!)", "PYTHON", "def re_score(f1,f2):\\n if f1 < f2:\\n out1 = f1\\n else:\\n out1 = f2\\n return out1")
# Process: Buffer (3)
arcpy.Buffer_analysis(Signal_Strength_Added__2_, Buffered_tower_range__3_, "30 Meters", "FULL", "ROUND", "LIST", "Downgraded_SS")
No comments:
Post a Comment