forked from wilmerjimenez/SoilDataDevelopmentToolbox
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathSDA_Valu1Table.py
3208 lines (2455 loc) · 138 KB
/
SDA_Valu1Table.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# SDA_Valu1Table.py
#
# Steve Peaslee, National Soil Survey Center, August 2016
#
# Purpose: Queries Soil Data Access Tabular service for National Valu1 table data and aggregate to the map unit level
# The Tabular service uses a MS SQLServer database.
# Attribute data only on the basis of !
#
# If this table is to be joined to either a gSSURGO raster or map unit polygon layer, the user is responsible for
# making sure that both are of the same vintage. Over time, the mukey values will 'drift' and some records may no
# longer join. Find the date for the most recent survey by looking at the end of the Credits section in the
# gSSURGO and the Valu1 tametadata for gSSURGO and the Valu1 table by finding
#
# gSSURGO metadata:
#
# Valu1 metadata: Look for the date at the end of the Credits section.
## ===================================================================================
class MyError(Exception):
pass
## ===================================================================================
def PrintMsg(msg, severity=0):
# prints message to screen if run as a python script
# Adds tool message to the geoprocessor
#
#Split the message on \n first, so that if it's multiple lines, a GPMessage will be added for each line
try:
for string in msg.split('\n'):
#Add a geoprocessing message (in case this is run as a tool)
if severity == 0:
arcpy.AddMessage(string)
elif severity == 1:
arcpy.AddWarning(string)
elif severity == 2:
arcpy.AddMessage(" ")
arcpy.AddError(string)
except:
pass
## ===================================================================================
def errorMsg():
try:
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
theMsg = tbinfo + "\n" + str(sys.exc_type)+ ": " + str(sys.exc_value)
PrintMsg(theMsg, 2)
except:
PrintMsg("Unhandled error in unHandledException method", 2)
pass
## ===================================================================================
def CreateNewTable(newTable, columnNames, columnInfo):
# Create new table. Start with in-memory and then export to geodatabase table
#
# ColumnNames and columnInfo come from the Attribute query JSON string
# MUKEY would normally be included in the list, but it should already exist in the output featureclass
#
try:
# Dictionary: SQL Server to FGDB
dType = dict()
dType["int"] = "long"
dType["smallint"] = "short"
dType["bit"] = "short"
dType["varbinary"] = "blob"
dType["nvarchar"] = "text"
dType["varchar"] = "text"
dType["char"] = "text"
dType["datetime"] = "date"
dType["datetime2"] = "date"
dType["smalldatetime"] = "date"
dType["decimal"] = "double"
dType["numeric"] = "double"
dType["float"] ="double"
# numeric type conversion depends upon the precision and scale
dType["numeric"] = "float" # 4 bytes
dType["real"] = "double" # 8 bytes
# Iterate through list of field names and add them to the output table
i = 0
# ColumnInfo contains:
# ColumnOrdinal, ColumnSize, NumericPrecision, NumericScale, ProviderType, IsLong, ProviderSpecificDataType, DataTypeName
#PrintMsg(" \nFieldName, Length, Precision, Scale, Type", 1)
joinFields = list()
outputTbl = os.path.join(os.path.dirname(newTable), os.path.basename(newTable))
arcpy.CreateTable_management(os.path.dirname(outputTbl), os.path.basename(outputTbl))
for i, fldName in enumerate(columnNames):
vals = columnInfo[i].split(",")
length = int(vals[1].split("=")[1])
precision = int(vals[2].split("=")[1])
scale = int(vals[3].split("=")[1])
dataType = dType[vals[4].lower().split("=")[1]]
if fldName.lower().endswith("key"):
# Per SSURGO standards, key fields should be string. They come from Soil Data Access as long integer.
dataType = 'text'
length = 30
arcpy.AddField_management(outputTbl, fldName, dataType, precision, scale, length)
return outputTbl
except:
errorMsg()
return False
## ===================================================================================
def AttributeRequest(sdaURL, outputTable, sQuery):
# POST REST which uses urllib and JSON
#
# Uses an InsertCursor to populate the new outputTable
#
# Send query to SDM Tabular Service, returning data in JSON format,
# creates a new table and loads the data into a new Table in the geodatabase
# Returns a list of key values and if keyField = "mukey", returns a dictionary like the output table
try:
keyList = list()
if sQuery == "":
raise MyError, "Missing query string"
# Tabular service to append to SDA URL
url = sdaURL + "/Tabular/SDMTabularService/post.rest"
#PrintMsg(" \nURL: " + url, 1)
#PrintMsg(" \n" + sQuery, 0)
#time.sleep(2)
dRequest = dict()
dRequest["format"] = "JSON+COLUMNNAME+METADATA"
dRequest["query"] = sQuery
#PrintMsg(" \nURL: " + url)
#PrintMsg("QUERY: " + sQuery)
# Create SDM connection to service using HTTP
jData = json.dumps(dRequest)
# Send request to SDA Tabular service
req = urllib2.Request(url, jData)
resp = urllib2.urlopen(req)
jsonString = resp.read()
data = json.loads(jsonString)
del jsonString, resp, req
if not "Table" in data:
return keyList
#raise MyError, "Query failed to select anything: \n " + sQuery
dataList = data["Table"] # Data as a list of lists. Service returns everything as string.
# Get column metadata from first two records
columnNames = dataList.pop(0)
columnInfo = dataList.pop(0)
#PrintMsg(" \n\tImporting attribute data to " + os.path.basename(outputTable) + "...", 0)
#PrintMsg(" \nColumn Names: " + str(columnNames), 1)
# Create new table to hold data
if not arcpy.Exists(outputTable):
outputTable = CreateNewTable(outputTable, columnNames, columnInfo)
if "mukey" in columnNames:
keyIndx = columnNames.index("mukey")
elif "cokey" in columnNames:
keyIndx = columnNames.index("cokey")
else:
keyIndx = 0
#PrintMsg(" \n" + outputTable + " uses key field: " + columnNames[keyIndx], 1)
with arcpy.da.InsertCursor(outputTable, columnNames) as cur:
for rec in dataList:
cur.insertRow(rec)
keyList.append(rec[keyIndx])
#PrintMsg("\tPopulated " + os.path.basename(outputTable) + " with " + Number_Format(len(keyList), 0, True) + " records", 1)
return keyList
except MyError, e:
# Example: raise MyError, "This is an error message"
PrintMsg(str(e), 2)
return []
except urllib2.HTTPError:
errorMsg()
PrintMsg(" \n" + sQuery, 1)
return []
except:
errorMsg()
return []
## ===================================================================================
def GetKeys(theInput, keyField):
# Create bracketed list of MUKEY values from spatial layer for use in query
#
try:
# Tell user how many features are being processed
theDesc = arcpy.Describe(theInput)
theDataType = theDesc.dataType
PrintMsg("", 0)
#if theDataType.upper() == "FEATURELAYER":
# Get Featureclass and total count
if theDataType.lower() == "featurelayer":
theFC = theDesc.featureClass.catalogPath
theResult = arcpy.GetCount_management(theFC)
elif theDataType.lower() in ["featureclass", "shapefile"]:
theResult = arcpy.GetCount_management(theInput)
else:
raise MyError, "Unknown data type: " + theDataType.lower()
iTotal = int(theResult.getOutput(0))
if iTotal > 0:
sqlClause = ("DISTINCT " + keyField, "ORDER BY " + keyField)
keyList = list()
with arcpy.da.SearchCursor(theInput, [keyField], sql_clause=sqlClause) as cur:
for rec in cur:
keyList.append(int(rec[0]))
#PrintMsg("\tmukey list: " + str(keyList), 1)
return keyList
else:
return []
except MyError, e:
# Example: raise MyError, "This is an error message"
PrintMsg(str(e), 2)
return []
except:
errorMsg()
return []
## ===================================================================================
def elapsedTime(start):
# Calculate amount of time since "start" and return time string
try:
# Stop timer
#
end = time.time()
# Calculate total elapsed secondss[17:-18]
eTotal = end - start
# day = 86400 seconds
# hour = 3600 seconds
# minute = 60 seconds
eMsg = ""
# calculate elapsed days
eDay1 = eTotal / 86400
eDay2 = math.modf(eDay1)
eDay = int(eDay2[1])
eDayR = eDay2[0]
if eDay > 1:
eMsg = eMsg + str(eDay) + " days "
elif eDay == 1:
eMsg = eMsg + str(eDay) + " day "
# Calculated elapsed hours
eHour1 = eDayR * 24
eHour2 = math.modf(eHour1)
eHour = int(eHour2[1])
eHourR = eHour2[0]
if eDay > 0 or eHour > 0:
if eHour > 1:
eMsg = eMsg + str(eHour) + " hours "
else:
eMsg = eMsg + str(eHour) + " hour "
# Calculate elapsed minutes
eMinute1 = eHourR * 60
eMinute2 = math.modf(eMinute1)
eMinute = int(eMinute2[1])
eMinuteR = eMinute2[0]
if eDay > 0 or eHour > 0 or eMinute > 0:
if eMinute > 1:
eMsg = eMsg + str(eMinute) + " minutes "
else:
eMsg = eMsg + str(eMinute) + " minute "
# Calculate elapsed secons
eSeconds = "%.1f" % (eMinuteR * 60)
if eSeconds == "1.00":
eMsg = eMsg + eSeconds + " second "
else:
eMsg = eMsg + eSeconds + " seconds "
return eMsg
except:
errorMsg()
return ""
## ===================================================================================
def Number_Format(num, places=0, bCommas=True):
try:
# Format a number according to locality and given places
locale.setlocale(locale.LC_ALL, "")
if bCommas:
theNumber = locale.format("%.*f", (places, num), True)
else:
theNumber = locale.format("%.*f", (places, num), False)
return theNumber
except:
errorMsg()
return False
## ===================================================================================
def CreateOutputTableMu(valuTable, depthList, dPct, mukeyList):
#
try:
# If this table does not already exist, create the output table and add required fields
# Populate mukey column using DISTINCT horizon table information
#PrintMsg(" \nAdding new fields to table: " + os.path.basename(theMuTable), 0)
outputDB = os.path.dirname(valuTable)
if not arcpy.Exists(valuTable):
#PrintMsg(" \nCreating new table: " + os.path.basename(valuTable), 0)
arcpy.CreateTable_management(os.path.dirname(valuTable), os.path.basename(valuTable))
# Add fields for AWS
for rng in depthList:
# Create the AWS fields in a loop
#
td = rng[0]
bd = rng[1]
awsField = "aws" + str(td) + "_" + str(bd)
arcpy.AddField_management(valuTable, awsField, "FLOAT", "", "", "", awsField) # Integer is more appropriate
for rng in depthList:
# Create the AWS fields in a loop
#
td = rng[0]
bd = rng[1]
awsField = "tk" + str(td) + "_" + str(bd) + "a"
arcpy.AddField_management(valuTable, awsField, "FLOAT", "", "", "", awsField)
arcpy.AddField_management(valuTable, "musumcpcta", "SHORT", "", "", "")
# Add Fields for SOC
for rng in depthList:
# Create the SOC fields in a loop
#
td = rng[0]
bd = rng[1]
socField = "soc" + str(td) + "_" + str(bd)
arcpy.AddField_management(valuTable, socField, "FLOAT", "", "", "", socField) # Integer is more appropriate
for rng in depthList:
# Create the SOC thickness fields in a loop
#
td = rng[0]
bd = rng[1]
socField = "tk" + str(td) + "_" + str(bd) + "s"
arcpy.AddField_management(valuTable, socField, "FLOAT", "", "", "", socField)
arcpy.AddField_management(valuTable, "musumcpcts", "SHORT", "", "", "")
if mainRuleName == "NCCPI - National Commodity Crop Productivity Index (Ver 2.0)":
# Add fields for NCCPI version 2
#
arcpy.AddField_management(valuTable, "nccpi2cs", "FLOAT", "", "", "")
arcpy.AddField_management(valuTable, "nccpi2sg", "FLOAT", "", "", "")
arcpy.AddField_management(valuTable, "nccpi2co", "FLOAT", "", "", "")
arcpy.AddField_management(valuTable, "nccpi2all", "FLOAT", "", "", "")
elif mainRuleName == "NCCPI - National Commodity Crop Productivity Index (Ver 3.0)":
# Add fields for NCCPI version 3
# "mukey", "NCCPI2CORN", "NCCPI2SOY", "NCCPI2COT","NCCPI2SG", "NCCPI2ALL"
arcpy.AddField_management(valuTable, "nccpi3corn", "FLOAT", "", "", "")
arcpy.AddField_management(valuTable, "nccpi3soy", "FLOAT", "", "", "")
arcpy.AddField_management(valuTable, "nccpi3cot", "FLOAT", "", "", "")
arcpy.AddField_management(valuTable, "nccpi3sg", "FLOAT", "", "", "")
arcpy.AddField_management(valuTable, "nccpi3all", "FLOAT", "", "", "")
else:
PrintMsg(" \n\tNCCPI version 2 or 3 not found", 1)
#raise MyError, "Problem handling mainrule: " + mainRuleName
# Add fields for root zone depth and root zone available water supply
arcpy.AddField_management(valuTable, "pctearthmc", "SHORT", "", "", "")
arcpy.AddField_management(valuTable, "rootznemc", "SHORT", "", "", "")
arcpy.AddField_management(valuTable, "rootznaws", "SHORT", "", "", "")
# Add field for droughty soils
arcpy.AddField_management(valuTable, "droughty", "SHORT", "", "", "")
# Add field for potential wetland soils
arcpy.AddField_management(valuTable, "pwsl1pomu", "SHORT", "", "", "")
# Add field for mapunit-sum of ALL component-comppct_r values
arcpy.AddField_management(valuTable, "musumcpct", "SHORT", "", "", "")
# Add Mukey field (primary key)
arcpy.AddField_management(valuTable, "mukey", "TEXT", "", "", "30", "mukey")
# Populate mukey and mapunit-sum-of-comppct_r values for each survey area
outcur = arcpy.da.InsertCursor(valuTable, ["mukey", "musumcpct"])
for mukey in mukeyList:
try:
comppct = dPct[mukey][0]
except:
comppct = 0
outcur.insertRow([mukey, comppct])
return True
except MyError, e:
# Example: raise MyError("this is an error message")
PrintMsg(str(e) + " \n", 2)
return False
except:
errorMsg()
return False
## ===================================================================================
def CreateOutputTableCo(theCompTable, depthList):
# Create the component level table
# The new input field is created using adaptive code from another script.
#
try:
#PrintMsg(" \nCreating new output table (" + os.path.basename(theCompTable) + ") for component level data", 0)
outputDB = os.path.dirname(theCompTable)
if not arcpy.Exists(theCompTable):
arcpy.CreateTable_management(os.path.dirname(theCompTable), os.path.basename(theCompTable))
# Add fields appropriate for the component level restrictions
# mukey,cokey, compName, localphase, compPct, comppct, resdept, restriction
arcpy.AddField_management(theCompTable, "COKEY", "TEXT", "", "", "30", "COKEY")
arcpy.AddField_management(theCompTable, "COMPNAME", "TEXT", "", "", "60", "COMPNAME")
arcpy.AddField_management(theCompTable, "LOCALPHASE", "TEXT", "", "", "40", "LOCALPHASE")
arcpy.AddField_management(theCompTable, "COMPPCT_R", "SHORT", "", "", "", "COMPPCT_R")
for rng in depthList:
# Create the AWS fields in a loop
#
td = rng[0]
bd = rng[1]
awsField = "AWS" + str(td) + "_" + str(bd)
arcpy.AddField_management(theCompTable, awsField, "FLOAT", "", "", "", awsField)
for rng in depthList:
# Create the TK-AWS fields in a loop
#
td = rng[0]
bd = rng[1]
awsField = "TK" + str(td) + "_" + str(bd) + "A"
arcpy.AddField_management(theCompTable, awsField, "FLOAT", "", "", "", awsField)
arcpy.AddField_management(theCompTable, "MUSUMCPCTA", "SHORT", "", "", "")
for rng in depthList:
# Create the SOC fields in a loop
#
td = rng[0]
bd = rng[1]
awsField = "SOC" + str(td) + "_" + str(bd)
arcpy.AddField_management(theCompTable, awsField, "FLOAT", "", "", "")
for rng in depthList:
# Create the rest of the SOC thickness fields in a loop
#
td = rng[0]
bd = rng[1]
awsField = "TK" + str(td) + "_" + str(bd) + "S"
arcpy.AddField_management(theCompTable, awsField, "FLOAT", "", "", "")
arcpy.AddField_management(theCompTable, "MUSUMCPCTS", "SHORT", "", "", "")
# Root Zone and root zone available water supply
arcpy.AddField_management(theCompTable, "PCTEARTHMC", "SHORT", "", "", "")
arcpy.AddField_management(theCompTable, "ROOTZNEMC", "SHORT", "", "", "")
arcpy.AddField_management(theCompTable, "ROOTZNAWS", "SHORT", "", "", "")
arcpy.AddField_management(theCompTable, "RESTRICTION", "TEXT", "", "", "254", "RESTRICTION")
# Droughty soils
arcpy.AddField_management(theCompTable, "DROUGHTY", "SHORT", "", "", "")
# Add field for potential wetland soils
arcpy.AddField_management(theCompTable, "PWSL1POMU", "SHORT", "", "", "")
# Add primary key field
arcpy.AddField_management(theCompTable, "MUKEY", "TEXT", "", "", "30", "MUKEY")
# add attribute indexes for key fields
arcpy.AddIndex_management(theCompTable, "MUKEY", "Indx_Res2Mukey", "NON_UNIQUE", "NON_ASCENDING")
arcpy.AddIndex_management(theCompTable, "COKEY", "Indx_ResCokey", "UNIQUE", "NON_ASCENDING")
# populate component level table with mukey and component data
sqlClause = ("DISTINCT", "ORDER BY cokey")
#PrintMsg(" \nApparent problem with DISTINCT COKEY clause for " + hzTable, 1)
#lastCokey = 'xxxx'
coCnt = 0
hzCnt = int(arcpy.GetCount_management(hzTable).getOutput(0))
#PrintMsg(" \nInput table " + hzTable + " has " + Number_Format(hzCnt, 0, True) + " records", 1)
uniqueList = list()
with arcpy.da.SearchCursor(hzTable, ["mukey", "cokey", "compname", "localphase", "comppct_r"], sql_clause=sqlClause) as incur:
#incur.reset()
# Populate component-level table from the horizon query table
outcur = arcpy.da.InsertCursor(theCompTable, ["mukey", "cokey", "compname", "localphase", "comppct_r"])
for inrec in incur:
coCnt += 1
outcur.insertRow(inrec)
#PrintMsg(" \nUsing sql_clause " + str(sqlClause) + " we got " + Number_Format(coCnt, 0, True) + " components", 1)
except MyError, e:
# Example: raise MyError("this is an error message")
PrintMsg(str(e) + " \n", 2)
return False
except:
errorMsg()
return False
## ===================================================================================
def CheckTexture(mukey, cokey, desgnmaster, om, texture, lieutex, taxorder, taxsubgrp):
# Is this an organic horizon? Look at desgnmaster and OM first. If those
# don't help, look at chtexturegrp.texture next.
#
# if True: Organic, exclude from root zone calculations unless it is 'buried'
# if False: Mineral, include in root zone calculations
#
# 01-26-2015
#
# According to Bob, if TAXORDER = 'Histosol' and DESGNMASTER = 'O' or 'L' then it should NOT be included in the RZAWS calculations
#
# If desgnmast = 'O' or 'L' and not (TAXORDER = 'Histosol' OR TAXSUBGRP like 'Histic%') then exclude this horizon from all RZAWS calcualtions.
#
# lieutext values: Slightly decomposed plant material, Moderately decomposed plant material,
# Bedrock, Variable, Peat, Material, Unweathered bedrock, Sand and gravel, Mucky peat, Muck,
# Highly decomposed plant material, Weathered bedrock, Cemented, Gravel, Water, Cobbles,
# Stones, Channers, Parachanners, Indurated, Cinders, Duripan, Fragmental material, Paragravel,
# Artifacts, Boulders, Marl, Flagstones, Coprogenous earth, Ashy, Gypsiferous material,
# Petrocalcic, Paracobbles, Diatomaceous earth, Fine gypsum material, Undecomposed organic matter
# According to Bob, any of the 'decomposed plant material', 'Muck, 'Mucky peat, 'Peat', 'Coprogenous earth' LIEUTEX
# values qualify.
#
# This function does not determine whether the horizon might be a buried organic. That is done in CalcRZAWS1.
#
lieuList = ['Slightly decomposed plant material', 'Moderately decomposed plant material', \
'Highly decomposed plant material', 'Undecomposed plant material', 'Muck', 'Mucky peat', \
'Peat', 'Coprogenous earth']
txList = ["CE", "COP-MAT", "HPM", "MPM", "MPT", "MUCK", "PDOM", "PEAT", "SPM", "UDOM"]
try:
if str(taxorder) == 'Histosols' or str(taxsubgrp).lower().find('histic') >= 0:
# Always treat histisols and histic components as having all mineral horizons
#if mukey == tmukey:
# PrintMsg("\tHistisol or histic: " + cokey + ", " + str(taxorder) + ", " + str(taxsubgrp), 1)
return False
elif desgnmaster in ["O", "L"]:
# This is an organic horizon according to CHORIZON.DESGNMASTER OR OM_R
#if mukey == tmukey:
# PrintMsg("\tO: " + cokey + ", " + str(taxorder) + ", " + str(taxsubgrp), 1)
return True
#elif om > 19:
# This is an organic horizon according to CHORIZON.DESGNMASTER OR OM_R
# if mukey == tmukey:
# PrintMsg("\tHigh om_r: " + cokey + ", " + str(taxorder) + ", " + str(taxsubgrp), 1)
# return True
elif str(texture) in txList:
# This is an organic horizon according to CHTEXTUREGRP.TEXTURE
#if mukey == tmukey:
# PrintMsg("\tTexture: " + cokey + ", " + str(taxorder) + ", " + str(taxsubgrp), 1)
return True
elif str(lieutex) in lieuList:
# This is an organic horizon according to CHTEXTURE.LIEUTEX
#if mukey == tmukey:
# PrintMsg("\tLieutex: " + cokey + ", " + str(taxorder) + ", " + str(taxsubgrp), 1)
return True
else:
# Default to mineral horizon if it doesn't match any of the criteria
#if mukey == tmukey:
# PrintMsg("\tDefault mineral: " + cokey + ", " + str(taxorder) + ", " + str(taxsubgrp), 1)
return False
except MyError, e:
# Example: raise MyError("this is an error message")
PrintMsg(str(e) + " \n", 2)
return False
except:
errorMsg()
return False
## ===================================================================================
def CheckBulkDensity(sand, silt, clay, bd, mukey, cokey):
# Bob's check for a dense layer
# If sand, silt or clay are missing then we default to Dense layer = False
# If the sum of sand, silt, clay are less than 100 then we default to Dense layer = False
# If a single sand, silt or clay value is NULL, calculate it
try:
#if mukey == tmukey:
# PrintMsg("\tCheck for Dense: " + str(mukey) + ", " + str(cokey) + ", " + \
# str(sand) + ", " + str(silt) + ", " + str(clay) + ", " + str(bd), 1)
txlist = [sand, silt, clay]
if bd is None:
# This is not a Dense Layer
#if mukey == tmukey:
# PrintMsg("\tMissing bulk density", 1)
return False
if txlist.count(None) == 1:
# Missing a single total_r value, calculate it
if txlist[0] is None:
sand = 100.0 - silt - clay
elif silt is None:
silt = 100.0 - sand - clay
else:
clay = 100.0 - sand - silt
txlist = [sand, silt, clay]
if txlist.count(None) > 0:
# Null values for more than one, return False
#if mukey == tmukey:
# PrintMsg("\tDense layer with too many null texture values", 1)
return False
if round(sum(txlist), 1) <> 100.0:
# Cannot run calculation, default value is False
#if mukey == tmukey:
# PrintMsg("\tTexture values do not sum to 100", 1)
return False
# All values required to run the Dense Layer calculation are available
a = bd - ((( sand * 1.65 ) / 100.0 ) + (( silt * 1.30 ) / 100.0 ) + (( clay * 1.25 ) / 100.0))
b = ( 0.002081 * sand ) + ( 0.003912 * silt ) + ( 0.0024351 * clay )
if a > b:
# This is a Dense Layer
#if mukey == tmukey:
# PrintMsg("\tDense layer: a = " + str(a) + " and b = " + str(b) + " and BD = " + str(bd), 1)
return True
else:
# This is not a Dense Layer
#if mukey == tmukey:
# PrintMsg("\tNot a Dense layer: a = " + str(a) + " and b = " + str(b) + " and BD = " + str(bd), 1)
return False
except MyError, e:
# Example: raise MyError("this is an error message")
PrintMsg(str(e) + " \n", 2)
return False
except:
errorMsg()
return False
## ===================================================================================
def CalcRZDepth(db, theCompTable, maxD, dPct, dCR):
#
# Look at soil horizon properties to adjust the root zone depth.
# This is in addition to the standard component restrictions
#
# Read the component restrictions into a dictionary, then read through the
# QueryTable_Hz table, calculating the final component rootzone depth
#
# Only major components are used
# Components with COMPKIND = 'Miscellaneous area' or NULL are filtered out.
# Components with no horizon data are assigned a root zone depth of zero.
#
# Horizons with NULL hzdept_r or hzdepb_r are filtered out
# Horizons with hzdept_r => hzdepb_r are filtered out
# O horizons or organic horizons from the surface down to the first mineral horizon
# are filtered out.
#
# Horizon data below 150cm or select component restrictions are filtered out.
# A Dense layer calculation is also included as an additional horizon-specific restriction.
try:
dComp = dict() # component level data for all component restrictions
dComp2 = dict() # store all component level data plus default values
coList = list()
# Create dictionaries and lists
dMapunit = dict() # store mapunit weighted restriction depths
# FIELDS LIST FOR INPUT TABLE
# areasymbol, mukey, musym, muname, mukname,
# cokey, compct, compname, compkind, localphase,
# taxorder, taxsubgrp, ec, pH, dbthirdbar, hzname,
# hzdesgn, hzdept, hzdepb, hzthk, sand,
# silt, clay, om, reskind, reshard,
# resdept, resthk, texture, lieutex
# All reskind values: Strongly contrasting textural stratification, Lithic bedrock, Densic material,
# Ortstein, Permafrost, Paralithic bedrock, Cemented horizon, Undefined, Fragipan, Plinthite,
# Abrupt textural change, Natric, Petrocalcic, Duripan, Densic bedrock, Salic,
# Human-manufactured materials, Sulfuric, Placic, Petroferric, Petrogypsic
#
# Using these restrictions:
# Lithic bedrock, Paralithic bedrock, Densic bedrock, Fragipan, Duripan, Sulfuric
# Other restrictions include pH < 3.5 and EC > 16
crFlds = ["cokey","reskind", "reshard", "resdept_r"]
sqlClause = (None, "ORDER BY cokey, resdept_r ASC")
# ********************************************************
#
# Read the QueryTable_HZ and adjust the component restrictions for additional
# issues such as pH, EC, etc.
#
# Save these new restriction values to dComp dictionary
#
# Only process major-earthy components...
#whereClause = "component.compkind <> 'Miscellaneous area' and component.compkind is not Null and component.majcompflag = 'Yes'"
whereClause = "compkind <> 'Miscellaneous area' and compkind is not Null and majcompflag = 'Yes'"
sqlClause = (None, "ORDER BY mukey, comppct_r DESC, cokey, hzdept_r ASC")
curFlds = ["mukey", "cokey", "compname", "compkind", "localphase", "comppct_r", "taxorder", "taxsubgrp", "hzname", "desgnmaster", "hzdept_r", "hzdepb_r", "sandtotal_r", "silttotal_r", "claytotal_r", "om_r", "dbthirdbar_r", "ph1to1h2o_r", "ec_r", "awc_r", "texture", "lieutex"]
resList = ['Lithic bedrock','Paralithic bedrock','Densic bedrock', 'Fragipan', 'Duripan', 'Sulfuric']
lastCokey = "xxxx"
lastMukey = 'xxxx'
# Display status of processing input table containing horizon data and component restrictions
with arcpy.da.SearchCursor(hzTable, curFlds, where_clause=whereClause, sql_clause=sqlClause) as cur:
# Reading horizon-level data
for rec in cur:
# ********************************************************
#
# Read QueryTable_HZ record
mukey, cokey, compName, compKind, localPhase, compPct, taxorder, taxsubgrp, hzname, desgnmaster, hzDept, hzDepb, sand, silt, clay, om, bd, pH, ec, awc, texture, lieutex = rec
# Initialize component restriction depth to maxD
dComp2[cokey] = [mukey, compName, localPhase, compPct, maxD, ""]
if lastCokey != cokey:
# Accumulate a list of components for future use
lastCokey = cokey
coList.append(cokey)
if hzDept < maxD:
# ********************************************************
# For horizons above the floor level (maxD), look for other restrictive
# layers based on horizon properties such as pH, EC and bulk density.
# Start with the top horizons and work down.
# initialize list of restrictions
resKind = ""
restriction = list()
bOrganic = CheckTexture(mukey, cokey, desgnmaster, om, texture, lieutex, taxorder, taxsubgrp)
if not bOrganic:
# calculate alternate dense layer per Dobos
bDense = CheckBulkDensity(sand, silt, clay, bd, mukey, cokey)
if bDense:
# use horizon top depth for the dense layer
restriction.append("Dense")
resDept = hzDept
# Not sure whether these horizon property checks should be skipped for Organic
# Bob said to only skip Dense Layer check, but VALU table RZAWS looks like all
# horizon properties were skipped.
#
# If we decide to skip EC and pH horizon checks for histosols/histic, use this query
# Example Pongo muck in North Carolina that have low pH but no other restriction
#
if str(taxorder) != 'Histosols' and str(taxsubgrp).lower().find('histic') == -1:
# Only non histosols/histic soils will be checked for pH or EC restrictive horizons
if pH <= 3.5 and pH is not None:
restriction.append("pH")
resDept = hzDept
#if mukey == tmukey:
# PrintMsg("\tpH restriction at " + str(resDept) + "cm", 1)
if ec >= 16.0 and ec is not None:
# Originally I understood that EC > 12 is a restriction, but Bob says he is
# now using 16.
restriction.append("EC")
resDept = hzDept
#if mukey == tmukey:
# PrintMsg("\tEC restriction at " + str(resDept) + "cm", 1)
#if bd >= 1.8:
# restriction.append("BD")
# resDept = hzDept
#if awc is None:
# restriction.append("AWC")
# resDept = hzDept
# ********************************************************
#
# Finally, check for one of the standard component restrictions
#
if cokey in dCR:
resDepth2, resKind = dCR[cokey]
if hzDept <= resDepth2 < hzDepb:
# This restriction may not be at the top of the horizon, thus we
# need to override this if one of the other restrictions exists for this
# horizon
if len(restriction) == 0:
# If this is the only restriction, set the restriction depth
# to the value from the corestriction table.
resDept = resDepth2
# Adding this restriction name to the list even if there are others
# May want to take this out later
restriction.append(resKind)
# ********************************************************
#
if len(restriction) > 0:
# Found at least one restriction for this horizon
if not cokey in dComp:
# if there are no higher restrictions for this component, save this one
# to the dComp dictionary as the upper-most restriction
#
dComp[cokey] = [mukey, compName, localPhase, compPct, resDept, restriction]
# Load restrictions from dComp into dComp2 so that there is complete information for all components
for cokey in dComp2:
try:
dComp2[cokey] = dComp[cokey]
except:
pass
# Return the dictionary containing restriction depths and the dictionary containing defaults
return dComp2
except MyError, e:
# Example: raise MyError("this is an error message")
PrintMsg(str(e) + " \n", 2)
return dComp2
except:
errorMsg()
return dComp2
## ===================================================================================
def GetCoRestrictions(crTable, maxD, resList):
#
# Returns a dictionary of top component restrictions for root growth
#
# resList is a comma-delimited string of reskind values, surrounded by parenthesis
#
# Get component root zone depth from QueryTable_CR and load into dictionary (dCR)
# This is NOT the final root zone depth. This information will be compared with the
# horizon soil properties to determine the final root zone depth.
try:
rSQL = "resdept_r < " + str(maxD) + " and reskind in " + resList
sqlClause = (None, "ORDER BY cokey, resdept_r ASC")
#PrintMsg("\tGetting corestrictions matching: " + resList, 1)
dRestrictions = dict()
# Get the top component restriction from the sorted table
with arcpy.da.SearchCursor(crTable, ["cokey", "resdept_r", "reskind"], where_clause=rSQL, sql_clause=sqlClause) as cur:
for rec in cur:
cokey, resDept, reskind = rec
#PrintMsg("Restriction: " + str(rec), 1)
if not cokey in dRestrictions:
dRestrictions[str(cokey)] = resDept, reskind
return dRestrictions
except MyError, e:
# Example: raise MyError("this is an error message")
PrintMsg(str(e) + " \n", 2)
return dict()
except:
errorMsg()
return dict()
## ===================================================================================
def CalcRZAWS(inputDB, outputDB, td, bd, theCompTable, valuTable, dRestrictions, maxD, dPct):
# Create a component-level summary table
# Calculate mapunit-weighted average for each mapunit and write to a mapunit-level table
# Need to filter out compkind = 'Miscellaneous area' for RZAWS
# dRestrictions[cokey] = [mukey, compName, localPhase, compPct, resDept, restriction]
try:
import decimal
env.workspace = outputDB
# Using the same component horizon table that has been
queryTbl = hzTable
numRows = int(arcpy.GetCount_management(queryTbl).getOutput(0))
PrintMsg(" \nCalculating Root Zone AWS for " + str(td) + " to " + str(bd) + "cm...", 0)
arcpy.SetProgressorLabel("Calculating Root Zone AWS")
# QueryTable_HZ fields
qFieldNames = ["mukey", "cokey", "comppct_r", "compname", "localphase", "majcompflag", "compkind", "taxorder", "taxsubgrp", "desgnmaster", "om_r", "awc_r", "hzdept_r", "hzdepb_r", "texture", "lieutex"]
#arcpy.SetProgressorLabel("Creating output tables using dominant component...")
#arcpy.SetProgressor("step", "Calculating root zone available water supply..." , 0, numRows, 1)
# Open edit session on geodatabase to allow multiple update cursors
with arcpy.da.Editor(inputDB) as edit:
# initialize list of components with horizon overlaps
#badCo = list()
# Output fields for root zone and droughty
muFieldNames = ["mukey", "pctearthmc", "rootznemc", "rootznaws", "droughty"]
muCursor = arcpy.da.UpdateCursor(valuTable, muFieldNames)
# Open component-level output table for updates
#coCursor = arcpy.da.InsertCursor(theCompTable, coFieldNames)
coFieldNames = ["mukey", "cokey", "compname", "localphase", "comppct_r", "pctearthmc", "rootznemc", "rootznaws", "restriction"]
coCursor = arcpy.da.UpdateCursor(theCompTable, coFieldNames)
# Process query table using cursor, write out horizon data for each major component
sqlClause = [None, "order by mukey, comppct_r DESC, cokey, hzdept_r ASC"]
iCnt = int(arcpy.GetCount_management(queryTbl).getOutput(0))
# For root zone calculations, we only want earthy, major components
#PrintMsg(" \nFiltering components in Query_HZ for CalcRZAWS1 function", 1)
#
# Major-Earthy Components
#hzSQL = "component.compkind <> 'Miscellaneous area' and component.compkind is not NULL and component.majcompflag = 'Yes'"
# All Components