def convert_pixgwktList_to_wgs84wktList(inputRaster, wktPolygonPixList):
## returns # [[GeoWKT, PixWKT], ...]
wgs84WKTList=[]
if os.path.isfile(inputRaster):
srcRaster = gdal.Open(inputRaster)
targetSR = osr.SpatialReference()
targetSR.ImportFromWkt(srcRaster.GetProjectionRef())
geomTransform = srcRaster.GetGeoTransform()
for wktPolygonPix in wktPolygonPixList:
geom_wkt_list = pixelWKTToGeoWKT(wktPolygonPix, inputRaster, targetSR='',
geomTransform=geomTransform,
breakMultiPolygonPix=False)
wgs84WKTList.extend(geom_wkt_list)
# [[GeoWKT, PixWKT], ...]
return wgs84WKTList
python类Open()的实例源码
def reprojectRaster(src,match_ds,dst_filename):
src_proj = src.GetProjection()
src_geotrans = src.GetGeoTransform()
match_proj = match_ds.GetProjection()
match_geotrans = match_ds.GetGeoTransform()
wide = match_ds.RasterXSize
high = match_ds.RasterYSize
dst = gdal.GetDriverByName('GTiff').Create(dst_filename, wide, high, 1, gdalconst.GDT_Int16)
dst.SetGeoTransform( match_geotrans )
dst.SetProjection( match_proj)
gdal.ReprojectImage(src, dst, src_proj, match_proj, gdalconst.GRA_Bilinear)
del dst # Flush
return(gdal.Open(dst_filename,gdalconst.GA_ReadOnly))
#finds the intersecting extent of a series of scenes (left,right,bottom,top are arrays containing the respective lat/lon of the left,right,bottom,top of each image)
def get_lulc_source(ds):
"""For a given input dataset extent, select the appropriate mask source (NLCD vs. global bareground)
"""
ds_geom = geolib.ds_geom(ds)
lulc_fn = get_nlcd_fn()
lulc_ds = gdal.Open(lulc_fn)
lulc_geom = geolib.ds_geom(lulc_ds)
#If the dem geom is within CONUS (nlcd extent), use it
geolib.geom_transform(ds_geom, t_srs=lulc_geom.GetSpatialReference())
if lulc_geom.Contains(ds_geom):
print("Using NLCD 30m data for rockmask")
lulc_source = 'nlcd'
else:
print("Using global 30m bare ground data for rockmask")
#Otherwise for Global, use 30 m Bare Earth percentage
lulc_source = 'bareground'
#lulc_fn = get_bareground_fn()
#lulc_ds = gdal.Open(lulc_fn)
return lulc_source
def read_naip(file_path, bands_to_use):
"""
Read in a NAIP, based on www.machinalis.com/blog/python-for-geospatial-data-processing.
Bands_to_use is an array like [0,0,0,1], designating whether to use each band (R, G, B, IR).
"""
raster_dataset = gdal.Open(file_path, gdal.GA_ReadOnly)
bands_data = []
index = 0
for b in range(1, raster_dataset.RasterCount + 1):
band = raster_dataset.GetRasterBand(b)
if bands_to_use[index] == 1:
bands_data.append(band.ReadAsArray())
index += 1
bands_data = numpy.dstack(bands_data)
return raster_dataset, bands_data
def tag_with_locations(test_images, predictions, tile_size, state_abbrev):
"""Combine image data with label data, so info can be rendered in a map and list UI.
Add location data for convenience too.
"""
combined_data = []
for idx, img_loc_tuple in enumerate(test_images):
raster_filename = img_loc_tuple[2]
raster_dataset = gdal.Open(os.path.join(NAIP_DATA_DIR, raster_filename), gdal.GA_ReadOnly)
raster_tile_x = img_loc_tuple[1][0]
raster_tile_y = img_loc_tuple[1][1]
ne_lat, ne_lon = pixel_to_lat_lon_web_mercator(raster_dataset, raster_tile_x +
tile_size, raster_tile_y)
sw_lat, sw_lon = pixel_to_lat_lon_web_mercator(raster_dataset, raster_tile_x,
raster_tile_y + tile_size)
certainty = predictions[idx][0]
formatted_info = {'certainty': certainty, 'ne_lat': ne_lat, 'ne_lon': ne_lon,
'sw_lat': sw_lat, 'sw_lon': sw_lon, 'raster_tile_x': raster_tile_x,
'raster_tile_y': raster_tile_y, 'raster_filename': raster_filename,
'state_abbrev': state_abbrev, 'country_abbrev': 'USA'
}
combined_data.append(formatted_info)
return combined_data
def import_summary_geojson(geojsonfilename, removeNoBuildings=True):
# driver = ogr.GetDriverByName('geojson')
datasource = ogr.Open(geojsonfilename, 0)
layer = datasource.GetLayer()
print(layer.GetFeatureCount())
buildingList = []
for idx, feature in enumerate(layer):
poly = feature.GetGeometryRef()
if poly:
if removeNoBuildings:
if feature.GetField('BuildingId') != -1:
buildingList.append({'ImageId': feature.GetField('ImageId'), 'BuildingId': feature.GetField('BuildingId'),
'poly': feature.GetGeometryRef().Clone()})
else:
buildingList.append({'ImageId': feature.GetField('ImageId'), 'BuildingId': feature.GetField('BuildingId'),
'poly': feature.GetGeometryRef().Clone()})
return buildingList
def import_chip_geojson(geojsonfilename, ImageId=''):
# driver = ogr.GetDriverByName('geojson')
datasource = ogr.Open(geojsonfilename, 0)
if ImageId=='':
ImageId = geojsonfilename
layer = datasource.GetLayer()
print(layer.GetFeatureCount())
polys = []
BuildingId = 0
for idx, feature in enumerate(layer):
poly = feature.GetGeometryRef()
if poly:
BuildingId = BuildingId + 1
polys.append({'ImageId': ImageId, 'BuildingId': BuildingId,
'poly': feature.GetGeometryRef().Clone()})
return polys
def mergePolyList(geojsonfilename):
multipolygon = ogr.Geometry(ogr.wkbMultiPolygon)
datasource = ogr.Open(geojsonfilename, 0)
layer = datasource.GetLayer()
print(layer.GetFeatureCount())
for idx, feature in enumerate(layer):
poly = feature.GetGeometryRef()
if poly:
multipolygon.AddGeometry(feature.GetGeometryRef().Clone())
return multipolygon
def import_glcf_tile(glcf_header, cell_num, tilefile):
glcfgrid = grids.GLCFGrid()
glcf_data = np.zeros((glcfgrid.cell_height, glcfgrid.cell_width, 1),
dtype=np.uint8)
with tempfile.NamedTemporaryFile() as f:
# uncompress
with gzip.open(tilefile, 'rb') as f_in, open(f.name, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
gdal_ds = gdal.Open(f.name)
assert gdal_ds is not None, "Failed to open GDAL dataset"
band = gdal_ds.GetRasterBand(1)
nodata_val = band.GetNoDataValue()
print "NoData : ", nodata_val
glcf_data[:, :, 0] = band.ReadAsArray()
glcf_header.write_frac((cell_num, 0), glcf_data)
print 'Finished %d' % cell_num
return True
def reproject_tiff_from_model(old_name, new_name, model, unit):
"""
Reprojects an tiff on a tiff model. Can be used to warp tiff.
Keyword arguments:
old_name -- the name of the old tiff file
new_name -- the name of the output tiff file
model -- the gdal dataset which will be used to warp the tiff
unit -- the gdal unit in which the operation will be performed
"""
mem_drv = gdal.GetDriverByName("MEM")
old = gdal.Open(old_name)
new = mem_drv.Create(new_name, model.RasterXSize, model.RasterYSize, 1,
unit)
new.SetGeoTransform(model.GetGeoTransform())
new.SetProjection(model.GetProjection())
res = gdal.ReprojectImage(old, new, old.GetProjection(),
model.GetProjection(), gdal.GRA_NearestNeighbour)
assert res == 0, 'Error in ReprojectImage'
arr = new.ReadAsArray()
new = None
return arr
readBandMetaData.py 文件源码
项目:python_scripting_for_spatial_data_processing
作者: upsdeepak
项目源码
文件源码
阅读 20
收藏 0
点赞 0
评论 0
def readImageMetaData(inputFile, name):
# Open the dataset in read only mode
dataset = gdal.Open(inputFile, gdal.GA_ReadOnly)
# Check that the image has been opened
if not dataset is None:
# Get the meta value specified
metaData = dataset.GetMetaDataItem(name)
# Check that it is present
if metaData == None:
# If not present, print error.
print("Could not find \'", name, "\' item.")
else:
# Else print out the metaData value.
print(name, "=\'", metaData, "\'")
else:
# Print an error messagge if the file
# could not be opened.
print("Could not open the input image file: ", inputFile)
# A function to read a meta-data item
# from a image band
readBandMetaData.py 文件源码
项目:python_scripting_for_spatial_data_processing
作者: upsdeepak
项目源码
文件源码
阅读 17
收藏 0
点赞 0
评论 0
def listImageMetaData(inputFile):
# Open the dataset in Read Only mode
dataset = gdal.Open(inputFile, gdal.GA_ReadOnly)
# Check that the image has been opened.
if not dataset is None:
# Get the meta-data dictionary
metaData = dataset.GetMetaData_Dict()
# Check it has entries.
if len(metaData) == 0:
# if it has no entries return
# error message.
print("There is no image meta-data.")
else:
# Otherwise, print out the
# meta-data.
# Loop through each entry.
for metaItem in metaData:
print(metaItem)
else:
# Print an error message if the file
# could not be opened.
print("Could not open the input image file", inputFile)
# This is the first part of the script to
# be executed.
setBandName.py 文件源码
项目:python_scripting_for_spatial_data_processing
作者: upsdeepak
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def setBandName(inputFile, band, name):
# Open the image file, in update mode
# so that the image can be edited.
dataset = gdal.Open(inputFile, GA_Update)
# Check that the image has opened
if not dataset is None:
# Get the image Band
imgBand = dataset.GetRasterBand(band)
# Check the image band was available
if not imgBand is None:
# Set the image band name
imgBand.SetDescription(name)
else:
# Print out an error message
print("Could not open the image band: ", band)
else:
# Print the error message if the file
# could not be opened
print("Could not open the input image file: ", inputFile)
# This is the first par of the script
# to be executed
def raster2array(rasterfn):
raster = gdal.Open(rasterfn)
band = raster.GetRasterBand(1)
nodata = band.GetNoDataValue()
array = band.ReadAsArray()
proj = raster.GetProjection()
inproj = osr.SpatialReference()
inproj.ImportFromWkt(proj)
geoTransform = raster.GetGeoTransform()
minx = geoTransform[0]
maxy = geoTransform[3]
maxx = minx + geoTransform[1]*raster.RasterXSize
miny = maxy + geoTransform[5]*raster.RasterYSize
extent = [minx, maxx, miny, maxy]
pixelSizeXY = [geoTransform[1], geoTransform[5]]
del raster, band
return [array, nodata, extent, inproj, pixelSizeXY]
#clip a raster by vector
def gdal2np_dtype(b):
"""
Get NumPy datatype that corresponds with GDAL RasterBand datatype
Input can be filename, GDAL Dataset, GDAL RasterBand, or GDAL integer dtype
"""
dt_dict = gdal_array.codes
if isinstance(b, str):
b = gdal.Open(b)
if isinstance(b, gdal.Dataset):
b = b.GetRasterBand(1)
if isinstance(b, gdal.Band):
b = b.DataType
if isinstance(b, int):
np_dtype = dt_dict[b]
else:
np_dtype = None
print("Input must be GDAL Dataset or RasterBand object")
return np_dtype
#Replace nodata value in GDAL band
def copyproj(src_fn, dst_fn, gt=True):
"""Copy projection and geotransform from one raster file to another
"""
src_ds = gdal.Open(src_fn, gdal.GA_ReadOnly)
dst_ds = gdal.Open(dst_fn, gdal.GA_Update)
dst_ds.SetProjection(src_ds.GetProjection())
if gt:
src_gt = np.array(src_ds.GetGeoTransform())
src_dim = np.array([src_ds.RasterXSize, src_ds.RasterYSize])
dst_dim = np.array([dst_ds.RasterXSize, dst_ds.RasterYSize])
#This preserves dst_fn resolution
if np.any(src_dim != dst_dim):
res_factor = src_dim/dst_dim.astype(float)
src_gt[[1, 5]] *= max(res_factor)
#src_gt[[1, 5]] *= min(res_factor)
#src_gt[[1, 5]] *= res_factor
dst_ds.SetGeoTransform(src_gt)
src_ds = None
dst_ds = None
def split_metatiles(output_dir, metatiles):
# Determine metatile size and bit depth.
mt_ds = gdal.Open(metatiles[0])
assert mt_ds.RasterXSize % UF_TILE_SIZE == 0
assert mt_ds.RasterYSize % UF_TILE_SIZE == 0
x_uf_count = mt_ds.RasterXSize / UF_TILE_SIZE
y_uf_count = mt_ds.RasterYSize / UF_TILE_SIZE
for ytile in range(y_uf_count):
print 'Process Row: ', ytile
split_row_of_unit_fields(output_dir, metatiles, ytile)
logging.info('Wrote %d piles to directory %s.',
x_uf_count * y_uf_count, output_dir)
def mask_chip(feature):
'''
Apply polygon mask to bounding-box chips. Chips must be named
'feature_id.tif' and exist in the current working directory.
'''
fn = feature[:-4] + '.geojson'
chip = gdal.Open(feature)
# Create ogr vector file for gdal_rasterize
vectordata = {'type': 'FeatureCollection', 'features': [feature]}
with open(fn, 'wb') as f:
geojson.dump(vectordata, f)
# Mask raster
cmd = 'gdal_rasterize -q -i -b 1 -b 2 -b 3 -burn 0 -burn 0 -burn 0 {} {}'.format(fn, feature)
subprocess.call(cmd, shell=True)
# Remove ogr vector file
os.remove(fn)
def rasterize(self,inRaster,inShape,inField):
filename = tempfile.mktemp('.tif')
data = gdal.Open(inRaster,gdal.GA_ReadOnly)
shp = ogr.Open(inShape)
lyr = shp.GetLayer()
driver = gdal.GetDriverByName('GTiff')
dst_ds = driver.Create(filename,data.RasterXSize,data.RasterYSize, 1,gdal.GDT_Byte)
dst_ds.SetGeoTransform(data.GetGeoTransform())
dst_ds.SetProjection(data.GetProjection())
OPTIONS = 'ATTRIBUTE='+inField
gdal.RasterizeLayer(dst_ds, [1], lyr, None,options=[OPTIONS])
data,dst_ds,shp,lyr=None,None,None,None
return filename
def clip(self):
"""
This function clips the mosaicked, projected images to the size of the
referenceImage
"""
subprocess.call(['gdaltindex', self.extent, self.referenceImagePath])
dataNames = sorted(glob.glob(self.fullPath + '/full*.tif'))
splitAt = len(self.fullPath) + 1
for i in range(len(dataNames)):
x = dataNames[i]
y = dataNames[i][:splitAt] + dataNames[i][splitAt+4:]
subprocess.call(['gdalwarp', '-r', 'near', '-cutline', self.extent, '-crop_to_cutline', x, y, '-dstnodata', '9999'])
for n in dataNames:
os.remove(n)
dataNames = sorted(glob.glob(self.fullPath + '/*.tif'))
test = gdal.Open(dataNames[0]).ReadAsArray()
logger.log('SUCCESS', 'Clipping complete! %d %s files were successfully clipped to the size of %s with dimensions %d rows by %d columns' % (len(dataNames), str(self.outformat), str(self.referenceImagePath), test.shape[0], test.shape[1]))
def rasterize_polygons(polygon_shp, template_raster, out_raster, field):
""" generate a categorical raster based on polygons
:rtype: None
:param polygon_shp: input polygon shapefile
:param template_raster: raster template for cellsize and extent
:param out_raster: output raster file
"""
# Source: https://pcjericks.github.io/py-gdalogr-cookbook/raster_layers.html
gdal.UseExceptions()
# Get template raster specs
src_ds = gdal.Open(template_raster)
if src_ds is None:
print 'Unable to open %s' % template_raster
sys.exit(1)
try:
srcband = src_ds.GetRasterBand(1)
except RuntimeError, e:
print 'No band %i found' % 0
print e
sys.exit(1)
# Open the data source and read in the extent
source_ds = ogr.Open(polygon_shp)
source_layer = source_ds.GetLayer()
target_ds = gdal.GetDriverByName('GTiff').Create(out_raster, src_ds.RasterXSize, src_ds.RasterYSize, 1, gdal.GDT_Float32)
target_ds.SetGeoTransform(src_ds.GetGeoTransform())
target_ds.SetProjection(src_ds.GetProjection())
band = target_ds.GetRasterBand(1)
band.SetNoDataValue(srcband.GetNoDataValue())
# Rasterize
gdal.RasterizeLayer(target_ds, [1], source_layer, options=["ATTRIBUTE={}".format(field)])
def readTodayBands(path,row):
allFiles=sorted(glob.glob(os.path.join(getCurrentDirectory(),'Today',path,row,'*.TIF')))
allRasters=dict([])
fileNumber=1
percent=(fileNumber/len(allFiles))*100
for file in allFiles:
percent=(fileNumber/len(allFiles))*100
sys.stdout.write("\rReading today's bands...%d%%" % percent)
sys.stdout.flush()
bandName=file[file.rfind('_')+1:-4]
sys.stdout.write("\rReading today's bands...%d%%" % percent)
sys.stdout.flush()
allRasters[bandName]=gdal.Open(file,gdalconst.GA_ReadOnly)
fileNumber+=1
todayExtent = getRasterExtent(allRasters['B4'])#saves the extent of today's rasters (they'll match band 4) so that we can crop during the cloudmask backfill
print('')
return(allRasters,todayExtent)
#large function that will back-fill cloudy areas of most recent imagery using historic imagery
def downloadLandsatScene(jd,year,month,day,path,row,directory):
LandsatID=LSUniqueID(path,row,year,jd,os.path.join(directory,str(row)))
downLoadCommand=getCurrentDirectory() + "/download_landsat_scene.py -o scene -b LC8 -d "+ year + month + day + ' -s ' + path + '0'+str(row)+ " -u usgs.txt --output " + LandsatID
os.system(downLoadCommand)
print('extracting...')
tarName=extractTar(LandsatID,os.path.join(directory,str(row)))
allFiles=glob.glob(os.path.join(directory,str(row),'*.TIF'))
for filename in allFiles:
bandName=filename.strip('.TIF')[filename.rfind('_')+1:]
if bandName != 'B4' and bandName != 'B5' and bandName != 'B7' and bandName != 'B8' and bandName != 'BQA':
os.remove(filename)
try:
shutil.rmtree(os.path.join(directory,str(row),tarName))
except:
print('No folder to delete called: ' + os.path.join(directory,str(row),tarName))
try:
os.remove(os.path.join(directory,str(row),tarName + '_MTL.txt'))
except:
print('No file to delete called: ' + os.path.join(directory,str(row),tarName + '_MTL.txt'))
reprojectPanBand(gdal.Open(os.path.join(directory,str(row),tarName + '_B8.TIF'),gdalconst.GA_ReadOnly),gdal.Open(os.path.join(directory,str(row),tarName + '_B4.TIF'),gdalconst.GA_ReadOnly),os.path.join(directory,str(row),tarName + '_B8.TIF'))
SLIP.model(year+month+day,path,str(row))
def test_hall_rectification(self):
'''Should rectify an image in the expected way.'''
control_sets = {
'High/Bright': [(331501.45,4694346.66), (319495.39,4706820.66), (298527.006,4691417.99)],
'Low/Dark': [(322577.40,4658508.99), (361612.79,4694665.62), (378823.69,4692132.56)]
}
ref_raster = gdal.Open(os.path.join(self.test_dir, 'multi7_raster.tiff'))
sub_raster = gdal.Open(os.path.join(self.test_dir, 'multi7_raster2.tiff'))
# NOTE: Using same control sets for reference, subject
hall_rectification(ref_raster, sub_raster, self.test_dir, control_sets, control_sets)
arr, gt, wkt = as_array(os.path.join(self.test_dir, 'rect_multi7_raster2.tiff'))
self.assertTrue(np.array_equal(arr.shape, (6, 74, 81)))
self.assertTrue(np.array_equiv(arr[:,50,50].round(5), np.array([
17, 1331, 1442, 3422, 2916, 2708
]).round(5)))
def test_spectral_profile(self):
'''
Should correctly retrieve a spectral profile from a raster dataset.
'''
coords = ((-84.5983, 42.7256), (-85.0807, 41.1138))
pixels = [(18, 0), (2, 59)]
file_path = os.path.join(self.test_dir, 'multi3_raster.tiff')
ds = gdal.Open(file_path)
kwargs = {
'gt': ds.GetGeoTransform(),
'wkt': ds.GetProjection(),
'dd': True
}
# The true spectral profile
spectra = np.array([[237, 418, 325], [507, 616, 445]], dtype=np.int16)
sp1 = spectra_at_xy(ds, coords, **kwargs)
sp2 = spectra_at_xy(ds.ReadAsArray(), coords, **kwargs)
sp3 = spectra_at_idx(ds.ReadAsArray().transpose(), pixels)
self.assertEqual(spectra.tolist(), sp1.tolist())
self.assertEqual(spectra.tolist(), sp2.tolist())
self.assertEqual(spectra.tolist(), sp3.tolist())
def array_to_raster_clone(a, proto, xoff=None, yoff=None):
'''
Creates a raster from a given array and a prototype raster dataset, with
optional x- and y-offsets if the array was clipped. Arguments:
a A NumPy array
proto A prototype dataset
xoff The offset in the x-direction; should be provided when clipped
yoff The offset in the y-direction; should be provided when clipped
'''
rast = gdal_array.OpenNumPyArray(a)
kwargs = dict()
if xoff is not None and yoff is not None:
kwargs = dict(xoff=xoff, yoff=yoff)
# Copy the projection info and metadata from a prototype dataset
if type(proto) == str:
proto = gdal.Open(proto)
gdalnumeric.CopyDatasetInfo(proto, rast, **kwargs)
return rast
def get_lulc_ds_full(ds, lulc_source=None):
if lulc_source is None:
lulc_source = get_lulc_source(ds)
if lulc_source == 'nlcd':
lulc_ds_full = gdal.Open(get_nlcd_fn())
elif lulc_source == 'bareground':
lulc_ds_full = gdal.Open(get_bareground_fn())
return lulc_ds_full
def proc_modscag(fn_list, extent=None, t_srs=None):
"""Process the MODSCAG products for full date range, create composites and reproject
"""
#Use cubic spline here for improve upsampling
ds_list = warplib.memwarp_multi_fn(fn_list, res='min', extent=extent, t_srs=t_srs, r='cubicspline')
stack_fn = os.path.splitext(fn_list[0])[0] + '_' + os.path.splitext(os.path.split(fn_list[-1])[1])[0] + '_stack_%i' % len(fn_list)
#Create stack here - no need for most of mastack machinery, just make 3D array
#Mask values greater than 100% (clouds, bad pixels, etc)
ma_stack = np.ma.array([np.ma.masked_greater(iolib.ds_getma(ds), 100) for ds in np.array(ds_list)], dtype=np.uint8)
stack_count = np.ma.masked_equal(ma_stack.count(axis=0), 0).astype(np.uint8)
stack_count.set_fill_value(0)
stack_min = ma_stack.min(axis=0).astype(np.uint8)
stack_min.set_fill_value(0)
stack_max = ma_stack.max(axis=0).astype(np.uint8)
stack_max.set_fill_value(0)
stack_med = np.ma.median(ma_stack, axis=0).astype(np.uint8)
stack_med.set_fill_value(0)
out_fn = stack_fn + '_count.tif'
iolib.writeGTiff(stack_count, out_fn, ds_list[0])
out_fn = stack_fn + '_max.tif'
iolib.writeGTiff(stack_max, out_fn, ds_list[0])
out_fn = stack_fn + '_min.tif'
iolib.writeGTiff(stack_min, out_fn, ds_list[0])
out_fn = stack_fn + '_med.tif'
iolib.writeGTiff(stack_med, out_fn, ds_list[0])
ds = gdal.Open(out_fn)
return ds
def _open(self):
if not _gdal:
load_lib()
self._ds = _gdal.Open(self.request.get_local_filename())
def polygonize(self,rasterTemp,outShp):
sourceRaster = gdal.Open(rasterTemp)
band = sourceRaster.GetRasterBand(1)
driver = ogr.GetDriverByName("ESRI Shapefile")
# If shapefile already exist, delete it
if os.path.exists(outShp):
driver.DeleteDataSource(outShp)
outDatasource = driver.CreateDataSource(outShp)
# get proj from raster
srs = osr.SpatialReference()
srs.ImportFromWkt( sourceRaster.GetProjectionRef() )
# create layer with proj
outLayer = outDatasource.CreateLayer(outShp,srs)
# Add class column (1,2...) to shapefile
newField = ogr.FieldDefn('Class', ogr.OFTInteger)
outLayer.CreateField(newField)
gdal.Polygonize(band, None,outLayer, 0,[],callback=None)
outDatasource.Destroy()
sourceRaster=None
band=None
ioShpFile = ogr.Open(outShp, update = 1)
lyr = ioShpFile.GetLayerByIndex(0)
lyr.ResetReading()
for i in lyr:
lyr.SetFeature(i)
# if area is less than inMinSize or if it isn't forest, remove polygon
if i.GetField('Class')!=1:
lyr.DeleteFeature(i.GetFID())
ioShpFile.Destroy()
return outShp