memory problem

lorenzo sitbot at gmail.com
Wed Jun 6 09:36:47 EDT 2012


I try to run my python program with a large matrix image.
Unfortunately I receive an memory error. Here the program (that run in
window PC) that donot crash with image represented by matrix 3000 x
3000. With 7000 x 7000 do not works.
Even increasinf the virtual memory the problem still remain.

best reg Lorenzo Bottai



import skimage.graph as graph
from skimage import filter
from skimage.morphology import watershed, is_local_maximum
from scipy import ndimage
import os, numpy, sys, time,math,csv
from osgeo import gdal, ogr
from osgeo.gdalconst import *
from math import *



from gdalconst import *

startTime = time.time()


os.chdir(r'c:\Users\lorenzo\lidar2')
# register all of the GDAL drivers
gdal.AllRegister()

# open the image

inDs1 = gdal.Open('conv_utm2m.tif',GA_ReadOnly)
print 'legge immagine di convergenza'

#inDs2 = gdal.Open('htree_corretto_filt.rst',GA_ReadOnly)


# get image size
rows = inDs1.RasterYSize
cols = inDs1.RasterXSize
bands = inDs1.RasterCount
transform = inDs1.GetGeoTransform()

driver = gdal.GetDriverByName('GTiff')
#outDs = driver.Create('origf3_flessi.tif', cols, rows, 1, GDT_Int32)
outDs1 = driver.Create('conv_massimo.tif', cols, rows, 1, GDT_Int32)
outDs2 = driver.Create('conv_chiome.tif', cols, rows, 1, GDT_Int32)

print 'righe', rows, 'colonne', cols, 'bande', bands

driver1 = inDs1.GetDriver()
#driver2 = inDs2.GetDriver()




convergenza  = numpy.ones((rows,cols),numpy.float)
massimi =numpy.zeros((rows,cols),numpy.int)
chiome = numpy.zeros((rows,cols),numpy.float)


inBand1 = inDs1.GetRasterBand(1)


print 'leggo convergenza'

convergenza = inBand1.ReadAsArray(0,0,cols,rows).astype(numpy.float)


print ' cerco il massimo locale sulla convergenza'
massimi = is_local_maximum(convergenza)


print ' numero in modo progressimo i massimi'
markers = ndimage.label(massimi)[0]

print ' allago le pozze ........'


chiome = watershed(-convergenza, markers)


outBand1 = outDs1.GetRasterBand(1)
outBand2 = outDs2.GetRasterBand(1)
outBand1.WriteArray(massimi, 0, 0)

outBand1.FlushCache()
stats1 = outBand1.GetStatistics(0, 1)


outBand2.WriteArray(chiome, 0, 0)
outBand2.FlushCache()
stats2 = outBand2.GetStatistics(0, 1)


outDs1.SetGeoTransform(transform)
outDs2.SetGeoTransform(transform)
outDs1 = None
outDs2 = None

inDs1 = None

print 'script took', time.time() - startTime, 'seconds to run'



More information about the scikit-image mailing list