Examples
XTGeo is Python library to work with surfaces, grids, cubes, wells, etc, possibly in combination. It is easy to make small user scripts that run from the command line in Linux, macOS, and Windows.
Surface operations
See class RegularSurface
for details on
available methods and attributes.
Initialising a Surface object (instance)
import xtgeo
# initialising a RegularSurface object:
# this makes a surface from scratch
surf = xtgeo.RegularSurface(ncol=33, nrow=50,
xori=34522.22, yori=6433231.21,
xinc=40.0, yinc=40.0, rotation=30,
values=np.zeros((33,50)))
# a more common method is to make an instance from file:
surf = xtgeo.surface_from_file("somename.gri")
Surface object properties
A Surface object will have a number of so-called properties,
see RegularSurface
. Some
of these properties can be changed, which actually changes the map
import xtgeo
surf3 =xtgeo.surface_from_file('reek.gri')
print(surf3) # will show a description
print(surf3.xinc, surf3.yinc)
print(surf3.rotation)
# change the rotation:
surf3.rotation = 45.0
# move the surface 1000 m to west:
surf3.xori -= 1000.0
# export the modified surface
surf3.to_file('changedsurface.gri') # irap binary is default
# Note that changing `nrow` and `ncol` is not possible to do directly.
Sample a surface from a 3D grid
"""
Slice a 3Grid property with a surface, e.g. a FLW map.
In this case 3 maps with constant depth are applied. The maps are refined
for smoother result, and output is exported as Roxar binary *.gri and
quickplots (png)
JRIV
"""
import os
import pathlib
import tempfile
import xtgeo
TMPDIR = pathlib.Path(tempfile.gettempdir())
def slice_a_grid():
"""Slice a 3D grid property with maps (looping)"""
expath1 = pathlib.Path("../../xtgeo-testdata/3dgrids/reek")
expath2 = pathlib.Path("../../xtgeo-testdata/surfaces/reek/1")
gridfileroot = expath1 / "REEK"
surfacefile = expath2 / "midreek_rota.gri"
initprops = ["PORO", "PERMX"]
grd = xtgeo.grid_from_file(gridfileroot, fformat="eclipserun", initprops=initprops)
# read a surface, which is used for "template"
surf = xtgeo.surface_from_file(surfacefile)
surf.refine(2) # make finer for nicer sampling (NB takes time then)
slices = [1700, 1720, 1740]
for myslice in slices:
print("Slice is {}".format(myslice))
for prp in grd.props:
sconst = surf.copy()
sconst.values = myslice # set constant value for surface
print("Work with {}, slice at {}".format(prp.name, myslice))
sconst.slice_grid3d(grd, prp)
fname = "{}_{}.gri".format(prp.name, myslice)
sconst.to_file(TMPDIR / fname)
fname = TMPDIR / ("{}_{}.png".format(prp.name, myslice))
if "SKIP_PLOT" not in os.environ:
sconst.quickplot(filename=fname)
if __name__ == "__main__":
slice_a_grid()
print(f"Running example OK: {pathlib.Path(__file__).name}")
Sample a surface or a window attribute from a cube
"""
Slice a Cube with a surface, and get attributes between two horizons
In this case 3 maps with constant depth are applied. The maps are refined
for smoother result, and output is exported as Roxar binary *.gri and
quickplots (png)
JRIV
"""
import pathlib
import tempfile
import xtgeo
DEBUG = False
EXPATH1 = pathlib.Path("../../xtgeo-testdata/cubes/etc/")
EXPATH2 = pathlib.Path("../../xtgeo-testdata/surfaces/etc")
TMPDIR = pathlib.Path(tempfile.gettempdir())
def slice_a_cube_with_surface():
"""Slice a seismic cube with a surface on OW dat/map format"""
cubefile = EXPATH1 / "ib_test_cube2.segy"
surfacefile = EXPATH2 / "h1.dat"
mycube = xtgeo.cube_from_file(cubefile)
# import map/dat surface using cube as template (inline/xline
# must match)
mysurf = xtgeo.surface_from_file(surfacefile, fformat="ijxyz", template=mycube)
# sample cube values to mysurf (replacing current depth values)
mysurf.slice_cube(mycube, sampling="trilinear")
# export result
mysurf.to_file(TMPDIR / "slice.dat", fformat="ijxyz")
def attribute_around_surface_symmetric():
"""Get atttribute around a surface (symmetric)"""
cubefile = EXPATH1 / "ib_test_cube2.segy"
surfacefile = EXPATH2 / "h1.dat"
mycube = xtgeo.cube_from_file(cubefile)
mysurf = xtgeo.surface_from_file(surfacefile, fformat="ijxyz", template=mycube)
attrs = ["max", "mean"]
myattrs = mysurf.slice_cube_window(
mycube, attribute=attrs, sampling="trilinear", zrange=10.0
)
for attr in myattrs:
myattrs[attr].to_file(
TMPDIR / ("myfile_symmetric_" + attr + ".dat"), fformat="ijxyz"
)
def attribute_around_surface_asymmetric():
"""Get attribute around a surface (asymmetric)"""
cubefile = EXPATH1 / "ib_test_cube2.segy"
surfacefile = EXPATH2 / "h1.dat"
above = 10
below = 20
mycube = xtgeo.cube_from_file(cubefile)
mysurf = xtgeo.surface_from_file(surfacefile, fformat="ijxyz", template=mycube)
# instead of using zrange, we make some tmp surfaces that
# reflects the assymmetric
sabove = mysurf.copy()
sbelow = mysurf.copy()
sabove.values -= above
sbelow.values += below
if DEBUG:
sabove.describe()
sbelow.describe()
attrs = "all"
myattrs = mysurf.slice_cube_window(
mycube, attribute=attrs, sampling="trilinear", zsurf=sabove, other=sbelow
)
for attr in myattrs:
if DEBUG:
myattrs[attr].describe()
myattrs[attr].to_file(
TMPDIR / ("myfile_asymmetric_" + attr + ".dat"), fformat="ijxyz"
)
def attribute_around_constant_cube_slices():
"""Get attribute around a constant cube slices"""
cubefile = EXPATH1 / "ib_test_cube2.segy"
level1 = 1010
level2 = 1100
mycube = xtgeo.cube_from_file(cubefile)
# instead of using zrange, we make some tmp surfaces that
# reflects the assymmetric; here sample slices from cube
sabove = xtgeo.surface_from_cube(mycube, level1)
sbelow = xtgeo.surface_from_cube(mycube, level2)
if DEBUG:
sabove.describe()
sbelow.describe()
attrs = "all"
myattrs = sabove.slice_cube_window(
mycube, attribute=attrs, sampling="trilinear", zsurf=sabove, other=sbelow
)
for attr in myattrs:
if DEBUG:
myattrs[attr].describe()
myattrs[attr].to_file(
TMPDIR / ("myfile_constlevels_" + attr + ".dat"), fformat="ijxyz"
)
if __name__ == "__main__":
slice_a_cube_with_surface()
attribute_around_surface_symmetric()
attribute_around_surface_asymmetric()
attribute_around_constant_cube_slices()
print(f"Running example OK: {pathlib.Path(__file__).name}")
Cube operations
Taking diff of two cubes and export, in SEGY
It is easy to take the difference bewteen two cubes and export to SEGY format. Here is an example:
import xtgeo
# make two cube objects from file import:
cube1 = xtgeo.cube_from_file('cube1.segy')
cube2 = xtgeo.cube_from_file('cube2.segy')
# subtract the two numpy arrays
cube1.values = cube1.values - cube2.values
# export the updated cube1 to SEGY
cube1.to_file('diff.segy')
Reduce cube (e.g. SEGY) data by thinning and cropping
Here is a big data set which gets heavily reduced by thinning and cropping. These are very quick operations! Note that cropping has two numbers (tuple) for each direction, e.g. (20, 30) means removal of 20 columns from front, and 30 from back. The applied order of these routines matters…
import xtgeo
big = xtgeo.cube_from_file("troll.segy")
big.do_thinning(2, 2, 1) # keep every second inline and xline
big.do_cropping((20, 30), (250, 20), (0, 0)) # crop ilines and xlines
# export a much smaller file to SEGY
big.to_file("much_smaller.segy")
Reduce or change cube (e.g. SEGY) data by resampling
Here is a big data set which gets heavily reduced by making a new cube with every second inline and xline, and then resample values prior to export.
Also, another small cube with another rotation is made:
import xtgeo
big = xtgeo.cube_from_file("troll.segy")
# make a cube of every second iline and xline
newcube = xtgeo.Cube(xori=big.xori, yori=big.yori, zori=big.zori,
xinc=big.xinc * 2,
yinc=big.yinc * 2,
zinc=big.zinc,
ncol=int(big.ncol / 2),
nrow=int(big.nrow / 2),
nlay=big.nlay,
rotation=big.rotation,
yflip=big.yflip)
newcube.resample(big)
newcube.to_file('newcube.segy')
# you can also make whatever cube you want with e.g. another rotation
smallcube = xtgeo.Cube(xori=523380, yori=6735680, zori=big.zori,
xinc=50,
yinc=50,
zinc=12,
ncol=100,
nrow=200,
nlay=100,
rotation=0.0,
yflip=big.yflip)
smallcube.resample(big)
smallcube.to_file('smallcube.segy')
Combined Surface and Cube operations
To sample cube values into a surface can be quite useful. Both direct sampling, and interval sampling (over a window, or between two surfaces) is supported. For the interval sampling, various attributes can be extracted.
Sampling a surface from a cube
Here is sampling a regular surface from a cube. The two objects can have
different rotation. See xtgeo.surface.RegularSurface.slice_cube()
method
import xtgeo
# make two cube objects from file import:
surf = xtgeo.surface_from_file('top.gri')
cube = xtgeo.cube_from_file('cube2.segy')
surf.slice_cube(cube)
# export the updated to RMS binary map format
surf.to_file('myslice.gri')
Sampling the root-mean-square surface over a window from a cube
The root mean scquare (rms) value over a surface, +- 10 units (e.g. metres if depth), see slice_cube_window method.
import xtgeo
# slice within a window (vertically) around surface:
surf = xtgeo.surface_from_file('top.gri')
cube = xtgeo.cube_from_file('cube.segy')
surf.slice_cube_window(cube, zrange=10, attribute='rms')
# export the updated to Irap (RMS) ascii map format
surf.to_file('rmsaverage.fgr', fformat='irap_ascii')
3D grid examples
Crop a 3D grid with properties
"""
Crop a 3D grid.
"""
import pathlib
import tempfile
import xtgeo
EXPATH1 = pathlib.Path("../../xtgeo-testdata/3dgrids/reek")
GRIDFILEROOT = EXPATH1 / "REEK"
INITPROPS = ["PORO", "PERMX"]
TMPDIR = pathlib.Path(tempfile.gettempdir())
def cropper():
"""Do a cropping of a 3D grid"""
grd = xtgeo.grid_from_file(GRIDFILEROOT, fformat="eclipserun", initprops=INITPROPS)
print(grd.props)
# find current NCOL, NROW and divide into 4 pieces
ncol = grd.ncol
nrow = grd.nrow
nlay = grd.nlay
ncol1 = int(ncol / 2)
nrow1 = int(nrow / 2)
print("Original grid dimensions are {} {} {}".format(ncol, nrow, nlay))
print("Crop ranges are {} {} {}".format(ncol1, nrow1, nlay))
ncolranges = [(1, ncol1), (ncol1 + 1, ncol)]
nrowranges = [(1, nrow1), (nrow1 + 1, nrow)]
for ncr in ncolranges:
nc1, nc2 = ncr
for nrr in nrowranges:
nr1, nr2 = nrr
fname = "_{}-{}_{}-{}".format(nc1, nc2, nr1, nr2)
tmpgrd = grd.copy()
tmpgrd.crop(ncr, nrr, (1, nlay), props="all")
# save to disk as ROFF files
tmpgrd.to_file(TMPDIR / ("grid" + fname + ".roff"))
for prop in tmpgrd.props:
print("{} for {} .. {}".format(prop.name, ncr, nrr))
fname2 = prop.name + fname + ".roff"
fname2 = fname2.lower()
prop.to_file(TMPDIR / fname2)
if __name__ == "__main__":
cropper()
Extract Pandas dataframe from 3D grid and props
"""
Example on how to retrieve a dataframe (Pandas) from a 3D grid.
Explanation:
Both a GridProperties and a Grid instance can return a dataframe.
The `grd.gridprops` attribute below is the GridProperties, and
this will return a a dataframe by default which does not include
XYZ and ACTNUM, as this information is only from the Grid (geometry).
The grid itself can also return a dataframe, and in this case
XYZ and ACNUM will be returned by default. Also properties that
are "attached" to the Grid via a GridProperties attribute will
be shown.
"""
import pathlib
import tempfile
import xtgeo
EXPATH = pathlib.Path("../../xtgeo-testdata/3dgrids/reek")
GRIDFILEROOT = EXPATH / "REEK"
TMPDIR = pathlib.Path(tempfile.gettempdir())
INITS = ["PORO", "PERMX"]
RESTARTS = ["PRESSURE", "SWAT", "SOIL"]
MYDATES = [20001101, 20030101]
def extractdf():
"""Extract dataframe from Eclipse case"""
# gete dataframe from the grid only
grd = xtgeo.grid_from_file(GRIDFILEROOT.with_suffix(".EGRID"))
dataframe = grd.get_dataframe() # will not have any grid props
print(dataframe)
# load as Eclipse run; this will automatically look for EGRID, INIT, UNRST
grd = xtgeo.grid_from_file(
GRIDFILEROOT,
fformat="eclipserun",
initprops=INITS,
restartprops=RESTARTS,
restartdates=MYDATES,
)
# dataframe from a GridProperties instance, in this case grd.gridprops
dataframe = grd.gridprops.get_dataframe() # properties for all cells
print(dataframe)
# Get a dataframe for all cells, with ijk and xyz. In this case
# a grid key input is required:
dataframe = grd.get_dataframe()
print(dataframe) # default is for all cells
# For active cells only:
dataframe = grd.get_dataframe(activeonly=True)
print(dataframe)
dataframe.to_csv(TMPDIR / "reek_sim.csv")
if __name__ == "__main__":
extractdf()
Compute a grid property average and stdev
In this example, how to extract Mean ans Stddev from some geo properties, filtered on facies. An RMS inside version is also shown.
"""Compute statistics within one realisation, using ROFF or RMS internal."""
from os.path import join as ojn
import xtgeo
EXPATH1 = "../../xtgeo-testdata/3dgrids/reek2"
ROOT = "geogrid"
EXT = ".roff"
GRIDFILE = ojn(EXPATH1, ROOT + EXT)
PROPS = ["perm", "poro"]
FACIES = "facies"
FACIESFILE = ojn(EXPATH1, ROOT + "--" + FACIES + EXT)
def show_stats():
"""Get statistics for one realisation, poro/perm filtered on facies.
But note that values here are unweighted as total volume is not present.
"""
# read grid
grd = xtgeo.grid_from_file(GRIDFILE)
# read facies (to be used as filter)
facies = xtgeo.gridproperty_from_file(FACIESFILE, name=FACIES, grid=grd)
print("Facies codes are: {}".format(facies.codes))
for propname in PROPS:
pfile = ojn(EXPATH1, ROOT + "--" + propname + EXT)
pname = "geogrid--" + propname
prop = xtgeo.gridproperty_from_file(pfile, name=pname, grid=grd)
print("Working with {}".format(prop.name))
# now find statistics for each facies, and all facies
for key, fname in facies.codes.items():
avg = prop.values[facies.values == key].mean()
std = prop.values[facies.values == key].std()
print(
"For property {} in facies {}, avg is {:10.3f} and "
"stddev is {:9.3f}".format(propname, fname, avg, std)
)
avg = prop.values.mean()
std = prop.values.std()
print(
"For property {} in ALL facies, avg is {:10.3f} and "
"stddev is {:9.3f}".format(propname, avg, std)
)
def show_stats_inside_rms():
"""Get statistics for one realisation, poro/perm filtered on facies.
This is an 'inside RMS' version; should work given runrmsx <project>
but not tested. Focus on syntax for getting properties, otherwise code
is quite similar.
"""
prj = project # type: ignore # noqa
# names of icons...
gridmodel = "Reek"
faciesname = "Facies"
propnames = ["Poro", "Perm"]
# read facies (to be used as filter)
facies = xtgeo.gridproperty_from_roxar(prj, gridmodel, faciesname)
print("Facies codes are: {}".format(facies.codes))
for propname in propnames:
prop = xtgeo.gridproperty_from_roxar(prj, gridmodel, propname)
print("Working with {}".format(prop.name))
# now find statistics for each facies, and all facies
for key, fname in facies.codes.items():
avg = prop.values[facies.values == key].mean()
std = prop.values[facies.values == key].std()
print(
"For property {} in facies {}, avg is {:10.3f} and "
"stddev is {:9.3f}".format(propname, fname, avg, std)
)
avg = prop.values.mean()
std = prop.values.std()
print(
"For property {} in ALL facies, avg is {:10.3f} and "
"stddev is {:9.3f}".format(propname, avg, std)
)
if __name__ == "__main__":
show_stats()
# show_stats_inside_rms()
Compute a grid property average across realisations
In this example, a technique that keeps memory usage under control when computing averages is also presented.
"""
Compute statistics of N realisations. In this the realisations are "faked" by
just adding a constant to each loop. It provides and insight on memory
handling and speed.
"""
from os.path import join as ojn
import numpy.ma as npma
import xtgeo
# from memory_profiler import profile
EXPATH1 = "../../xtgeo-testdata/3dgrids/reek"
GRIDFILEROOT = ojn(EXPATH1, "REEK")
INITPROPS = ["PORO", "PERMX"]
RESTARTPROPS = ["PRESSURE", "SWAT", "SOIL"]
RDATES = [20001101, 20030101]
NRUN = 10
def sum_stats():
"""Accumulate numpies for all realisations and then do stats.
This will be quite memory intensive, and memory consumption will
increase linearly.
"""
propsd = {}
for irel in range(NRUN):
# load as Eclipse run; this will look for EGRID, INIT, UNRST
print("Loading realization no {}".format(irel))
grd = xtgeo.grid_from_file(
GRIDFILEROOT,
fformat="eclipserun",
initprops=INITPROPS,
restartprops=RESTARTPROPS,
restartdates=RDATES,
)
for prop in grd.props:
if prop.name not in propsd:
propsd[prop.name] = []
if prop.name == "PORO":
prop.values += irel * 0.001 # mimic variability aka ensembles
else:
prop.values += irel * 1 # just to mimic variability
propsd[prop.name].append(prop.values1d)
# find the averages:
porovalues = npma.vstack(propsd["PORO"])
poromeanarray = porovalues.mean(axis=0)
porostdarray = porovalues.std(axis=0)
print(poromeanarray)
print(poromeanarray.mean())
print(porostdarray)
print(porostdarray.mean())
return poromeanarray.mean()
def sum_running_stats():
"""Find avg per realisation and do a cumulative rolling mean.
Memory consumption shall be very low.
"""
for irel in range(NRUN):
# load as Eclipse run; this will look for EGRID, INIT, UNRST
print("Loading realization no {}".format(irel))
grd = xtgeo.grid_from_file(
GRIDFILEROOT,
fformat="eclipserun",
restartprops=RESTARTPROPS,
restartdates=RDATES,
initprops=INITPROPS,
)
nnum = float(irel + 1)
for prop in grd.props:
if prop.name == "PORO":
prop.values += irel * 0.001 # mimic variability aka ensembles
else:
prop.values += irel * 1 # just to mimic variability
if prop.name == "PORO":
if irel == 0:
pcum = prop.values1d
else:
pavg = prop.values1d / nnum
pcum = pcum * (nnum - 1) / nnum
pcum = npma.vstack([pcum, pavg])
pcum = pcum.sum(axis=0)
# find the averages:
print(pcum)
print(pcum.mean())
return pcum.mean()
if __name__ == "__main__":
AVG1 = sum_stats()
AVG2 = sum_running_stats()
if AVG1 == AVG2:
print("Same result, OK!")
Make a CSV file from Eclipse INIT data (aka ERT ECL)
Example on how to create a CSV file from all INIT properties. Example is for Eclipse format, but shall work also with ROFF input.
"""
Print a CSV from all INIT vectors
"""
import pathlib
import tempfile
import numpy as np
import xtgeo
EXPATH = pathlib.Path("../../xtgeo-testdata/3dgrids/reek")
GRIDFILEROOT = EXPATH / "REEK"
TMPDIR = pathlib.Path(tempfile.gettempdir())
INITPROPS = "all" # will look for all vectors that looks "gridvalid"
def all_init_as_csv():
"""Get dataframes, print as CSV."""
print("Loading Eclipse data {}".format(GRIDFILEROOT))
grd = xtgeo.grid_from_file(GRIDFILEROOT, fformat="eclipserun", initprops=INITPROPS)
print("Get dataframes...")
dfr = grd.get_dataframe(activeonly=True)
print(dfr.head())
print("Filter out columns with constant values...")
dfr = dfr.iloc[:, ~np.isclose(0, dfr.var())]
print(dfr.head())
print("Write to file...")
dfr.to_csv(TMPDIR / "mycsvdump.csv", index=False)
if __name__ == "__main__":
all_init_as_csv()