我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用shapely.geometry.Point()。
def point_projects_to_line(point, line): """Get the nearest point index on line """ coords = list(line.coords) p = Point(point) pd = line.project(p) for i in range(1, len(coords)): pp = Point(coords[i-1]) cp = Point(coords[i]) prev = line.project(pp) cur = line.project(cp) if cur == pd: return i if prev == pd: return i - 1 if prev < pd < cur: pdist = p.distance(pp) cdist = p.distance(cp) return i-1 if pdist <= cdist else i return None
def validate_intersection(self, line_index, point): """ Validate the intersection between given line and point. A spatial tolerance is considered to confirm the validation. :param line_index: index of specific line :param point: validated point :return: coordinates of intersected point on line, otherwise None """ line = self.geoms[line_index] coords = list(line.coords) buffered_point = Point(point).buffer(self.point_buffer) touched = None if line.intersects(buffered_point): cut = point_projects_to_line(point, line) touched = coords[cut] self._update_cut(line_index, cut) return touched
def get_region(lat, lon, region_path=REGION_PATH): """ Return the 2 character, 1 number code (e.g., IP-3) associated with the physiographic region that encloses the point specified by *lat* and *lon*. Return `None` if the point is not interior to any region. Look for the required files at *region_path*. """ try: regions = shpreader.Reader(os.path.join(region_path, 'ConductivityRegions')) except: initialize() regions = shpreader.Reader(os.path.join(region_path, 'ConductivityRegions')) geos = regions.geometries() names = [x.attributes['Name'] for x in regions.records()] point = Point(convert_lon(lon), lat) encloses = [name for name, geo in zip(names, geos) if geo.contains(point)] if encloses: assert len(encloses) == 1 return encloses[0] else: return None
def get_contour_mask(dd, id, dosegridpoints, contour): """Get the mask for the contour with respect to the dose plane.""" doselut = dd['lut'] c = matplotlib.path.Path(list(contour)) # def inpolygon(polygon, xp, yp): # return np.array( # [Point(x, y).intersects(polygon) for x, y in zip(xp, yp)], # dtype=np.bool) # p = Polygon(contour) # x, y = np.meshgrid(np.array(dd['lut'][0]), np.array(dd['lut'][1])) # mask = inpolygon(p, x.ravel(), y.ravel()) # return mask.reshape((len(doselut[1]), len(doselut[0]))) grid = c.contains_points(dosegridpoints) grid = grid.reshape((len(doselut[1]), len(doselut[0]))) return grid
def intersect_point_layer_with_wkt(layer, wkt, buffer_width): """ Return all features from given layer that intersects with wkt """ assert QgsWKBTypes.geometryType( int(layer.wkbType())) == QgsWKBTypes.PointGeometry line = loads(wkt.replace('Z', ' Z')) if not line.is_valid: logging.warning('Invalid feature geometry wkt={}'.format(wkt)) return # square cap style for the buffer -> less points buf = line.buffer(buffer_width, cap_style=2) bbox = QgsRectangle( buf.bounds[0], buf.bounds[1], buf.bounds[2], buf.bounds[3]) # request features inside bounding-box for feature in layer.getFeatures(QgsFeatureRequest(bbox)): p = feature.geometry().asPoint() if buf.contains(Point(p.x(), p.y())): yield feature
def create_memory_layer(geometry_type, crs, name, custom_properties=None): assert geometry_type in (QGis.Point, QGis.Line, QGis.Polygon) layer = QgsVectorLayer( "{}?crs={}&index=yes".format( { QGis.Point: "Point", QGis.Line: "LineString", QGis.Polygon: "Polygon" }[geometry_type], crs.authid() ), name, "memory") layer.setCrs(crs) if custom_properties: for key in custom_properties: layer.setCustomProperty(key, custom_properties[key]) return layer
def sampleShapes(shapes, xypoints, attribute): """ Get the attribute value at each of the input XY points for sequence of input shapes (decimal degrees). Slower than sampling grids. Args: shapes: Sequence of shapes (decimal degrees) of predictor variable. xypoints: 2D numpy array of XY points, in decimal degrees. attribute: String name of attribute to sample in each of the shapes. Returns: 1D array of attribute values at each of XY points. """ samples = [] for x, y in xypoints: for tshape in shapes: polygon = shape(tshape['geometry']) point = Point(x, y) if polygon.contains(point): sample = tshape['properties'][attribute] samples.append(sample) return np.array(samples)
def getPointByBoundsWithFilterHandler(self,index, polygon, step, bounds): logger.info('current index %s' % index) print 'current index %s' % index x1 = bounds[0] y1 = bounds[1] x2 = bounds[2] y2 = bounds[3] points = [] print x1,y1 , x2,y2 print (y2 - y1) / float(step) print (x2 - x1) / float(step) # TODO ????????????? for startY in frange2(y1, y2, step): for startX in frange2(x1, x2, step): point = str(startY) + ',' + str(startX) point_wkt = Point(startX, startY) isIntersects = self.isIntersectsPolygon(polygon, point_wkt) if isIntersects: points.append(point) print "range points size : ", len(points) return points # geometry???polygon??
def testTOW(self): cm = CAP.CAPMessage(TestCAP._get_test_messages()[0]) self.assertEqual('TO.W', cm.get_event_type()) self.assertEqual('http://alerts.weather.gov/cap/wwacapget.php?x=KS1255FCC0A5BC.TornadoWarning.1255FCC0C36CKS.GLDTORGLD.3a1fc090003ef1448f822dfd9b2ddee2', cm.get_event_id()) self.assertEqual('KGLD.TO.W.0021', cm.vtec[-1].event_id) self.assertEqual(cm, cm.vtec[-1].container) self.assertEqual(1463877540.0, cm.get_start_time_sec()) self.assertEqual(1463879700.0, cm.get_end_time_sec()) self.assertFalse(cm.is_effective(when=1463877539.9)) self.assertTrue(cm.is_effective(when=1463877540.0)) self.assertTrue(cm.is_effective(when=1463877541.0)) self.assertTrue(cm.is_effective(when=1463879700.0)) self.assertFalse(cm.is_effective(when=1463879700.1)) self.assertTrue(cm.polygon.contains(Point(38.80, -101.45))) self.assertFalse(cm.polygon.contains(Point(38.90, -101.45))) self.assertEqual("CAP [ Sun May 22 00:39:00 2016 TO.W /O.NEW.KGLD.TO.W.0021.160522T0039Z-160522T0115Z/ ['020109', '020199'] ]",str(cm))
def red_pixels(self): pixels = np.zeros(self._red_pixel_sensors) for i in range(self._red_pixel_sensors): rotation = self._robot.rotation - self._fov / 2 + i * self._fov / (self._red_pixel_sensors-1) ray = affinity.rotate( LineString([np.array(self._robot.pos.coords)[0], np.array(self._robot.pos.coords)[0] + (self._height*2, 0)]), rotation, origin=self._robot.pos, use_radians=True) dists_red = np.min(np.array([self._robot.pos.distance(point) if not point.is_empty else np.inf for point in (ray.intersection(cube) for cube in self._red_cubes)])) dists_obs = np.min(np.array([self._robot.pos.distance(point) if not point.is_empty else np.inf for point in (ray.intersection(cube) for cube in #filter(lambda o: np.array([self._robot.pos.distance(Point(p)) < dists_red for p in o.exterior.coords]).any(), self._obstacles)])) pixels[i] = dists_red < dists_obs return pixels
def removeIgnoredPointsRects(rects,polyList): ridxs = list(range(len(rects))) for ridx in range(len(rects)): points = rects[ridx]["annopoints"][0]["point"] pidxs = list(range(len(points))) for pidx in range(len(points)): pt = geometry.Point(points[pidx]["x"][0], points[pidx]["y"][0]) bIgnore = False for poidx in range(len(polyList)): poly = polyList[poidx] if (poly.contains(pt)): bIgnore = True break if (bIgnore): pidxs.remove(pidx) points = [points[pidx] for pidx in pidxs] if (len(points) > 0): rects[ridx]["annopoints"][0]["point"] = points else: ridxs.remove(ridx) rects = [rects[ridx] for ridx in ridxs] return rects
def removeIgnoredPoints(gtFramesAll,prFramesAll): imgidxs = [] for imgidx in range(len(gtFramesAll)): if ("ignore_regions" in gtFramesAll[imgidx].keys() and len(gtFramesAll[imgidx]["ignore_regions"]) > 0): regions = gtFramesAll[imgidx]["ignore_regions"] polyList = [] for ridx in range(len(regions)): points = regions[ridx]["point"] pointList = [] for pidx in range(len(points)): pt = geometry.Point(points[pidx]["x"][0], points[pidx]["y"][0]) pointList += [pt] poly = geometry.Polygon([[p.x, p.y] for p in pointList]) polyList += [poly] rects = prFramesAll[imgidx]["annorect"] prFramesAll[imgidx]["annorect"] = removeIgnoredPointsRects(rects,polyList) rects = gtFramesAll[imgidx]["annorect"] gtFramesAll[imgidx]["annorect"] = removeIgnoredPointsRects(rects,polyList) return gtFramesAll, prFramesAll
def process_item(d, poly_dts): key = d.key.name lat = d['lat'] lon = -d['lon'] dt = d['image_datetime'] point = Point(lon, lat) # # Based on photo's time, find the umbra whose center point's time is the same # TODO(dek): fix https://b.corp.google.com/issues/64974121 pdt = dt.replace(tzinfo=None) if pdt not in poly_dts: print "Point outside eclipse time window:", key, pdt, lat, lon return key, False current_poly = poly_dts[pdt][0] if not current_poly.contains(point): print "Point outside eclipse time window:", key, pdt, lat,lon return key, False # Now we know this photo point/dt is in totality return key, True
def generate_polygon(points): from shapely.geometry import Polygon, Point, LineString p = [] p.append(points[0][1]) for point in points: p.append(point[0]) p.append(points[-1][1]) for point in points[::-1]: p.append(point[2]) eclipse_boundary = Polygon(p) p = [] for point in points: p.append(point[1]) center_line = LineString(p) return eclipse_boundary, center_line
def process_item(d, fname, polys, poly_dts): lat = d['lat'] lon = -d['lon'] dt = d['image_datetime'] point = Point(lon, lat) # # Based on photo's time, find the umbra whose center point's time is the same # TODO(dek): fix https://b.corp.google.com/issues/64974121 pdt = dt.replace(tzinfo=None) if pdt not in poly_dts: print "Point outside eclipse time window:", fname, pdt, lat, lon return None current_poly = poly_dts[pdt][0] if not current_poly.contains(point): print "Point outside eclipse time window:", fname, pdt, lat,lon return None # Now we know this photo point/dt is in totality # Find all umbra polys that contain this photo x = [] for j, p in enumerate(polys): poly, poly_centroid, poly_dt = p if poly.contains(point): x.append((j, (poly_centroid.distance(point), fname, lat, lon, dt))) return x
def genPolygonFromConf(self, polygon, configuration, refpoint=Point()): """ recover the workspace polygon using a configuration point :param configuration: :return: """ if refpoint.is_empty: xoff = configuration[0] yoff = configuration[1] rotpoly = self.rotate(polygon, (configuration[2])/self.scale) transpoly = self.translate(rotpoly, Point(xoff, yoff)) # print configuration[0], configuration[1] # print transpoly.centroid.x, transpoly.centroid.y return transpoly else: xoff = configuration[0] yoff = configuration[1] rotpoly = self.rotate(polygon, configuration[2]/self.scale) transpoly = self.translate(rotpoly, Point(xoff, yoff)) return transpoly
def nodes_for_point(self, point, all_nodes): point = Point(point.x, point.y) nodes = {} if self.nodes: for node in self.nodes: node = all_nodes[node] line = LineString([(node.x, node.y), (point.x, point.y)]) if line.length < 5 and not self.clear_geometry_prep.intersects(line): nodes[node.i] = (None, None) if not nodes: nearest_node = min(tuple(all_nodes[node] for node in self.nodes), key=lambda node: point.distance(node.point)) nodes[nearest_node.i] = (None, None) else: nodes = self.fallback_nodes return nodes
def testNoAdditionalFields(self): polygons = [Point((0, 0))] labels = [1] timing = WorkflowTiming() info = WorkflowInformation(polygons, labels, timing=timing) self.assertEqual(len(info), 1) self.assertEqual(timing, info.timing) assert_array_equal(labels, info.labels) assert_array_equal(np.array(polygons, dtype=np.object), info.polygons) self.assertSetEqual(set(info.fields), {info.DATA_FIELD_POLYGONS, info.DATA_FIELD_LABELS}) object_info = info[0] self.assertEqual(len(object_info), 2) self.assertEqual(object_info.polygon, polygons[0]) self.assertEqual(object_info.label, labels[0]) object_info_iter = next(iter(info)) self.assertEqual(len(object_info_iter), 2) self.assertEqual(object_info_iter.polygon, polygons[0]) self.assertEqual(object_info_iter.label, labels[0])
def testAdditionalFields(self): polygons = [Point((0, 0)), Point((0, 1))] labels = [1, 2] dispatch = [5, 3] timing = WorkflowTiming() info = WorkflowInformation(polygons, labels, timing=timing, dispatches=(dispatch, "dispatch")) self.assertEqual(len(info), 2) self.assertEqual(timing, info.timing) assert_array_equal(labels, info.labels) assert_array_equal(np.array(polygons, dtype=np.object), info.polygons) assert_array_equal(dispatch, info.dispatches) self.assertSetEqual(set(info.fields), {info.DATA_FIELD_POLYGONS, info.DATA_FIELD_LABELS, "dispatches"}) for i, object_info in enumerate(info): self.assertEqual(info[i], object_info) self.assertEqual(len(object_info), 3) self.assertEqual(object_info.polygon, polygons[i]) self.assertEqual(object_info.label, labels[i]) self.assertEqual(object_info.dispatch, dispatch[i])
def testErrors(self): p1, p2 = Point(0, 0), Point(1, 0) timing = WorkflowTiming() with self.assertRaises(ValueError): WorkflowInformation([p1, p2], [1], timing) with self.assertRaises(ValueError): WorkflowInformation([p1, p2], [1, 2], timing, others=([2], "other")) with self.assertRaises(ValueError): WorkflowInformation([p1, p2], [1, 2], timing, __init__=([1, 2], "__init__s")) first = WorkflowInformation([p1, p2], [1, 2], timing, others=([2, 1], "other")) second = WorkflowInformation([p1, p2], [1, 2], timing) third = WorkflowInformation([p1, p2], [1, 2], timing, others=([2, 1], "oo")) with self.assertRaises(TypeError): first._is_compatible(dict()) with self.assertRaises(ValueError): first._is_compatible(second) with self.assertRaises(ValueError): first._is_compatible(third)
def _geom_to_array(geom: BaseGeometry): if isinstance(geom, geometry.Point): yield np.array([(np.nan, GeomTypes.POINT)]) yield np.asarray(geom.coords) elif isinstance(geom, geometry.LineString): yield np.array([(np.nan, GeomTypes.LINESTRING)]) yield np.asarray(geom.coords) elif isinstance(geom, geometry.Polygon): for interior in geom.interiors: yield np.array([(np.nan, GeomTypes.POLYGON_HOLE)]) yield np.asarray(interior) yield np.array([(np.nan, GeomTypes.POLYGON_SHELL)]) yield np.asarray(geom.exterior) elif isinstance(geom, BaseMultipartGeometry): return chain.from_iterable(map(geom_to_array, geom)) else: raise TypeError
def add_geom(fig: Figure, geom: BaseGeometry, **kwargs): """Add Shapely geom into Bokeh plot. Args: fig (Figure): geom (BaseGeometry): """ if isinstance(geom, Point): fig.circle(*geom.xy, **kwargs) elif isinstance(geom, LineString): fig.line(*geom.xy, **kwargs) elif isinstance(geom, Polygon): fig.patch(*geom.exterior.xy, **kwargs) elif isinstance(geom, BaseMultipartGeometry): for item in geom: add_geom(fig, item, **kwargs) else: raise TypeError('Object geom {geom} no instance of {types}.'.format( geom=geom, types=BaseGeometry))
def test_linearize(): t_start = 1 t_stop = 6 xy = np.array(([1., 1.], [2., 2.], [3., 3.], [4., 4.], [5., 5.])) time = np.array([0., 1., 2., 3., 4.]) pos = nept.Position(xy, time) trajectory = [[0., 0.], [5., 5.], [10., 10.]] line = LineString(trajectory) zone_start = Point([1., 1.]) zone_stop = Point([9., 9.]) expand_by = 1 zone = nept.expand_line(zone_start, zone_stop, line, expand_by) sliced_pos = pos[t_start:t_stop] linear = sliced_pos.linearize(line, zone) assert np.allclose(linear.x, np.array([2.82842712, 4.24264069, 5.65685425, 7.07106781])) assert np.allclose(linear.time, [1, 2, 3, 4])
def test_position_linearize(): times = np.array([1.0, 2.0, 3.0]) data = np.array([[0.0, 0.5], [0.5, 0.1], [1.0, 1.2]]) pos = nept.Position(data, times) line = LineString([(0.0, 0.0), (1.0, 1.0)]) zone_start = Point([1., 1.]) zone_stop = Point([9., 9.]) expand_by = 1 zone = nept.expand_line(zone_start, zone_stop, line, expand_by) linear = pos.linearize(line, zone) assert np.allclose(linear.x, np.array([0.35355339, 0.42426407, 1.41421356])) assert np.allclose(linear.time, np.array([1., 2., 3.]))
def linearize(self, ideal_path, zone): """ Projects 2D positions into an 'ideal' linear trajectory. Parameters ---------- ideal_path : shapely.LineString zone : shapely.Polygon Returns ------- pos : nept.Position 1D position. """ zpos = [] for point_x, point_y in zip(self.x, self.y): point = Point([point_x, point_y]) if zone.contains(point): zpos.append(ideal_path.project(Point(point_x, point_y))) zpos = np.array(zpos) return Position(zpos, self.time)
def split(line, point): distance_on_line = line.project(point) coords = list(line.coords) for i, p in enumerate(coords): pd = line.project(geometry.Point(p)) if pd == distance_on_line: return [ geometry.LineString(coords[:i+1]), geometry.LineString(coords[i:]) ] elif distance_on_line < pd: cp = line.interpolate(distance_on_line) ls1_coords = coords[:i] ls1_coords.append(cp.coords[0]) ls2_coords = [cp.coords[0]] ls2_coords.extend(coords[i:]) return [geometry.LineString(ls1_coords), geometry.LineString(ls2_coords)]
def is_in_grid(self, shp): """Check if arbitrary polygon is within grid bounding box. Args: shp (shape): Returns: Bool whether shp is in grid box. Depends on self.grid_box (shapely prep type) being defined in environment. """ if not hasattr(shp, 'geom_type'): raise Exception("CoreMSR [is_in_grid] : invalid shp given") if not isinstance(self.grid_box, type(prep(Point(0,0)))): raise Exception("CoreMSR [is_in_grid] : invalid prep_adm0 " "found") return self.grid_box.contains(shp)
def load_nodes(self, numbersuperpoints=None): coords = {} supercoords = {} with open(self.tnxy, 'rb') as f: with open(self.tnz, 'rb') as fz: i = 0 while True: chunkx = f.read(8) chunky = f.read(8) chunkz = fz.read(4) if any(c == '' for c in [chunkx, chunky, chunkz]): break else: i = i + 1 if i <= numbersuperpoints: supercoords[i] = Point(struct.unpack('>d', chunkx)[0], struct.unpack('>d', chunky)[0], struct.unpack('>f', chunkz)[0]) else: coords[i] = Point(struct.unpack('>d', chunkx)[0], struct.unpack('>d', chunky)[0], struct.unpack('>f', chunkz)[0]) return coords, supercoords
def points_along_boundaries(geoseries, distance=1.0): """ Generate a shapely MultiPoint of point features along lines at a specified distance :param geoseries: :param distance: :return: """ list_points = [] for line3d in iter_line(geoseries): line = LineString([xy[0:2] for xy in list(line3d.coords)]) current_dist = distance line_length = line.length list_points.append(Point(list(line.coords)[0])) while current_dist < line_length: list_points.append(line.interpolate(current_dist)) current_dist += distance list_points.append(Point(list(line.coords)[-1])) return MultiPoint(list_points)
def CalcBufferGridPoints(x_buffer, y_buffer, polygon_buffer, spacing_grid): """Finds the grid points for our buffer around the fault.""" x_fill_vec = np.arange(np.min(x_buffer), np.max(x_buffer), spacing_grid) y_fill_vec = np.arange(np.min(y_buffer), np.max(y_buffer), spacing_grid) x_buffer_fill_grid, y_buffer_fill_grid = np.meshgrid(x_fill_vec, y_fill_vec) # Select only those grid points inside of buffered region. x_buffer_fill_grid = x_buffer_fill_grid.flatten() y_buffer_fill_grid = y_buffer_fill_grid.flatten() indices_to_delete = [] for i in range(len(x_buffer_fill_grid)): candidate = Point(x_buffer_fill_grid[i], y_buffer_fill_grid[i]) if not polygon_buffer.contains(candidate): indices_to_delete.append(i) x_buffer_fill_grid = np.delete(x_buffer_fill_grid, indices_to_delete) y_buffer_fill_grid = np.delete(y_buffer_fill_grid, indices_to_delete) return (x_buffer_fill_grid, y_buffer_fill_grid)
def split_line(line, max_line_units): """Checks the line's length and splits in half if larger than the configured max :param line: Shapely line to be split :param max_line_units: The maximum allowed length of the line """ if line.length <= max_line_units: return [line] half_length = line.length / 2 coords = list(line.coords) for idx, point in enumerate(coords): proj_dist = line.project(Point(point)) if proj_dist == half_length: return [LineString(coords[:idx + 1]), LineString(coords[idx:])] if proj_dist > half_length: mid_point = line.interpolate(half_length) head_line = LineString(coords[:idx] + [(mid_point.x, mid_point.y)]) tail_line = LineString([(mid_point.x, mid_point.y)] + coords[idx:]) return split_line(head_line, max_line_units) + split_line(tail_line, max_line_units)
def is_point_on_land(coords, shape_file=None): """Checks if a coords are over land or over water. This is done useing a shape file of world boundaries and looping over all Polygons to see if the point is in any of it. """ import fiona from shapely.geometry import Point, shape if not shape_file: shape_file='/home/data/mapdata/other/tm_world/TM_WORLD_BORDERS-0.3.shp' lon, lat=coords pt=Point(lon, lat) try: fc=fiona.open(shape_file) except: pass result=False for feature in fc: if shape(feature['geometry']).contains(pt): result=True fc.close() return result
def plot_lines(lines, **kwargs): import matplotlib def plot_line(ob): x, y = ob.xy matplotlib.pyplot.plot(x, y, linewidth=1, solid_capstyle='round', zorder=1, **kwargs) for u in lines: if u.geom_type in ['LineString', 'LinearRing', 'Point']: plot_line(u) elif u.geom_type is 'MultiLineString': for p in u: plot_line(p)
def line_contains_point(line, point, buf=10e-5): p = Point(point).buffer(buf) return line.intersects(p)
def _register_node(self, p): if isinstance(p, Point): p = list(p.coords)[0] elif isinstance(p, tuple): pass else: raise TypeError('The point should be shapely::Point or a 2-tuple') if p not in self.node_ids: nid = self.nodes_counter self.node_ids[p] = nid self.node_xy[nid] = p self.nodes_counter += 1 return self.node_ids[p]
def point_projects_to_edges(self, point, distance_tolerance=0.01): """ Project a point to graph edges considering specific distance tolerance. Note the tolerance is measured by great circle distance to point per se. :param point: a shapely Point instance or (lon, lat) tuple :param distance_tolerance: tolerance of distance in km :return: a list of projected edges, reversely sorted by offsets. """ point_buffer = distance_to_buffer(distance_tolerance) p_buf = Point(point).buffer(point_buffer) projected_edges = [] projected_segments = [] major = self.major_component() for i in range(0, len(major)): line_index = major[i] line = self.geoms[line_index] if line.intersects(p_buf): cuts = self.line_cuts(line_index) if cuts is None: continue for j in range(1, len(cuts)): sinx = cuts[j - 1] einx = cuts[j] segment = line.coords[sinx:einx + 1] ls = LineString(segment) if ls.intersects(p_buf): edge = self.edge_key(segment[0], segment[-1]) offset = ls.distance(Point(point)) # no buffer projected_edges.append((edge, offset)) projected_segments.append(segment) result = sorted(projected_edges, key=lambda x: x[1], reverse=True) edges = list(set([i[0] for i in result])) return edges, projected_segments
def test_costdistance_finite(self): def zero_one(kv): k = kv[0] return (k.col == 0 and k.row == 1) result = cost_distance(self.raster_rdd, geometries=[Point(13, 13)], max_distance=144000.0) tile = result.to_numpy_rdd().filter(zero_one).first()[1] point_distance = tile.cells[0][1][3] self.assertEqual(point_distance, 0.0)
def test_costdistance_finite_int(self): def zero_one(kv): k = kv[0] return (k.col == 0 and k.row == 1) result = cost_distance(self.raster_rdd, geometries=[Point(13, 13)], max_distance=144000) tile = result.to_numpy_rdd().filter(zero_one).first()[1] point_distance = tile.cells[0][1][3] self.assertEqual(point_distance, 0.0)
def test_costdistance_infinite(self): def zero_one(kv): k = kv[0] return (k.col == 0 and k.row == 1) result = cost_distance(self.raster_rdd, geometries=[Point(13, 13)], max_distance=float('inf')) tile = result.to_numpy_rdd().filter(zero_one).first()[1] point_distance = tile.cells[0][0][0] self.assertTrue(point_distance > 1250000)
def test_euclideandistance(self): def mapTransform(layoutDefinition, spatialKey): ex = layoutDefinition.extent x_range = ex.xmax - ex.xmin xinc = x_range/layoutDefinition.tileLayout.layoutCols yrange = ex.ymax - ex.ymin yinc = yrange/layoutDefinition.tileLayout.layoutRows return {'xmin': ex.xmin + xinc * spatialKey['col'], 'xmax': ex.xmin + xinc * (spatialKey['col'] + 1), 'ymin': ex.ymax - yinc * (spatialKey['row'] + 1), 'ymax': ex.ymax - yinc * spatialKey['row']} def gridToMap(layoutDefinition, spatialKey, px, py): ex = mapTransform(layoutDefinition, spatialKey) x_range = ex['xmax'] - ex['xmin'] xinc = x_range/layoutDefinition.tileLayout.tileCols yrange = ex['ymax'] - ex['ymin'] yinc = yrange/layoutDefinition.tileLayout.tileRows return (ex['xmin'] + xinc * (px + 0.5), ex['ymax'] - yinc * (py + 0.5)) def distanceToGeom(layoutDefinition, spatialKey, geom, px, py): x, y = gridToMap(layoutDefinition, spatialKey, px, py) return geom.distance(Point(x, y)) tiled = euclidean_distance(self.pts_wm, 3857, 7) result = tiled.stitch().cells[0] arr = np.zeros((256,256), dtype=float) it = np.nditer(arr, flags=['multi_index']) while not it.finished: py, px = it.multi_index arr[py][px] = distanceToGeom(tiled.layer_metadata.layout_definition, {'col': 64, 'row':63}, self.pts_wm, px, py) it.iternext() self.assertTrue(np.all(abs(result - arr) < 1e-8))
def in_us(lat, lon): p = Point(lon, lat) for state, poly in state_polygons.iteritems(): if poly.contains(p): return state return None
def get_state_from_coordinates(coordnates): #coordinates = np.array([(34, -118), (40.7, -74)]) sf = shapefile.Reader('./data/us_states_st99/st99_d00') #sf = shapefile.Reader("./data/states/cb_2015_us_state_20m") shapes = sf.shapes() #shapes[i].points fields = sf.fields records = sf.records() state_polygons = defaultdict(list) for i, record in enumerate(records): state = record[5] points = shapes[i].points poly = shape(shapes[i]) state_polygons[state].append(poly) coor_state = {} for i in range(coordnates.shape[0]): lat, lon = coordnates[i] for state, polies in state_polygons.iteritems(): for poly in polies: point = Point(lon, lat) if poly.contains(point): coor_state[(lat, lon)] = state.lower() return coor_state
def bbox_for_track(track): """ Get the bounding box for the track. :param MultiPoint|Point track: :return: """ return BBox(track.bounds[1], track.bounds[0], track.bounds[3], track.bounds[2])
def __mk_track(track_points): if len(track_points) > 1: return MultiPoint(track_points) elif len(track_points) == 1: return Point(track_points[0]) else: __LOG.critical(u'Selected GPX have no valid track points') print u'Selected GPX have no valid track points' sys.exit(404)
def load_gpx(filename): """ :param filename: The file to load the track for. :return [Point|MultiPoint], BBox: The point or line read from the GPX track file. And the bounding box as a 4-tuple. """ __LOG.debug(u'Opening GPX file: %s' % filename) try: with open(filename, 'r') as gpx_file: tree = ElementTree.parse(gpx_file) root = tree.getroot() except IOError as e: __LOG.error(u'Failed to read %s: %s' % (filename, e.message)) raise e tracks = [] for trk in root.findall('{http://www.topografix.com/GPX/1/1}trk'): for seg in trk.findall('{http://www.topografix.com/GPX/1/1}trkseg'): track_points = [] for point in seg.findall('{http://www.topografix.com/GPX/1/1}trkpt'): trk_pt = ([float(point.get('lon')), float(point.get('lat'))]) track_points.append(trk_pt) tracks.append(__mk_track(track_points)) for trk in root.findall('{http://www.topografix.com/GPX/1/0}trk'): for seg in trk.findall('{http://www.topografix.com/GPX/1/0}trkseg'): track_points = [] for point in seg.findall('{http://www.topografix.com/GPX/1/0}trkpt'): trk_pt = ([float(point.get('lon')), float(point.get('lat'))]) track_points.append(trk_pt) tracks.append(__mk_track(track_points)) return tracks
def store_kml(obj, obj_id, admin_level, name=u'unknown'): """ Store a shapely geometry object as a KML file. :param BaseGeometry obj: The object to store :param int obj_id: The object ID of region. :param int admin_level: Admin level of region [default=0] :param str|unicode name: Name of the region to store in KML. :return: """ ascii_name = gpx_utils.enforce_unicode(name).encode('ascii', 'replace') filename = './%s_%s.kml' % (ascii_name.replace(' ', '_'), obj_id) __LOG.info(u'store_kml: storing a %s with size: %s ', obj.geom_type, obj.area) ns = '{http://www.opengis.net/kml/2.2}' sls = styles.LineStyle(color='ffff0000') sas = styles.PolyStyle(color='5500ff00') sps = styles.BalloonStyle(bgColor='ff0000ff') style = styles.Style(styles=[sls, sas, sps]) kf = kml.KML(ns) if obj.geom_type == 'LineString' or obj.geom_type == 'MultiLineString' or obj.geom_type == 'LinearRing': d = kml.Document(ns, str(obj_id), 'Traces', 'GPX Visualization') elif obj.geom_type == 'Polygon' or obj.geom_type == 'MultiPolygon': d = kml.Document(ns, str(obj_id), 'Border of {0} ({1})'.format(ascii_name, obj_id), 'Border visualization') else: d = kml.Document(ns, str(obj_id), 'Points', 'Point visualization') kf.append(d) p = kml.Placemark(ns, str(obj_id), ascii_name, '{0}'.format(ascii_name), styles=[style]) p.geometry = obj d.append(p) fil = open(filename, 'w') fil.write(kf.to_string(prettyprint=True)) fil.flush() fil.close() __LOG.debug(u'store_kml: store successful (%s/%s) -> %s' % (admin_level, obj_id, filename))
def points_to_shapely_polygon(sets_of_points): # sets of points are lists using str(z) as keys # each item is an ordered list of points representing a polygon, each point is a 3-item list [x, y, z] # polygon n is inside polygon n-1, then the current accumulated polygon is # polygon n subtracted from the accumulated polygon up to and including polygon n-1 # Same method DICOM uses to handle rings and islands composite_polygon = [] for set_of_points in sets_of_points: if len(set_of_points) > 3: points = [(point[0], point[1]) for point in set_of_points] points.append(points[0]) # Explicitly connect the final point to the first # if there are multiple sets of points in a slice, each set is a polygon, # interior polygons are subtractions, exterior are addition # Only need to check one point for interior vs exterior current_polygon = Polygon(points).buffer(0) # clean stray points if composite_polygon: if Point((points[0][0], points[0][1])).disjoint(composite_polygon): composite_polygon = composite_polygon.union(current_polygon) else: composite_polygon = composite_polygon.symmetric_difference(current_polygon) else: composite_polygon = current_polygon return composite_polygon
def geom(self): return ShapelyPoint(self.co)