forked from nsams/srtm2postgis
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathimport-google-app-engine.py
72 lines (59 loc) · 2.72 KB
/
import-google-app-engine.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
# Read an srtm data file and turn it into an csv file
from osgeo import gdal, gdal_array
import sys
import re
from math import sqrt
from read_data import loadTile, posFromLatLon
from data import util
# Main functions
def writeTileCsvFile(tile, lat0, lon0, top_row = 1, bottom_row = 1200, left_col = 0, right_col = 1199 ):
# Calculate begin position
begin = posFromLatLon(lat0,lon0)
# First we write the data into a temporary file.
f = open('data/tile.csv', 'w')
# We drop the top row and right column.
for row in range(top_row, bottom_row + 1 ):
for col in range(left_col, right_col + 1):
f.write(str(\
begin + (row-1) * 1200 + col\
) + ", " + str(tile[row][col] ) + "\n")
f.close()
if __name__ == '__main__':
# We will only upload 1 tile to the Google App Engine. This will take quite
# a bit of time. For the offline data store, we will only "upload" the city
# of Heidelberg; the offline data store is very slow.
# For this we need tile N49E008.
name = "N49E008"
tile = loadTile("Eurasia", name)
[lat,lon] = util.getLatLonFromFileName(name)
if not ("online" in sys.argv or "offline" in sys.argv):
print "Online or offline?"
exit()
if sys.argv[1] == "offline":
# If we are offline, we'll only look the center of Heidelberg.
# 49.39 --- 49.42
# 8.67 --- 8.71
# That corresponds to:
row_top = int((1.0 - 0.42) * 1200.)
row_bottom = int((1.0 - 0.39) * 1200.)
col_left = int(0.67 * 1200.)
col_right = int(0.71 * 1200.)
# So that 1813 records
writeTileCsvFile(tile, lat, lon, row_top, row_bottom, col_left, col_right)
print "Now run this command to insert the data into your local datastore:"
print "/path/to/app-engine-sdk/tools/bulkload_client.py --filename data/tile.csv --kind Altitude --url http://localhost:8080/load --batch_size 100 --cookie 'dev_appserver_login=test@example.com:True'"
else:
# Because the bulk upload script does not support resume, we will upload
# the data row by row. sys.argv[2] represents the line number to insert.
# You should create a script to perform the upload. Start at 1, end at 1200.
row = int(sys.argv[2])
print "About to make csv for row " + str(row) + " of 1200..."
row_top = row
row_bottom = row
col_left = 0
col_right = 1199
writeTileCsvFile(tile, lat, lon, row_top, row_bottom, col_left, col_right)
# To insert, you need to do something like this:
# /path/to/app-engine-sdk/bulkload_client.py --filename data/tile.csv --kind Altitude --url http://something.appspot.com/load --batch_size 100"
# You also need to add a --cookie argument: just surf to http://something.appspot.com/load
# to see how.