Skip to content

Commit e52f51b

Browse files
authored
Merge pull request #4 from open-datastudio/tunnel
Move tunnel creation to staroid package
2 parents c297b43 + 98b19ce commit e52f51b

File tree

3 files changed

+5
-92
lines changed

3 files changed

+5
-92
lines changed

ods/ods.py

Lines changed: 3 additions & 85 deletions
Original file line numberDiff line numberDiff line change
@@ -4,17 +4,6 @@
44
import requests
55
import os, stat, time
66
from pathlib import Path
7-
from shutil import which
8-
import platform
9-
import subprocess
10-
import wget
11-
import atexit
12-
13-
CHISEL_VERSION="1.6.0"
14-
CHISEL_ARCH_MAP={
15-
"x86_64": "amd64",
16-
"i386": "386"
17-
}
187

198
class Ods:
209
def __init__(self, staroid=None, ske=None, cache_dir=None, chisel_path=None):
@@ -48,40 +37,8 @@ def create_or_get_cache_dir(self, module = ""):
4837
os.makedirs(cache_dir)
4938
return cache_dir
5039

51-
def __check_cmd(self, cmd):
52-
if which(cmd) == None:
53-
raise Exception("'{}' command not found".format(cmd))
54-
5540
def download_chisel_if_not_exists(self):
56-
# check gunzip available
57-
self.__check_cmd("gunzip")
58-
59-
if self.__chisel_path == None:
60-
# download chisel binary for secure tunnel if not exists
61-
uname = platform.uname()
62-
uname.system.lower()
63-
if uname.machine not in CHISEL_ARCH_MAP.keys():
64-
raise Exception("Can not download chisel automatically. Please download manually from 'https://github.com/jpillora/chisel/releases/download/v{}' and set 'chisel_path' argument".format(CHISEL_VERSION))
65-
66-
download_url = "https://github.com/jpillora/chisel/releases/download/v{}/chisel_{}_{}_{}.gz".format(
67-
CHISEL_VERSION, CHISEL_VERSION, uname.system.lower(), CHISEL_ARCH_MAP[uname.machine])
68-
cache_bin = self.create_or_get_cache_dir("bin")
69-
chisel_path = "{}/chisel".format(cache_bin)
70-
71-
if not os.path.exists(chisel_path):
72-
# download
73-
filename = wget.download(download_url, cache_bin)
74-
75-
# extract
76-
subprocess.run(["gunzip", "-f", filename])
77-
78-
# rename
79-
subprocess.run(["mv", filename.replace(".gz", ""), chisel_path])
80-
81-
# chmod
82-
os.chmod(chisel_path, stat.S_IRWXU)
83-
84-
self.__chisel_path = chisel_path
41+
self._staroid.get_chisel_path()
8542

8643
def _start_instance_on_staroid(self, instance_name, commit_url):
8744
cluster = self._staroid.cluster().get(self.__ske)
@@ -118,53 +75,14 @@ def _start_tunnel(self, instance_name, tunnels):
11875
ns_api = self._staroid.namespace(cluster)
11976
ns = ns_api.get(instance_name)
12077
ns_api.shell_start(instance_name)
121-
resources = ns_api.get_all_resources(instance_name)
122-
123-
shell_service = None
124-
for s in resources["services"]["items"]:
125-
if "labels" in s["metadata"]:
126-
if "resource.staroid.com/system" in s["metadata"]["labels"]:
127-
if s["metadata"]["labels"]["resource.staroid.com/system"] == "shell":
128-
shell_service = s
129-
break
130-
131-
if shell_service == None:
132-
raise Exception("Shell service not found")
133-
134-
tunnel_server = "https://p{}-{}--{}".format("57682", shell_service["metadata"]["name"], ns.url()[len("https://"):])
135-
cmd = [
136-
self.__chisel_path,
137-
"client",
138-
"--header",
139-
"Authorization: token {}".format(self._staroid.get_access_token()),
140-
"--keepalive",
141-
"10s",
142-
tunnel_server
143-
]
144-
cmd.extend(tunnels)
145-
self.__tunnel_processes[instance_name]=subprocess.Popen(cmd)
146-
atexit.register(self.cleanup)
147-
148-
def cleanup(self):
149-
timeout_sec = 5
150-
for p in self.__tunnel_processes.values(): # list of your processes
151-
p_sec = 0
152-
for second in range(timeout_sec):
153-
if p.poll() == None:
154-
time.sleep(1)
155-
p_sec += 1
156-
if p_sec >= timeout_sec:
157-
p.kill() # supported from python 2.6
78+
ns_api.start_tunnel(instance_name, tunnels)
15879

15980
def _stop_tunnel(self, instance_name):
160-
if self._is_tunnel_running(instance_name):
161-
self.__tunnel_processes[instance_name].kill()
162-
del self.__tunnel_processes[instance_name]
163-
16481
cluster = self._staroid.cluster().get(self.__ske)
16582
if cluster == None:
16683
raise Exception("Can't get ske cluster")
16784
ns_api = self._staroid.namespace(cluster)
85+
ns_api.stop_tunnel(instance_name)
16886
ns_api.shell_stop(instance_name)
16987

17088
def _stop_instance_on_staroid(self, instance_name):

ods/spark_cluster/spark_cluster.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -10,17 +10,12 @@
1010
SPARK_ARTIFACTS={
1111
"3.0.0": {
1212
"image": "opendatastudio/spark-py:v3.0.0-staroid-20200830-01",
13-
"dist": "https://downloads.apache.org/spark/spark-3.0.0/spark-3.0.0-bin-hadoop3.2.tgz",
13+
"dist": "https://archive.apache.org/dist/spark/spark-3.0.0/spark-3.0.0-bin-hadoop3.2.tgz",
1414
"commit_url": "GITHUB/open-datastudio/spark-serverless:master",
1515
"jars": {
1616
"hadoop-aws": "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.2.0/hadoop-aws-3.2.0.jar",
1717
"aws-java-sdk": "https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.11.563/aws-java-sdk-bundle-1.11.563.jar"
1818
}
19-
},
20-
"2.4.6": {
21-
"image": "opendatastudio/spark-py:v2.4.6-staroid",
22-
"dist": "https://downloads.apache.org/spark/spark-2.4.6/spark-2.4.6-bin-hadoop2.7.tgz",
23-
"commit_url": "GITHUB/open-datastudio/spark-serverless:master"
2419
}
2520
}
2621

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
setuptools.setup(
77
name="ods",
8-
version="0.0.3",
8+
version="0.0.4",
99
license='MIT',
1010
author="Open Data Studio",
1111
author_email="moon@staroid.com",

0 commit comments

Comments
 (0)