瀏覽代碼

commit of early 22 implements

early_22
--global 4 年之前
父節點
當前提交
9bf368c2cf
共有 38 個檔案被更改,包括 44771 行新增1008 行删除
  1. 0
    3
      README.md
  2. 0
    41
      auto_coordinate.py
  3. 0
    91
      auto_extract.py
  4. 0
    60
      auto_kpi.py
  5. 2
    0
      cfg/log_reboot.txt
  6. 39
    0
      configuration.json
  7. 25
    0
      configuration.json.bak
  8. 0
    54
      coordinate_g.py
  9. 0
    5
      data.data
  10. 161
    181
      depthsensing.py
  11. 0
    23
      detect_new_folder.py
  12. 0
    35
      extract_01.py
  13. 0
    33
      extract_02.py
  14. 0
    233
      kpi.py
  15. 20
    0
      mailanalysis.py
  16. 171
    0
      mailcalibrate.py
  17. 70
    0
      mailextract_01.py
  18. 70
    0
      mailextract_02.py
  19. 115
    0
      mailimage.py
  20. 250
    0
      mailkpi.py
  21. 104
    0
      mailrequest.py
  22. 137
    0
      mailsetup.py
  23. 230
    0
      mailsync.py
  24. 0
    0
      mailuploader.py
  25. 0
    2
      names.names
  26. 5
    0
      obj.data
  27. 1
    0
      obj.names
  28. 33
    11
      rec_ubuntu.py
  29. 0
    107
      recorder.py.bak
  30. 24
    0
      stop_ubuntu.py
  31. 294
    0
      tiny-yolov4.cfg
  32. 20
    0
      tools/checkstatus.py
  33. 29
    0
      tools/zedclose.py
  34. 5
    0
      yolo_cfg/tiny_v4/obj.data
  35. 1
    0
      yolo_cfg/tiny_v4/obj.names
  36. 38230
    0
      yolo_cfg/tiny_v4/train.txt
  37. 4247
    0
      yolo_cfg/tiny_v4/vaild.txt
  38. 488
    129
      yolov4_7gpu.cfg

+ 0
- 3
README.md 查看文件

@@ -1,3 +0,0 @@
1
-# mailsys_server
2
-
3
-Repository for M.A.I.L system's analysis server.

+ 0
- 41
auto_coordinate.py 查看文件

@@ -1,41 +0,0 @@
1
-import os
2
-import sys
3
-import glob
4
-import time
5
-
6
-
7
-hdd_root = '/hdd/*'
8
-
9
-def main():
10
-    surgery_list = glob.glob(hdd_root)
11
-
12
-    for id in surgery_list:
13
-        if 'lost+found' in id:
14
-            surgery_list.remove(id)
15
-        else:
16
-            pass
17
-
18
-    for id in surgery_list:
19
-        file_list=glob.glob(id + '/*')
20
-        for file in file_list:
21
-            if 'log.txt' in file:
22
-                f = open(file, 'rt')
23
-                logs = f.readlines()
24
-                f.close()
25
-                if len(logs) == 4:
26
-                    f = open(file, 'at')
27
-                    r_time = time.ctime(time.time())
28
-                    f.write('global coordinate successfully starts at ' + str(r_time) + '\n')
29
-                    f.close()
30
-                    #FIXME: change coordinate_g.py when IMU sensor works
31
-                    os.system('docker run --rm --name coordinate --gpus '"device=1"' -v /dev:/dev -v /home/mc365/sources:/sources -v /hdd:/hdd ellishuntingmoon/mailsys:0.4 python3 /sources/coordinate_g.py ' + id + '/ >> /dev/null')
32
-                    f = open(file, 'at')
33
-                    r_time = time.ctime(time.time())
34
-                    f.write('global coordinate successfully ends at ' + str(r_time) + '\n')
35
-                    f.close()
36
-                else:
37
-                    pass
38
-
39
-if __name__ == '__main__':
40
-    while True:
41
-        main()

+ 0
- 91
auto_extract.py 查看文件

@@ -1,91 +0,0 @@
1
-import os
2
-import sys
3
-import glob
4
-import time
5
-import threading
6
-import paramiko
7
-
8
-
9
-hdd_root = '/hdd/*'
10
-
11
-def cmd_run(index, shell_cmd):
12
-    sh = shell_cmd[index]
13
-    os.system(sh)
14
-
15
-def main():
16
-    surgery_list = glob.glob(hdd_root)
17
-
18
-    for id in surgery_list:
19
-        if 'lost+found' in id:
20
-            surgery_list.remove(id)
21
-        else:
22
-            pass
23
-
24
-    for id in surgery_list:
25
-        
26
-        file_list=glob.glob(id + '/*')
27
-        for file in file_list:
28
-            if 'log.txt' in file:
29
-                f = open(file, 'rt')
30
-                logs = f.readlines()
31
-                f.close()                
32
-                if len(logs) == 2:
33
-                    thread_list = []
34
-                    f = open(file, 'at')
35
-                    r_time = time.ctime(time.time())
36
-                    f.write('exectue is successfully starts at ' + str(r_time) + '\n')
37
-                    f.close()
38
-                    cmd_list = ['docker run --rm --name extract_2 --gpus '"device=1"' -v /dev:/dev -v /home/mc365/sources:/sources -v /hdd:/hdd ellishuntingmoon/mailsys:0.5 python3 /sources/extract_02.py ' + id + '/', 'echo ext_1 start']
39
-                    for index in range(0, len(cmd_list)):
40
-                        thread_list.append(threading.Thread(target=cmd_run,args=(index,cmd_list)))
41
-                        thread_list[index].start()         
42
-                    os.system('docker run --rm --name extract_1 --gpus '"device=0"' -v /dev:/dev -v /home/mc365/sources:/sources -v /hdd:/hdd ellishuntingmoon/mailsys:0.5 python3 /sources/extract_01.py ' + id + '/')       
43
-                    f = open(file, 'at')
44
-                    r_time = time.ctime(time.time())
45
-                    f.write('exectue is successfully ends at ' + str(r_time) + '\n')
46
-                    f.close()
47
-
48
-                    #NOTE: Starting global coordinate
49
-                    f = open(file, 'at')
50
-                    r_time = time.ctime(time.time())
51
-                    f.write('global coordinate successfully starts at ' + str(r_time) + '\n')
52
-                    f.close()
53
-                    #FIXME: change coordinate_g.py when IMU sensor works
54
-                    os.system('docker run --rm --name coordinate --gpus '"device=1"' -v /dev:/dev -v /home/mc365/sources:/sources -v /hdd:/hdd ellishuntingmoon/mailsys:0.5 python3 /sources/coordinate_g.py ' + id + '/ >> /dev/null')
55
-                    f = open(file, 'at')
56
-                    r_time = time.ctime(time.time())
57
-                    f.write('global coordinate successfully ends at ' + str(r_time) + '\n')
58
-                    f.close()
59
-
60
-
61
-                    #NOTE:
62
-                    f = open(file, 'at')
63
-                    r_time = time.ctime(time.time())
64
-                    f.write('kpi successfully starts at ' + str(r_time) + '\n')
65
-                    f.close()
66
-                    #FIXME: change coordinate_g.py when IMU sensor works and get generic optimizer for DBSCAN
67
-                    os.system('docker run --rm --name kpi --gpus '"device=1"' -v /dev:/dev -v /home/mc365/sources:/sources -v /hdd:/hdd ellishuntingmoon/mailsys:0.5 python3 /sources/kpi.py ' + id + '/ >> /dev/null')
68
-                    f = open(file, 'at')
69
-                    r_time = time.ctime(time.time())
70
-                    f.write('kpi successfully ends at ' + str(r_time) + '\n')
71
-                    f.close()
72
-
73
-                    transport = paramiko.Transport(('365mc.iptime.org', 63122))
74
-                    transport.connect(username='mc365', password='tkadbrdhMC1!')
75
-                    sftp = paramiko.SFTPClient.from_transport(transport)
76
-                    #FIXME: send file recursively
77
-                    full_list = glob.glob(id + "/*")
78
-                    sftp.mkdir('/data/'+full_list[0][5:22])
79
-                    for f in full_list:
80
-                        sftp.put(f,'/data/'+str(f[5:]))
81
-                    sftp.close()
82
-                    transport.close()      
83
-                    os.system('rm -rf '+id)     
84
-
85
-                else:
86
-                    pass
87
-
88
-if __name__ == '__main__':
89
-    while True:
90
-        main()
91
-        time.sleep(900)

+ 0
- 60
auto_kpi.py 查看文件

@@ -1,60 +0,0 @@
1
-import os
2
-import sys
3
-import glob
4
-import time
5
-import paramiko
6
-
7
-
8
-
9
-hdd_root = '/hdd/*'
10
-
11
-
12
-hdd_root = '/hdd/*'
13
-
14
-def main():
15
-    surgery_list = glob.glob(hdd_root)
16
-
17
-    for id in surgery_list:
18
-        #id = /hdd/********
19
-        if 'lost+found' in id:
20
-            surgery_list.remove(id)
21
-        else:
22
-            pass
23
-
24
-    for id in surgery_list:
25
-        file_list=glob.glob(id + '/*')
26
-
27
-        for file in file_list:
28
-            #file = /hdd/**********/*.csv, svo, txt
29
-            if 'log.txt' in file:
30
-                f = open(file, 'rt')
31
-                logs = f.readlines()
32
-                f.close()
33
-                if len(logs) == 6:
34
-                    f = open(file, 'at')
35
-                    r_time = time.ctime(time.time())
36
-                    f.write('kpi successfully starts at ' + str(r_time) + '\n')
37
-                    f.close()
38
-                    #FIXME: change coordinate_g.py when IMU sensor works and get generic optimizer for DBSCAN
39
-                    os.system('docker run --rm --name kpi --gpus '"device=1"' -v /dev:/dev -v /home/mc365/sources:/sources -v /hdd:/hdd ellishuntingmoon/mailsys:0.4 python3 /sources/kpi.py ' + id + '/ >> /dev/null')
40
-                    f = open(file, 'at')
41
-                    r_time = time.ctime(time.time())
42
-                    f.write('kpi successfully ends at ' + str(r_time) + '\n')
43
-                    f.close()
44
-
45
-                    transport = paramiko.Transport(('365mc.iptime.org', 63122))
46
-                    transport.connect(username='mc365', password='tkadbrdhMC1!')
47
-                    sftp = paramiko.SFTPClient.from_transport(transport)
48
-                    #FIXME: send file recursively
49
-                    for file in file_list:
50
-                        sftp.put(file,'/data/'+str(file[5:]))
51
-                    sftp.close()
52
-                    transport.close()      
53
-                    os.system('rm -rf '+id)          
54
-                else:
55
-                    pass
56
-        
57
-
58
-if __name__ == '__main__':
59
-    while True:
60
-        main()

+ 2
- 0
cfg/log_reboot.txt 查看文件

@@ -0,0 +1,2 @@
1
+This file automatically created at 2022/02/04 19:02:51
2
+[REBOOT] Device reboot executed at 2022/02/05 19:23:39

+ 39
- 0
configuration.json 查看文件

@@ -0,0 +1,39 @@
1
+{
2
+    "confDate": "2022/02/09 19:18:45",
3
+    "branch": "",
4
+    "room": "",
5
+    "workingDir": "/home/mc365/data",
6
+    "storageDir": "/hdd",
7
+    "mainCam": "ZED 29498854",
8
+    "recFrame": 1800,
9
+    "cam1": {
10
+        "SN": "ZED 29498854",
11
+        "angle": 10.848755325589863,
12
+        "d_center": 1.77,
13
+        "headCoord": [
14
+            1149,
15
+            540
16
+        ],
17
+        "d_headCoord": 1.78,
18
+        "tailCoord": [
19
+            700,
20
+            540
21
+        ],
22
+        "d_tailCoord": 1.8
23
+    },
24
+    "cam2": {
25
+        "SN": "ZED 27972057",
26
+        "angle": 10.848755325589863,
27
+        "d_center": 1.77,
28
+        "headCoord": [
29
+            700,
30
+            540
31
+        ],
32
+        "d_headCoord": 1.78,
33
+        "tailCoord": [
34
+            1149,
35
+            540
36
+        ],
37
+        "d_tailCoord": 1.8
38
+    }
39
+}

+ 25
- 0
configuration.json.bak 查看文件

@@ -0,0 +1,25 @@
1
+{
2
+    "confDate": "2022/02/09 03:57:27",
3
+    "workingDir": "/home/mc365/data",
4
+    "storageDir": "/hdd",
5
+    "mainCam": "ZED 22842807",
6
+    "recFrame": 1800,
7
+    "cam1": {
8
+        "SN": "ZED 22842807",
9
+        "angle": 8.925006049019956,
10
+        "d_center": 0,
11
+        "headCoord": "",
12
+        "d_headCoord": 0,
13
+        "tailCoord": "",
14
+        "d_tailCoord": 0
15
+    },
16
+    "cam2": {
17
+        "SN": "",
18
+        "angle": "",
19
+        "d_center": "",
20
+        "headCoord": "",
21
+        "d_headCoord": "",
22
+        "tailCoord": "",
23
+        "d_tailCoord": ""
24
+    }
25
+}

+ 0
- 54
coordinate_g.py 查看文件

@@ -1,54 +0,0 @@
1
-import pandas as pd
2
-import os
3
-import sys
4
-
5
-target_dir = str(sys.argv[1])
6
-
7
-full_list = os.listdir(target_dir)
8
-csv_list = [file for file in full_list if file.startswith('cam')]
9
-csv_list = sorted(csv_list)
10
-#csv_list = [cam1.csv,cam2.csv]
11
-#print(csv_list)
12
-
13
-d1 = pd.read_csv(os.path.join(target_dir,csv_list[0]))
14
-
15
-if len(csv_list) != 1 :
16
-    d2 = pd.read_csv(os.path.join(target_dir,csv_list[1]))
17
-
18
-    d2['x'] = 1920 - d2['x']
19
-
20
-    df_conc = pd.concat([d1,d2])
21
-else:
22
-    df_conc = d1
23
-if len(d1) == 0 and len(d2) == 0:
24
-    dummy_dic = {'x':[0,1920], 'y':[1080,0], 'z':[1.0,5.0], 'frame':[1,1900]}
25
-    df_conc = pd.DataFrame(dummy_dic)
26
-df_conc.sort_values(by='frame')
27
-df_conc.drop_duplicates(['frame'])
28
-#print(df_conc)
29
-
30
-df_global = pd.DataFrame(index=range(0,int(df_conc['frame'].iloc[-1])+1), columns=['x','y','z','frame'])
31
-df_global['frame'] = list(range(1, int(df_conc['frame'].iloc[-1]+2)))
32
-df_global = df_global.astype({'frame':'int'})
33
-#print(df_global)
34
-
35
-df_global = pd.concat([df_global,df_conc])
36
-
37
-df_global=df_global.sort_values(by = ['frame'])
38
-df_global = df_global.drop_duplicates(['frame'], keep='last')
39
-df_global = df_global.reset_index(drop=True)
40
-df_global = df_global.interpolate()
41
-df_global = df_global.interpolate(method='values')
42
-#print(df_global)
43
-
44
-
45
-df_global['x'] = 10 * ((df_global['x'] - df_global['x'].min()) / (df_global['x'].max() - df_global['x'].min())) - 5
46
-df_global['y'] = 10 * ((df_global['y'] - df_global['y'].min()) / (df_global['y'].max() - df_global['y'].min())) - 5
47
-df_global['z'] = 2.7 * ((df_global['z'] - df_global['z'].min()) / (df_global['z'].max() - df_global['z'].min())) + 0.3
48
-#print(df_global)
49
-
50
-df_global = df_global.fillna(0)
51
-df_summary = df_global[df_global['frame']% 30 == 1]
52
-#print(df_summary)
53
-df_global.to_csv(os.path.join(target_dir,'coordinate.csv'))
54
-df_summary.to_csv(os.path.join(target_dir,'coordinate_summary.csv'))

+ 0
- 5
data.data 查看文件

@@ -1,5 +0,0 @@
1
-classes=2
2
-train=/tmp/darknet/365mc/dataset_v4/train.txt
3
-valid=/tmp/darknet/365mc/dataset_v4/valid.txt
4
-names=/sources/names.names
5
-backup=/tmp/darknet/365mc/dataset_v4/backup

+ 161
- 181
depthsensing.py 查看文件

@@ -1,37 +1,33 @@
1 1
 #!python3
2 2
 """
3 3
 Python 3 wrapper for identifying objects in images
4
-
5 4
 Requires DLL compilation
6
-
7 5
 Original *nix 2.7: https://github.com/pjreddie/darknet/blob/0f110834f4e18b30d5f101bf8f1724c34b7b83db/python/darknet.py
8 6
 Windows Python 2.7 version: https://github.com/AlexeyAB/darknet/blob/fc496d52bf22a0bb257300d3c79be9cd80e722cb/build/darknet/x64/darknet.py
9
-
10 7
 @author: Philip Kahn, Aymeric Dujardin
11 8
 @date: 20180911
12 9
 """
13 10
 # pylint: disable=R, W0401, W0614, W0703
14
-import cv2
15
-import pyzed.sl as sl
16
-from ctypes import *
17
-import math
18
-import random
19 11
 import os
20
-import numpy as np
21
-import statistics
22 12
 import sys
23
-import getopt
24
-from random import randint
25 13
 import time
14
+import logging
15
+import random
16
+from random import randint
17
+import math
18
+import statistics
19
+import getopt
20
+from ctypes import *
21
+import numpy as np
22
+import cv2
23
+import pyzed.sl as sl
26 24
 import pandas as pd
27
-from pandas import  DataFrame as df
28
-import  subprocess
25
+from pandas import DataFrame as df
29 26
 
30 27
 
31
-#NOTE at 2020-12-22: if svo's resulotion changed, then you have to edit WIDTH, HEIGHT and DEPTH_MAX
32
-WIDTH = 1920
33
-HEIGHT = 1080
34
-DEPTH_MAX = 8
28
+# Get the top-level logger object
29
+log = logging.getLogger(__name__)
30
+logging.basicConfig(level=logging.INFO)
35 31
 
36 32
 
37 33
 def sample(probs):
@@ -64,7 +60,13 @@ class DETECTION(Structure):
64 60
                 ("prob", POINTER(c_float)),
65 61
                 ("mask", POINTER(c_float)),
66 62
                 ("objectness", c_float),
67
-                ("sort_class", c_int)]
63
+                ("sort_class", c_int),
64
+                ("uc", POINTER(c_float)),
65
+                ("points", c_int),
66
+                ("embeddings", POINTER(c_float)),
67
+                ("embedding_size", c_int),
68
+                ("sim", c_float),
69
+                ("track_id", c_int)]
68 70
 
69 71
 
70 72
 class IMAGE(Structure):
@@ -86,7 +88,6 @@ if os.name == "nt":
86 88
     cwd = os.path.dirname(__file__)
87 89
     os.environ['PATH'] = cwd + ';' + os.environ['PATH']
88 90
     winGPUdll = os.path.join(cwd, "yolo_cpp_dll.dll")
89
-    #print(winGPUdll)
90 91
     winNoGPUdll = os.path.join(cwd, "yolo_cpp_dll_nogpu.dll")
91 92
     envKeys = list()
92 93
     for k, v in os.environ.items():
@@ -97,7 +98,7 @@ if os.name == "nt":
97 98
             if tmp in ["1", "true", "yes", "on"]:
98 99
                 raise ValueError("ForceCPU")
99 100
             else:
100
-                pass#print("Flag value '"+tmp+"' not forcing CPU mode")
101
+                log.info("Flag value '"+tmp+"' not forcing CPU mode")
101 102
         except KeyError:
102 103
             # We never set the flag
103 104
             if 'CUDA_VISIBLE_DEVICES' in envKeys:
@@ -109,8 +110,8 @@ if os.name == "nt":
109 110
                     raise ValueError("ForceCPU")
110 111
             except NameError:
111 112
                 pass
112
-            # #print(os.environ.keys())
113
-            # #print("FORCE_CPU flag undefined, proceeding with GPU")
113
+            # log.info(os.environ.keys())
114
+            # log.warning("FORCE_CPU flag undefined, proceeding with GPU")
114 115
         if not os.path.exists(winGPUdll):
115 116
             raise ValueError("NoDLL")
116 117
         lib = CDLL(winGPUdll, RTLD_GLOBAL)
@@ -118,13 +119,13 @@ if os.name == "nt":
118 119
         hasGPU = False
119 120
         if os.path.exists(winNoGPUdll):
120 121
             lib = CDLL(winNoGPUdll, RTLD_GLOBAL)
121
-            #print("Notice: CPU-only mode")
122
+            log.warning("Notice: CPU-only mode")
122 123
         else:
123 124
             # Try the other way, in case no_gpu was
124 125
             # compile but not renamed
125 126
             lib = CDLL(winGPUdll, RTLD_GLOBAL)
126
-            #print("Environment variables indicated a CPU run, but we didn't find `" +
127
-            #      winNoGPUdll+"`. Trying a GPU run anyway.")
127
+            log.warning("Environment variables indicated a CPU run, but we didn't find `" +
128
+                        winNoGPUdll+"`. Trying a GPU run anyway.")
128 129
 else:
129 130
     lib = CDLL("/root/darknet/libdarknet.so", RTLD_GLOBAL)
130 131
 lib.network_width.argtypes = [c_void_p]
@@ -220,10 +221,10 @@ def classify(net, meta, im):
220 221
     res = []
221 222
     for i in range(meta.classes):
222 223
         if altNames is None:
223
-            nameTag = meta.names[i]
224
+            name_tag = meta.names[i]
224 225
         else:
225
-            nameTag = altNames[i]
226
-        res.append((nameTag, out[i]))
226
+            name_tag = altNames[i]
227
+        res.append((name_tag, out[i]))
227 228
     res = sorted(res, key=lambda x: -x[1])
228 229
     return res
229 230
 
@@ -246,21 +247,16 @@ def detect(net, meta, image, thresh=.5, hier_thresh=.5, nms=.45, debug=False):
246 247
         do_nms_sort(dets, num, meta.classes, nms)
247 248
     res = []
248 249
     if debug:
249
-        pass
250
-        #print("about to range")
250
+        log.debug("about to range")
251 251
     for j in range(num):
252 252
         for i in range(meta.classes):
253
-            try:
254
-                if dets[j].prob[i] > 0:
255
-                    b = dets[j].bbox
256
-                    if altNames is None:
257
-                        nameTag = meta.names[i]
258
-                    else:
259
-                        nameTag = altNames[i]
260
-                    res.append((nameTag, dets[j].prob[i], (b.x, b.y, b.w, b.h), i))
261
-            except ValueError as e:
262
-                #print(e)
263
-                pass
253
+            if dets[j].prob[i] > 0:
254
+                b = dets[j].bbox
255
+                if altNames is None:
256
+                    name_tag = meta.names[i]
257
+                else:
258
+                    name_tag = altNames[i]
259
+                res.append((name_tag, dets[j].prob[i], (b.x, b.y, b.w, b.h), i))
264 260
     res = sorted(res, key=lambda x: -x[1])
265 261
     free_detections(dets, num)
266 262
     return res
@@ -271,7 +267,20 @@ metaMain = None
271 267
 altNames = None
272 268
 
273 269
 
274
-def getObjectDepth(depth, bounds):
270
+def get_object_depth(depth, bounds):
271
+    '''
272
+    Calculates the median x, y, z position of top slice(area_div) of point cloud
273
+    in camera frame.
274
+    Arguments:
275
+        depth: Point cloud data of whole frame.
276
+        bounds: Bounding box for object in pixels.
277
+            bounds[0]: x-center
278
+            bounds[1]: y-center
279
+            bounds[2]: width of bounding box.
280
+            bounds[3]: height of bounding box.
281
+    Return:
282
+        x, y, z: Location of object in meters.
283
+    '''
275 284
     area_div = 2
276 285
 
277 286
     x_vect = []
@@ -286,22 +295,29 @@ def getObjectDepth(depth, bounds):
286 295
                 y_vect.append(depth[i, j, 1])
287 296
                 z_vect.append(z)
288 297
     try:
289
-        x = statistics.median(x_vect)
290
-        y = statistics.median(y_vect)
291
-        z = statistics.median(z_vect)
298
+        x_median = statistics.median(x_vect)
299
+        y_median = statistics.median(y_vect)
300
+        z_median = statistics.median(z_vect)
292 301
     except Exception:
293
-        x = -1
294
-        y = -1
295
-        z = -1
302
+        x_median = -1
303
+        y_median = -1
304
+        z_median = -1
296 305
         pass
297 306
 
298
-    return x, y, z
307
+    return x_median, y_median, z_median
299 308
 
300 309
 
301
-def generateColor(metaPath):
310
+def generate_color(meta_path):
311
+    '''
312
+    Generate random colors for the number of classes mentioned in data file.
313
+    Arguments:
314
+    meta_path: Path to .data file.
315
+    Return:
316
+    color_array: RGB color codes for each class.
317
+    '''
302 318
     random.seed(42)
303
-    f = open(metaPath, 'r')
304
-    content = f.readlines()
319
+    with open(meta_path, 'r') as f:
320
+        content = f.readlines()
305 321
     class_num = int(content[0].split("=")[1])
306 322
     color_array = []
307 323
     for x in range(0, class_num):
@@ -312,62 +328,58 @@ def generateColor(metaPath):
312 328
 def main(argv):
313 329
 
314 330
     thresh = 0.25
315
-    configPath = "/sources/cfg.cfg"
316
-    weightPath = "/sources/mailsys.weight"
317
-    metaPath = "/sources/data.data"
318
-    svoPath = None
319
-
320
-    help_str = 'darknet_zed.py -c <config> -w <weight> -m <meta> -t <threshold> -s <svo_file> -o <output_path>'
331
+    darknet_path="../libdarknet/"
332
+    config_path = darknet_path + "cfg/yolov3-tiny.cfg"
333
+    weight_path = "yolov3-tiny.weights"
334
+    meta_path = "coco.data"
335
+    svo_path = None
336
+    zed_id = 0
337
+
338
+    help_str = 'darknet_zed.py -c <config> -w <weight> -m <meta> -t <threshold> -s <svo_file> -z <zed_id>'
321 339
     try:
322 340
         opts, args = getopt.getopt(
323
-            argv, "hc:w:m:t:s:o:", ["config=", "weight=", "meta=", "threshold=", "svo_file=", "output_path="])
341
+            argv, "hc:w:m:t:s:z:o:", ["config=", "weight=", "meta=", "threshold=", "svo_file=", "zed_id=", "output_path="])
324 342
     except getopt.GetoptError:
325
-        print (help_str)
343
+        log.exception(help_str)
326 344
         sys.exit(2)
327 345
     for opt, arg in opts:
328 346
         if opt == '-h':
329
-            print (help_str)
347
+            log.info(help_str)
330 348
             sys.exit()
331 349
         elif opt in ("-c", "--config"):
332
-            configPath = arg
350
+            config_path = arg
333 351
         elif opt in ("-w", "--weight"):
334
-            weightPath = arg
352
+            weight_path = arg
335 353
         elif opt in ("-m", "--meta"):
336
-            metaPath = arg
354
+            meta_path = arg
337 355
         elif opt in ("-t", "--threshold"):
338 356
             thresh = float(arg)
339 357
         elif opt in ("-s", "--svo_file"):
340
-            svoPath = arg
358
+            svo_path = arg
359
+        elif opt in ("-z", "--zed_id"):
360
+            zed_id = int(arg)
341 361
         elif opt in ("-o", "--output_path"):
342 362
             output_path = arg
343 363
 
344
-    init = sl.InitParameters()
345
-    init.coordinate_units = sl.UNIT.METER
346
-
347
-
348
-
349
-    if svoPath is not None:
350
-        init.set_from_svo_file(svoPath)
351
-
352
-    dirname = os.path.splitext(svoPath)[0]
353
-    dir, filename = os.path.split(dirname)
354
-    #print(output_path)
355
-    #print(filename)
364
+            output_path_nms = arg[:-4] + '_nms' + arg[-4:]
356 365
 
357
-    #try:
358
-    #    if not (os.path.isdir(output_path)):
359
-    #        os.makedirs(os.path.join(output_path))
360
-    #except OSError as e:
361
-    #    #print(e)
366
+    input_type = sl.InputType()
367
+    if svo_path is not None:
368
+        log.info("SVO file : " + svo_path)
369
+        input_type.set_from_svo_file(svo_path)
370
+    else:
371
+        # Launch camera by id
372
+        input_type.set_from_camera_id(zed_id)
362 373
 
374
+    init = sl.InitParameters(input_t=input_type)
375
+    init.coordinate_units = sl.UNIT.METER
363 376
 
364 377
     cam = sl.Camera()
365 378
     if not cam.is_opened():
366
-        #print("Opening ZED Camera...")
367
-        pass
379
+        log.info("Opening ZED Camera...")
368 380
     status = cam.open(init)
369 381
     if status != sl.ERROR_CODE.SUCCESS:
370
-        #print(repr(status))
382
+        log.error(repr(status))
371 383
         exit()
372 384
 
373 385
     runtime = sl.RuntimeParameters()
@@ -376,31 +388,32 @@ def main(argv):
376 388
     mat = sl.Mat()
377 389
     point_cloud_mat = sl.Mat()
378 390
 
379
-    # Import the global variables. This lets us instance Darknet once, then just call performDetect() again without instancing again
391
+    # Import the global variables. This lets us instance Darknet once,
392
+    # then just call performDetect() again without instancing again
380 393
     global metaMain, netMain, altNames  # pylint: disable=W0603
381 394
     assert 0 < thresh < 1, "Threshold should be a float between zero and one (non-inclusive)"
382
-    if not os.path.exists(configPath):
395
+    if not os.path.exists(config_path):
383 396
         raise ValueError("Invalid config path `" +
384
-                         os.path.abspath(configPath)+"`")
385
-    if not os.path.exists(weightPath):
397
+                         os.path.abspath(config_path)+"`")
398
+    if not os.path.exists(weight_path):
386 399
         raise ValueError("Invalid weight path `" +
387
-                         os.path.abspath(weightPath)+"`")
388
-    if not os.path.exists(metaPath):
400
+                         os.path.abspath(weight_path)+"`")
401
+    if not os.path.exists(meta_path):
389 402
         raise ValueError("Invalid data file path `" +
390
-                         os.path.abspath(metaPath)+"`")
403
+                         os.path.abspath(meta_path)+"`")
391 404
     if netMain is None:
392
-        netMain = load_net_custom(configPath.encode(
393
-            "ascii"), weightPath.encode("ascii"), 0, 1)  # batch size = 1
405
+        netMain = load_net_custom(config_path.encode(
406
+            "ascii"), weight_path.encode("ascii"), 0, 1)  # batch size = 1
394 407
     if metaMain is None:
395
-        metaMain = load_meta(metaPath.encode("ascii"))
408
+        metaMain = load_meta(meta_path.encode("ascii"))
396 409
     if altNames is None:
397 410
         # In thon 3, the metafile default access craps out on Windows (but not Linux)
398 411
         # Read the names file and create a list to feed to detect
399 412
         try:
400
-            with open(metaPath) as metaFH:
401
-                metaContents = metaFH.read()
413
+            with open(meta_path) as meta_fh:
414
+                meta_contents = meta_fh.read()
402 415
                 import re
403
-                match = re.search("names *= *(.*)$", metaContents,
416
+                match = re.search("names *= *(.*)$", meta_contents,
404 417
                                   re.IGNORECASE | re.MULTILINE)
405 418
                 if match:
406 419
                     result = match.group(1)
@@ -408,41 +421,27 @@ def main(argv):
408 421
                     result = None
409 422
                 try:
410 423
                     if os.path.exists(result):
411
-                        with open(result) as namesFH:
412
-                            namesList = namesFH.read().strip().split("\n")
413
-                            altNames = [x.strip() for x in namesList]
424
+                        with open(result) as names_fh:
425
+                            names_list = names_fh.read().strip().split("\n")
426
+                            altNames = [x.strip() for x in names_list]
414 427
                 except TypeError:
415 428
                     pass
416 429
         except Exception:
417 430
             pass
418 431
 
419
-    color_array = generateColor(metaPath)
432
+    color_array = generate_color(meta_path)
420 433
 
421
-    #print("Running...")
422
-    #df1 = df(data={'frame': [], 'label': [], 'x': [], 'y': [], 'depth': []})
434
+    #log.info("Running...")
423 435
     df1 = df(data={'x': [], 'y': [], 'z': [], 'frame': []})
424
-    #print(df1)
425
-    start = time.time()
426
-    key = ''
436
+    df_nms = df(data={'x': [], 'y': [], 'z': [],'nms':[] ,'frame': []})
427 437
     count = 0
428 438
     frame = 1
429
-
430
-    #image_size = cam.get_resolution()
431
-    #width = image_size.width
432
-    #height = image_size.height
433
-    #width_sbs = width * 2
434
-
435
-    # Prepare side by side image container equivalent to CV_8UC4
436
-    #svo_image_sbs_rgba = np.zeros((height, width_sbs, 4), dtype=np.uint8)
437
-    fourcc = cv2.VideoWriter_fourcc('M', '4', 'S', '2')
438
-
439
-    # 컬러 영상 저장시
440
-    ##print(cam.get_camera_fps(), "!!!!!!!!!!!")
441
-    #video_path = os.path.join(output_path, filename)
442
-    #writer = cv2.VideoWriter(str(video_path)+"_output.avi", fourcc, cam.get_camera_fps(), (width, height))
443
-    ##print(video_path)
444
-    ##print(writer)
445
-    while key != 113:  # for 'q' key
439
+    svo_frame = cam.get_svo_number_of_frames()
440
+    
441
+    quit_timer = time.time()
442
+    key = ''
443
+    while frame != svo_frame:  # for 'q' key        
444
+        start_time = time.time() # start time of the loop
446 445
         err = cam.grab(runtime)
447 446
         if err == sl.ERROR_CODE.SUCCESS:
448 447
             cam.retrieve_image(mat, sl.VIEW.LEFT)
@@ -455,75 +454,56 @@ def main(argv):
455 454
             # Do the detection
456 455
             detections = detect(netMain, metaMain, image, thresh)
457 456
 
458
-            #print(chr(27) + "[2J"+"**** " +
459
-                #  str(len(detections)) + " Results ****")
460
-            frame += 1
457
+            log.info(chr(27) + "[2J"+"**** " + str(len(detections)) + " Results ****")
458
+            
461 459
             for detection in detections:
460
+                log.info(detection)
462 461
                 label = detection[0]
463 462
                 confidence = detection[1]
464 463
                 pstring = label+": "+str(np.rint(100 * confidence))+"%"
465
-                #print(pstring)
464
+                log.info(pstring)
466 465
                 bounds = detection[2]
467
-                yExtent = int(bounds[3])
468
-                xEntent = int(bounds[2])
466
+                y_extent = int(bounds[3])
467
+                x_extent = int(bounds[2])
469 468
                 # Coordinates are around the center
470
-                xCoord = int(bounds[0] - bounds[2]/2)
471
-                yCoord = int(bounds[1] - bounds[3]/2)
472
-                boundingBox = [[xCoord, yCoord], [xCoord, yCoord + yExtent], [xCoord + xEntent, yCoord + yExtent], [xCoord + xEntent, yCoord] ]
469
+                x_coord = int(bounds[0] - bounds[2]/2)
470
+                y_coord = int(bounds[1] - bounds[3]/2)
471
+                #boundingBox = [[x_coord, y_coord], [x_coord, y_coord + y_extent], [x_coord + x_extent, y_coord + y_extent], [x_coord + x_extent, y_coord]]
473 472
                 thickness = 1
474
-                x, y, z = getObjectDepth(depth, bounds)
475
-                distance_xyz = math.sqrt(x * x + y * y + z * z)
476
-
477
-                distance = "{:.4f}".format(distance_xyz)
478
-
479
-                #print(label, distance, xCoord, yCoord)
480
-                cv2.rectangle(image, (xCoord-thickness, yCoord-thickness), (xCoord + xEntent+thickness, yCoord+(18 +thickness*4)), color_array[detection[3]], -1)
481
-                cv2.putText(image, label + " " +  (str(distance) + " m"), (xCoord+(thickness*4), yCoord+(10 +thickness*4)), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255,255,255), 2)
482
-                cv2.rectangle(image, (xCoord-thickness, yCoord-thickness), (xCoord + xEntent+thickness, yCoord + yExtent+thickness), color_array[detection[3]], int(thickness*2))
483
-                x = xCoord-thickness
484
-                x1 = xCoord + xEntent+thickness
485
-                y = yCoord-thickness
486
-                y1 = yCoord + yExtent+thickness
487
-                
488
-
489
-                #If need normalize, erase # and change df1.loc[count]
490
-                #norm_x = (x + x1) / 2 / WIDTH
491
-                #norm_y = (y + y1) / 2 / HEIGHT
492
-                #norm_z = distance_xyz / DEPTH_MAX
493
-
494
-
495
-                
496
-                df1.loc[count] = [int(xCoord), int(yCoord), distance_xyz, int(frame)]
497
-                #df1.columns = ['idx','frame','label','x','y','depth']
498
-                cv2.line(image, (int((x+x1)/2), int((y+y1)/2)), (int((x+x1)/2), int((y+y1)/2)), (0, 0, 255), 10)
499
-
473
+                x, y, z = get_object_depth(depth, bounds)
474
+                distance = math.sqrt(x * x + y * y + z * z)
475
+                distance = "{:.8f}".format(distance)
476
+                cv2.rectangle(image, (x_coord - thickness, y_coord - thickness),
477
+                              (x_coord + x_extent + thickness, y_coord + (18 + thickness*4)),
478
+                              color_array[detection[3]], -1)
479
+                cv2.putText(image, label + " " +  (str(distance) + " m"),
480
+                            (x_coord + (thickness * 4), y_coord + (10 + thickness * 4)),
481
+                            cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
482
+                cv2.rectangle(image, (x_coord - thickness, y_coord - thickness),
483
+                              (x_coord + x_extent + thickness, y_coord + y_extent + thickness),
484
+                              color_array[detection[3]], int(thickness*2))
485
+                x = x_coord-thickness
486
+                x1 = x_coord + x_extent+thickness
487
+                y = y_coord-thickness
488
+                y1 = y_coord + y_extent+thickness
489
+                df1.loc[count] = [int(x_coord), int(y_coord), distance, int(frame)]
490
+                df_nms.loc[count] = [int(x_coord), int(y_coord), distance, np.rint(100*confidence), int(frame)]
500 491
                 count += 1
501
-                image = cv2.cvtColor(image, cv2.COLOR_RGBA2RGB)
502
-            #print(frame)
503
-            image = cv2.cvtColor(image, cv2.COLOR_RGBA2RGB)
504
-            #writer.write(image)
505
-            # cv2.imshow("ZED", image)
506
-           # key = cv2.waitKey(5)
507
-            # #print(cam.get_svo_number_of_frames())
508
-            if frame == cam.get_svo_number_of_frames():
509
-                break
510
-        else:
511
-            #print('something happend')
492
+            #cv2.imshow("ZED", image)
512 493
             #key = cv2.waitKey(5)
494
+            log.info("FPS: {}".format(1.0 / (time.time() - start_time)))
495
+            log.info("\n"+str(frame))
496
+            frame += 1        
497
+        else:        
513 498
             pass
514
-    #print("time :", time.time() - start)
499
+            #key = cv2.waitKey(5)
500
+    #cv2.destroyAllWindows()
515 501
 
516 502
     cam.close()
517
-    #writer.release()
518
-    #cv2.destroyAllWindows()
519
-    #print("\nFINISH")
520
-    ##print(df1)
521
-    
522 503
     df1.to_csv(output_path, index=False)
523
-    #print("csv save")
504
+    df_nms.to_csv(output_path_nms, index=False)
505
+    log.info("\nFINISH")
524 506
 
525 507
 
526 508
 if __name__ == "__main__":
527
-
528
-    #print(sys.argv)
529
-    main(sys.argv[1:])
509
+    main(sys.argv[1:])

+ 0
- 23
detect_new_folder.py 查看文件

@@ -1,23 +0,0 @@
1
-import os
2
-import time
3
-
4
-name_time_list = []
5
-target_dir = '/home/mc365/data/dummy'
6
-while True:
7
-    time.sleep(60)
8
-    root_path = "/home/mc365/data/"
9
-    filenames = os.listdir(root_path)
10
-    for f in filenames:
11
-        w_time = os.path.getctime(root_path + f)
12
-        name_time_list.append((f, w_time))
13
-    sorted_list = sorted(name_time_list, key=lambda x: x[1], reverse=True)
14
-    
15
-    
16
-    recent_file = sorted_list[0]
17
-    tmp_recent = str(root_path) + str(recent_file[0])
18
-    if target_dir != tmp_recent:
19
-        time.sleep(5)
20
-        command = 'docker run --rm --name mail_recorder --gpus '"device=1"' --privileged -v /dev:/dev -v /home/mc365/sources:/sources -v /home/mc365/data/'+str(recent_file[0])+':/data/'+str(recent_file[0])+' stereolabs/zed:3.5-gl-devel-cuda11.1-ubuntu18.04 python3 /sources/recorder.py.bak'
21
-        os.system(str(command))        
22
-    target_dir = str(root_path) + str(recent_file[0])    
23
-    

+ 0
- 35
extract_01.py 查看文件

@@ -1,35 +0,0 @@
1
-import sys
2
-import os
3
-import glob
4
-import pandas as pd
5
-
6
-#ssd_dir = /data/36536536520201231/
7
-target_dir = str(sys.argv[1])
8
-
9
-
10
-svo_list = os.listdir(target_dir)
11
-svo_list = [file for file in svo_list if file.endswith('1.svo')]
12
-svo_list = sorted(svo_list)
13
-del svo_list[-1]
14
-
15
-
16
-
17
-for svo in svo_list:
18
-  if svo[-5:] == '1.svo':   
19
-    os.system('python3 /sources/depthsensing.py -c /sources/cfg.cfg -w /sources/mailsys.weights -m /sources/data.data -s ' + target_dir + svo + ' -o ' + target_dir + svo[:5] + '.csv')
20
-  else:
21
-    pass
22
-
23
-full_list = os.listdir(target_dir)
24
-csv_1_list = [file for file in full_list if file.endswith("1.csv")]
25
-csv_1_list = sorted(csv_1_list)
26
-
27
-conc_df = pd.DataFrame(data={'x': [], 'y': [], 'z': [], 'frame': []})
28
-FRAME_COUNTER = 0
29
-N_OF_FRAME = 1800
30
-for x in csv_1_list:
31
-  df = pd.read_csv(os.path.join(target_dir,x), header=0)
32
-  df['frame'] = df['frame'] + FRAME_COUNTER
33
-  conc_df = pd.concat([conc_df,df])
34
-  FRAME_COUNTER += N_OF_FRAME
35
-conc_df.to_csv(os.path.join(target_dir,'cam1.csv'), index=False)

+ 0
- 33
extract_02.py 查看文件

@@ -1,33 +0,0 @@
1
-import sys
2
-import os
3
-import glob
4
-import pandas as pd
5
-
6
-#target_dir = /hdd/36536536520201231/
7
-target_dir = sys.argv[1]
8
-
9
-
10
-svo_list = os.listdir(target_dir)
11
-svo_list = [file for file in svo_list if file.endswith('2.svo')]
12
-svo_list = sorted(svo_list)
13
-del svo_list[-1]
14
-
15
-for svo in svo_list:
16
-  if svo[-5:] == '2.svo':
17
-    os.system('python3 /sources/depthsensing.py -c /sources/cfg.cfg -w /sources/mailsys.weights -m /sources/data.data -s ' + target_dir + svo + ' -o ' + target_dir + svo[:5] + '.csv')
18
-  else:
19
-    pass
20
-
21
-full_list = os.listdir(target_dir)
22
-csv_2_list = [file for file in full_list if file.endswith("2.csv")]
23
-csv_2_list = sorted(csv_2_list)
24
-
25
-conc_df = pd.DataFrame(data={'x': [], 'y': [], 'z': [], 'frame': []})
26
-FRAME_COUNTER = 0
27
-N_OF_FRAME = 1800
28
-for x in csv_2_list:
29
-  df = pd.read_csv(os.path.join(target_dir,x), header=0)
30
-  df['frame'] = df['frame'] + FRAME_COUNTER
31
-  conc_df = pd.concat([conc_df,df])
32
-  FRAME_COUNTER += N_OF_FRAME
33
-conc_df.to_csv(os.path.join(target_dir,'cam2.csv'), index=False)

+ 0
- 233
kpi.py 查看文件

@@ -1,233 +0,0 @@
1
-import os
2
-import time
3
-import math
4
-import sys
5
-import numpy as np
6
-from numpy.lib.function_base import append
7
-import pandas as pd
8
-from pandas import DataFrame as df
9
-from sklearn.cluster import DBSCAN
10
-import pymssql
11
-#import matplotlib.pyplot as plt
12
-
13
-
14
-ssd_dir = str(sys.argv[1])
15
-file_path = '/hdd/' + ssd_dir[5:]
16
-count = 1
17
-
18
-def put_kpi_to_db(kpi):
19
-    conn = pymssql.connect(server='52.231.39.219',port=1433, user='mc365', password='tkadbrdhmc1!', database='master')
20
-    #print("INSERT INTO tsfmc_mailsystem.dbo.KPI(surgeryID, totalStrokeCount, totalStrokeVelocity, totalStrokeDepth, upDownTotalStrokeCount, upDownTotalStrokeVelocity, upDownTotalStrokeDepth, leftRightTotalStrokeCount, leftRightTotalStrokeVelocity, leftRightTotalStrokeDepth, quadrant1TotalStrokeCount, quadrant1TotalStrokeVelocity, quadrant1TotalStrokeDepth, quadrant2TotalStrokeCount, quadrant2TotalStrokeVelocity, quadrant2TotalStrokeDepth, quadrant3TotalStrokeCount, quadrant3TotalStrokeVelocity, quadrant3TotalStrokeDepth, quadrant4TotalStrokeCount, quadrant4TotalStrokeVelocity, quadrant4TotalStrokeDepth) VALUES (" + str(kpi[0]) + ", "+str(kpi[1])+", "+str(kpi[2])+", "+str(kpi[3])+", "+str(kpi[4])+", "+str(kpi[5])+", "+str(kpi[6])+", "+str(kpi[7])+", "+str(kpi[8])+", "+str(kpi[9])+", "+str(kpi[10])+", "+str(kpi[11])+", "+str(kpi[12])+", "+str(kpi[13])+", "+str(kpi[14])+", "+str(kpi[15])+", "+str(kpi[16])+", "+str(kpi[17])+", "+str(kpi[18])+", "+str(kpi[19])+", "+str(kpi[20])+", "+str(kpi[21])+")")
21
-    cursor = conn.cursor()
22
-    cursor.execute("INSERT INTO tsfmc_mailsystem.dbo.KPI(surgeryID, totalStrokeCount, totalStrokeVelocity, totalStrokeDepth, upDownTotalStrokeCount, upDownTotalStrokeVelocity, upDownTotalStrokeDepth, leftRightTotalStrokeCount, leftRightTotalStrokeVelocity, leftRightTotalStrokeDepth, quadrant1TotalStrokeCount, quadrant1TotalStrokeVelocity, quadrant1TotalStrokeDepth, quadrant2TotalStrokeCount, quadrant2TotalStrokeVelocity, quadrant2TotalStrokeDepth, quadrant3TotalStrokeCount, quadrant3TotalStrokeVelocity, quadrant3TotalStrokeDepth, quadrant4TotalStrokeCount, quadrant4TotalStrokeVelocity, quadrant4TotalStrokeDepth) VALUES ( '" + str(kpi[0]) + "', '"+str(kpi[1])+"', '"+str(kpi[2])+"', '"+str(kpi[3])+"', '"+str(kpi[4])+"', '"+str(kpi[5])+"', '"+str(kpi[6])+"', '"+str(kpi[7])+"', '"+str(kpi[8])+"', '"+str(kpi[9])+"', '"+str(kpi[10])+"', '"+str(kpi[11])+"', '"+str(kpi[12])+"', '"+str(kpi[13])+"', '"+str(kpi[14])+"', '"+str(kpi[15])+"', '"+str(kpi[16])+"', '"+str(kpi[17])+"', '"+str(kpi[18])+"', '"+str(kpi[19])+"', '"+str(kpi[20])+"', '"+str(kpi[21])+"')")
23
-    conn.commit()
24
-    conn.close()
25
-
26
-
27
-def get_quad(df):
28
-    if df['x'] >= 0 and df['y'] >= 0:
29
-        return 1
30
-    elif df['x'] >= 0 and df['y'] < 0:
31
-        return 2
32
-    elif df['x'] < 0 and df['y'] < 0:
33
-        return 3
34
-    elif df['x'] < 0 and df['y'] >= 0:
35
-        return 4
36
-def get_inc_num(df):
37
-    global count
38
-    prev_count = 0
39
-    if abs(df['diff_diff']) > 0.0008:                
40
-        prev_count = df['inc_num']
41
-        df['inc_num'] = count
42
-        #print(prev_count)
43
-        return count        
44
-    else:
45
-        if prev_count != 0:            
46
-            count += 1
47
-            df['inc_num'] = 0
48
-            prev_count = df['inc_num']
49
-            return 0
50
-        else:
51
-            df['inc_num'] = 0
52
-            prev_count = df['inc_num']
53
-            return 0
54
-
55
-
56
-def count_inc(df):
57
-    e= 0
58
-    count  = 1
59
-    prev_count = 0
60
-    for x in df['diff_diff']:
61
-        if abs(x) > 8:
62
-            df['inc_num'][e] = count
63
-            prev_count = count
64
-            e +=1
65
-        else:
66
-            if prev_count != 0:
67
-                count += 1
68
-                df['inc_num'][e] = 0
69
-                prev_count = 0
70
-                e +=1
71
-            else:
72
-                df['inc_num'][e] = 0
73
-                prev_count = 0
74
-                e +=1
75
-
76
-def stroke_count(df):
77
-    e = 0
78
-    count = 1    
79
-    for x in df['diff_diff']:
80
-        if x > 1.2:
81
-            df['stroke_count'][e] = count
82
-            e +=1
83
-        else:
84
-            df['stroke_count'][e] = 0
85
-            e+=1
86
-            
87
-def get_sqrt(df):
88
-    e =0
89
-    count = 1
90
-    prev_x = 0.0
91
-    prev_y = 0.0
92
-    prev_z = 0.0
93
-    for x in range(len(df)):
94
-        df['sqrt'][e] = np.sqrt((df['x'][e] - prev_x) **2 + (df['y'][e] - prev_y) ** 2 +(df['z'][e] - prev_z) ** 2)
95
-        prev_x = df['x'][e]
96
-        prev_y = df['y'][e]
97
-        prev_z = df['z'][e]
98
-        e += 1
99
-
100
-        
101
-
102
-
103
-def dist():
104
-    file = os.path.join(file_path, 'coordinate.csv')
105
-    norm = pd.read_csv(file)
106
-    norm = norm.drop(norm.columns[0], axis=1)
107
-    norm = norm.fillna(0)
108
-    data = norm[['x', 'y', 'z']]
109
-    norm['sqrt'] = 0.0
110
-
111
-    get_sqrt(norm)
112
-
113
-    #norm = norm.apply(lambda x:np.sqrt(x) if x.name in ['sqrt'] else x, axis =1)
114
-    norm['diff'] = norm.diff()['sqrt']
115
-    norm['diff_diff'] = norm.diff()['diff']
116
-
117
-
118
-    norm['quad'] = norm.apply(get_quad, axis =1)
119
-    
120
-    
121
-    norm = norm.fillna(0)
122
-
123
-    norm['inc_num'] = 0
124
-    #norm['inc_num'] = norm.apply(get_inc_num, axis = 1)
125
-    count_inc(norm)
126
-    norm['stroke_count'] = 0
127
-    stroke_count(norm)
128
-    
129
-    #print('num of inc')
130
-    #print(norm['inc_num'].unique())
131
-
132
-    #print('total stroke')
133
-    #print(norm['stroke_count'].unique())
134
-    #print(norm)
135
-    diff_csv = norm[['frame','diff_diff']]
136
-    diff_csv.to_csv(os.path.join(file_path, 'diff.csv'))
137
-    #plt.plot(norm['frame'],norm['diff_diff'])
138
-    #plt.savefig('./diff_diff.png')    
139
-
140
-
141
-
142
-    kpi = pd.DataFrame(index=range(0,1))
143
-
144
-    quad_count = norm[norm['stroke_count'] != 0]
145
-    
146
-    quad1_c = quad_count[quad_count['quad'] == 1]
147
-    quad2_c = quad_count[quad_count['quad'] == 2]
148
-    quad3_c = quad_count[quad_count['quad'] == 3]
149
-    quad4_c = quad_count[quad_count['quad'] == 4]
150
-    #print(len(quad1_c))
151
-    #print(len(quad2_c))
152
-    #print(len(quad3_c))
153
-    #print(len(quad4_c))
154
-    #print(quad1_c)
155
-    kpi['PK'] = ssd_dir[5:] 
156
-    kpi['qaud1_c'] = len(quad1_c)
157
-    kpi['qaud2_c'] = len(quad2_c)
158
-    kpi['qaud3_c'] = len(quad3_c)
159
-    kpi['qaud4_c'] = len(quad4_c)
160
-    #FIXME: this values are huristic value. please change this values after research
161
-    kpi['qaud1_s'] = quad1_c['sqrt'].mean() / 30
162
-    kpi['qaud2_s'] = quad2_c['sqrt'].mean() / 30
163
-    kpi['qaud3_s'] = quad3_c['sqrt'].mean() / 30
164
-    kpi['qaud4_s'] = quad4_c['sqrt'].mean() / 30
165
-    kpi['qaud1_d'] = quad1_c['sqrt'].mean() / 45
166
-    kpi['qaud2_d'] = quad2_c['sqrt'].mean() / 45
167
-    kpi['qaud3_d'] = quad3_c['sqrt'].mean() / 45
168
-    kpi['qaud4_d'] = quad4_c['sqrt'].mean() / 45
169
-    kpi = kpi.fillna(0)
170
-
171
-
172
-    kpi_1 = {
173
-        "stroke": [len(quad1_c),len(quad2_c),len(quad3_c),len(quad4_c)],
174
-        "velocity": [quad1_c['sqrt'].mean() / 30, quad2_c['sqrt'].mean() / 30, quad3_c['sqrt'].mean() / 30, quad4_c['sqrt'].mean() / 30],
175
-        "depth": [quad1_c['sqrt'].mean() / 45, quad2_c['sqrt'].mean() / 45, quad3_c['sqrt'].mean() / 45, quad4_c['sqrt'].mean() / 45]
176
-    }
177
-
178
-
179
-    #print(kpi)
180
-    val_list = []
181
-
182
-    #PK =
183
-    val_list.append(str(ssd_dir[5:-1]))
184
-
185
-    #t_stroke = 
186
-    val_list.append(sum(kpi_1['stroke']))
187
-    #t_vel
188
-    val_list.append("{:.4f}".format(sum(kpi_1['velocity'])/4)) 
189
-    #t_dep
190
-    val_list.append("{:.4f}".format(sum(kpi_1['depth'])/4)) 
191
-    #ud_s_rate =
192
-    val_list.append("{:.4f}".format((kpi_1['stroke'][0] + kpi_1['stroke'][3] - kpi_1['stroke'][1] - kpi_1['stroke'][2]) / sum(kpi_1['stroke'])*100))
193
-    #ud_v_rate =
194
-    val_list.append("{:.4f}".format((kpi_1['velocity'][0] + kpi_1['velocity'][3] - kpi_1['velocity'][1] - kpi_1['velocity'][2])/2))
195
-    #ud_d_rate =
196
-    val_list.append("{:.4f}".format((kpi_1['depth'][0] + kpi_1['depth'][3] - kpi_1['depth'][1] - kpi_1['depth'][2])/2))
197
-
198
-    #lr_s_rate =
199
-    val_list.append("{:.4f}".format((kpi_1['stroke'][2] + kpi_1['stroke'][3] - kpi_1['stroke'][0] - kpi_1['stroke'][1]) / sum(kpi_1['stroke'])*100))
200
-    #lr_v_rate =
201
-    val_list.append("{:.4f}".format((kpi_1['velocity'][2] + kpi_1['velocity'][3] - kpi_1['velocity'][0] - kpi_1['velocity'][1])/2))
202
-    #lr_d_rate =
203
-    val_list.append("{:.4f}".format((kpi_1['depth'][2] + kpi_1['depth'][3] - kpi_1['depth'][0] - kpi_1['depth'][1])/2))
204
-
205
-    #q1 s, v, d
206
-    val_list.append(len(quad1_c))
207
-    val_list.append(quad1_c['sqrt'].mean() / 30)
208
-    val_list.append(quad1_c['sqrt'].mean() / 45)
209
-    val_list.append(len(quad2_c))
210
-    val_list.append(quad2_c['sqrt'].mean() / 30)
211
-    val_list.append(quad2_c['sqrt'].mean() / 45)
212
-    val_list.append(len(quad3_c))
213
-    val_list.append(quad3_c['sqrt'].mean() / 30)
214
-    val_list.append(quad3_c['sqrt'].mean() / 45)
215
-    val_list.append(len(quad4_c))
216
-    val_list.append(quad4_c['sqrt'].mean() / 30)
217
-    val_list.append(quad4_c['sqrt'].mean() / 45)
218
-
219
-
220
-
221
-
222
-    return val_list
223
-
224
-    #print(val_list)
225
-
226
-
227
-
228
-if __name__ == "__main__":
229
-    kpi = dist()
230
-    kpi_csv = pd.DataFrame([kpi])
231
-    kpi_csv = kpi_csv.fillna(0)
232
-    kpi_csv.to_csv(os.path.join(file_path,'kpi.csv'))
233
-    #put_kpi_to_db(kpi_csv)

+ 20
- 0
mailanalysis.py 查看文件

@@ -0,0 +1,20 @@
1
+import os
2
+import sys
3
+import pandas as pd
4
+import numpy as np
5
+import json
6
+import time
7
+
8
+def 
9
+
10
+
11
+
12
+
13
+
14
+
15
+
16
+def main():
17
+
18
+
19
+if __name__ == "__main":
20
+    main()

+ 171
- 0
mailcalibrate.py 查看文件

@@ -0,0 +1,171 @@
1
+import pyzed.sl as sl
2
+import numpy as np
3
+import sys
4
+import os
5
+import time
6
+import json
7
+import math
8
+import json
9
+
10
+JSON_PATH = '~/sources/configuration.json'
11
+
12
+
13
+def conf_depth(camobject=sl.Camera(), headCoord=(480,540), tailCoord=(1440,540)):
14
+    # Create a Camera object
15
+    hc = headCoord
16
+    tc = tailCoord
17
+
18
+
19
+    zed = sl.Camera()
20
+
21
+    # Create a InitParameters object and set configuration parameters
22
+    init = sl.InitParameters()
23
+    init.set_from_serial_number
24
+    init.depth_mode = sl.DEPTH_MODE.PERFORMANCE  # Use PERFORMANCE depth mode
25
+    init.coordinate_units = sl.UNIT.METER  # Use meter units (for depth measurements)
26
+    init.camera_resolution = sl.RESOLUTION.HD1080
27
+
28
+    # Open the camera
29
+    err = zed.open(init)
30
+    if err != sl.ERROR_CODE.SUCCESS:
31
+        exit(1)
32
+
33
+    # Create and set RuntimeParameters after opening the camera
34
+    runtime_parameters = sl.RuntimeParameters()
35
+    runtime_parameters.sensing_mode = sl.SENSING_MODE.STANDARD  # Use STANDARD sensing mode
36
+    # Setting the depth confidence parameters
37
+    runtime_parameters.confidence_threshold = 100
38
+    runtime_parameters.textureness_confidence_threshold = 100
39
+
40
+    # Capture 150 images and depth, then stop
41
+    i = 0
42
+    image = sl.Mat()
43
+    depth = sl.Mat()
44
+    point_cloud = sl.Mat()
45
+
46
+    mirror_ref = sl.Transform()
47
+    mirror_ref.set_translation(sl.Translation(2.75,4.0,0))
48
+    tr_np = mirror_ref.m
49
+    hx = 0
50
+    tx = 0    
51
+    distance_head_cliff = 0
52
+    distance_tail_cliff = 0
53
+    while i < 900:
54
+        # A new image is available if grab() returns SUCCESS
55
+        if zed.grab(runtime_parameters) == sl.ERROR_CODE.SUCCESS:
56
+            # Retrieve left image
57
+            zed.retrieve_image(image, sl.VIEW.LEFT)
58
+            # Retrieve depth map. Depth is aligned on the left image
59
+            zed.retrieve_measure(depth, sl.MEASURE.DEPTH)
60
+            # Retrieve colored point cloud. Point cloud is aligned on the left image.
61
+            zed.retrieve_measure(point_cloud, sl.MEASURE.XYZRGBA)
62
+
63
+            # Get and print distance value in mm at the center of the image
64
+            # We measure the distance camera - object using Euclidean distance
65
+            x = round(image.get_width() / 2)
66
+            y = round(image.get_height() / 2)
67
+            if hx == 0 :
68
+                hx = x
69
+                tx = x
70
+            err, point_cloud_value_center = point_cloud.get_value(x, y)
71
+
72
+            distance_center = math.sqrt(point_cloud_value_center[0] * point_cloud_value_center[0] +
73
+                                 point_cloud_value_center[1] * point_cloud_value_center[1] +
74
+                                 point_cloud_value_center[2] * point_cloud_value_center[2])
75
+            
76
+
77
+            err, point_cloud_value_head = point_cloud.get_value(hx,y)
78
+            distance_head = math.sqrt(point_cloud_value_head[0] * point_cloud_value_head[0] +
79
+                                 point_cloud_value_head[1] * point_cloud_value_head[1] +
80
+                                 point_cloud_value_head[2] * point_cloud_value_head[2])
81
+            if distance_head_cliff != 0:
82
+                pass
83
+            elif distance_head > (0.04 + distance_center):
84
+                if np.isnan(prev_distance_head) == True:
85
+                    pass
86
+                else:
87
+                    distance_head_cliff = prev_distance_head
88
+                    hx_cliff = hx
89
+            
90
+            if np.isnan(distance_head) == False:
91
+                prev_distance_head = distance_head
92
+
93
+            err, point_cloud_value_tail = point_cloud.get_value(tx,y)
94
+            distance_tail =  math.sqrt(point_cloud_value_tail[0] * point_cloud_value_tail[0] +
95
+                                 point_cloud_value_tail[1] * point_cloud_value_tail[1] +
96
+                                 point_cloud_value_tail[2] * point_cloud_value_tail[2])
97
+            
98
+            if distance_tail_cliff != 0:
99
+                pass
100
+            elif distance_tail > (0.04 + distance_center):
101
+                if np.isnan(prev_distance_tail) == True:
102
+                    pass
103
+                else:
104
+                    distance_tail_cliff = prev_distance_tail
105
+                    tx_cliff = tx - 1
106
+            if np.isnan(distance_tail) == False:
107
+                prev_distance_tail = distance_tail
108
+            
109
+
110
+            point_cloud_np = point_cloud.get_data()
111
+            point_cloud_np.dot(tr_np)
112
+            hx -=1
113
+            tx +=1
114
+
115
+            if not np.isnan(distance_center) and not np.isinf(distance_center):
116
+                print("Distance to Camera at ({}, {}) (image center): {:1.3} m".format(hx, y, distance_head))
117
+                print("Distance to Camera at ({}, {}) (image center): {:1.3} m".format(x, y, distance_center))                
118
+                print("Distance to Camera at ({}, {}) (image center): {:1.3} m".format(tx, y, distance_tail))
119
+                # Increment the loop
120
+                i = i + 1
121
+            else:
122
+                print("Can't estimate distance at this position.")
123
+                print("Your camera is probably too close to the scene, please move it backwards.\n")
124
+            sys.stdout.flush()
125
+
126
+    # Close the camera
127
+    zed.close()
128
+    return [distance_center,hx_cliff,distance_head_cliff,tx_cliff,distance_tail_cliff]
129
+
130
+
131
+
132
+def main():
133
+    cameras = sl.Camera.get_device_list()
134
+    if len(cameras) == 1:
135
+        with open (os.path.abspath(os.path.expanduser(JSON_PATH)), "r") as json_file:
136
+            conf = json.load(json_file)
137
+        d_list = conf_depth(cameras[0],conf["cam1"]["headCoord"], conf["cam1"]["tailCoord"])
138
+
139
+        conf["cam1"]["d_center"] = d_list[0]             
140
+        conf["cam1"]["d_headCoord"] = d_list[2]        
141
+        conf["cam1"]["d_tailCoord"] = d_list[4]
142
+
143
+        #as maincam, head is left side of image
144
+        if conf["mainCam"] == cameras[0][0]:
145
+            conf["cam1"]["headCoord"] = [d_list[1], 540]
146
+            conf["cam1"]["tailCoord"] = [d_list[3], 540]
147
+        else:
148
+            conf["cam1"]["headCoord"] = [d_list[3], 540]
149
+            conf["cam1"]["tailCoord"] = [d_list[1], 540]
150
+        
151
+        with open (os.path.abspath(os.path.expanduser(JSON_PATH)), "w") as json_file:
152
+            json.dump(conf,json_file, indent=4)
153
+    elif len(cameras) == 2:
154
+        with open (os.path.abspath(os.path.expanduser(JSON_PATH)), "r") as json_file:
155
+            conf = json.load(json_file)
156
+        d_list = conf_depth(cameras[1],conf["cam2"]["headCoord"], conf["cam2"]["tailCoord"])
157
+
158
+        conf["cam2"]["d_center"] = d_list[0]             
159
+        conf["cam2"]["d_headCoord"] = d_list[2]        
160
+        conf["cam2"]["d_tailCoord"] = d_list[4]
161
+        if conf["mainCam"] == cameras[1][0]:            
162
+            conf["cam2"]["headCoord"] = [d_list[1], 540]
163
+            conf["cam2"]["tailCoord"] = [d_list[3], 540]
164
+        else:
165
+            conf["cam2"]["headCoord"] = [d_list[3], 540]
166
+            conf["cam2"]["tailCoord"] = [d_list[1], 540]
167
+        with open (os.path.abspath(os.path.expanduser(JSON_PATH)), "w") as json_file:
168
+            json.dump(conf,json_file, indent=4)    
169
+
170
+if __name__ == "__main__":
171
+    main()

+ 70
- 0
mailextract_01.py 查看文件

@@ -0,0 +1,70 @@
1
+import os
2
+import sys
3
+import numpy
4
+import subprocess
5
+import pandas as pd
6
+import time
7
+
8
+SOURCE_DIR = '/home/mc365/sources/'
9
+STROAGE_DIR = '/hdd/'
10
+LOG_EXTRACT = '/log_extract.txt'
11
+#NOTE: CHANGE CFG, WEIGHT, .data
12
+CFG_FILE = 'yolov4_7gpu.cfg'
13
+WEIGHT_FILE = 'yolov4_7gpu_last.weights'
14
+DATA_FILE = 'obj.data'
15
+FRAME_COUNTER = 0
16
+N_OF_FRAME = 1800
17
+
18
+SURGEON_DIR = os.path.abspath(STROAGE_DIR+os.listdir('/hdd/')[0])
19
+CALL_DEPTH = ['python3',SOURCE_DIR+'depthsensing.py', '-c', SOURCE_DIR+CFG_FILE, '-w', SOURCE_DIR+WEIGHT_FILE, '-m', SOURCE_DIR+DATA_FILE]
20
+#print(SURGEON_DIR)
21
+def extract_surgeon ():    
22
+    svo1list = [file for file in os.listdir(os.path.abspath(SURGEON_DIR)) if file.endswith('1.svo')]
23
+    svo1list= sorted(svo1list)
24
+    del svo1list[-1]
25
+    for svo in svo1list:
26
+        REST_OF_CALL_DEPTH = ['-s',SURGEON_DIR+'/'+svo, '-o', SURGEON_DIR+'/'+svo[:5]+'.csv']
27
+        subprocess.call(CALL_DEPTH+REST_OF_CALL_DEPTH)
28
+
29
+def concat_csv():
30
+    csv1list = [file for file in os.listdir(os.path.abspath(SURGEON_DIR)) if file.endswith('1.csv')]
31
+    csv1list = sorted(csv1list)    
32
+    fc = FRAME_COUNTER
33
+    nf = N_OF_FRAME
34
+
35
+    conc_df = pd.DataFrame(data={'x': [], 'y': [], 'z': [], 'frame': []})
36
+    for csv in csv1list:
37
+        df = pd.read_csv(os.path.join(SURGEON_DIR,csv), header=0)
38
+        df['frame'] = df['frame'] + fc
39
+        conc_df = pd.concat([conc_df,df])
40
+        fc += nf
41
+    conc_df.to_csv(os.path.join(SURGEON_DIR,'cam1.csv'), index=False)
42
+
43
+def concat_csv_with_nms():
44
+    csv1list = [file for file in os.listdir(os.path.abspath(SURGEON_DIR)) if file.endswith('1_nms.csv')]
45
+    csv1list = sorted(csv1list)    
46
+    fc = FRAME_COUNTER
47
+    nf = N_OF_FRAME
48
+
49
+    conc_df = pd.DataFrame(data={'x': [], 'y': [], 'z': [], 'nms':[],'frame': []})
50
+    for csv in csv1list:
51
+        df = pd.read_csv(os.path.join(SURGEON_DIR,csv), header=0)
52
+        df['frame'] = df['frame'] + fc
53
+        conc_df = pd.concat([conc_df,df])
54
+        fc += nf
55
+    conc_df.to_csv(os.path.join(SURGEON_DIR,'cam1_nms.csv'), index=False)
56
+
57
+def log_write():
58
+    exttime = time.localtime()
59
+    csv1list = [file for file in os.listdir(os.path.abspath(SURGEON_DIR)) if file.endswith('1.csv')]
60
+    with open (os.path.abspath(SURGEON_DIR+LOG_EXTRACT), "a") as log_extract:
61
+        log_extract.write("\n[EXTRACT] EXTRACT %d svo files finishied at %04d/%02d/%02d %02d:%02d:%02d" % (len(csv1list)-1,exttime.tm_year, exttime.tm_mon, exttime.tm_mday, exttime.tm_hour, exttime.tm_min, exttime.tm_sec))
62
+
63
+def main():        
64
+    extract_surgeon()
65
+    concat_csv()
66
+    concat_csv_with_nms()    
67
+    log_write()
68
+
69
+if __name__ == '__main__':
70
+    main()

+ 70
- 0
mailextract_02.py 查看文件

@@ -0,0 +1,70 @@
1
+import os
2
+import sys
3
+import numpy
4
+import subprocess
5
+import pandas as pd
6
+import time
7
+
8
+SOURCE_DIR = '/home/mc365/sources/'
9
+STROAGE_DIR = '/hdd/'
10
+LOG_EXTRACT = '/log_extract.txt'
11
+#NOTE: CHANGE CFG, WEIGHT, .data
12
+CFG_FILE = 'yolov4_7gpu.cfg'
13
+WEIGHT_FILE = 'yolov4_7gpu_last.weights'
14
+DATA_FILE = 'obj.data'
15
+FRAME_COUNTER = 0
16
+N_OF_FRAME = 1800
17
+
18
+SURGEON_DIR = os.path.abspath(STROAGE_DIR+os.listdir('/hdd/')[0])
19
+CALL_DEPTH = ['python3',SOURCE_DIR+'depthsensing.py', '-c', SOURCE_DIR+CFG_FILE, '-w', SOURCE_DIR+WEIGHT_FILE, '-m', SOURCE_DIR+DATA_FILE]
20
+#print(SURGEON_DIR)
21
+def extract_surgeon ():    
22
+    svo2list = [file for file in os.listdir(os.path.abspath(SURGEON_DIR)) if file.endswith('2.svo')]
23
+    svo2list= sorted(svo2list)
24
+    del svo2list[-1]    
25
+    for svo in svo2list:
26
+        REST_OF_CALL_DEPTH = ['-s',SURGEON_DIR+'/'+svo, '-o', SURGEON_DIR+'/'+svo[:5]+'.csv']
27
+        subprocess.call(CALL_DEPTH+REST_OF_CALL_DEPTH)
28
+    
29
+def concat_csv():
30
+    csv2list = [file for file in os.listdir(os.path.abspath(SURGEON_DIR)) if file.endswith('2.csv')]
31
+    csv2list = sorted(csv2list)    
32
+    fc = FRAME_COUNTER
33
+    nf = N_OF_FRAME
34
+
35
+    conc_df = pd.DataFrame(data={'x': [], 'y': [], 'z': [], 'frame': []})
36
+    for csv in csv2list:
37
+        df = pd.read_csv(os.path.join(SURGEON_DIR,csv), header=0)
38
+        df['frame'] = df['frame'] + fc
39
+        conc_df = pd.concat([conc_df,df])
40
+        fc += nf
41
+    conc_df.to_csv(os.path.join(SURGEON_DIR,'cam2.csv'), index=False)
42
+
43
+def concat_csv_with_nms():
44
+    csv2list = [file for file in os.listdir(os.path.abspath(SURGEON_DIR)) if file.endswith('2_nms.csv')]
45
+    csv2list = sorted(csv2list)    
46
+    fc = FRAME_COUNTER
47
+    nf = N_OF_FRAME
48
+
49
+    conc_df = pd.DataFrame(data={'x': [], 'y': [], 'z': [], 'nms':[],'frame': []})
50
+    for csv in csv2list:
51
+        df = pd.read_csv(os.path.join(SURGEON_DIR,csv), header=0)
52
+        df['frame'] = df['frame'] + fc
53
+        conc_df = pd.concat([conc_df,df])
54
+        fc += nf
55
+    conc_df.to_csv(os.path.join(SURGEON_DIR,'cam2_nms.csv'), index=False)
56
+
57
+def log_write():
58
+    exttime = time.localtime()
59
+    csv2list = [file for file in os.listdir(os.path.abspath(SURGEON_DIR)) if file.endswith('2.csv')]
60
+    with open (os.path.abspath(SURGEON_DIR+LOG_EXTRACT), "a") as log_extract:
61
+        log_extract.write("\n[EXTRACT] EXTRACT %d svo files finishied at %04d/%02d/%02d %02d:%02d:%02d" % (len(csv2list)-1,exttime.tm_year, exttime.tm_mon, exttime.tm_mday, exttime.tm_hour, exttime.tm_min, exttime.tm_sec))
62
+
63
+def main():        
64
+    extract_surgeon()
65
+    concat_csv()
66
+    concat_csv_with_nms()    
67
+    log_write()
68
+
69
+if __name__ == '__main__':
70
+    main()

+ 115
- 0
mailimage.py 查看文件

@@ -0,0 +1,115 @@
1
+import os
2
+import sys
3
+import time
4
+import pandas as pd
5
+import json
6
+import numpy as np
7
+import math
8
+import matplotlib.pyplot as plt
9
+#import sklearn.datasets as data
10
+#import hdbscan ##NOTE: hdbscan needs Cython. please install Cython before use.
11
+import seaborn as sns
12
+import pandas as pd
13
+
14
+
15
+
16
+
17
+def draw_plot_cam_1(dataframe, path):
18
+    df = dataframe
19
+    imgpath = path
20
+    sns.set_context('poster')
21
+    sns.set_style('white')
22
+    sns.set_color_codes()
23
+    plot_kwds = {'alpha': 0.5, 'linewidths':0 }
24
+    plt.rcParams['figure.figsize'] = 9, 7
25
+
26
+    fig = plt.figure()
27
+    ax = fig.add_subplot(projection = '3d')
28
+    ax.scatter(df['x'],df['y'],df['z'], color='g', s=0.3, **plot_kwds)
29
+    plt.savefig(path, dpi=660)
30
+    plt.clf()
31
+
32
+def draw_plot_cam_2(dataframe, path):
33
+    df = dataframe
34
+    sns.set_context('poster')
35
+    sns.set_style('white')
36
+    sns.set_color_codes()
37
+    plot_kwds = {'alpha': 0.5, 'linewidths':0 }
38
+    plt.rcParams['figure.figsize'] = 9, 7
39
+
40
+    fig = plt.figure()
41
+    ax = fig.add_subplot(projection = '3d')
42
+    ax.scatter(df['x'],df['y'],df['z'], color='r', s=0.3, **plot_kwds)
43
+    plt.savefig(path, dpi=660)
44
+    plt.clf()
45
+
46
+def draw_xy(dataframe,path):
47
+    df = dataframe
48
+    path_list=path.split('/')    
49
+    plt.rcParams['figure.figsize'] = 9, 7
50
+    fig=plt.figure()
51
+    plt.scatter(df['x'], df['y'], s=0.2)
52
+    plt.title(path_list[-1])
53
+    plt.savefig(path, dpi=600)
54
+    plt.clf()
55
+    
56
+
57
+def draw_yz(dataframe, path):
58
+    df = dataframe
59
+    path_list=path.split('/')    
60
+    plt.rcParams['figure.figsize'] = 9, 7
61
+    fig=plt.figure()
62
+    plt.scatter(df['y'], df['z'], s=0.2)
63
+    plt.title(path_list[-1])
64
+    plt.savefig(path, dpi=600)
65
+    plt.clf()
66
+
67
+def draw_xz(dataframe, path):
68
+    df = dataframe
69
+    path_list=path.split('/')    
70
+    plt.rcParams['figure.figsize'] = 9, 7
71
+    fig=plt.figure()
72
+    plt.scatter(df['z'], df['x'], s=0.2)
73
+    plt.title(path_list[-1])
74
+    plt.savefig(path, dpi=600)
75
+    plt.clf()
76
+
77
+def draw_plot_total(dataframe, path):
78
+    df = dataframe
79
+    sns.set_context('poster')
80
+    sns.set_style('white')
81
+    sns.set_color_codes()
82
+    plot_kwds = {'alpha': 0.5, 'linewidths':0 }
83
+    plt.rcParams['figure.figsize'] = 9, 7
84
+
85
+    fig = plt.figure()
86
+    ax = fig.add_subplot(projection = '3d')
87
+    ax.axes.set_xlim3d(left=-1.8, right=1.8)
88
+    ax.axes.set_ylim3d(bottom=-1.8, top=1.8)
89
+    ax.axes.set_zlim3d(bottom=0, top=1.8)
90
+    ax.set_xlabel('x (left_right)')
91
+    ax.set_ylabel('y (head_tail)')
92
+    ax.set_zlabel('z (floor_ceiling)')
93
+
94
+    cam_1 = df[df['cam'] == 1]
95
+    cam_2 = df[df['cam'] == 2]
96
+    ax.scatter(cam_1['x'],cam_1['y'],cam_1['z'], color='g', s=0.3, **plot_kwds)
97
+    ax.scatter(cam_2['x'],cam_2['y'],cam_2['z'], color='r', s=0.3, **plot_kwds)
98
+    
99
+    plt.savefig(path, dpi=660)
100
+    plt.clf()
101
+
102
+def draw_diff_diff(dataframe, path):
103
+    data = dataframe
104
+    plt.rcParams['figure.figsize'] =30, 7
105
+    #plt.axes().set_aspect('equal')
106
+    plt.xlim(data['frame'].min()-10, data['frame'].max()+10)
107
+    #l = range(0, len(data['diff_diff']))
108
+    #plt.xticks(l*10, data['diff_diff'])
109
+    plt.ylim(data['diff_diff'].min(), data['diff_diff'].max())
110
+    fig = plt.figure()
111
+    plt.plot(data['frame'], data['diff_diff'], color='g')
112
+    #plt.gca().set_aspect('auto')
113
+    plt.savefig('./diff.png', dpi=600)
114
+    plt.clf()
115
+

+ 250
- 0
mailkpi.py 查看文件

@@ -0,0 +1,250 @@
1
+import os
2
+import time
3
+import math
4
+import sys
5
+import numpy as np
6
+from numpy.lib.function_base import append
7
+import pandas as pd
8
+from pandas import DataFrame as df
9
+from sklearn.cluster import DBSCAN
10
+import pymssql
11
+#import matplotlib.pyplot as plt
12
+
13
+
14
+#ssd_dir = str(sys.argv[1])
15
+#file_path = '/hdd/' + ssd_dir[5:]
16
+
17
+
18
+def put_kpi_to_db(kpi):
19
+    try:
20
+        conn = pymssql.connect(server='52.231.39.219',port=1433, user='mc365', password='tkadbrdhmc1!', database='master')
21
+        #print("INSERT INTO tsfmc_mailsystem.dbo.KPI(surgeryID, totalStrokeCount, totalStrokeVelocity, totalStrokeDepth, upDownTotalStrokeCount, upDownTotalStrokeVelocity, upDownTotalStrokeDepth, leftRightTotalStrokeCount, leftRightTotalStrokeVelocity, leftRightTotalStrokeDepth, quadrant1TotalStrokeCount, quadrant1TotalStrokeVelocity, quadrant1TotalStrokeDepth, quadrant2TotalStrokeCount, quadrant2TotalStrokeVelocity, quadrant2TotalStrokeDepth, quadrant3TotalStrokeCount, quadrant3TotalStrokeVelocity, quadrant3TotalStrokeDepth, quadrant4TotalStrokeCount, quadrant4TotalStrokeVelocity, quadrant4TotalStrokeDepth) VALUES (" + str(kpi[0]) + ", "+str(kpi[1])+", "+str(kpi[2])+", "+str(kpi[3])+", "+str(kpi[4])+", "+str(kpi[5])+", "+str(kpi[6])+", "+str(kpi[7])+", "+str(kpi[8])+", "+str(kpi[9])+", "+str(kpi[10])+", "+str(kpi[11])+", "+str(kpi[12])+", "+str(kpi[13])+", "+str(kpi[14])+", "+str(kpi[15])+", "+str(kpi[16])+", "+str(kpi[17])+", "+str(kpi[18])+", "+str(kpi[19])+", "+str(kpi[20])+", "+str(kpi[21])+")")
22
+        #print("INSERT INTO tsfmc_mailsystem.dbo.KPI(surgeryID, totalStrokeCount, totalStrokeVelocity, totalStrokeDepth, upDownTotalStrokeCount, upDownTotalStrokeVelocity, upDownTotalStrokeDepth, leftRightTotalStrokeCount, leftRightTotalStrokeVelocity, leftRightTotalStrokeDepth, quadrant1TotalStrokeCount, quadrant1TotalStrokeVelocity, quadrant1TotalStrokeDepth, quadrant2TotalStrokeCount, quadrant2TotalStrokeVelocity, quadrant2TotalStrokeDepth, quadrant3TotalStrokeCount, quadrant3TotalStrokeVelocity, quadrant3TotalStrokeDepth, quadrant4TotalStrokeCount, quadrant4TotalStrokeVelocity, quadrant4TotalStrokeDepth) VALUES ( '" + str(kpi[0]) + "', '"+str(kpi[1])+"', '"+str(kpi[2])+"', '"+str(kpi[3])+"', '"+str(kpi[4])+"', '"+str(kpi[5])+"', '"+str(kpi[6])+"', '"+str(kpi[7])+"', '"+str(kpi[8])+"', '"+str(kpi[9])+"', '"+str(kpi[10])+"', '"+str(kpi[11])+"', '"+str(kpi[12])+"', '"+str(kpi[13])+"', '"+str(kpi[14])+"', '"+str(kpi[15])+"', '"+str(kpi[16])+"', '"+str(kpi[17])+"', '"+str(kpi[18])+"', '"+str(kpi[19])+"', '"+str(kpi[20])+"', '"+str(kpi[21])+"')")
23
+        cursor = conn.cursor()
24
+        cursor.execute("INSERT INTO tsfmc_mailsystem.dbo.KPI(surgeryID, totalStrokeCount, totalStrokeVelocity, totalStrokeDepth, upDownTotalStrokeCount, upDownTotalStrokeVelocity, upDownTotalStrokeDepth, leftRightTotalStrokeCount, leftRightTotalStrokeVelocity, leftRightTotalStrokeDepth, quadrant1TotalStrokeCount, quadrant1TotalStrokeVelocity, quadrant1TotalStrokeDepth, quadrant2TotalStrokeCount, quadrant2TotalStrokeVelocity, quadrant2TotalStrokeDepth, quadrant3TotalStrokeCount, quadrant3TotalStrokeVelocity, quadrant3TotalStrokeDepth, quadrant4TotalStrokeCount, quadrant4TotalStrokeVelocity, quadrant4TotalStrokeDepth) VALUES ( '" + str(kpi[0]) + "', '"+str(kpi[1])+"', '"+str(kpi[2])+"', '"+str(kpi[3])+"', '"+str(kpi[4])+"', '"+str(kpi[5])+"', '"+str(kpi[6])+"', '"+str(kpi[7])+"', '"+str(kpi[8])+"', '"+str(kpi[9])+"', '"+str(kpi[10])+"', '"+str(kpi[11])+"', '"+str(kpi[12])+"', '"+str(kpi[13])+"', '"+str(kpi[14])+"', '"+str(kpi[15])+"', '"+str(kpi[16])+"', '"+str(kpi[17])+"', '"+str(kpi[18])+"', '"+str(kpi[19])+"', '"+str(kpi[20])+"', '"+str(kpi[21])+"')")
25
+    
26
+        conn.commit()
27
+        conn.close()
28
+    except:
29
+        pass
30
+
31
+
32
+def get_quad(df):
33
+    if df['x'] >= 0 and df['y'] >= 0:
34
+        return 1
35
+    elif df['x'] >= 0 and df['y'] < 0:
36
+        return 2
37
+    elif df['x'] < 0 and df['y'] < 0:
38
+        return 3
39
+    elif df['x'] < 0 and df['y'] >= 0:
40
+        return 4
41
+def get_inc_num(df):
42
+    count = 1
43
+    prev_count = 0
44
+    if abs(df['diff_diff']) > 0.0008:                
45
+        prev_count = df['inc_num']
46
+        df['inc_num'] = count
47
+        #print(prev_count)
48
+        return count        
49
+    else:
50
+        if prev_count != 0:            
51
+            count += 1
52
+            df['inc_num'] = 0
53
+            prev_count = df['inc_num']
54
+            return 0
55
+        else:
56
+            df['inc_num'] = 0
57
+            prev_count = df['inc_num']
58
+            return 0
59
+
60
+
61
+def count_inc(df):
62
+    e= 0
63
+    count  = 1
64
+    prev_count = 0
65
+    for x in df['diff_diff']:
66
+        if abs(x) > 0.05:
67
+            df['inc_num'][e] = count
68
+            prev_count = count
69
+            e +=1
70
+        else:
71
+            if prev_count != 0:
72
+                count += 1
73
+                df['inc_num'][e] = 0
74
+                prev_count = 0
75
+                e +=1
76
+            else:
77
+                df['inc_num'][e] = 0
78
+                prev_count = 0
79
+                e +=1
80
+
81
+def stroke_count(df):
82
+    e = 0
83
+    count = 1    
84
+    for x in df['diff_diff']:
85
+        if x > 0.03:
86
+            df['stroke_count'][e] = count
87
+            e +=1
88
+        else:
89
+            df['stroke_count'][e] = 0
90
+            e+=1
91
+            
92
+def get_sqrt(df):
93
+    prev_x = 0.0
94
+    prev_y = 0.0
95
+    prev_z = 0.0
96
+
97
+    
98
+    for index, row in df.iterrows():        
99
+            df['sqrt'][index] = np.sqrt((df['x'][index] - prev_x) **2 + (df['y'][index] - prev_y) ** 2 +(df['z'][index] - prev_z) ** 2)
100
+            prev_x = df['x'][index]
101
+            prev_y = df['y'][index]
102
+            prev_z = df['z'][index]
103
+    df['sqrt'][0] = 0.0
104
+            
105
+
106
+        
107
+
108
+
109
+def dist(filepath):
110
+    file = filepath
111
+    surgeon_dir = file.split('/')
112
+    #print(surgeon_dir)
113
+
114
+    #filepath_slice = file.slice('/')
115
+    norm = pd.read_csv(file)
116
+    #print(norm)
117
+    #norm = norm.drop(norm.columns[0], axis=1)
118
+    #norm = norm.fillna(0)
119
+    data = norm[['x', 'y', 'z']]
120
+    norm['sqrt'] = 0.0
121
+
122
+    get_sqrt(norm)
123
+
124
+    #norm = norm.apply(lambda x:np.sqrt(x) if x.name in ['sqrt'] else x, axis =1)
125
+    norm['diff'] = norm.diff()['sqrt']
126
+    norm['diff_diff'] = norm.diff()['diff']
127
+
128
+
129
+    norm['quad'] = norm.apply(get_quad, axis =1)
130
+    
131
+    
132
+    norm = norm.fillna(0)
133
+
134
+    norm['inc_num'] = 0
135
+    #norm['inc_num'] = norm.apply(get_inc_num, axis = 1)
136
+    count_inc(norm)
137
+    norm['stroke_count'] = 0
138
+    stroke_count(norm)
139
+    
140
+    #print('num of inc')
141
+    #print(norm['inc_num'].unique())
142
+
143
+    #print('total stroke')
144
+    #print(norm['stroke_count'].unique())
145
+    #print(norm)
146
+    diff_csv = norm[['frame','diff_diff']]    
147
+    norm.to_csv(os.path.join(file[:-14]+ 'norm.csv'), index=False)
148
+    diff_csv.to_csv(os.path.join(file[:-14]+ 'diff.csv'))
149
+    #plt.plot(norm['frame'],norm['diff_diff'])
150
+    #plt.savefig('./diff_diff.png')    
151
+
152
+
153
+
154
+    kpi = pd.DataFrame(index=range(0,1))
155
+
156
+    quad_count = norm[norm['stroke_count'] != 0]
157
+    
158
+    quad1_c = quad_count[quad_count['quad'] == 1]
159
+    quad2_c = quad_count[quad_count['quad'] == 2]
160
+    quad3_c = quad_count[quad_count['quad'] == 3]
161
+    quad4_c = quad_count[quad_count['quad'] == 4]
162
+    #print(len(quad1_c))
163
+    #print(len(quad2_c))
164
+    #print(len(quad3_c))
165
+    #print(len(quad4_c))
166
+    #print(quad1_c)
167
+    kpi['PK'] = surgeon_dir[2]
168
+    kpi['qaud1_c'] = len(quad1_c)
169
+    kpi['qaud2_c'] = len(quad2_c)
170
+    kpi['qaud3_c'] = len(quad3_c)
171
+    kpi['qaud4_c'] = len(quad4_c)
172
+    #FIXME: this values are huristic value. please change this values after research
173
+    kpi['qaud1_s'] = quad1_c['sqrt'].mean() / 30
174
+    kpi['qaud2_s'] = quad2_c['sqrt'].mean() / 30
175
+    kpi['qaud3_s'] = quad3_c['sqrt'].mean() / 30
176
+    kpi['qaud4_s'] = quad4_c['sqrt'].mean() / 30
177
+    kpi['qaud1_d'] = quad1_c['sqrt'].mean() / 45
178
+    kpi['qaud2_d'] = quad2_c['sqrt'].mean() / 45
179
+    kpi['qaud3_d'] = quad3_c['sqrt'].mean() / 45
180
+    kpi['qaud4_d'] = quad4_c['sqrt'].mean() / 45
181
+    kpi = kpi.fillna(0)
182
+
183
+
184
+    kpi_1 = {
185
+        "stroke": [len(quad1_c),len(quad2_c),len(quad3_c),len(quad4_c)],
186
+        "velocity": [quad1_c['sqrt'].mean() / 30, quad2_c['sqrt'].mean() / 30, quad3_c['sqrt'].mean() / 30, quad4_c['sqrt'].mean() / 30],
187
+        "depth": [quad1_c['sqrt'].mean() / 45, quad2_c['sqrt'].mean() / 45, quad3_c['sqrt'].mean() / 45, quad4_c['sqrt'].mean() / 45]
188
+    }
189
+
190
+
191
+    #print(kpi)
192
+    val_list = []
193
+
194
+    #PK =
195
+    val_list.append(str(surgeon_dir[2]))
196
+
197
+    #t_stroke = 
198
+    val_list.append(sum(kpi_1['stroke']))
199
+    #t_vel
200
+    val_list.append("{:.4f}".format(sum(kpi_1['velocity'])/4)) 
201
+    #t_dep
202
+    val_list.append("{:.4f}".format(sum(kpi_1['depth'])/4)) 
203
+    #ud_s_rate =
204
+    val_list.append("{:.4f}".format((kpi_1['stroke'][0] + kpi_1['stroke'][3] - kpi_1['stroke'][1] - kpi_1['stroke'][2]) / sum(kpi_1['stroke'])*100))
205
+    #ud_v_rate =
206
+    val_list.append("{:.4f}".format((kpi_1['velocity'][0] + kpi_1['velocity'][3] - kpi_1['velocity'][1] - kpi_1['velocity'][2])/2))
207
+    #ud_d_rate =
208
+    val_list.append("{:.4f}".format((kpi_1['depth'][0] + kpi_1['depth'][3] - kpi_1['depth'][1] - kpi_1['depth'][2])/2))
209
+
210
+    #lr_s_rate =
211
+    val_list.append("{:.4f}".format((kpi_1['stroke'][2] + kpi_1['stroke'][3] - kpi_1['stroke'][0] - kpi_1['stroke'][1]) / sum(kpi_1['stroke'])*100))
212
+    #lr_v_rate =
213
+    val_list.append("{:.4f}".format((kpi_1['velocity'][2] + kpi_1['velocity'][3] - kpi_1['velocity'][0] - kpi_1['velocity'][1])/2))
214
+    #lr_d_rate =
215
+    val_list.append("{:.4f}".format((kpi_1['depth'][2] + kpi_1['depth'][3] - kpi_1['depth'][0] - kpi_1['depth'][1])/2))
216
+
217
+    #q1 s, v, d
218
+    val_list.append(str(len(quad1_c)))
219
+    val_list.append(str(quad1_c['sqrt'].mean() / 30))
220
+    val_list.append(str(quad1_c['sqrt'].mean() / 45))
221
+    val_list.append(str(len(quad2_c)))
222
+    val_list.append(str(quad2_c['sqrt'].mean() / 30))
223
+    val_list.append(str(quad2_c['sqrt'].mean() / 45))
224
+    val_list.append(str(len(quad3_c)))
225
+    val_list.append(str(quad3_c['sqrt'].mean() / 30))
226
+    val_list.append(str(quad3_c['sqrt'].mean() / 45))
227
+    val_list.append(str(len(quad4_c)))
228
+    val_list.append(str(quad4_c['sqrt'].mean() / 30))
229
+    val_list.append(str(quad4_c['sqrt'].mean() / 45))
230
+    
231
+    #print(val_list)
232
+    for x in range(0,len(val_list)):        
233
+        if val_list[x] == 'nan':
234
+            val_list[x] = 0        
235
+    #print(val_list)
236
+    return val_list    
237
+
238
+    #print(val_list)
239
+
240
+def main():
241
+    kpi = dist()
242
+    #kpi_csv = pd.DataFrame([kpi])
243
+    #kpi_csv = kpi_csv.fillna(0)
244
+    #kpi_csv.to_csv(os.path.join(file[:-14],'kpi.csv'))
245
+    #put_kpi_to_db(kpi_csv)
246
+
247
+
248
+
249
+if __name__ == "__main__":
250
+    main()

+ 104
- 0
mailrequest.py 查看文件

@@ -0,0 +1,104 @@
1
+from re import sub
2
+import sys
3
+import os
4
+import subprocess
5
+import time
6
+
7
+DEVICE_PASSWORD = b'tkadbrdhmc1!\n'
8
+SOURCE_DIR = '~/sources/'
9
+STORAGE_DIR = '/hdd/'
10
+LOG_REBOOT = '~/sources/cfg/log_reboot.txt'
11
+REC_COUNT = '/rec_count.txt'
12
+LOG_EXTRACT = '/log_extract.txt'
13
+CONTAINER_EXT_1 = ['docker', 'run', '--name', 'ext_01', '--gpus', ''"device=0"'', '--rm']
14
+CONTAINER_EXT_2 = ['docker', 'run', '--name', 'ext_02', '--gpus', ''"device=1"'', '--rm']
15
+CONTAINER_IMAGE_1 = ['ellishuntingmoon/mailsys:0.5','python3',os.path.join(os.path.abspath(os.path.expanduser(SOURCE_DIR)),'mailextract_01.py')]
16
+CONTAINER_IMAGE_2 = ['ellishuntingmoon/mailsys:0.5','python3',os.path.join(os.path.abspath(os.path.expanduser(SOURCE_DIR)),'mailextract_02.py')]
17
+
18
+def container_status():
19
+    clist = []    
20
+    docker_process = subprocess.Popen(('docker','ps','-a'), stdout=subprocess.PIPE)
21
+    try:
22
+        clist = subprocess.check_output(('grep','ext_'), stdin=docker_process.stdout).decode()
23
+        clist = clist.split()
24
+        clist= [i for i in clist if 'ext_' in i]
25
+        return clist
26
+    except subprocess.CalledProcessError as e:
27
+        return clist
28
+
29
+
30
+
31
+def request_extract():        
32
+    nowtime = time.localtime()    
33
+    nowdate = "%04d/%02d/%02d" % (nowtime.tm_year, nowtime.tm_mon, nowtime.tm_mday)
34
+    hdd_list = os.listdir(os.path.abspath(STORAGE_DIR))
35
+    reboot_list = []
36
+    container_list = container_status()
37
+    #print(container_list)
38
+
39
+
40
+    with open (os.path.abspath(os.path.expanduser(LOG_REBOOT)), "r") as log_reboot:
41
+            reboot_list = log_reboot.readlines()            
42
+    #NOTE: nowtime.tm_wday == 6 is sunday
43
+    if (nowtime.tm_wday == 6) and (nowdate not in reboot_list[-1]) and (len(hdd_list) == 1):
44
+        with open (os.path.abspath(os.path.expanduser(LOG_REBOOT)), "a+") as log_reboot:
45
+            log_reboot.write("\n[REBOOT] Device reboot executed at %04d/%02d/%02d %02d:%02d:%02d" % (nowtime.tm_year, nowtime.tm_mon, nowtime.tm_mday, nowtime.tm_hour, nowtime.tm_min, nowtime.tm_sec))
46
+        
47
+        #FIXME: MUST ERASE COMMENT WHEN YOU COMPLETE
48
+        #subprocess.call(('echo','rebooting...'))
49
+        #subprocess.Popen(('sudo','-S','shutdown','-r','now'), stdin=subprocess.PIPE, stderr=subprocess.PIPE).communicate(input=PASSWORD)
50
+    elif len(container_list) == 0 :
51
+        hdd_list = []
52
+        hdd_list = os.listdir(os.path.abspath(STORAGE_DIR))
53
+        hdd_list.remove('lost+found')
54
+        #roundrobin surgeons
55
+        for surgeon in hdd_list:
56
+            if len(container_list) != 0:
57
+                break
58
+            svocount = 0
59
+            surgeondatalist = os.listdir(os.path.abspath(STORAGE_DIR+surgeon))
60
+            #rec_count 내용 확인
61
+            with open (os.path.abspath(STORAGE_DIR+surgeon+REC_COUNT), "r") as log_reboot:
62
+                svocount = int(log_reboot.readline())
63
+
64
+            
65
+            svofulllist = [file for file in surgeondatalist if file.endswith('.svo')]
66
+            #svo 개수가 rec_count -2일때 and 파일 개수가 rec_count와 동일 하면 분석 시작    
67
+            if (len(svofulllist) == (svocount-2)) and len(surgeondatalist) == svocount:
68
+                exttime = time.localtime()                
69
+                with open (os.path.abspath(STORAGE_DIR+surgeon+LOG_EXTRACT), "w+") as log_extract:
70
+                    log_extract.write("\n[EXTRACT] EXTRACT %d svo files executed at %04d/%02d/%02d %02d:%02d:%02d" % (len(svofulllist),exttime.tm_year, exttime.tm_mon, exttime.tm_mday, exttime.tm_hour, exttime.tm_min, exttime.tm_sec))
71
+                
72
+                containermiddle = [\
73
+                    '-v', os.path.abspath(STORAGE_DIR+surgeon)+':'+ os.path.abspath(STORAGE_DIR+surgeon), \
74
+                    '-v', os.path.abspath(os.path.expanduser(SOURCE_DIR))+':'+os.path.abspath(os.path.expanduser(SOURCE_DIR)), \
75
+                    '-w', os.path.abspath(STORAGE_DIR+surgeon)                    
76
+                ]
77
+                #docker run --name ext_01 --gpus '"device=0"' --rm -v /hdd/{surgeon}:/hdd/{surgeon} -v /home/mc365/sources/:/home/mc365/sources/ -w /hdd/{surgeon} ellishuntingmoon/mailsys:0.5 python3 /home/mc365/sources/mailextract_01.py
78
+                #print(CONTAINER_EXT_1 + containermiddle + CONTAINER_IMAGE_1)                
79
+                subprocess.Popen(CONTAINER_EXT_1 + containermiddle + CONTAINER_IMAGE_1)
80
+                subprocess.Popen(CONTAINER_EXT_2 + containermiddle + CONTAINER_IMAGE_2)
81
+                
82
+                #svo1list = [file for file in svofulllist if file.endswith('1.svo')]
83
+                #svo2list = [file for file in svofulllist if file.endswith('2.svo')]
84
+                #print('ext start')
85
+                time.sleep(15)
86
+                container_list = container_status()
87
+            else:
88
+                #print('already finished')
89
+                pass
90
+                #print(svo1list, svo2list)
91
+                
92
+    else:
93
+        #print('extract is in progress... wait until it finish...')
94
+        pass
95
+    
96
+        
97
+
98
+def main():
99
+    while True:
100
+        request_extract()
101
+        time.sleep(15) 
102
+
103
+if __name__== '__main__':
104
+    main()

+ 137
- 0
mailsetup.py 查看文件

@@ -0,0 +1,137 @@
1
+import os
2
+import sys
3
+import pyzed.sl as sl
4
+import json
5
+import numpy as np 
6
+import math 
7
+import time
8
+import subprocess
9
+import mailcalibrate
10
+
11
+WDIR_DEFAULT = os.path.expanduser('~/data/')
12
+SDIR_DEFAULT = os.path.abspath('/hdd/')
13
+MAINCAM_DEFAULT = 'cam1'
14
+RECFRAME_DEFAULT = 1800
15
+JSON_PATH = '~/sources/configuration.json'
16
+LOG_REBOOT = '~/sources/cfg/log_reboot.txt'
17
+
18
+cameras = sl.Camera.get_device_list()
19
+
20
+def get_cam_info():
21
+    init = sl.InitParameters()
22
+    init.depth_mode = sl.DEPTH_MODE.NONE  # Use PERFORMANCE depth mode
23
+    init.coordinate_units = sl.UNIT.METER  # Use meter units (for depth measurements)
24
+    init.camera_resolution = sl.RESOLUTION.HD720
25
+    name_list = []
26
+    
27
+    zed_list = []
28
+    angle_list = []    
29
+
30
+    for cam in cameras:        
31
+        init.set_from_serial_number(cam.serial_number)
32
+        name_list.append("ZED {}".format(cam.serial_number))
33
+        zed = sl.Camera()
34
+        err = zed.open(init)
35
+        if err != sl.ERROR_CODE.SUCCESS:
36
+            print(repr(err))
37
+            zed.close()
38
+        info = zed.get_camera_information()
39
+        sensors_data = sl.SensorsData()
40
+        if zed.get_sensors_data(sensors_data, sl.TIME_REFERENCE.CURRENT) == sl.ERROR_CODE.SUCCESS :
41
+            #print(" - IMU:")
42
+            # Filtered orientation quaternion
43
+            quaternion = sensors_data.get_imu_data().get_pose().get_orientation().get()            
44
+            #NOTE: IS IT REALLY CORRECT ANGLE???
45
+            angle = (quaternion[0] * 900)/7+ 90
46
+            angle_list.append(angle)
47
+        print(angle)
48
+        
49
+    zed.close()
50
+    return_list = []
51
+    for x in range(0,len(name_list)):
52
+        return_list.append(name_list[x])
53
+        return_list.append(angle_list[x])
54
+
55
+    return return_list
56
+    
57
+
58
+def main():
59
+    #Initialize JSON
60
+    conf = {
61
+    "confDate" : "",
62
+    "branch" : "",
63
+    "room" : "",
64
+    "workingDir" : "",
65
+    "storageDir" : "",
66
+    "mainCam" : "cam1",
67
+    "recFrame" : 1800,
68
+    "cam1" : {
69
+        "SN" : "",
70
+        "angle" : "",
71
+        "d_center" : "",
72
+        "headCoord" : "",
73
+        "d_headCoord" : "",
74
+        "tailCoord" : "",
75
+        "d_tailCoord" : ""
76
+        },
77
+    "cam2" : {
78
+        "SN" : "",
79
+        "angle" : "",
80
+        "d_center" : "",
81
+        "headCoord" : "",
82
+        "d_headCoord" : "",
83
+        "tailCoord" : "",
84
+        "d_tailCoord" : ""
85
+        }
86
+    }
87
+
88
+    wdirpath = os.path.abspath(os.path.expanduser('~/data'))
89
+    sdirpath = os.path.abspath('/hdd')
90
+        
91
+    nowtime = time.localtime()
92
+    #NOTE: createdDate = "2022/02/04 21:07:54"
93
+    createdDate = "%04d/%02d/%02d %02d:%02d:%02d" % (nowtime.tm_year, nowtime.tm_mon, nowtime.tm_mday, nowtime.tm_hour, nowtime.tm_min, nowtime.tm_sec)
94
+
95
+    caminfo_list = get_cam_info()
96
+    
97
+    conf["confDate"] = createdDate
98
+    conf["workingDir"] = wdirpath
99
+    conf["storageDir"] = sdirpath
100
+    conf["mainCam"] = caminfo_list[0]
101
+    conf["recFrame"] = RECFRAME_DEFAULT
102
+    conf["cam1"]["SN"] = caminfo_list[0]
103
+    conf["cam1"]["angle"] = caminfo_list[1]
104
+    conf["cam1"]["d_center"] = 0
105
+    conf["cam1"]["headCoord"] = ''
106
+    conf["cam1"]["d_headCoord"] = 0
107
+    conf["cam1"]["tailCoord"] = ''
108
+    conf["cam1"]["d_tailCoord"] = 0
109
+    if len(caminfo_list) == 4:
110
+        conf["mainCam"] = "SN 00000000"
111
+        conf["cam2"]["SN"] = caminfo_list[2]
112
+        conf["cam2"]["angle"] = caminfo_list[3]
113
+        conf["cam2"]["d_center"] = 0
114
+        conf["cam2"]["headCoord"] = ''
115
+        conf["cam2"]["d_headCoord"] = 0
116
+        conf["cam2"]["tailCoord"] = ''
117
+        conf["cam2"]["d_tailCoord"] = 0
118
+    else:
119
+        pass
120
+
121
+    if os.path.isfile(os.path.expanduser(JSON_PATH)):
122
+        subprocess.call(('mv', os.path.expanduser(JSON_PATH), os.path.expanduser('~/sources/configuration.json.bak')))
123
+        print('Already configuration file exists.')
124
+        with open (os.path.abspath(os.path.expanduser(JSON_PATH)), "w") as json_file:
125
+            json.dump(conf,json_file, indent=4)
126
+        print('configuration.json was created')
127
+    else:
128
+        with open (os.path.abspath(os.path.expanduser(JSON_PATH)), "w") as json_file:
129
+            json.dump(conf,json_file, indent=4)
130
+
131
+    if os.path.isfile(os.path.expanduser(LOG_REBOOT)) == False:        
132
+        with open (os.path.abspath(os.path.expanduser(LOG_REBOOT)), "w+") as log_reboot:
133
+            log_reboot.write('This file automatically created at '+ createdDate)
134
+            print('log_reboot.txt was created')
135
+
136
+if __name__ == "__main__":
137
+    main()

+ 230
- 0
mailsync.py 查看文件

@@ -0,0 +1,230 @@
1
+import os
2
+import sys
3
+import time
4
+import pandas as pd
5
+import json
6
+import numpy as np
7
+import math
8
+import subprocess
9
+import paramiko
10
+sys.path.append(os.path.expanduser('~/sources/'))
11
+import mailimage
12
+import mailkpi
13
+
14
+
15
+
16
+WDIR_DEFAULT = os.path.expanduser('~/data/')
17
+SDIR_DEFAULT = os.path.abspath('/hdd/')
18
+RECFRAME_DEFAULT = 1800
19
+JSON_PATH = '~/sources/configuration.json'
20
+SOURCE_DIR = '~/sources/'
21
+STORAGE_DIR = '/hdd/'
22
+WIDTH_NORMAL = float(4) / 1920
23
+HEIGHT_NORMAL = float(2.25) / 1080
24
+
25
+
26
+#dataFrame = cam1.csv, conf_cam = conf['cam1'] or conf['cam2']
27
+def sync_coord_main(dataFrame, conf_cam):
28
+    df = dataFrame
29
+    cam = conf_cam
30
+    df.drop_duplicates(['frame'])
31
+    radian = np.pi/180 * cam['angle']
32
+    sin_a = np.sin(radian)
33
+    cos_a = np.cos(radian)
34
+    
35
+    df['x'] = df['x'] * WIDTH_NORMAL
36
+    df['y'] = df['y'] * HEIGHT_NORMAL
37
+
38
+    g_cam_x = math.sqrt((cam['d_headCoord']**2) - (cam['d_center']**2))
39
+    g_cam_y = cam['d_center'] * cos_a
40
+    g_cam_z = cam['d_center'] * sin_a
41
+
42
+    g_df = pd.DataFrame(data={'x': [], 'y': [], 'z': [], 'frame': []})
43
+    g_df['x'] = df['x'] + g_cam_x
44
+    g_df['frame'] = df['frame']
45
+
46
+    for index, row in df.iterrows():        
47
+        A = [[sin_a,-(cos_a)],[cos_a, sin_a]]
48
+        B = [row['y']-g_cam_y*sin_a-g_cam_z*cos_a, row['z']-g_cam_y*cos_a+g_cam_z*sin_a]
49
+        C = np.linalg.solve(A,B)
50
+        g_df['y'][index] = C[0]
51
+        g_df['z'][index] = C[1]
52
+    
53
+    g_result = pd.DataFrame(data={'x': g_df['y'], 'y': g_df['x'], 'z': g_df['z'], 'frame': g_df['frame']})
54
+    
55
+    return(g_result)
56
+    
57
+
58
+
59
+
60
+
61
+
62
+
63
+    
64
+
65
+
66
+
67
+def sync_coord_sub(dataFrame, conf_cam):
68
+    df = dataFrame
69
+    cam = conf_cam
70
+
71
+    df.drop_duplicates(['frame'])
72
+    radian = np.pi/180 * cam['angle']
73
+    sin_a = np.sin(radian)
74
+    cos_a = np.cos(radian)
75
+    
76
+    df['x'] = df['x'] * WIDTH_NORMAL
77
+    df['y'] = df['y'] * HEIGHT_NORMAL
78
+
79
+    g_cam_x = math.sqrt((cam['d_headCoord']**2) - (cam['d_center']**2))
80
+    g_cam_y = cam['d_center'] * cos_a
81
+    g_cam_z = cam['d_center'] * sin_a
82
+
83
+    g_df = pd.DataFrame(data={'x': [], 'y': [], 'z': [], 'frame': []})
84
+    g_df['x'] = df['x'] + g_cam_x
85
+
86
+    for index, row in df.iterrows():        
87
+        A = [[sin_a,cos_a],[cos_a,-(sin_a)]]
88
+        B = [g_cam_y*sin_a + g_cam_z*cos_a - row['y'],-(g_cam_y*cos_a)-g_cam_z*sin_a-row['z']]
89
+        C = np.linalg.solve(A,B)
90
+        g_df['y'][index] = C[0]
91
+        g_df['z'][index] = C[1]
92
+    
93
+    g_result = pd.DataFrame(data={'x': g_df['y'], 'y': g_df['x'], 'z': g_df['z'], 'frame': g_df['frame']})
94
+    
95
+    
96
+    return(g_result)
97
+
98
+
99
+
100
+
101
+def loop_sync():
102
+    with open (os.path.abspath(os.path.expanduser(JSON_PATH)), "r") as json_file:
103
+        conf = json.load(json_file)
104
+    hdd_list = []
105
+    hdd_list = os.listdir(os.path.abspath(STORAGE_DIR))
106
+    hdd_list.remove('lost+found')
107
+    tmp_e = hdd_list[0]
108
+    hdd_list[0] = hdd_list[1]
109
+    hdd_list[1] = tmp_e
110
+
111
+
112
+    for surgeon in hdd_list:
113
+        surgeondatalist = os.listdir(os.path.abspath(STORAGE_DIR+surgeon))
114
+        surgeondatalist = sorted(surgeondatalist)
115
+        if 'kpi.csv' in surgeondatalist:
116
+            transport = paramiko.Transport(('mc365.asuscomm.com', 63122))
117
+            transport.connect(username='mc365', password='tkadbrdhMC1!')
118
+            sftp = paramiko.SFTPClient.from_transport(transport)
119
+            for surgeondata in surgeondatalist:
120
+                try:                
121
+                    sftp.put(os.path.abspath(STORAGE_DIR+surgeon+'/'+surgeondata), '/data/'+surgeon + '/' + surgeondata)
122
+                except:
123
+                    pass
124
+            sftp.close()
125
+            transport.close()
126
+            subprocess.Popen(['rm', '-rf', os.path.abspath(STORAGE_DIR+surgeon+'/')])            
127
+            
128
+        else:
129
+            if 'cam1.csv' in surgeondatalist:
130
+                df_1 = pd.read_csv(os.path.abspath(STORAGE_DIR+surgeon+'/cam1.csv'))                
131
+                if conf['mainCam'] == conf['cam1']['SN']:                
132
+                    g_cam1=sync_coord_main(df_1, conf['cam1'])
133
+                    g_cam1['cam'] = 1                    
134
+                    g_cam1.to_csv(os.path.abspath(STORAGE_DIR+surgeon+'/cam1_coord.csv'),index=False)
135
+                    mailimage.draw_plot_cam_1(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/cam1_coord.png'))
136
+                    mailimage.draw_xy(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/cam1_coord_xy.png'))
137
+                    mailimage.draw_yz(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/cam1_coord_yz.png'))
138
+                    mailimage.draw_xz(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/cam1_coord_xz.png'))
139
+                    if 'cam2.csv' not in surgeondatalist:                        
140
+                        g_cam1 = g_cam1.drop_duplicates(['frame'], keep='first')
141
+                        g_cam1.to_csv(os.path.abspath(STORAGE_DIR+surgeon+'/coordinate.csv'),index=False)
142
+                        g_cam1.to_csv(os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_summary.csv'),index=False)
143
+                        mailimage.draw_plot_total(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate.png'))
144
+                        mailimage.draw_xy(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_xy.png'))
145
+                        mailimage.draw_yz(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_yz.png'))
146
+                        mailimage.draw_xz(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_xz.png'))                        
147
+                else:
148
+                    g_cam1=sync_coord_sub(df_1, conf['cam1'])
149
+                    g_cam1['cam'] = 1                    
150
+                    g_cam1.to_csv(os.path.abspath(STORAGE_DIR+surgeon+'/cam1_coord.csv'),index=False)
151
+                    mailimage.draw_plot_cam_1(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/cam1_coord.png'))
152
+                    mailimage.draw_xy(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/cam1_coord_xy.png'))
153
+                    mailimage.draw_yz(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/cam1_coord_yz.png'))
154
+                    mailimage.draw_xz(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/cam1_coord_xz.png'))
155
+                    if 'cam2.csv' not in surgeondatalist:
156
+                        g_cam1 = g_cam1.drop_duplicates(['frame'], keep='first')
157
+                        g_cam1.to_csv(os.path.abspath(STORAGE_DIR+surgeon+'/coordinate.csv'),index=False)
158
+                        g_cam1.to_csv(os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_summary.csv'),index=False)
159
+                        mailimage.draw_plot_total(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate.png'))
160
+                        mailimage.draw_xy(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_xy.png'))
161
+                        mailimage.draw_yz(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_yz.png'))
162
+                        mailimage.draw_xz(g_cam1, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_xz.png'))
163
+                        
164
+            if 'cam2.csv' in surgeondatalist:
165
+                df_2 = pd.read_csv(os.path.abspath(STORAGE_DIR+surgeon+'/cam2.csv'))
166
+
167
+                if conf['mainCam'] == conf['cam2']['SN']:                
168
+                    g_cam2 = sync_coord_main(df_2, conf['cam2'])
169
+                    g_cam2['cam'] = 2
170
+
171
+
172
+                    g_cam2.to_csv(os.path.abspath(STORAGE_DIR+surgeon+'/cam2_coord.csv'),index=False)
173
+                    mailimage.draw_plot_cam_2(g_cam2, os.path.abspath(STORAGE_DIR+surgeon+'/cam2_coord.png'))
174
+                    mailimage.draw_xy(g_cam2, os.path.abspath(STORAGE_DIR+surgeon+'/cam2_coord_xy.png'))
175
+                    mailimage.draw_yz(g_cam2, os.path.abspath(STORAGE_DIR+surgeon+'/cam2_coord_yz.png'))
176
+                    mailimage.draw_xz(g_cam2, os.path.abspath(STORAGE_DIR+surgeon+'/cam2_coord_xz.png'))
177
+                    g_coor = pd.concat([g_cam1,g_cam2])
178
+                    g_coor = g_coor.drop_duplicates(['frame'], keep='first')
179
+                    g_coor = g_coor.sort_values('frame')
180
+                    g_coor.to_csv(os.path.abspath(STORAGE_DIR+surgeon+'/coordinate.csv'),index=False)
181
+                    g_coor.to_csv(os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_summary.csv'),index=False)
182
+                    mailimage.draw_plot_total(g_coor, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate.png'))
183
+                    mailimage.draw_xy(g_coor, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_xy.png'))
184
+                    mailimage.draw_yz(g_coor, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_yz.png'))
185
+                    mailimage.draw_xz(g_coor, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_xz.png'))
186
+                else:
187
+                    g_cam2 = sync_coord_sub(df_2, conf['cam2'])
188
+                    g_cam2['cam'] = 2
189
+
190
+                    g_cam2.to_csv(os.path.abspath(STORAGE_DIR+surgeon+'/cam2_coord.csv'),index=False)
191
+                    mailimage.draw_plot_cam_2(g_cam2, os.path.abspath(STORAGE_DIR+surgeon+'/cam2_coord.png'))
192
+                    mailimage.draw_xy(g_cam2, os.path.abspath(STORAGE_DIR+surgeon+'/cam2_coord_xy.png'))
193
+                    mailimage.draw_yz(g_cam2, os.path.abspath(STORAGE_DIR+surgeon+'/cam2_coord_yz.png'))
194
+                    mailimage.draw_xz(g_cam2, os.path.abspath(STORAGE_DIR+surgeon+'/cam2_coord_xz.png'))
195
+                    g_coor = pd.concat([g_cam1,g_cam2])
196
+                    g_coor = g_coor.drop_duplicates(['frame'], keep='first')
197
+                    g_coor = g_coor.sort_values('frame')
198
+                    g_coor.to_csv(os.path.abspath(STORAGE_DIR+surgeon+'/coordinate.csv'),index=False)
199
+                    g_coor.to_csv(os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_summary.csv'),index=False)
200
+                    mailimage.draw_plot_total(g_coor, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate.png'))
201
+                    mailimage.draw_xy(g_coor, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_xy.png'))
202
+                    mailimage.draw_yz(g_coor, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_yz.png'))
203
+                    mailimage.draw_xz(g_coor, os.path.abspath(STORAGE_DIR+surgeon+'/coordinate_xz.png'))
204
+            #difference of difference        
205
+            df_kpi = mailkpi.dist(os.path.abspath(STORAGE_DIR+surgeon+'/coordinate.csv'))
206
+            mailkpi.put_kpi_to_db(df_kpi)            
207
+            df_kpi = pd.DataFrame([df_kpi])
208
+            diff = pd.read_csv(os.path.abspath(STORAGE_DIR+surgeon+'/diff.csv'))
209
+            mailimage.draw_diff_diff(diff,os.path.abspath(STORAGE_DIR+surgeon+'/diff.png'))
210
+            df_kpi.to_csv(os.path.abspath(STORAGE_DIR+surgeon+'/kpi.csv'), index=False)
211
+            
212
+            
213
+            #subprocess ftp
214
+            
215
+            
216
+
217
+def main():    
218
+    while True:
219
+        loop_sync()
220
+        time.sleep(15)
221
+
222
+
223
+                
224
+        
225
+
226
+
227
+if __name__ == "__main__":
228
+    main()
229
+
230
+

+ 0
- 0
mailuploader.py 查看文件


+ 0
- 2
names.names 查看文件

@@ -1,2 +0,0 @@
1
-bind	
2
-point

+ 5
- 0
obj.data 查看文件

@@ -0,0 +1,5 @@
1
+classes = 1
2
+train = /workspace/configurations/v4_220204/train.txt
3
+valid = /workspace/configurations/v4_220204/valid.txt
4
+names = /home/mc365/sources/obj.names
5
+backup = /workspace/configurations/v4_220204/weights_backups/

+ 1
- 0
obj.names 查看文件

@@ -0,0 +1 @@
1
+handle

recorder.py → rec_ubuntu.py 查看文件

@@ -1,3 +1,4 @@
1
+from asyncio import subprocess
1 2
 import sys
2 3
 import os
3 4
 import pyzed.sl as sl
@@ -5,6 +6,10 @@ import threading
5 6
 import time
6 7
 from signal import signal, SIGINT
7 8
 
9
+#FIXME: 2022/02/04 completed
10
+#FIXME: NOTE: rec_count.txt contains number of flies in target_path include itself.
11
+
12
+
8 13
 zed_list = []
9 14
 timestamp_list = []
10 15
 thread_list = []
@@ -16,15 +21,18 @@ record_time_per_svo = 1800
16 21
 name_time_list = []
17 22
 
18 23
 
19
-root_path = "/data/"
24
+root_path = os.path.abspath(os.path.expanduser('~/data/'))
25
+print(root_path)
20 26
 filenames = os.listdir(root_path)
21 27
 for f in filenames:
22
-    w_time = os.path.getctime(root_path + f)
28
+    w_time = os.path.getctime(root_path + '/'+f)
23 29
     name_time_list.append((f, w_time))
30
+
24 31
 sorted_list = sorted(name_time_list, key=lambda x: x[1], reverse=True)
32
+print(sorted_list)
25 33
 
26 34
 recent_file = sorted_list[0]
27
-target_dir = str(root_path) + str(recent_file[0])
35
+target_dir = str(root_path) + '/' + str(recent_file[0])
28 36
 
29 37
 
30 38
 def handler(signal, frame):
@@ -32,14 +40,23 @@ def handler(signal, frame):
32 40
     global zed_list
33 41
     global target_dir        
34 42
     stop_signal = True
43
+    #os.call(('wall', 'catch SIGINT'))
44
+    
45
+
46
+    #STOP RECORDING CAMERA
47
+    for cam in zed_list:
48
+        cam.disable_recording()
49
+        cam.close()
50
+
51
+    nowtime = time.localtime()
52
+    rec_done = "%04d/%02d/%02d %02d:%02d:%02d" % (nowtime.tm_year, nowtime.tm_mon, nowtime.tm_mday, nowtime.tm_hour, nowtime.tm_min, nowtime.tm_sec)
35 53
 
36 54
     filepath = '%s/log.txt'%(target_dir)
37
-    f = open(filepath, 'w')
38
-    f.write('record succesfully stopped')
55
+    f = open(filepath, 'a')
56
+    f.write('\n[RECORD] record succesfully stopped at ' + rec_done + '\n')
39 57
     f.close()
40
-    
41
-    time.sleep(2)
42
-    exit()
58
+    with open ('%s/rec_count.txt'%(target_dir), "w") as rec_count:
59
+        rec_count.write(str(len(os.listdir(target_dir))))
43 60
 
44 61
 def grab_run(index, cameras):
45 62
     global stop_signal
@@ -55,7 +72,7 @@ def grab_run(index, cameras):
55 72
     while not stop_signal:
56 73
         #I need loop and 1800ms
57 74
         if frame_list[index] % record_time_per_svo == 0:
58
-            filepath = '%s/%s_%s.svo'%(target_dir,str(shard_count).zfill(3),cameras[index].id+1)
75
+            filepath = '%s/%s_%s.svo'%(target_dir,str(shard_count).zfill(3),cameras[index].id+1)            
59 76
             record_params = sl.RecordingParameters(filepath, sl.SVO_COMPRESSION_MODE.H264)
60 77
             err = zed_list[index].enable_recording(record_params)
61 78
             #print('new shard created')
@@ -74,9 +91,11 @@ def grab_run(index, cameras):
74 91
     #print('saved')    
75 92
 
76 93
 def main():
77
-
94
+    global target_dir
95
+    #os.call(('wall', target_dir))
78 96
     signal(SIGINT, handler)
79 97
 
98
+
80 99
     #print('Running...')
81 100
     init = sl.InitParameters()
82 101
     init.camera_resolution = sl.RESOLUTION.HD1080
@@ -85,13 +104,15 @@ def main():
85 104
     name_list = []
86 105
 
87 106
     cameras = sl.Camera.get_device_list()
88
-    #print(cameras)
107
+    print(cameras)
89 108
     index = 0
90 109
     for cam in cameras:
91 110
         init.set_from_serial_number(cam.serial_number)
92 111
         name_list.append("ZED {}".format(cam.serial_number))
93 112
         print("Opening {}".format(name_list[index]))
113
+        
94 114
         zed_list.append(sl.Camera())
115
+        
95 116
         timestamp_list.append(0)
96 117
         frame_list.append(0)
97 118
         status = zed_list[index].open(init)
@@ -105,6 +126,7 @@ def main():
105 126
         if zed_list[index].is_opened():
106 127
             thread_list.append(threading.Thread(target=grab_run, args=(index,cameras)))
107 128
             thread_list[index].start()
129
+            
108 130
 
109 131
 
110 132
 

+ 0
- 107
recorder.py.bak 查看文件

@@ -1,107 +0,0 @@
1
-import sys
2
-import os
3
-import pyzed.sl as sl
4
-import threading
5
-import time
6
-from signal import signal, SIGINT
7
-
8
-zed_list = []
9
-timestamp_list = []
10
-thread_list = []
11
-stop_signal = False
12
-frame_list = []
13
-
14
-record_time_per_svo = 1800
15
-
16
-name_time_list = []
17
-
18
-
19
-root_path = "/data/"
20
-filenames = os.listdir(root_path)
21
-for f in filenames:
22
-    w_time = os.path.getctime(root_path + f)
23
-    name_time_list.append((f, w_time))
24
-sorted_list = sorted(name_time_list, key=lambda x: x[1], reverse=True)
25
-
26
-recent_file = sorted_list[0]
27
-target_dir = str(root_path) + str(recent_file[0])
28
-
29
-
30
-def handler(signal, frame):
31
-    global stop_signal
32
-    global zed_list
33
-    stop_signal = True    
34
-    time.sleep(2)
35
-    exit()
36
-
37
-def grab_run(index, cameras):
38
-    global stop_signal
39
-    global zed_list
40
-    global timestamp_list
41
-    global left_list
42
-    global depth_list
43
-    global frame_list
44
-    global taget_dir
45
-
46
-    runtime = sl.RuntimeParameters()
47
-    shard_count = 0
48
-    while not stop_signal:
49
-        #I need loop and 1800ms
50
-        if frame_list[index] % record_time_per_svo == 0:
51
-            filepath = '%s/%s_%s.svo'%(target_dir,str(shard_count).zfill(3),cameras[index].id+1)
52
-            record_params = sl.RecordingParameters(filepath, sl.SVO_COMPRESSION_MODE.H264)
53
-            err = zed_list[index].enable_recording(record_params)
54
-            #print('new shard created')
55
-            shard_count += 1
56
-            if zed_list[index].grab(runtime) == sl.ERROR_CODE.SUCCESS :
57
-                frame_list[index] += 1
58
-        #if zed_list[index].grab() == "SUCCESS":
59
-        #    zed_list[index].record()
60
-        #print("Frame count: " + str(frame_list[index]), end='\r')
61
-        if zed_list[index].grab(runtime) == sl.ERROR_CODE.SUCCESS :
62
-            frame_list[index] += 1
63
-
64
-    zed_list[index].disable_recording()
65
-    filepath = '%s/log.txt'%(target_dir)
66
-    f = open(filepath, 'w')
67
-    f.write('record succesfully stopped')
68
-    f.close()
69
-    #print('saved')    
70
-
71
-def main():
72
-
73
-    signal(SIGINT, handler)
74
-
75
-    #print('Running...')
76
-    init = sl.InitParameters()
77
-    init.camera_resolution = sl.RESOLUTION.HD1080
78
-    init.camera_fps = 30
79
-
80
-    name_list = []
81
-
82
-    cameras = sl.Camera.get_device_list()
83
-    #print(cameras)
84
-    index = 0
85
-    for cam in cameras:
86
-        init.set_from_serial_number(cam.serial_number)
87
-        name_list.append("ZED {}".format(cam.serial_number))
88
-        print("Opening {}".format(name_list[index]))
89
-        zed_list.append(sl.Camera())
90
-        timestamp_list.append(0)
91
-        frame_list.append(0)
92
-        status = zed_list[index].open(init)
93
-        if status != sl.ERROR_CODE.SUCCESS:
94
-            print(repr(status))
95
-            zed_list[index].close()
96
-        index = index +1
97
-
98
-    #Start camera threads
99
-    for index in range(0, len(zed_list)):
100
-        if zed_list[index].is_opened():
101
-            thread_list.append(threading.Thread(target=grab_run, args=(index,cameras)))
102
-            thread_list[index].start()
103
-
104
-
105
-
106
-if __name__ == "__main__":
107
-    main()

+ 24
- 0
stop_ubuntu.py 查看文件

@@ -0,0 +1,24 @@
1
+import subprocess
2
+import os
3
+import signal
4
+import json
5
+
6
+
7
+PROCESS_NAME = 'rec_ubuntu.py'
8
+
9
+
10
+devnull = open('/dev/null', 'w')
11
+
12
+def kill_recorder():
13
+    ps = subprocess.Popen(('ps', '-ef'), stdout=subprocess.PIPE)
14
+    output = subprocess.check_output(('grep', PROCESS_NAME), stdin=ps.stdout).decode()    
15
+    ps_number = output.split()    
16
+    os.kill(int(ps_number[1]), signal.SIGINT)
17
+    #with open (os.path.abspath(os.path.expanduser(JSON_PATH)), "w") as json_file:
18
+    #        json.dump(conf,json_file, indent=4)
19
+
20
+def main():
21
+    kill_recorder()
22
+
23
+if __name__ == "__main__":
24
+    main()

+ 294
- 0
tiny-yolov4.cfg 查看文件

@@ -0,0 +1,294 @@
1
+[net]
2
+# Testing
3
+#batch=1
4
+#subdivisions=1
5
+# Training
6
+batch=64
7
+subdivisions=16
8
+width=1920
9
+height=1056
10
+channels=3
11
+momentum=0.9
12
+decay=0.0005
13
+angle=0
14
+saturation = 1.5
15
+exposure = 1.5
16
+hue=.1
17
+
18
+learning_rate=0.00261
19
+burn_in=1000
20
+
21
+max_batches = 100000
22
+policy=steps
23
+steps=1600,1800
24
+scales=.1,.1
25
+
26
+
27
+#weights_reject_freq=1001
28
+#ema_alpha=0.9998
29
+#equidistant_point=1000
30
+#num_sigmas_reject_badlabels=3
31
+#badlabels_rejection_percentage=0.2
32
+
33
+
34
+[convolutional]
35
+batch_normalize=1
36
+filters=32
37
+size=3
38
+stride=2
39
+pad=1
40
+activation=leaky
41
+
42
+[convolutional]
43
+batch_normalize=1
44
+filters=64
45
+size=3
46
+stride=2
47
+pad=1
48
+activation=leaky
49
+
50
+[convolutional]
51
+batch_normalize=1
52
+filters=64
53
+size=3
54
+stride=1
55
+pad=1
56
+activation=leaky
57
+
58
+[route]
59
+layers=-1
60
+groups=2
61
+group_id=1
62
+
63
+[convolutional]
64
+batch_normalize=1
65
+filters=32
66
+size=3
67
+stride=1
68
+pad=1
69
+activation=leaky
70
+
71
+[convolutional]
72
+batch_normalize=1
73
+filters=32
74
+size=3
75
+stride=1
76
+pad=1
77
+activation=leaky
78
+
79
+[route]
80
+layers = -1,-2
81
+
82
+[convolutional]
83
+batch_normalize=1
84
+filters=64
85
+size=1
86
+stride=1
87
+pad=1
88
+activation=leaky
89
+
90
+[route]
91
+layers = -6,-1
92
+
93
+[maxpool]
94
+size=2
95
+stride=2
96
+
97
+[convolutional]
98
+batch_normalize=1
99
+filters=128
100
+size=3
101
+stride=1
102
+pad=1
103
+activation=leaky
104
+
105
+[route]
106
+layers=-1
107
+groups=2
108
+group_id=1
109
+
110
+[convolutional]
111
+batch_normalize=1
112
+filters=64
113
+size=3
114
+stride=1
115
+pad=1
116
+activation=leaky
117
+
118
+[convolutional]
119
+batch_normalize=1
120
+filters=64
121
+size=3
122
+stride=1
123
+pad=1
124
+activation=leaky
125
+
126
+[route]
127
+layers = -1,-2
128
+
129
+[convolutional]
130
+batch_normalize=1
131
+filters=128
132
+size=1
133
+stride=1
134
+pad=1
135
+activation=leaky
136
+
137
+[route]
138
+layers = -6,-1
139
+
140
+[maxpool]
141
+size=2
142
+stride=2
143
+
144
+[convolutional]
145
+batch_normalize=1
146
+filters=256
147
+size=3
148
+stride=1
149
+pad=1
150
+activation=leaky
151
+
152
+[route]
153
+layers=-1
154
+groups=2
155
+group_id=1
156
+
157
+[convolutional]
158
+batch_normalize=1
159
+filters=128
160
+size=3
161
+stride=1
162
+pad=1
163
+activation=leaky
164
+
165
+[convolutional]
166
+batch_normalize=1
167
+filters=128
168
+size=3
169
+stride=1
170
+pad=1
171
+activation=leaky
172
+
173
+[route]
174
+layers = -1,-2
175
+
176
+[convolutional]
177
+batch_normalize=1
178
+filters=256
179
+size=1
180
+stride=1
181
+pad=1
182
+activation=leaky
183
+
184
+[route]
185
+layers = -6,-1
186
+
187
+[maxpool]
188
+size=2
189
+stride=2
190
+
191
+[convolutional]
192
+batch_normalize=1
193
+filters=512
194
+size=3
195
+stride=1
196
+pad=1
197
+activation=leaky
198
+
199
+##################################
200
+
201
+[convolutional]
202
+batch_normalize=1
203
+filters=256
204
+size=1
205
+stride=1
206
+pad=1
207
+activation=leaky
208
+
209
+[convolutional]
210
+batch_normalize=1
211
+filters=512
212
+size=3
213
+stride=1
214
+pad=1
215
+activation=leaky
216
+
217
+[convolutional]
218
+size=1
219
+stride=1
220
+pad=1
221
+filters=18
222
+activation=linear
223
+
224
+
225
+
226
+[yolo]
227
+mask = 3,4,5
228
+anchors = 10,14,  23,27,  37,58,  81,82,  135,169,  344,319
229
+classes=1
230
+num=6
231
+jitter=.3
232
+scale_x_y = 1.05
233
+cls_normalizer=1.0
234
+iou_normalizer=0.07
235
+iou_loss=ciou
236
+ignore_thresh = .7
237
+truth_thresh = 1
238
+random=0
239
+resize=1.5
240
+nms_kind=greedynms
241
+beta_nms=0.6
242
+#new_coords=1
243
+#scale_x_y = 2.0
244
+
245
+[route]
246
+layers = -4
247
+
248
+[convolutional]
249
+batch_normalize=1
250
+filters=128
251
+size=1
252
+stride=1
253
+pad=1
254
+activation=leaky
255
+
256
+[upsample]
257
+stride=2
258
+
259
+[route]
260
+layers = -1, 23
261
+
262
+[convolutional]
263
+batch_normalize=1
264
+filters=256
265
+size=3
266
+stride=1
267
+pad=1
268
+activation=leaky
269
+
270
+[convolutional]
271
+size=1
272
+stride=1
273
+pad=1
274
+filters=18
275
+activation=linear
276
+
277
+[yolo]
278
+mask = 1,2,3
279
+anchors = 10,14,  23,27,  37,58,  81,82,  135,169,  344,319
280
+classes=1
281
+num=6
282
+jitter=.3
283
+scale_x_y = 1.05
284
+cls_normalizer=1.0
285
+iou_normalizer=0.07
286
+iou_loss=ciou
287
+ignore_thresh = .7
288
+truth_thresh = 1
289
+random=0
290
+resize=1.5
291
+nms_kind=greedynms
292
+beta_nms=0.6
293
+#new_coords=1
294
+#scale_x_y = 2.0

+ 20
- 0
tools/checkstatus.py 查看文件

@@ -0,0 +1,20 @@
1
+from asyncio import subprocess
2
+import sys
3
+import os
4
+import pyzed.sl as sl
5
+import threading
6
+import time
7
+from signal import signal, SIGINT
8
+
9
+
10
+def check_status():
11
+    cameras = sl.Camera.get_device_list()
12
+    print(cameras)    
13
+
14
+
15
+
16
+def main():
17
+    check_status()
18
+
19
+if __name__ == '__main__':
20
+    main()

+ 29
- 0
tools/zedclose.py 查看文件

@@ -0,0 +1,29 @@
1
+import os
2
+import pyzed.sl as sl
3
+import sys
4
+
5
+
6
+def close_zed():
7
+    zed_list = []
8
+
9
+    init = sl.InitParameters()
10
+    init.camera_resolution = sl.RESOLUTION.HD1080
11
+    init.camera_fps = 30
12
+
13
+
14
+    cameras = sl.Camera.get_device_list()
15
+    print(cameras)
16
+    for cam in cameras:
17
+        init.set_from_serial_number(cam.serial_number)        
18
+        zed_list.append(sl.Camera())
19
+    for index in range(0, len(zed_list)):
20
+        zed_list[index].close()
21
+
22
+    
23
+
24
+
25
+def main():
26
+    close_zed()
27
+
28
+if __name__ == '__main__':
29
+    main()

+ 5
- 0
yolo_cfg/tiny_v4/obj.data 查看文件

@@ -0,0 +1,5 @@
1
+classes = 1
2
+train = /workspace/configurations/tiny_v4_220204/train.txt
3
+valid = /workspace/configurations/tiny_v4_220204/valid.txt
4
+names = /home/mc365/sources/obj.names
5
+backup = /workspace/configurations/tiny_v4_220204/weights_backups/

+ 1
- 0
yolo_cfg/tiny_v4/obj.names 查看文件

@@ -0,0 +1 @@
1
+handle

+ 38230
- 0
yolo_cfg/tiny_v4/train.txt
文件差異過大導致無法顯示
查看文件


+ 4247
- 0
yolo_cfg/tiny_v4/vaild.txt
文件差異過大導致無法顯示
查看文件


yolov4_7gpu.cfg
文件差異過大導致無法顯示
查看文件


Loading…
取消
儲存