Commit 38666208 authored by YONG-LIN SU's avatar YONG-LIN SU

change api rule to select by id

parent 91760e33
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import cv2\n",
"import threading\n",
"# from ipcamCapture import ipcamCapture\n",
"import time\n",
"import subprocess as sp"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ipcam=ipcamCapture(URL='rtsp://iCRc0en0w:VEZ3LCKK@192.168.5.1:2555/d878278e67636fbb')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ipcam.start()\n",
"time.sleep(1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"face_cascade = cv2.CascadeClassifier(\"Face-Clock/model/cv2/haarcascade_frontalface_alt2.xml\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"while(True):\n",
" frame=ipcam.getframe()\n",
" faces = face_cascade.detectMultiScale(frame,scaleFactor=1.1,minNeighbors=3)\n",
" if(len(faces)>0):\n",
" for f in faces:\n",
" x,y,w,h=f\n",
" margin=10\n",
" cv2.rectangle(frame,(x-margin//2,y-margin//2),(x+w+margin//2,y+h+margin//2),(0,0,255))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cap=cv2.VideoCapture('rtsp://iCRc0en0w:VEZ3LCKK@192.168.5.1:2555/d878278e67636fbb')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from matplotlib import pyplot as plt"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cap.read()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import cv2\n",
"from darkflow.net.build import TFNet\n",
"import time\n",
"import threading\n",
"import subprocess as sp"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"option={\n",
" 'model':'cfg/yolo.cfg',\n",
" 'load':'bin/yolov2.weights',\n",
" 'threshold':0.3,\n",
"# 'gpu':0.8\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"tfnet=TFNet(option)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# capture=cv2.VideoCapture('Khare_testvideo_03.mp4')\n",
"rtmpUrl='rtmp://stream.mivs.isrg.tw/analyze/yolov2_kr_1'\n",
"URL=\"http://91.201.117.136/mjpg/video.mjpg#.W4ez7ApQikk.link\"\n",
"capture=cv2.VideoCapture(URL)\n",
"# colors=[tuple(255*np.random.rand(3)) for i in range(20)]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# 取得影片參數\n",
"size = (int(capture.get(cv2.CAP_PROP_FRAME_WIDTH)), int(capture.get(cv2.CAP_PROP_FRAME_HEIGHT)))\n",
"sizeStr = str(size[0]) + 'x' + str(size[1])\n",
"fps = capture.get(cv2.CAP_PROP_FPS) # 30p/self\n",
"fps = int(fps)\n",
"hz = int(1000.0 / fps)\n",
"print('size:'+ sizeStr + ' fps:' + str(fps) + ' hz:' + str(hz))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# 管道输出 ffmpeg推送rtmp 重点 : 通过管道 共享数据的方式\n",
"command = ['ffmpeg',\n",
" '-y',\n",
" '-f', 'rawvideo',\n",
" '-vcodec','rawvideo',\n",
" '-pix_fmt', 'bgr24',\n",
" '-s', sizeStr,\n",
" '-r', str(fps),\n",
" '-i', '-',\n",
" '-c:v', 'libx264',\n",
" '-pix_fmt', 'yuv420p',\n",
" '-preset', 'ultrafast',\n",
" '-f', 'flv', \n",
" rtmpUrl]\n",
"# pipe = sp.Popen(command, stdout = sp.PIPE, bufsize=10**8)\n",
"pipe = sp.Popen(command, stdin=sp.PIPE) #,shell=False"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"class ipcamCapture:\n",
" def __init__(self, URL):\n",
" self.Frame = []\n",
" self.status = False\n",
" self.isstop = False\n",
"\t\t\n",
"\t# 攝影機連接。\n",
" self.capture = cv2.VideoCapture(URL)\n",
"\n",
" def start(self):\n",
"\t# 把程式放進子執行緒,daemon=True 表示該執行緒會隨著主執行緒關閉而關閉。\n",
" print('ipcam started!')\n",
" threading.Thread(target=self.queryframe, daemon=True, args=()).start()\n",
"\n",
" def stop(self):\n",
"\t# 記得要設計停止無限迴圈的開關。\n",
" self.isstop = True\n",
" print('ipcam stopped!')\n",
" \n",
" def getframe(self):\n",
"\t# 當有需要影像時,再回傳最新的影像。\n",
" return self.Frame\n",
" \n",
" def queryframe(self):\n",
" while (not self.isstop):\n",
" self.status, self.Frame = self.capture.read()\n",
" \n",
" self.capture.release()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# URL=\"http://91.201.117.136/mjpg/video.mjpg#.W4ez7ApQikk.link\"\n",
"\n",
"# 連接攝影機\n",
"ipcam = ipcamCapture(URL)\n",
"\n",
"# 啟動子執行緒\n",
"ipcam.start()\n",
"\n",
"# 暫停1秒,確保影像已經填充\n",
"time.sleep(1)\n",
"\n",
"# 使用無窮迴圈擷取影像,直到按下Esc鍵結束\n",
"while True:\n",
" stime=time.time()\n",
" # 使用 getframe 取得最新的影像\n",
" frame = ipcam.getframe()\n",
" \n",
" results = tfnet.return_predict(frame)\n",
" for result in results:\n",
" tl=(result['topleft']['x'],result['topleft']['y'])\n",
" br=(result['bottomright']['x'],result['bottomright']['y'])\n",
" label=result['label']\n",
" frame=cv2.rectangle(frame,tl,br,(0,255,0),2)\n",
" frame=cv2.putText(frame,label,tl,cv2.FONT_HERSHEY_COMPLEX,0.5, (255,255,255), 1, cv2.LINE_AA)\n",
" cv2.imshow('frame',frame)\n",
" pipe.stdin.write(frame.tostring())\n",
" pass\n",
" print('FPS {:.1f}'.format(1 / (time.time() - stime)))\n",
" if cv2.waitKey(33) == 27:\n",
" cv2.destroyAllWindows()\n",
" ipcam.stop()\n",
" break"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.6"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
{
"cells": [
{
"cell_type": "code",
"execution_count": 38,
"metadata": {},
"outputs": [],
"source": [
"import xlwt\n",
"from influxdb import InfluxDBClient\n",
"import calendar\n",
"import datetime\n",
"import pytz\n",
"import json"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"# 參數\n",
"name=\"Allen\"\n",
"year=2019\n",
"month=7"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"# 時區設定\n",
"tw = pytz.timezone('Asia/Taipei')"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"# 獲取當月第一天的星期與當月的總天數\n",
"firstDayWeekDay, monthRange = calendar.monthrange(year, month)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"# 獲取當月第一天跟最後一天 localtime\n",
"firstDay = datetime.datetime(year=year, month=month, day=1,hour=0,minute=0,second=0)\n",
"lastDay = datetime.datetime(year=year, month=month, day=monthRange,hour=23,minute=59,second=59)\n",
"# 時區本地化\n",
"tw_firstDay=tw.localize(firstDay)\n",
"tw_lastDay=tw.localize(lastDay)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"# 轉換為utc時間\n",
"utc_firstDay=tw_firstDay.astimezone(pytz.utc)\n",
"utc_lastDay=tw_lastDay.astimezone(pytz.utc)"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"# 字串格式化轉為influxdb時間格式\n",
"format_firstDay=utc_firstDay.strftime(\"%Y-%m-%d %H:%M:%S\")\n",
"format_lastDay=utc_lastDay.strftime(\"%Y-%m-%d %H:%M:%S\")"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"# 建立influxdb連線\n",
"client=InfluxDBClient(host='192.168.0.6',port=8086,username='root',password='xMd2k5aK',database='rd')\n",
"# client=InfluxDBClient(host='192.168.5.17',port=8086,username='root',password='root',database='RD')"
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [],
"source": [
"login_response=client.query(\"SELECT * FROM login_ WHERE time > '{0}' AND time < '{1}' AND \\\"name\\\" = '{2}'\".format(format_firstDay,format_lastDay,name))\n",
"logout_response=client.query(\"SELECT * FROM logout_ WHERE time > '{0}' AND time < '{1}' AND \\\"name\\\" = '{2}'\".format(format_firstDay,format_lastDay,name))"
]
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {},
"outputs": [],
"source": [
"login_history=login_response.raw['series'][0]['values']\n",
"logout_history=logout_response.raw['series'][0]['values']"
]
},
{
"cell_type": "code",
"execution_count": 31,
"metadata": {},
"outputs": [],
"source": [
"def utc2local(strtime):\n",
" strtime=strtime.split('Z')[0]\n",
" strtime=strtime.split('.')[0]\n",
" dt=datetime.datetime.strptime(strtime,\"%Y-%m-%dT%H:%M:%S\")\n",
" loacl_time=dt.astimezone(pytz.timezone('Asia/Taipei'))\n",
" return loacl_time.strftime(\"%Y-%m-%d %H:%M:%S\")"
]
},
{
"cell_type": "code",
"execution_count": 32,
"metadata": {},
"outputs": [],
"source": [
"log_json={}\n",
"for login,logout in zip(login_history,logout_history):\n",
" local_login=utc2local(login[0])\n",
" local_logout=utc2local(logout[0])\n",
" \n",
" login_date,login_time=local_login.split(' ')\n",
" logout_date,logout_time=local_logout.split(' ')\n",
" \n",
" d1 = datetime.datetime.strptime(local_logout,'%Y-%m-%d %H:%M:%S')\n",
" \n",
" overtime=0\n",
" if(d1.hour ==17 and d1.minute >= 45):\n",
" overtime=0.5\n",
" elif(d1.hour >17 and d1.minute >=15):\n",
" overtime=d1.hour-17\n",
" if(d1.minute>=45):\n",
" overtime=overtime+0.5\n",
" \n",
" date_json={'login':login_time,'logout':logout_time,'overtime':overtime}\n",
" log_json[login_date]=date_json"
]
},
{
"cell_type": "code",
"execution_count": 33,
"metadata": {},
"outputs": [],
"source": [
"# 新增Excel檔案\n",
"workbook = xlwt.Workbook(encoding='utf-8')"
]
},
{
"cell_type": "code",
"execution_count": 34,
"metadata": {},
"outputs": [],
"source": [
"# 新建Sheet\n",
"sheet=workbook.add_sheet(\"{0}月出勤考核表\".format(month))"
]
},
{
"cell_type": "code",
"execution_count": 35,
"metadata": {},
"outputs": [],
"source": [
"# 基本欄位\n",
"sheet.write(0,0,\"姓名\")\n",
"sheet.write(0,1,name)\n",
"sheet.write(1,0,\"日期\")\n",
"sheet.write(1,1,\"上班打卡時間\")\n",
"sheet.write(1,2,\"下班打卡時間\")\n",
"sheet.write(1,3,\"加班時數\")"
]
},
{
"cell_type": "code",
"execution_count": 36,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"{'login': '08:21:52', 'logout': '17:53:55', 'overtime': 0.5}\n"
]
}
],
"source": [
"x,y=2,0\n",
"for k in log_json:\n",
" sheet.write(x,0,k)\n",
" sheet.write(x,1,log_json[k]['login'])\n",
" sheet.write(x,2,log_json[k]['logout'])\n",
" sheet.write(x,3,log_json[k]['overtime'])\n",
" print(log_json[k])\n",
" x=x+1"
]
},
{
"cell_type": "code",
"execution_count": 37,
"metadata": {},
"outputs": [],
"source": [
"# 存儲\n",
"workbook.save(\"docs/{0}{1}月出勤紀錄.xlsx\".format(name,month))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 41,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"{\"2019-07-01\": {\"login\": \"08:21:52\", \"logout\": \"17:53:55\", \"overtime\": 0.5}}\n"
]
}
],
"source": [
"print(json.dumps(log_json))"
]
},
{
"cell_type": "code",
"execution_count": 28,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'2019-07-01T00:21:52'"
]
},
"execution_count": 28,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"'2019-07-01T00:21:52Z'.split('Z')[0].split('.')[0]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# 上下班時間編碼為字典\n",
"def encode_logtime(log_history):\n",
" log_json={}\n",
" for log in log_history:\n",
" local_log=utc2local(log[0].split('.')[0])\n",
" log_date,log_time=local_log.split(' ')\n",
"\n",
" if(log_date in log_json):\n",
" tmp=log_json[log_date]\n",
" if(type(tmp) == list):\n",
" tmp.append(log_time)\n",
" log_json[log_date]=tmp\n",
" else:\n",
" log_json[log_date]=[tmp,log_time]\n",
" else:\n",
" log_json[log_date]=log_time\n",
" return log_json"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"login_json=encode_logtime(login_history)\n",
"logout_json=encode_logtime(logout_history)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import cv2\n",
"import threading\n",
"# from ipcamCapture import ipcamCapture\n",
"import time\n",
"import subprocess as sp"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ipcam=ipcamCapture(URL='rtsp://iCRc0en0w:VEZ3LCKK@192.168.5.1:2555/d878278e67636fbb')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ipcam.start()\n",
"time.sleep(1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"face_cascade = cv2.CascadeClassifier(\"Face-Clock/model/cv2/haarcascade_frontalface_alt2.xml\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"while(True):\n",
" frame=ipcam.getframe()\n",
" faces = face_cascade.detectMultiScale(frame,scaleFactor=1.1,minNeighbors=3)\n",
" if(len(faces)>0):\n",
" for f in faces:\n",
" x,y,w,h=f\n",
" margin=10\n",
" cv2.rectangle(frame,(x-margin//2,y-margin//2),(x+w+margin//2,y+h+margin//2),(0,0,255))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cap=cv2.VideoCapture('rtsp://iCRc0en0w:VEZ3LCKK@192.168.5.1:2555/d878278e67636fbb')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from matplotlib import pyplot as plt"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cap.read()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import cv2\n",
"from darkflow.net.build import TFNet\n",
"import time\n",
"import threading\n",
"import subprocess as sp"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"option={\n",
" 'model':'cfg/yolo.cfg',\n",
" 'load':'bin/yolov2.weights',\n",
" 'threshold':0.3,\n",
"# 'gpu':0.8\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"tfnet=TFNet(option)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# capture=cv2.VideoCapture('Khare_testvideo_03.mp4')\n",
"rtmpUrl='rtmp://stream.mivs.isrg.tw/analyze/yolov2_kr_1'\n",
"URL=\"http://91.201.117.136/mjpg/video.mjpg#.W4ez7ApQikk.link\"\n",
"capture=cv2.VideoCapture(URL)\n",
"# colors=[tuple(255*np.random.rand(3)) for i in range(20)]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# 取得影片參數\n",
"size = (int(capture.get(cv2.CAP_PROP_FRAME_WIDTH)), int(capture.get(cv2.CAP_PROP_FRAME_HEIGHT)))\n",
"sizeStr = str(size[0]) + 'x' + str(size[1])\n",
"fps = capture.get(cv2.CAP_PROP_FPS) # 30p/self\n",
"fps = int(fps)\n",
"hz = int(1000.0 / fps)\n",
"print('size:'+ sizeStr + ' fps:' + str(fps) + ' hz:' + str(hz))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# 管道输出 ffmpeg推送rtmp 重点 : 通过管道 共享数据的方式\n",
"command = ['ffmpeg',\n",
" '-y',\n",
" '-f', 'rawvideo',\n",
" '-vcodec','rawvideo',\n",
" '-pix_fmt', 'bgr24',\n",
" '-s', sizeStr,\n",
" '-r', str(fps),\n",
" '-i', '-',\n",
" '-c:v', 'libx264',\n",
" '-pix_fmt', 'yuv420p',\n",
" '-preset', 'ultrafast',\n",
" '-f', 'flv', \n",
" rtmpUrl]\n",
"# pipe = sp.Popen(command, stdout = sp.PIPE, bufsize=10**8)\n",
"pipe = sp.Popen(command, stdin=sp.PIPE) #,shell=False"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"class ipcamCapture:\n",
" def __init__(self, URL):\n",
" self.Frame = []\n",
" self.status = False\n",
" self.isstop = False\n",
"\t\t\n",
"\t# 攝影機連接。\n",
" self.capture = cv2.VideoCapture(URL)\n",
"\n",
" def start(self):\n",
"\t# 把程式放進子執行緒,daemon=True 表示該執行緒會隨著主執行緒關閉而關閉。\n",
" print('ipcam started!')\n",
" threading.Thread(target=self.queryframe, daemon=True, args=()).start()\n",
"\n",
" def stop(self):\n",
"\t# 記得要設計停止無限迴圈的開關。\n",
" self.isstop = True\n",
" print('ipcam stopped!')\n",
" \n",
" def getframe(self):\n",
"\t# 當有需要影像時,再回傳最新的影像。\n",
" return self.Frame\n",
" \n",
" def queryframe(self):\n",
" while (not self.isstop):\n",
" self.status, self.Frame = self.capture.read()\n",
" \n",
" self.capture.release()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# URL=\"http://91.201.117.136/mjpg/video.mjpg#.W4ez7ApQikk.link\"\n",
"\n",
"# 連接攝影機\n",
"ipcam = ipcamCapture(URL)\n",
"\n",
"# 啟動子執行緒\n",
"ipcam.start()\n",
"\n",
"# 暫停1秒,確保影像已經填充\n",
"time.sleep(1)\n",
"\n",
"# 使用無窮迴圈擷取影像,直到按下Esc鍵結束\n",
"while True:\n",
" stime=time.time()\n",
" # 使用 getframe 取得最新的影像\n",
" frame = ipcam.getframe()\n",
" \n",
" results = tfnet.return_predict(frame)\n",
" for result in results:\n",
" tl=(result['topleft']['x'],result['topleft']['y'])\n",
" br=(result['bottomright']['x'],result['bottomright']['y'])\n",
" label=result['label']\n",
" frame=cv2.rectangle(frame,tl,br,(0,255,0),2)\n",
" frame=cv2.putText(frame,label,tl,cv2.FONT_HERSHEY_COMPLEX,0.5, (255,255,255), 1, cv2.LINE_AA)\n",
" cv2.imshow('frame',frame)\n",
" pipe.stdin.write(frame.tostring())\n",
" pass\n",
" print('FPS {:.1f}'.format(1 / (time.time() - stime)))\n",
" if cv2.waitKey(33) == 27:\n",
" cv2.destroyAllWindows()\n",
" ipcam.stop()\n",
" break"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
File added
{
"cells": [
{
"cell_type": "code",
"execution_count": 38,
"metadata": {},
"outputs": [],
"source": [
"import xlwt\n",
"from influxdb import InfluxDBClient\n",
"import calendar\n",
"import datetime\n",
"import pytz\n",
"import json"
]
},
{
"cell_type": "code",
"execution_count": 52,
"metadata": {},
"outputs": [],
"source": [
"# 參數\n",
"name=\"Allen\"\n",
"year=2019\n",
"month=5"
]
},
{
"cell_type": "code",
"execution_count": 53,
"metadata": {},
"outputs": [],
"source": [
"# 時區設定\n",
"tw = pytz.timezone('Asia/Taipei')"
]
},
{
"cell_type": "code",
"execution_count": 54,
"metadata": {},
"outputs": [],
"source": [
"# 獲取當月第一天的星期與當月的總天數\n",
"firstDayWeekDay, monthRange = calendar.monthrange(year, month)"
]
},
{
"cell_type": "code",
"execution_count": 55,
"metadata": {},
"outputs": [],
"source": [
"# 獲取當月第一天跟最後一天 localtime\n",
"firstDay = datetime.datetime(year=year, month=month, day=1,hour=0,minute=0,second=0)\n",
"lastDay = datetime.datetime(year=year, month=month, day=monthRange,hour=23,minute=59,second=59)\n",
"# 時區本地化\n",
"tw_firstDay=tw.localize(firstDay)\n",
"tw_lastDay=tw.localize(lastDay)"
]
},
{
"cell_type": "code",
"execution_count": 56,
"metadata": {},
"outputs": [],
"source": [
"# 轉換為utc時間\n",
"utc_firstDay=tw_firstDay.astimezone(pytz.utc)\n",
"utc_lastDay=tw_lastDay.astimezone(pytz.utc)"
]
},
{
"cell_type": "code",
"execution_count": 57,
"metadata": {},
"outputs": [],
"source": [
"# 字串格式化轉為influxdb時間格式\n",
"format_firstDay=utc_firstDay.strftime(\"%Y-%m-%d %H:%M:%S\")\n",
"format_lastDay=utc_lastDay.strftime(\"%Y-%m-%d %H:%M:%S\")"
]
},
{
"cell_type": "code",
"execution_count": 58,
"metadata": {},
"outputs": [],
"source": [
"# 建立influxdb連線\n",
"client=InfluxDBClient(host='192.168.0.6',port=8086,username='root',password='xMd2k5aK',database='rd')\n",
"# client=InfluxDBClient(host='192.168.5.17',port=8086,username='root',password='root',database='RD')"
]
},
{
"cell_type": "code",
"execution_count": 59,
"metadata": {},
"outputs": [],
"source": [
"login_response=client.query(\"SELECT * FROM login_ WHERE time > '{0}' AND time < '{1}' AND \\\"name\\\" = '{2}'\".format(format_firstDay,format_lastDay,name))\n",
"logout_response=client.query(\"SELECT * FROM logout_ WHERE time > '{0}' AND time < '{1}' AND \\\"name\\\" = '{2}'\".format(format_firstDay,format_lastDay,name))"
]
},
{
"cell_type": "code",
"execution_count": 60,
"metadata": {},
"outputs": [
{
"ename": "KeyError",
"evalue": "'series'",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-60-4d6e74a66a90>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mlogin_history\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mlogin_response\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mraw\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'series'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'values'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mlogout_history\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mlogout_response\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mraw\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'series'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'values'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mKeyError\u001b[0m: 'series'"
]
}
],
"source": [
"login_history=login_response.raw['series'][0]['values']\n",
"logout_history=logout_response.raw['series'][0]['values']"
]
},
{
"cell_type": "code",
"execution_count": 31,
"metadata": {},
"outputs": [],
"source": [
"def utc2local(strtime):\n",
" strtime=strtime.split('Z')[0]\n",
" strtime=strtime.split('.')[0]\n",
" dt=datetime.datetime.strptime(strtime,\"%Y-%m-%dT%H:%M:%S\")\n",
" loacl_time=dt.astimezone(pytz.timezone('Asia/Taipei'))\n",
" return loacl_time.strftime(\"%Y-%m-%d %H:%M:%S\")"
]
},
{
"cell_type": "code",
"execution_count": 32,
"metadata": {},
"outputs": [],
"source": [
"log_json={}\n",
"for login,logout in zip(login_history,logout_history):\n",
" local_login=utc2local(login[0])\n",
" local_logout=utc2local(logout[0])\n",
" \n",
" login_date,login_time=local_login.split(' ')\n",
" logout_date,logout_time=local_logout.split(' ')\n",
" \n",
" d1 = datetime.datetime.strptime(local_logout,'%Y-%m-%d %H:%M:%S')\n",
" \n",
" overtime=0\n",
" if(d1.hour ==17 and d1.minute >= 45):\n",
" overtime=0.5\n",
" elif(d1.hour >17 and d1.minute >=15):\n",
" overtime=d1.hour-17\n",
" if(d1.minute>=45):\n",
" overtime=overtime+0.5\n",
" \n",
" date_json={'login':login_time,'logout':logout_time,'overtime':overtime}\n",
" log_json[login_date]=date_json"
]
},
{
"cell_type": "code",
"execution_count": 33,
"metadata": {},
"outputs": [],
"source": [
"# 新增Excel檔案\n",
"workbook = xlwt.Workbook(encoding='utf-8')"
]
},
{
"cell_type": "code",
"execution_count": 34,
"metadata": {},
"outputs": [],
"source": [
"# 新建Sheet\n",
"sheet=workbook.add_sheet(\"{0}月出勤考核表\".format(month))"
]
},
{
"cell_type": "code",
"execution_count": 35,
"metadata": {},
"outputs": [],
"source": [
"# 基本欄位\n",
"sheet.write(0,0,\"姓名\")\n",
"sheet.write(0,1,name)\n",
"sheet.write(1,0,\"日期\")\n",
"sheet.write(1,1,\"上班打卡時間\")\n",
"sheet.write(1,2,\"下班打卡時間\")\n",
"sheet.write(1,3,\"加班時數\")"
]
},
{
"cell_type": "code",
"execution_count": 36,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"{'login': '08:21:52', 'logout': '17:53:55', 'overtime': 0.5}\n"
]
}
],
"source": [
"x,y=2,0\n",
"for k in log_json:\n",
" sheet.write(x,0,k)\n",
" sheet.write(x,1,log_json[k]['login'])\n",
" sheet.write(x,2,log_json[k]['logout'])\n",
" sheet.write(x,3,log_json[k]['overtime'])\n",
" print(log_json[k])\n",
" x=x+1"
]
},
{
"cell_type": "code",
"execution_count": 37,
"metadata": {},
"outputs": [],
"source": [
"# 存儲\n",
"workbook.save(\"docs/{0}{1}月出勤紀錄.xlsx\".format(name,month))"
]
},
{
"cell_type": "code",
"execution_count": 62,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"0"
]
},
"execution_count": 62,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"len(login_response)"
]
},
{
"cell_type": "code",
"execution_count": 41,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"{\"2019-07-01\": {\"login\": \"08:21:52\", \"logout\": \"17:53:55\", \"overtime\": 0.5}}\n"
]
}
],
"source": [
"print(json.dumps(log_json))"
]
},
{
"cell_type": "code",
"execution_count": 28,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'2019-07-01T00:21:52'"
]
},
"execution_count": 28,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"'2019-07-01T00:21:52Z'.split('Z')[0].split('.')[0]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# 上下班時間編碼為字典\n",
"def encode_logtime(log_history):\n",
" log_json={}\n",
" for log in log_history:\n",
" local_log=utc2local(log[0].split('.')[0])\n",
" log_date,log_time=local_log.split(' ')\n",
"\n",
" if(log_date in log_json):\n",
" tmp=log_json[log_date]\n",
" if(type(tmp) == list):\n",
" tmp.append(log_time)\n",
" log_json[log_date]=tmp\n",
" else:\n",
" log_json[log_date]=[tmp,log_time]\n",
" else:\n",
" log_json[log_date]=log_time\n",
" return log_json"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"login_json=encode_logtime(login_history)\n",
"logout_json=encode_logtime(logout_history)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
import cv2
import threading
# 接收攝影機串流影像,採用多執行緒的方式,降低緩衝區堆疊圖幀的問題。
class ipcamCapture:
def __init__(self, URL):
self.Frame = []
self.status = False
self.isstop = False
# 攝影機連接。
self.capture = cv2.VideoCapture(URL)
def start(self):
# 把程式放進子執行緒,daemon=True 表示該執行緒會隨著主執行緒關閉而關閉。
print('ipcam started!')
threading.Thread(target=self.queryframe, daemon=True, args=()).start()
def stop(self):
# 記得要設計停止無限迴圈的開關。
self.isstop = True
print('ipcam stopped!')
def getframe(self):
# 當有需要影像時,再回傳最新的影像。
return self.Frame
def queryframe(self):
while (not self.isstop):
self.status, self.Frame = self.capture.read()
self.capture.release()
def get_info(self):
# 取得影片參數
size = (int(self.capture.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.capture.get(cv2.CAP_PROP_FRAME_HEIGHT)))
sizeStr = str(size[0]) + 'x' + str(size[1])
fps = self.capture.get(cv2.CAP_PROP_FPS) # 30p/self
fps = int(fps)
hz = int(1000.0 / fps)
print('size:'+ sizeStr + ' fps:' + str(fps) + ' hz:' + str(hz))
return sizeStr,fps,hz
\ No newline at end of file
...@@ -75,3 +75,42 @@ ...@@ -75,3 +75,42 @@
192.168.5.202 - - [22/May/2019:09:47:52 +0000] "GET /page/factory/production/info HTTP/1.1" 500 32 "-" "Nx Witness/3.2.0.28738 (Network Optix) Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:36.0)" 192.168.5.202 - - [22/May/2019:09:47:52 +0000] "GET /page/factory/production/info HTTP/1.1" 500 32 "-" "Nx Witness/3.2.0.28738 (Network Optix) Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:36.0)"
192.168.5.202 - - [22/May/2019:09:55:45 +0000] "GET /page/factory/production/info HTTP/1.1" 500 32 "-" "Nx Witness/3.2.0.28738 (Network Optix) Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:36.0)" 192.168.5.202 - - [22/May/2019:09:55:45 +0000] "GET /page/factory/production/info HTTP/1.1" 500 32 "-" "Nx Witness/3.2.0.28738 (Network Optix) Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:36.0)"
192.168.5.202 - - [22/May/2019:10:09:14 +0000] "GET /page/factory/production/info HTTP/1.1" 500 32 "-" "Nx Witness/3.2.0.28738 (Network Optix) Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:36.0)" 192.168.5.202 - - [22/May/2019:10:09:14 +0000] "GET /page/factory/production/info HTTP/1.1" 500 32 "-" "Nx Witness/3.2.0.28738 (Network Optix) Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:36.0)"
192.168.5.103 - - [01/Jul/2019:06:11:19 +0000] "POST /whois HTTP/1.1" 200 86 "-" "PostmanRuntime/7.6.0"
192.168.5.103 - - [01/Jul/2019:06:11:19 +0000] "GET /apidocs HTTP/1.1" 308 273 "-" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [01/Jul/2019:06:11:19 +0000] "GET /apidocs/ HTTP/1.1" 200 1326 "-" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [01/Jul/2019:06:11:19 +0000] "GET /flasgger_static/swagger-ui.css HTTP/1.1" 200 153930 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [01/Jul/2019:06:11:19 +0000] "GET /flasgger_static/swagger-ui-standalone-preset.js HTTP/1.1" 200 440437 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [01/Jul/2019:06:11:19 +0000] "GET /flasgger_static/lib/jquery.min.js HTTP/1.1" 200 85578 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [01/Jul/2019:06:11:19 +0000] "GET /flasgger_static/swagger-ui-bundle.js HTTP/1.1" 200 1428809 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [01/Jul/2019:06:11:19 +0000] "GET /flasgger_static/favicon-32x32.png HTTP/1.1" 200 1141 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [01/Jul/2019:06:11:19 +0000] "GET /apispec_1.json HTTP/1.1" 200 6528 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [01/Jul/2019:06:12:28 +0000] "POST /whois HTTP/1.1" 504 192 "-" "PostmanRuntime/7.6.0"
192.168.5.103 - - [01/Jul/2019:06:12:41 +0000] "POST /whois HTTP/1.1" 200 232 "-" "PostmanRuntime/7.6.0"
192.168.5.103 - - [01/Jul/2019:06:12:47 +0000] "POST /whois HTTP/1.1" 200 168 "-" "PostmanRuntime/7.6.0"
192.168.5.103 - - [01/Jul/2019:06:13:25 +0000] "GET /apidocs/ HTTP/1.1" 200 1326 "http://60.249.6.104:30003/issues/398?issue_count=31&issue_position=7&next_issue_id=397&prev_issue_id=399" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [01/Jul/2019:06:13:26 +0000] "GET /apispec_1.json HTTP/1.1" 200 6528 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [01/Jul/2019:06:13:29 +0000] "GET /apispec_1.json HTTP/1.1" 200 6528 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [01/Jul/2019:06:13:29 +0000] "GET /favicon.ico HTTP/1.1" 404 209 "http://192.168.5.205:800/apispec_1.json" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [03/Jul/2019:00:53:27 +0000] "GET /apidocs/ HTTP/1.1" 200 1326 "http://60.249.6.104:30003/issues/398" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [03/Jul/2019:00:53:27 +0000] "GET /flasgger_static/swagger-ui.css HTTP/1.1" 304 0 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [03/Jul/2019:00:53:27 +0000] "GET /flasgger_static/swagger-ui-bundle.js HTTP/1.1" 200 1428809 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [03/Jul/2019:00:53:27 +0000] "GET /flasgger_static/swagger-ui-standalone-preset.js HTTP/1.1" 200 440437 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [03/Jul/2019:00:53:27 +0000] "GET /flasgger_static/lib/jquery.min.js HTTP/1.1" 304 0 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.103 - - [03/Jul/2019:00:53:27 +0000] "GET /apispec_1.json HTTP/1.1" 200 6528 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36"
192.168.5.101 - - [03/Jul/2019:01:03:03 +0000] "GET / HTTP/1.1" 404 209 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
192.168.5.101 - - [03/Jul/2019:01:03:03 +0000] "GET /favicon.ico HTTP/1.1" 404 209 "http://192.168.5.205:800/" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
192.168.5.101 - - [03/Jul/2019:01:03:27 +0000] "GET / HTTP/1.1" 404 209 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
192.168.5.101 - - [03/Jul/2019:01:03:36 +0000] "GET /apidoc HTTP/1.1" 404 209 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
192.168.5.101 - - [03/Jul/2019:01:03:40 +0000] "GET /apidocs HTTP/1.1" 308 273 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
192.168.5.101 - - [03/Jul/2019:01:03:40 +0000] "GET /apidocs/ HTTP/1.1" 200 1326 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
192.168.5.101 - - [03/Jul/2019:01:03:40 +0000] "GET /flasgger_static/lib/jquery.min.js HTTP/1.1" 200 85578 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
192.168.5.101 - - [03/Jul/2019:01:03:40 +0000] "GET /flasgger_static/swagger-ui.css HTTP/1.1" 200 153930 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
192.168.5.101 - - [03/Jul/2019:01:03:40 +0000] "GET /flasgger_static/swagger-ui-standalone-preset.js HTTP/1.1" 200 440437 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
192.168.5.101 - - [03/Jul/2019:01:03:40 +0000] "GET /flasgger_static/swagger-ui-bundle.js HTTP/1.1" 200 1428809 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
192.168.5.101 - - [03/Jul/2019:01:03:41 +0000] "GET /flasgger_static/favicon-32x32.png HTTP/1.1" 200 1141 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
192.168.5.101 - - [03/Jul/2019:01:03:41 +0000] "GET /apispec_1.json HTTP/1.1" 200 6528 "http://192.168.5.205:800/apidocs/" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
192.168.5.101 - - [03/Jul/2019:01:04:05 +0000] "GET /report/json/2019/7/Allen HTTP/1.1" 404 209 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36"
192.168.5.103 - - [03/Jul/2019:02:10:38 +0000] "POST /whois HTTP/1.1" 200 231 "-" "PostmanRuntime/7.6.0"
192.168.5.103 - - [03/Jul/2019:02:11:14 +0000] "POST /whois HTTP/1.1" 200 232 "-" "PostmanRuntime/7.6.0"
192.168.5.103 - - [03/Jul/2019:02:11:42 +0000] "POST /whois HTTP/1.1" 200 86 "-" "PostmanRuntime/7.6.0"
192.168.5.103 - - [03/Jul/2019:02:12:01 +0000] "POST /whois HTTP/1.1" 413 208 "-" "PostmanRuntime/7.6.0"
2019/07/01 06:12:28 [error] 22#22: *1 upstream timed out (110: Connection timed out) while reading response header from upstream, client: 192.168.5.103, server: localhost, request: "POST /whois HTTP/1.1", upstream: "uwsgi://unix:/notebooks/uwsgi.sock", host: "192.168.5.205:800"
2019/07/03 02:12:01 [error] 22#22: *53 client intended to send too large body: 1052719 bytes, client: 192.168.5.103, server: localhost, request: "POST /whois HTTP/1.1", host: "192.168.5.205:800"
...@@ -544,3 +544,92 @@ spawned uWSGI worker 2 (pid: 71, cores: 1) ...@@ -544,3 +544,92 @@ spawned uWSGI worker 2 (pid: 71, cores: 1)
spawned uWSGI worker 3 (pid: 72, cores: 1) spawned uWSGI worker 3 (pid: 72, cores: 1)
spawned uWSGI worker 4 (pid: 73, cores: 1) spawned uWSGI worker 4 (pid: 73, cores: 1)
spawned uWSGI worker 5 (pid: 74, cores: 1) spawned uWSGI worker 5 (pid: 74, cores: 1)
*** Starting uWSGI 2.0.18 (64bit) on [Mon Jul 1 06:10:43 2019] ***
compiled with version: 7.4.0 on 15 May 2019 04:49:44
os: Linux-4.15.0-29-generic #31-Ubuntu SMP Tue Jul 17 15:39:52 UTC 2018
nodename: 75475320ed5e
machine: x86_64
clock source: unix
detected number of CPU cores: 8
current working directory: /notebooks
detected binary path: /usr/local/bin/uwsgi
!!! no internal routing support, rebuild with pcre support !!!
uWSGI running as root, you can use --uid/--gid/--chroot options
*** WARNING: you are running uWSGI as root !!! (use the --uid flag) ***
chdir() to /notebooks
*** WARNING: you are running uWSGI without its master process manager ***
your memory page size is 4096 bytes
detected max file descriptor number: 1048576
lock engine: pthread robust mutexes
thunder lock: disabled (you can enable it with --thunder-lock)
uwsgi socket 0 bound to TCP address 0.0.0.0:5000 fd 3
uwsgi socket 1 bound to UNIX address /notebooks/uwsgi.sock fd 4
uWSGI running as root, you can use --uid/--gid/--chroot options
*** WARNING: you are running uWSGI as root !!! (use the --uid flag) ***
Python version: 3.6.7 (default, Oct 22 2018, 11:32:17) [GCC 8.2.0]
*** Python threads support is disabled. You can enable it with --enable-threads ***
Python main interpreter initialized at 0x55968a720f10
uWSGI running as root, you can use --uid/--gid/--chroot options
*** WARNING: you are running uWSGI as root !!! (use the --uid flag) ***
your server socket listen backlog is limited to 100 connections
your mercy for graceful operations on workers is 60 seconds
mapped 364520 bytes (355 KB) for 5 cores
*** Operational MODE: preforking ***
Using TensorFlow backend.
人臉偵測cascade分類器載入完成
unable to load configuration from from multiprocessing.semaphore_tracker import main;main(5)
svm version: 20190628102915
SVM分類器載入完成
WSGI app 0 (mountpoint='') ready in 35 seconds on interpreter 0x55968a720f10 pid: 7 (default app)
uWSGI running as root, you can use --uid/--gid/--chroot options
*** WARNING: you are running uWSGI as root !!! (use the --uid flag) ***
*** uWSGI is running in multiple interpreter mode ***
spawned uWSGI worker 1 (pid: 7, cores: 1)
spawned uWSGI worker 2 (pid: 56, cores: 1)
spawned uWSGI worker 3 (pid: 57, cores: 1)
spawned uWSGI worker 4 (pid: 58, cores: 1)
spawned uWSGI worker 5 (pid: 59, cores: 1)
2019_07_01_14_11_19.jpg
[pid: 56|app: 0|req: 1/1] 192.168.5.103 () {44 vars in 741 bytes} [Mon Jul 1 06:11:18 2019] POST /whois => generated 86 bytes in 96 msecs (HTTP/1.1 200) 2 headers in 71 bytes (1 switches on core 0)
[pid: 57|app: 0|req: 1/2] 192.168.5.103 () {42 vars in 1192 bytes} [Mon Jul 1 06:11:18 2019] GET /apidocs => generated 273 bytes in 89 msecs (HTTP/1.1 308) 3 headers in 141 bytes (1 switches on core 0)
[pid: 7|app: 0|req: 1/3] 192.168.5.103 () {42 vars in 1186 bytes} [Mon Jul 1 06:11:19 2019] GET /apidocs/ => generated 2985 bytes in 62 msecs (HTTP/1.1 200) 2 headers in 81 bytes (2 switches on core 0)
[pid: 7|app: 0|req: 2/4] 192.168.5.103 () {42 vars in 1144 bytes} [Mon Jul 1 06:11:19 2019] GET /flasgger_static/swagger-ui.css => generated 153930 bytes in 5 msecs via sendfile() (HTTP/1.1 200) 8 headers in 311 bytes (1 switches on core 0)
[pid: 56|app: 0|req: 2/5] 192.168.5.103 () {42 vars in 1163 bytes} [Mon Jul 1 06:11:19 2019] GET /flasgger_static/swagger-ui-standalone-preset.js => generated 440437 bytes in 18 msecs via sendfile() (HTTP/1.1 200) 8 headers in 325 bytes (1 switches on core 0)
[pid: 58|app: 0|req: 1/6] 192.168.5.103 () {42 vars in 1135 bytes} [Mon Jul 1 06:11:19 2019] GET /flasgger_static/lib/jquery.min.js => generated 85578 bytes in 79 msecs via sendfile() (HTTP/1.1 200) 8 headers in 321 bytes (0 switches on core 0)
[pid: 59|app: 0|req: 1/7] 192.168.5.103 () {42 vars in 1141 bytes} [Mon Jul 1 06:11:19 2019] GET /flasgger_static/swagger-ui-bundle.js => generated 1428809 bytes in 91 msecs via sendfile() (HTTP/1.1 200) 8 headers in 324 bytes (6 switches on core 0)
[pid: 57|app: 0|req: 2/8] 192.168.5.103 () {42 vars in 1171 bytes} [Mon Jul 1 06:11:19 2019] GET /flasgger_static/favicon-32x32.png => generated 1141 bytes in 11 msecs via sendfile() (HTTP/1.1 200) 8 headers in 292 bytes (0 switches on core 0)
[pid: 59|app: 0|req: 2/9] 192.168.5.103 () {42 vars in 1114 bytes} [Mon Jul 1 06:11:19 2019] GET /apispec_1.json => generated 6528 bytes in 63 msecs (HTTP/1.1 200) 2 headers in 73 bytes (2 switches on core 0)
2019_07_01_14_11_28.jpg
2019_07_01_14_12_38.jpg
[pid: 7|app: 0|req: 3/10] 192.168.5.103 () {44 vars in 743 bytes} [Mon Jul 1 06:12:38 2019] POST /whois => generated 232 bytes in 2520 msecs (HTTP/1.1 200) 2 headers in 72 bytes (1 switches on core 0)
2019_07_01_14_12_46.jpg
[pid: 7|app: 0|req: 4/11] 192.168.5.103 () {44 vars in 743 bytes} [Mon Jul 1 06:12:46 2019] POST /whois => generated 168 bytes in 280 msecs (HTTP/1.1 200) 2 headers in 72 bytes (1 switches on core 0)
[pid: 7|app: 0|req: 5/12] 192.168.5.103 () {44 vars in 1306 bytes} [Mon Jul 1 06:13:25 2019] GET /apidocs/ => generated 2985 bytes in 5 msecs (HTTP/1.1 200) 2 headers in 81 bytes (1 switches on core 0)
[pid: 58|app: 0|req: 2/13] 192.168.5.103 () {42 vars in 1114 bytes} [Mon Jul 1 06:13:26 2019] GET /apispec_1.json => generated 6528 bytes in 61 msecs (HTTP/1.1 200) 2 headers in 73 bytes (1 switches on core 0)
[pid: 7|app: 0|req: 6/14] 192.168.5.103 () {44 vars in 1247 bytes} [Mon Jul 1 06:13:29 2019] GET /apispec_1.json => generated 6528 bytes in 56 msecs (HTTP/1.1 200) 2 headers in 73 bytes (1 switches on core 0)
[pid: 58|app: 0|req: 3/15] 192.168.5.103 () {42 vars in 1133 bytes} [Mon Jul 1 06:13:29 2019] GET /favicon.ico => generated 232 bytes in 7 msecs (HTTP/1.1 404) 2 headers in 72 bytes (1 switches on core 0)
[pid: 7|app: 0|req: 7/16] 192.168.5.103 () {44 vars in 1227 bytes} [Wed Jul 3 00:53:27 2019] GET /apidocs/ => generated 2985 bytes in 17 msecs (HTTP/1.1 200) 2 headers in 81 bytes (1 switches on core 0)
[pid: 58|app: 0|req: 4/17] 192.168.5.103 () {46 vars in 1248 bytes} [Wed Jul 3 00:53:27 2019] GET /flasgger_static/swagger-ui.css => generated 0 bytes in 28 msecs (HTTP/1.1 304) 5 headers in 212 bytes (0 switches on core 0)
[pid: 56|app: 0|req: 3/18] 192.168.5.103 () {42 vars in 1152 bytes} [Wed Jul 3 00:53:27 2019] GET /flasgger_static/swagger-ui-standalone-preset.js => generated 440437 bytes in 29 msecs via sendfile() (HTTP/1.1 200) 8 headers in 325 bytes (1 switches on core 0)
[pid: 7|app: 0|req: 8/19] 192.168.5.103 () {42 vars in 1130 bytes} [Wed Jul 3 00:53:27 2019] GET /flasgger_static/swagger-ui-bundle.js => generated 1428809 bytes in 32 msecs via sendfile() (HTTP/1.1 200) 8 headers in 324 bytes (6 switches on core 0)
[pid: 57|app: 0|req: 3/20] 192.168.5.103 () {46 vars in 1236 bytes} [Wed Jul 3 00:53:27 2019] GET /flasgger_static/lib/jquery.min.js => generated 0 bytes in 14 msecs (HTTP/1.1 304) 5 headers in 209 bytes (0 switches on core 0)
[pid: 57|app: 0|req: 4/21] 192.168.5.103 () {42 vars in 1103 bytes} [Wed Jul 3 00:53:27 2019] GET /apispec_1.json => generated 6528 bytes in 67 msecs (HTTP/1.1 200) 2 headers in 73 bytes (1 switches on core 0)
[pid: 7|app: 0|req: 9/22] 192.168.5.101 () {42 vars in 1120 bytes} [Wed Jul 3 01:03:03 2019] GET / => generated 232 bytes in 21 msecs (HTTP/1.1 404) 2 headers in 72 bytes (1 switches on core 0)
[pid: 7|app: 0|req: 10/23] 192.168.5.101 () {42 vars in 1069 bytes} [Wed Jul 3 01:03:03 2019] GET /favicon.ico => generated 232 bytes in 1 msecs (HTTP/1.1 404) 2 headers in 72 bytes (1 switches on core 0)
[pid: 7|app: 0|req: 11/24] 192.168.5.101 () {42 vars in 1120 bytes} [Wed Jul 3 01:03:27 2019] GET / => generated 232 bytes in 2 msecs (HTTP/1.1 404) 2 headers in 72 bytes (2 switches on core 0)
[pid: 7|app: 0|req: 12/25] 192.168.5.101 () {42 vars in 1132 bytes} [Wed Jul 3 01:03:36 2019] GET /apidoc => generated 232 bytes in 2 msecs (HTTP/1.1 404) 2 headers in 72 bytes (2 switches on core 0)
[pid: 56|app: 0|req: 4/26] 192.168.5.101 () {42 vars in 1134 bytes} [Wed Jul 3 01:03:40 2019] GET /apidocs => generated 273 bytes in 25 msecs (HTTP/1.1 308) 3 headers in 141 bytes (2 switches on core 0)
[pid: 7|app: 0|req: 13/27] 192.168.5.101 () {42 vars in 1136 bytes} [Wed Jul 3 01:03:40 2019] GET /apidocs/ => generated 2985 bytes in 7 msecs (HTTP/1.1 200) 2 headers in 81 bytes (1 switches on core 0)
[pid: 7|app: 0|req: 14/28] 192.168.5.101 () {42 vars in 1094 bytes} [Wed Jul 3 01:03:40 2019] GET /flasgger_static/swagger-ui.css => generated 153930 bytes in 4 msecs via sendfile() (HTTP/1.1 200) 8 headers in 311 bytes (0 switches on core 0)
[pid: 7|app: 0|req: 15/29] 192.168.5.101 () {42 vars in 1085 bytes} [Wed Jul 3 01:03:40 2019] GET /flasgger_static/lib/jquery.min.js => generated 85578 bytes in 2 msecs via sendfile() (HTTP/1.1 200) 8 headers in 321 bytes (0 switches on core 0)
[pid: 57|app: 0|req: 5/30] 192.168.5.101 () {42 vars in 1113 bytes} [Wed Jul 3 01:03:40 2019] GET /flasgger_static/swagger-ui-standalone-preset.js => generated 440437 bytes in 17 msecs via sendfile() (HTTP/1.1 200) 8 headers in 325 bytes (1 switches on core 0)
[pid: 58|app: 0|req: 5/31] 192.168.5.101 () {42 vars in 1091 bytes} [Wed Jul 3 01:03:40 2019] GET /flasgger_static/swagger-ui-bundle.js => generated 1428809 bytes in 30 msecs via sendfile() (HTTP/1.1 200) 8 headers in 324 bytes (5 switches on core 0)
[pid: 7|app: 0|req: 16/32] 192.168.5.101 () {42 vars in 1121 bytes} [Wed Jul 3 01:03:41 2019] GET /flasgger_static/favicon-32x32.png => generated 1141 bytes in 2 msecs via sendfile() (HTTP/1.1 200) 8 headers in 292 bytes (0 switches on core 0)
[pid: 56|app: 0|req: 5/33] 192.168.5.101 () {42 vars in 1064 bytes} [Wed Jul 3 01:03:41 2019] GET /apispec_1.json => generated 6528 bytes in 66 msecs (HTTP/1.1 200) 2 headers in 73 bytes (2 switches on core 0)
[pid: 7|app: 0|req: 17/34] 192.168.5.101 () {42 vars in 1168 bytes} [Wed Jul 3 01:04:05 2019] GET /report/json/2019/7/Allen => generated 232 bytes in 3 msecs (HTTP/1.1 404) 2 headers in 72 bytes (1 switches on core 0)
2019_07_03_10_10_38.jpg
[pid: 7|app: 0|req: 18/35] 192.168.5.103 () {44 vars in 743 bytes} [Wed Jul 3 02:10:38 2019] POST /whois => generated 231 bytes in 631 msecs (HTTP/1.1 200) 2 headers in 72 bytes (1 switches on core 0)
2019_07_03_10_11_13.jpg
[pid: 7|app: 0|req: 19/36] 192.168.5.103 () {44 vars in 743 bytes} [Wed Jul 3 02:11:13 2019] POST /whois => generated 232 bytes in 448 msecs (HTTP/1.1 200) 2 headers in 72 bytes (1 switches on core 0)
2019_07_03_10_11_42.jpg
[pid: 58|app: 0|req: 6/37] 192.168.5.103 () {44 vars in 741 bytes} [Wed Jul 3 02:11:42 2019] POST /whois => generated 86 bytes in 37 msecs (HTTP/1.1 200) 2 headers in 71 bytes (1 switches on core 0)
from run import app from run import app
from flask_restful import Resource, reqparse from flask_restful import Resource, reqparse
from flask import redirect,jsonify
from werkzeug import datastructures from werkzeug import datastructures
import datetime import datetime
import time import time
...@@ -12,6 +13,9 @@ import svm_classification ...@@ -12,6 +13,9 @@ import svm_classification
import train import train
from influxdb import InfluxDBClient from influxdb import InfluxDBClient
import pymysql import pymysql
import xlwt
import calendar
import json
class Whois(Resource): class Whois(Resource):
def __init__(self): def __init__(self):
...@@ -22,7 +26,8 @@ class Whois(Resource): ...@@ -22,7 +26,8 @@ class Whois(Resource):
def save(self): def save(self):
save_time=time.time() save_time=time.time()
dt = datetime.datetime.now(pytz.timezone('Asia/Taipei')) dt = datetime.datetime.now(pytz.timezone('Asia/Taipei'))
dt = dt.strftime('%Y_%m_%d_%H_%M_%S_%f')+'.jpg' dt = dt.strftime('%Y_%m_%d_%H_%M_%S')+'.jpg'
print(dt)
filepath = os.path.abspath(os.path.join(app.config['UPLOAD_FOLDER'], dt)) filepath = os.path.abspath(os.path.join(app.config['UPLOAD_FOLDER'], dt))
args =self.reqparse.parse_args() args =self.reqparse.parse_args()
args['file'].save(filepath) args['file'].save(filepath)
...@@ -40,7 +45,18 @@ class Whois(Resource): ...@@ -40,7 +45,18 @@ class Whois(Resource):
return {'error':faces} return {'error':faces}
embs=facenet_predict.calc_embs(faces[1]) embs=facenet_predict.calc_embs(faces[1])
result=svm_classification.result(embs) result=svm_classification.result(embs)
return {'time':save_time,'face_location':str(faces[0]),'name':result[0],'confidence':result[1]}
# 建立mysql連線
db=pymysql.connect('192.168.0.5','root','xMd2k5aK','rd')
cursor = db.cursor()
sql="SELECT name FROM `employee` WHERE id='{0}'".format(result[0][0])
results=cursor.execute(sql)
if(results==0):
return jsonify({'error':'查詢錯誤請重新輸入'})
name=cursor.fetchall()[0][0]
db.close()
return {'time':save_time,'face_location':str(faces[0]),'uid':result[0],'name':name,'confidence':result[1]}
# if(len(result)!=3): # if(len(result)!=3):
# return {'error':result} # return {'error':result}
...@@ -54,22 +70,35 @@ class Whois(Resource): ...@@ -54,22 +70,35 @@ class Whois(Resource):
class Upload_image(Resource): class Upload_image(Resource):
def __init__(self): def __init__(self):
self.reqparse = reqparse.RequestParser() self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('name',required=True,help="Please write down your name and we will save your image to folder of train") self.reqparse.add_argument('uid',required=True,help="Please write down your name and we will save your image to folder of train")
self.reqparse.add_argument('file',type=datastructures.FileStorage, location='files',required=True,help="Please slecet a image which incloud car to upload.") self.reqparse.add_argument('file',type=datastructures.FileStorage, location='files',required=True,help="Please slecet a image which incloud car to upload.")
@staticmethod @staticmethod
def save(self): def save(self):
dt = datetime.datetime.now(pytz.timezone('Asia/Taipei')) dt = datetime.datetime.now(pytz.timezone('Asia/Taipei'))
dt = dt.strftime('%Y_%m_%d_%H_%M_%S_%f')+'.jpg' dt = dt.strftime('%Y_%m_%d_%H_%M_%S')+'.jpg'
args =self.reqparse.parse_args() args =self.reqparse.parse_args()
fileroot=os.path.abspath(os.path.join('./nas/Face/train',args['name']))
# 建立mysql連線
db=pymysql.connect('192.168.0.5','root','xMd2k5aK','rd')
cursor = db.cursor()
sql="SELECT name FROM `employee` WHERE id='{0}'".format(args['uid'])
results=cursor.execute(sql)
if(results==0):
return 'error','查詢錯誤請重新輸入'
name=cursor.fetchall()[0][0]
db.close()
fileroot=os.path.abspath(os.path.join('./nas/Face/train',args['uid']))
filepath = os.path.join(fileroot,dt) filepath = os.path.join(fileroot,dt)
if(not os.path.isdir(fileroot)): if(not os.path.isdir(fileroot)):
os.mkdir(fileroot) os.mkdir(fileroot)
args['file'].save(filepath) args['file'].save(filepath)
return args['name'],filepath return name,filepath
def post(self): def post(self):
name,img_path=self.save(self) name,img_path=self.save(self)
if(name=='error'):
return {'error':'查詢錯誤請重新輸入'}
img=cv2.imread(img_path) img=cv2.imread(img_path)
faces=face_cascade.get_face(img) faces=face_cascade.get_face(img)
if(len(faces)!=2): if(len(faces)!=2):
...@@ -85,34 +114,53 @@ class Train(Resource): ...@@ -85,34 +114,53 @@ class Train(Resource):
class Clock(Resource): class Clock(Resource):
def __init__(self): def __init__(self):
self.reqparse = reqparse.RequestParser() self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('name',required=True,help="Please write down your name") self.reqparse.add_argument('uid',required=True,help="Please write down your name")
self.reqparse.add_argument('time',required=True,help="Please write down save time") self.reqparse.add_argument('time',required=True,help="Please write down save time")
self.reqparse.add_argument('log',required=True,help="Please write down your login or logout") self.reqparse.add_argument('log',required=True,help="Please write down your login or logout")
self.reqparse.add_argument('status',required=True,help="Please write down your status") self.reqparse.add_argument('status',required=True,help="Please write down your status")
def post(self): def post(self):
args =self.reqparse.parse_args() args =self.reqparse.parse_args()
save_time=args['time'] save_time=args['time']
save_name=args['name'] save_uid=args['uid']
save_log=args['log'] save_log=args['log']
status=args['status'] status=args['status']
# 建立mysql連線
db=pymysql.connect('192.168.0.5','root','xMd2k5aK','rd')
cursor = db.cursor()
sql="SELECT name FROM `employee` WHERE id='{0}'".format(save_uid)
results=cursor.execute(sql)
if(results==0):
return jsonify({'error':'查詢錯誤請重新輸入'})
save_name=cursor.fetchall()[0][0]
db.close()
if(status == 'error_'): if(status == 'error_'):
save_log=status save_path=status
utc_time=datetime.datetime.utcfromtimestamp(float(save_time)) else:
tzutc_8 = datetime.timezone(datetime.timedelta(hours=8)) save_path=save_log
local_dt = utc_time.astimezone(tzutc_8) # 轉換程標準UTC時間
img_file = local_dt.strftime('%Y_%m_%d_%H_%M_%S_%f')+'.jpg' # 在轉換時區
dt=datetime.datetime.utcfromtimestamp(float(save_time))
utc_time=dt.replace(tzinfo=pytz.timezone('UTC'))
local_dt=utc_time.astimezone(pytz.timezone('Asia/Taipei'))
img_file = local_dt.strftime('%Y_%m_%d_%H_%M_%S')+'.jpg'
img=cv2.imread(os.path.join('./tmp/upload',img_file)) img=cv2.imread(os.path.join('./tmp/upload',img_file))
cv2.imwrite(os.path.abspath(os.path.join('./nas/Face',save_name,save_log,img_file)),img) print(img_file)
cv2.imwrite(os.path.abspath(os.path.join('./nas/Face/history',save_uid,save_path,img_file)),img)
# 建立資料庫連線 # 建立資料庫連線
client=InfluxDBClient(host='192.168.0.6',port=8086,username='root',password='xMd2k5aK',database='rd') #client=InfluxDBClient(host='192.168.0.6',port=8086,username='root',password='xMd2k5aK',database='rd')
client=InfluxDBClient(host='192.168.5.17',port=8086,username='root',password='root',database='RD')
json_body = [ json_body = [
{ {
"measurement": save_log, "measurement": save_log,
"time": utc_time, "time": datetime.datetime.utcfromtimestamp(float(save_time)),
"fields": { "fields": {
"id": save_uid,
"name": save_name, "name": save_name,
'snapshot':save_log+'/'+img_file+'.jpg' 'snapshot':save_path+'/'+img_file
} }
} }
] ]
...@@ -140,12 +188,15 @@ class Insert_employee(Resource): ...@@ -140,12 +188,15 @@ class Insert_employee(Resource):
# 資料庫查詢此類別最後一組id避免重複 # 資料庫查詢此類別最後一組id避免重複
sql="SELECT id FROM employee WHERE id LIKE 'RD{0}%'".format(years) sql="SELECT id FROM employee WHERE id LIKE 'RD{0}%'".format(years)
a=cursor.execute(sql) a=cursor.execute(sql)
if(a==0): if(a == 0):
full_num='RD'+years+'00' full_num='RD'+str(years)+'00'
else: else:
result=cursor.fetchall() result=cursor.fetchall()
num=int(max(result)[0].split(years)[1])+1 num=int(max(result)[0].split(years)[1])+1
full_num='RD'+years+str(num) if(num < 10):
num='0'+str(num)
full_num='RD'+str(years)+str(num)
sql="""INSERT INTO `employee` (`id`, `name`, `english_name`, `phone`) VALUES ('{0}', '{1}', '{2}', '{3}')""".format(full_num,name,engname,phone) sql="""INSERT INTO `employee` (`id`, `name`, `english_name`, `phone`) VALUES ('{0}', '{1}', '{2}', '{3}')""".format(full_num,name,engname,phone)
try: try:
# 执行SQL语句 # 执行SQL语句
...@@ -158,4 +209,106 @@ class Insert_employee(Resource): ...@@ -158,4 +209,106 @@ class Insert_employee(Resource):
db.close() db.close()
return {'message':str(args)} save_path=os.path.abspath(os.path.join('./nas/Face/history',full_num))
\ No newline at end of file if(not os.path.isdir(save_path)):
os.mkdir(save_path)
os.mkdir(os.path.join(save_path,'error_'))
os.mkdir(os.path.join(save_path,'login_'))
os.mkdir(os.path.join(save_path,'logout_'))
return {'message':full_num}
class Report(Resource):
def get(self,file_type,year,month,uid):
# 建立mysql連線
db=pymysql.connect('192.168.0.5','root','xMd2k5aK','rd')
cursor = db.cursor()
sql="SELECT name FROM `employee` WHERE id='{0}'".format(uid)
results=cursor.execute(sql)
if(results==0):
return jsonify({'error':'查詢錯誤請重新輸入'})
name=cursor.fetchall()[0][0]
db.close()
# name = request.args.get('name')
# 時區設定
tw = pytz.timezone('Asia/Taipei')
# 獲取當月第一天的星期與當月的總天數
firstDayWeekDay, monthRange = calendar.monthrange(int(year), int(month))
# 獲取當月第一天跟最後一天 localtime
firstDay = datetime.datetime(year=year, month=month, day=1,hour=0,minute=0,second=0)
lastDay = datetime.datetime(year=year, month=month, day=monthRange,hour=23,minute=59,second=59)
# 時區本地化
tw_firstDay=tw.localize(firstDay)
tw_lastDay=tw.localize(lastDay)
# 轉換為utc時間
utc_firstDay=tw_firstDay.astimezone(pytz.utc)
utc_lastDay=tw_lastDay.astimezone(pytz.utc)
# 字串格式化轉為influxdb時間格式
format_firstDay=utc_firstDay.strftime("%Y-%m-%d %H:%M:%S")
format_lastDay=utc_lastDay.strftime("%Y-%m-%d %H:%M:%S")
# 建立influxdb連線
client=InfluxDBClient(host='192.168.0.6',port=8086,username='root',password='xMd2k5aK',database='rd')
login_response=client.query("SELECT * FROM login_ WHERE time > '{0}' AND time < '{1}' AND \"id\" = '{2}'".format(format_firstDay,format_lastDay,uid))
logout_response=client.query("SELECT * FROM logout_ WHERE time > '{0}' AND time < '{1}' AND \"id\" = '{2}'".format(format_firstDay,format_lastDay,uid))
# 查詢錯誤處理
if(len(login_response) == 0 or len(logout_response) == 0):
return jsonify({'error':'查詢錯誤請重新輸入'})
# influxdb回傳值解析
login_history=login_response.raw['series'][0]['values']
logout_history=logout_response.raw['series'][0]['values']
def utc2local(strtime):
strtime=strtime.split('Z')[0]
strtime=strtime.split('.')[0]
dt=datetime.datetime.strptime(strtime,"%Y-%m-%dT%H:%M:%S")
loacl_time=dt.astimezone(pytz.timezone('Asia/Taipei'))
return loacl_time.strftime("%Y-%m-%d %H:%M:%S")
log_json={}
for login,logout in zip(login_history,logout_history):
local_login=utc2local(login[0])
local_logout=utc2local(logout[0])
login_date,login_time=local_login.split(' ')
logout_date,logout_time=local_logout.split(' ')
d1 = datetime.datetime.strptime(local_logout,'%Y-%m-%d %H:%M:%S')
overtime=0
if(d1.hour ==17 and d1.minute >= 45):
overtime=0.5
elif(d1.hour >17 and d1.minute >=15):
overtime=d1.hour-17
if(d1.minute>=45):
overtime=overtime+0.5
date_json={'login':login_time,'logout':logout_time,'overtime':overtime}
log_json[login_date]=date_json
if(file_type == 'excel'):
# 新增Excel檔案
workbook = xlwt.Workbook(encoding='utf-8')
# 新建Sheet
sheet=workbook.add_sheet("{0}月出勤考核表".format(month))
# 基本欄位
sheet.write(0,0,"ID")
sheet.write(0,1,uid)
sheet.write(0,2,"姓名")
sheet.write(0,3,name)
sheet.write(1,0,"日期")
sheet.write(1,1,"上班打卡時間")
sheet.write(1,2,"下班打卡時間")
sheet.write(1,3,"加班時數")
x,y=2,0
for k in log_json:
sheet.write(x,0,k)
sheet.write(x,1,log_json[k]['login'])
sheet.write(x,2,log_json[k]['logout'])
sheet.write(x,3,log_json[k]['overtime'])
print(log_json[k])
x=x+1
# 存儲
Fname="{0}{1}月出勤紀錄.xlsx".format(name,month)
workbook.save('docs/'+Fname)
return redirect('/download/'+Fname)
else:
return jsonify({'range':"{0}年{1}月".format(year,month),'name':name,'log':log_json})
\ No newline at end of file
from flask import Flask from flask import Flask,send_from_directory
from flask_restful import Api from flask_restful import Api
from flask_sqlalchemy import SQLAlchemy from flask_sqlalchemy import SQLAlchemy
from flask_jwt_extended import JWTManager from flask_jwt_extended import JWTManager
from flasgger import Swagger
def warn(*args, **kwargs): def warn(*args, **kwargs):
pass pass
...@@ -10,12 +12,16 @@ warnings.warn = warn ...@@ -10,12 +12,16 @@ warnings.warn = warn
app = Flask(__name__) app = Flask(__name__)
Swagger(app)
api = Api(app) api = Api(app)
import os import os
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db' app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SECRET_KEY'] = 'some-secret-string' app.config['SECRET_KEY'] = 'some-secret-string'
app.config['UPLOAD_FOLDER'] = 'tmp/upload' app.config['UPLOAD_FOLDER'] = 'tmp/upload'
app.config['DOWNLOAD_PATH'] = os.path.abspath('./docs')
app.config['JSON_AS_ASCII'] = False
os.environ["CUDA_VISIBLE_DEVICES"] = "2" os.environ["CUDA_VISIBLE_DEVICES"] = "2"
db = SQLAlchemy(app) db = SQLAlchemy(app)
...@@ -33,12 +39,252 @@ def check_if_token_in_blacklist(decrypted_token): ...@@ -33,12 +39,252 @@ def check_if_token_in_blacklist(decrypted_token):
import views, models, resources import views, models, resources
api.add_resource(resources.Whois,'/whois') api.add_resource(resources.Whois,'/whois')
@app.route('/whois/', methods=['POST'])
def whois_info():
"""
人臉辨識
---
tags:
- 人臉辨識回傳結果
description:
人臉辨識接口,img file格式
parameters:
- name: body
in: body
required: true
schema:
id: 人臉辨識
required:
- file
properties:
file:
type: image/jpeg
description: 預辨識照片.
responses:
201:
description: 辨識成功
example: { "time": 1561950037.7547767,"face_location": "[[125 7 270 270]]","uid":["RD10502"],"name": "李宗翰","confidence": [0.2725946425242175]}
406:
description: 偵測不到人臉請重新調整
"""
pass
api.add_resource(resources.Upload_image,'/upload/image') api.add_resource(resources.Upload_image,'/upload/image')
@app.route('/upload/image', methods=['POST'])
def upload_image_info():
"""
上傳訓練樣本
---
tags:
- 上傳訓練樣本
description:
上傳訓練樣本,img file格式
parameters:
- name: body
in: body
required: true
schema:
id: 上傳訓練樣本
required:
- uid
- file
properties:
uid:
type: string
description: 預訓練人員id.
file:
type: image/jpeg
description: 預訓練照片.
responses:
201:
description: 上傳成功
example: { "message": "test已新增訓練樣本" }
406:
description: 偵測不到人臉 請重新上傳
"""
pass
api.add_resource(resources.Train,'/train') api.add_resource(resources.Train,'/train')
@app.route('/train', methods=['GET'])
def train_info():
"""
上傳訓練樣本
---
tags:
- 開始訓練
description:
開始自動加載訓練新模型
responses:
201:
description: 訓練完成
example: { "message": "訓練完成" }
406:
description: 樣本錯誤請重新檢查
"""
pass
api.add_resource(resources.Clock,'/clock') api.add_resource(resources.Clock,'/clock')
@app.route('/clock', methods=['POST'])
def clock_info():
"""
開始打卡
---
tags:
- 開始打卡
description:
上下班打卡,寫入資料庫並儲存至Nas
parameters:
- name: body
in: body
required: true
schema:
id: 開始打卡
required:
- time
- uid
- log
- status
properties:
time:
type: float
description: 打卡時間戳記.
uid:
type: string
description: 人員id.
log:
type: string
description: 上班或下班.
status:
type: string
description: 辨識成功或失敗.
responses:
201:
description: 上班(下班)打卡成功
example: { "message": "Bruce打卡成功" }
406:
description: 輸入格式錯誤
"""
pass
api.add_resource(resources.Insert_employee,'/insert') api.add_resource(resources.Insert_employee,'/insert')
@app.route('/insert', methods=['POST'])
def insert_info():
"""
新增人員資料
---
tags:
- 新增人員資料
description:
新增人員到資料庫,以供後續訓練
parameters:
- name: body
in: body
required: true
schema:
id: 新增人員
required:
- name
- engname
- phone
- years
properties:
name:
type: string
description: 預新增人員人員姓名.
engname:
type: string
description: 預新增人員英文名.
phone:
type: string
description: 手機號碼.
years:
type: string
description: 入學年或職稱代碼以供編號.
responses:
201:
description: 新增成功
example: { "message": "Bruce新增成功" }
406:
description: 輸入格式錯誤
"""
pass
api.add_resource(resources.Report,'/report/<file_type>/<int:year>/<int:month>/<uid>')
@app.route('/report', methods=['GET'])
def report_info():
"""
產生報表
---
tags:
- 產生報表
description:
產生出勤報表以供人事審查
parameters:
- name: body
in: body
required: true
schema:
id: 產生報表
required:
- file_type
- year
- month
- uid
properties:
file_type:
type: string
description: 產生excel或json格式報表.
year:
type: int
description: 預查詢之年度.
month:
type: int
description: 預查詢之月份.
uid:
type: string
description: 預查詢之員工標號.
responses:
201:
description: 查詢成功產生報表
example: {
"log": {
"2019-07-01": {
"login": "08:21:52",
"logout": "17:53:55",
"overtime": 0.5
},
"2019-07-02": {
"login": "08:13:44",
"logout": "17:34:27",
"overtime": 0
}
},
"name": "李宗翰",
"range": "2019年7月"
}
406:
description: 查詢錯誤請重新輸入
"""
pass
@app.route('/download/<filename>')
def download_zh_file(filename):
path = app.config['DOWNLOAD_PATH']
# filename = 'Allen7月出勤紀錄.xlsx'
return send_from_directory(path, filename)
# @app.route("/docs/<fname>", methods=['GET'])
# def download_file(fname):
# MyFolder=os.path.abspath('./docs')
# # 此處的filepath是檔案的路徑,但是檔案必須儲存在static資料夾下, 比如images\test.jpg
# return send_from_directory(MyFolder, fname, mimetype='application/octet-stream')
@app.before_first_request @app.before_first_request
def create_tables(): def create_tables():
......
...@@ -104,7 +104,7 @@ ...@@ -104,7 +104,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 11, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -138,7 +138,7 @@ ...@@ -138,7 +138,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 18, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -167,75 +167,16 @@ ...@@ -167,75 +167,16 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 25, "execution_count": null,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[[[2.28072937, 2.2952711 , 2.24835543],\n",
" [2.28166012, 2.2952711 , 2.25576097],\n",
" [2.28720416, 2.2952711 , 2.26717278],\n",
" ...,\n",
" [2.16184803, 2.01388323, 1.99201755],\n",
" [2.13191541, 1.98395061, 1.97747582],\n",
" [2.14645715, 1.99201755, 1.98554277]],\n",
"\n",
" [[2.38347681, 2.25389947, 2.20539164],\n",
" [2.38312587, 2.26693064, 2.21842281],\n",
" [2.3810355 , 2.28232153, 2.2338137 ],\n",
" ...,\n",
" [2.05103365, 2.05347496, 1.98554277],\n",
" [2.05103365, 2.05347496, 1.98554277],\n",
" [2.05591627, 2.04137454, 1.97344234]],\n",
"\n",
" [[2.41171113, 2.19937659, 2.16541049],\n",
" [2.42687314, 2.21012699, 2.17616089],\n",
" [2.42550985, 2.23869632, 2.20473023],\n",
" ...,\n",
" [2.06871658, 2.05347496, 1.98554277],\n",
" [2.08266457, 2.05347496, 1.98554277],\n",
" [2.08584889, 2.05506712, 1.97761182]],\n",
"\n",
" ...,\n",
"\n",
" [[1.94411673, 1.95950762, 1.95950762],\n",
" [1.93397334, 1.94936423, 1.94936423],\n",
" [1.92223779, 1.92368069, 1.92368069],\n",
" ...,\n",
" [1.96841045, 1.95157667, 1.93618578],\n",
" [1.98372173, 1.95157667, 1.94936423],\n",
" [2.00252581, 1.96855972, 1.96269194]],\n",
"\n",
" [[1.93459362, 1.93459362, 1.93459362],\n",
" [1.93912531, 1.93703494, 1.93703494],\n",
" [1.95378911, 1.93924738, 1.93924738],\n",
" ...,\n",
" [1.93703494, 1.93567165, 1.95157667],\n",
" [1.93912531, 1.9224932 , 1.95157667],\n",
" [1.96789831, 1.93947625, 1.96301568]],\n",
"\n",
" [[1.94910816, 1.91761057, 1.9385999 ],\n",
" [1.96115418, 1.92129311, 1.95525921],\n",
" [1.95841633, 1.91333496, 1.94730106],\n",
" ...,\n",
" [1.91700356, 1.91052878, 1.93300146],\n",
" [1.91113579, 1.904661 , 1.91761057],\n",
" [1.92565032, 1.90866728, 1.92811884]]]])"
]
},
"execution_count": 25,
"metadata": {}, "metadata": {},
"output_type": "execute_result" "outputs": [],
}
],
"source": [ "source": [
"prewhiten(load_and_align_images(['nas/Face/train/Bruce/Bruce02.jpg'],10))" "prewhiten(load_and_align_images(['nas/Face/train/Bruce/Bruce02.jpg'],10))"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 17, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -246,7 +187,7 @@ ...@@ -246,7 +187,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 33, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -255,7 +196,7 @@ ...@@ -255,7 +196,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 19, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -264,7 +205,7 @@ ...@@ -264,7 +205,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 20, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -274,7 +215,7 @@ ...@@ -274,7 +215,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 24, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -283,7 +224,7 @@ ...@@ -283,7 +224,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 28, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -292,7 +233,7 @@ ...@@ -292,7 +233,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 35, "execution_count": 54,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -301,7 +242,7 @@ ...@@ -301,7 +242,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 43, "execution_count": 55,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -310,7 +251,7 @@ ...@@ -310,7 +251,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 44, "execution_count": 56,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -319,55 +260,69 @@ ...@@ -319,55 +260,69 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 120, "execution_count": 57,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"sql=\"SELECT id FROM employee WHERE id LIKE 'RD{0}%'\".format('105')" "# sql=\"SELECT id FROM employee WHERE id LIKE 'RD{0}%'\".format('105')\n",
"\n",
"# sql=\"SELECT name FROM `employee` WHERE id='RD10501'\"\n",
"\n",
"sql=\"SELECT id FROM employee WHERE id LIKE 'RD{0}%'\".format(100)"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 124, "execution_count": 58,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"a=cursor.execute(sql)\n", "a=cursor.execute(sql)"
"if(a==0):\n"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 125, "execution_count": 52,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"data": {
"text/plain": [
"()"
]
},
"execution_count": 52,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [ "source": [
"b=cursor.fetchall()" "cursor.fetchall()"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 126, "execution_count": 60,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
"data": { "data": {
"text/plain": [ "text/plain": [
"(('RD10501',), ('RD10502',), ('RD10503',))" "int"
] ]
}, },
"execution_count": 126, "execution_count": 60,
"metadata": {}, "metadata": {},
"output_type": "execute_result" "output_type": "execute_result"
} }
], ],
"source": [ "source": [
"b" "type(a)"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 104, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
...@@ -376,31 +331,143 @@ ...@@ -376,31 +331,143 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 105, "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cursor.execute(sql)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"db.commit()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from influxdb import InfluxDBClient\n",
"import requests\n",
"import datetime"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"frame=cv2.imread('./nas/Face/train/Allen/Allen03.jpg')\n",
"imencoded = cv2.imencode(\".jpg\", frame)[1]\n",
"file = {'file': ('image.jpg', imencoded.tostring(), 'image/jpeg', {'Expires': '0'})}\n",
"response=requests.post(os.path.join('http://192.168.5.205:8000','whois'),files=file)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [],
"source": [
"t=response.json()['time']"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"client=InfluxDBClient(host='192.168.0.6',port=8086,username='root',password='xMd2k5aK',database='rd')\n",
"json_body = [\n",
" {\n",
" \"measurement\": 'login_',\n",
" \"time\": datetime.datetime.utcfromtimestamp(float(t)),\n",
" \"fields\": {\n",
" \"name\": 'test',\n",
" 'snapshot':'test.jpg'\n",
" }\n",
" }\n",
"]\n",
"client.write_points(json_body)\n",
"client.close()"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [],
"source": [
"a = \"2019-07-01 00:26:22\"\n",
"import time\n",
"timeArray = time.strptime(a, \"%Y-%m-%d %H:%M:%S\")\n",
"timeStamp = time.mktime(timeArray)"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {
"scrolled": true
},
"outputs": [ "outputs": [
{ {
"data": { "data": {
"text/plain": [ "text/plain": [
"1" "1561940782.0"
] ]
}, },
"execution_count": 105, "execution_count": 18,
"metadata": {}, "metadata": {},
"output_type": "execute_result" "output_type": "execute_result"
} }
], ],
"source": [ "source": [
"cursor.execute(sql)" "timeStamp"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 106, "execution_count": 43,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"db.commit()" "import time"
]
},
{
"cell_type": "code",
"execution_count": 44,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1562126418.3251102"
]
},
"execution_count": 44,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"time.time()"
] ]
}, },
{ {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment