Compare commits
	
		
			2 Commits
		
	
	
		
			1ce899f56b
			...
			258393cbeb
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | 258393cbeb | |
|  | 96462c532c | 
|  | @ -10,9 +10,9 @@ RUN mkdir -p /app | |||
| WORKDIR /app | ||||
| COPY requirements.txt /app | ||||
| RUN python -m venv . | ||||
| RUN pip install pip==20.1.1 | ||||
| RUN pip install pip==23.0.1 | ||||
| RUN pip install setuptools==46.1.3 | ||||
| RUN pip install --no-cache-dir -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple | ||||
| RUN pip install -r requirements.txt -i https://mirrors.aliyun.com/pypi/simple/ | ||||
| COPY ./app /app | ||||
| EXPOSE 5000 | ||||
| CMD ["gunicorn", "--bind", ":5000", "server:app"] | ||||
							
								
								
									
										11
									
								
								README.md
								
								
								
								
							
							
						
						
									
										11
									
								
								README.md
								
								
								
								
							|  | @ -44,3 +44,14 @@ | |||
| | 原始图片 | img_src | string | 图像的base64编码字符串   | | ||||
| | 是否有裂缝 | crack | bool | 是否有裂缝 | | ||||
| | 是否有坑洼 | pothole | bool | 是否有坑洼 | | ||||
| 
 | ||||
| 
 | ||||
| ## 编译说明 | ||||
| 
 | ||||
| ### x86编译docker  | ||||
| 
 | ||||
| ```docker build -t hpds-road-detection:v1.0  .``` | ||||
| 
 | ||||
| ### arm64编译docker | ||||
| 
 | ||||
| ```docker buildx build -t hpds-road-detection-edge:v1.0 . --platform=linux/arm64``` | ||||
|  |  | |||
|  | @ -3,7 +3,7 @@ import numpy as np | |||
| 
 | ||||
| 
 | ||||
| def serve_unet_model(): | ||||
|     TFLITE_MODEL = "/app/UNet_25_Crack.tflite" | ||||
|     TFLITE_MODEL = "../app/UNet_25_Crack.tflite" | ||||
| 
 | ||||
|     tflite_interpreter = tf.lite.Interpreter(model_path=TFLITE_MODEL) | ||||
| 
 | ||||
|  | @ -20,7 +20,7 @@ def serve_rcnn_model(): | |||
|     detection_graph = tf.Graph() | ||||
|     with detection_graph.as_default(): | ||||
|         od_graph_def = tf.compat.v1.GraphDef() | ||||
|         with tf.compat.v1.gfile.GFile("/app/frozen_inference_graph.pb", 'rb') as fid: | ||||
|         with tf.compat.v1.gfile.GFile("../app/frozen_inference_graph.pb", 'rb') as fid: | ||||
|             serialized_graph = fid.read() | ||||
|             od_graph_def.ParseFromString(serialized_graph) | ||||
|             tf.import_graph_def(od_graph_def, name='') | ||||
|  |  | |||
|  | @ -191,13 +191,17 @@ def index(): | |||
|             result = result > 0.5 | ||||
|             result = result * 255 | ||||
|             mask = np.squeeze(result) | ||||
|             bg = np.asarray(img_segment).copy() | ||||
|             is_crack = False | ||||
|             for i in range(len(mask)): | ||||
|                 for j in range(len(mask[i])): | ||||
|                     if mask[i][j] > 0: | ||||
|                         bg[i][j][0] = 0 | ||||
|                         bg[i][j][1] = 0 | ||||
|                         bg[i][j][2] = 255 | ||||
|                         is_crack = True | ||||
|                         break | ||||
| 
 | ||||
|             img = Image.fromarray(bg.astype("uint8")) | ||||
| 
 | ||||
|             # start pothole detection | ||||
|             image_np = load_image_into_numpy_array(img_src) | ||||
|  | @ -216,15 +220,22 @@ def index(): | |||
|                 skip_scores=True, | ||||
|                 skip_labels=True) | ||||
|             raw_bytes = io.BytesIO() | ||||
|             img_src.save(raw_bytes, "JPEG") | ||||
|             raw_src = io.BytesIO() | ||||
|             img.save(raw_bytes, "JPEG") | ||||
|             img_src.save(raw_src,"JPEG") | ||||
|             raw_bytes.seek(0) | ||||
|             raw_src.seek(0) | ||||
|             img_byte = raw_bytes.getvalue() | ||||
|             img_str = base64.b64encode(img_byte) | ||||
|             img_src_byte = raw_src.getvalue() | ||||
|             img_str = base64.b64encode(img_src_byte) | ||||
|             img_discern = base64.b64encode(img_byte) | ||||
| 
 | ||||
|             data = { | ||||
|                 "code": 0, | ||||
|                 "crack": is_crack, | ||||
|                 "pothole": is_pothole, | ||||
|                 "img_src": img_str.decode('utf-8') | ||||
|                 "img_src": img_str.decode('utf-8'), | ||||
|                 "img_discern": img_discern.decode('utf-8') | ||||
|             } | ||||
|             return jsonify(data) | ||||
|         else: | ||||
|  |  | |||
|  | @ -1,48 +1,6 @@ | |||
| absl-py==0.9.0 | ||||
| astunparse==1.6.3 | ||||
| cachetools==4.1.0 | ||||
| certifi==2020.4.5.1 | ||||
| chardet==3.0.4 | ||||
| click==7.1.2 | ||||
| cycler==0.10.0 | ||||
| Flask==1.1.2 | ||||
| gast==0.3.3 | ||||
| gevent==20.5.0 | ||||
| google-auth==1.15.0 | ||||
| google-auth-oauthlib==0.4.1 | ||||
| google-pasta==0.2.0 | ||||
| greenlet==0.4.15 | ||||
| grpcio==1.29.0 | ||||
| gunicorn==20.0.4 | ||||
| h5py==2.10.0 | ||||
| idna==2.9 | ||||
| importlib-metadata==1.6.0 | ||||
| itsdangerous==1.1.0 | ||||
| Jinja2==2.11.2 | ||||
| Keras-Preprocessing==1.1.2 | ||||
| Markdown==3.2.2 | ||||
| MarkupSafe==1.1.1 | ||||
| matplotlib==3.2.1 | ||||
| numpy==1.18.4 | ||||
| oauthlib==3.1.0 | ||||
| opt-einsum==3.2.1 | ||||
| Pillow==7.1.2 | ||||
| protobuf==3.11.3 | ||||
| pyasn1==0.4.8 | ||||
| pyasn1-modules==0.2.8 | ||||
| pyparsing==2.4.7 | ||||
| python-dateutil==2.8.1 | ||||
| requests==2.23.0 | ||||
| requests-oauthlib==1.3.0 | ||||
| rsa==4.0 | ||||
| scipy==1.4.1 | ||||
| six==1.15.0 | ||||
| tensorboard==2.2.1 | ||||
| tensorboard-plugin-wit==1.6.0.post3 | ||||
| tensorflow==2.2.0 | ||||
| tensorflow-estimator==2.2.0 | ||||
| termcolor==1.1.0 | ||||
| urllib3==1.25.9 | ||||
| Werkzeug==1.0.1 | ||||
| wrapt==1.12.1 | ||||
| zipp==3.1.0 | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue