@@ -251,24 +251,25 @@ def visulization(render_norm, render_tex=None):
251
251
252
252
render_size = 256
253
253
254
- render_norm = render_norm .detach () * 255.0
255
- render_norm = torch .rot90 (render_norm , 1 , [0 , 1 ]).permute (2 , 0 , 1 ).unsqueeze (0 )
256
- render_norm = F .interpolate (render_norm , size = (render_size , render_size ))
257
- render_norm = render_norm [0 ].cpu ().numpy ().transpose (1 , 2 , 0 )
258
- # render_norm = cv2.cvtColor(render_norm, cv2.COLOR_BGR2RGB)
254
+ if render_norm is not None :
255
+ render_norm = render_norm .detach () * 255.0
256
+ render_norm = torch .rot90 (render_norm , 1 , [0 , 1 ]).permute (2 , 0 , 1 ).unsqueeze (0 )
257
+ render_norm = F .interpolate (render_norm , size = (render_size , render_size ))
258
+ render_norm = render_norm [0 ].cpu ().numpy ().transpose (1 , 2 , 0 )
259
+ reference = render_norm
259
260
260
261
if render_tex is not None :
261
262
render_tex = render_tex .detach () * 255.0
262
263
render_tex = torch .rot90 (render_tex , 1 , [0 , 1 ]).permute (2 , 0 , 1 ).unsqueeze (0 )
263
264
render_tex = F .interpolate (render_tex , size = (render_size , render_size ))
264
265
render_tex = render_tex [0 ].cpu ().numpy ().transpose (1 , 2 , 0 )
265
- # render_tex = cv2.cvtColor( render_tex, cv2.COLOR_BGR2RGB)
266
+ reference = render_tex
266
267
267
268
bg = np .logical_and (
268
269
np .logical_and (
269
- render_norm [:, :, 0 ] == 255 ,
270
- render_norm [:, :, 1 ] == 255 ),
271
- render_norm [:, :, 2 ] == 255 ,
270
+ reference [:, :, 0 ] == 255 ,
271
+ reference [:, :, 1 ] == 255 ),
272
+ reference [:, :, 2 ] == 255 ,
272
273
).reshape (render_size , render_size , 1 )
273
274
mask = ~ bg
274
275
@@ -368,7 +369,8 @@ def update_camera():
368
369
** data_dict ,
369
370
"feat_tensor_C" : netC .filter (
370
371
data_dict ["input_netC" ].to (cuda_backbone_C , non_blocking = True ),
371
- feat_prior = data_dict ["feat_tensor_G" ][- 1 ][- 1 ]) if netC else None
372
+ feat_prior = data_dict ["feat_tensor_G" ][- 1 ][- 1 ]) \
373
+ if (netC is not None ) and (DESKTOP_MODE == 'TEXTURE' or SERVER_MODE == 'TEXTURE' ) else None
372
374
},
373
375
374
376
# move feature to cuda_recon device
@@ -417,7 +419,7 @@ def update_camera():
417
419
data_dict ["Y" ],
418
420
data_dict ["Z" ],
419
421
data_dict ["calib_tensor" ],
420
- data_dict ["norm" ])
422
+ data_dict ["norm" ]) if ( DESKTOP_MODE == 'NORM' or SERVER_MODE == 'NORM' ) else None
421
423
},
422
424
423
425
# pifu render texture
@@ -430,7 +432,7 @@ def update_camera():
430
432
data_dict ["Y" ],
431
433
data_dict ["Z" ],
432
434
data_dict ["calib_tensor" ],
433
- None ) if netC else None
435
+ None ) if data_dict [ "feat_tensor_C" ] else None
434
436
},
435
437
436
438
# visualization
@@ -463,14 +465,13 @@ def main_loop():
463
465
window_server = np .ones ((256 , 256 , 3 ), dtype = np .uint8 ) * 255
464
466
window_desktop = np .ones ((512 , 1024 , 3 ), dtype = np .uint8 ) * 255
465
467
466
- create_opengl_context (128 , 128 )
467
- renderer = AlbedoRender (width = 128 , height = 128 , multi_sample_rate = 1 )
468
+ create_opengl_context (256 , 256 )
469
+ renderer = AlbedoRender (width = 256 , height = 256 , multi_sample_rate = 1 )
468
470
renderer .set_attrib (0 , scene .vert_data )
469
471
renderer .set_attrib (1 , scene .uv_data )
470
472
renderer .set_texture ('TargetTexture' , scene .texture_image )
471
473
472
474
def render (extrinsic , intrinsic ):
473
- renderer .set_texture ('TargetTexture' , scene .texture_image )
474
475
uniform_dict = {'ModelMat' : extrinsic , 'PerspMat' : intrinsic }
475
476
renderer .draw (uniform_dict )
476
477
color = (renderer .get_color () * 255 ).astype (np .uint8 )
@@ -496,14 +497,14 @@ def render(extrinsic, intrinsic):
496
497
])) # RGB
497
498
elif DESKTOP_MODE == 'NORM' :
498
499
if render_norm is None :
499
- render_norm = np .ones ((512 , 512 , 3 ), dtype = np .float32 ) * 255
500
+ render_norm = np .ones ((256 , 256 , 3 ), dtype = np .float32 ) * 255
500
501
window_desktop = np .uint8 (np .hstack ([
501
502
input * 255 ,
502
503
cv2 .resize (render_norm , (512 , 512 ))
503
504
])) # RGB
504
505
elif DESKTOP_MODE == 'TEXTURE' :
505
506
if render_tex is None :
506
- render_tex = np .ones ((512 , 512 , 3 ), dtype = np .float32 ) * 255
507
+ render_tex = np .ones ((256 , 256 , 3 ), dtype = np .float32 ) * 255
507
508
window_desktop = np .uint8 (np .hstack ([
508
509
input * 255 ,
509
510
cv2 .resize (render_tex , (512 , 512 ))
@@ -512,9 +513,41 @@ def render(extrinsic, intrinsic):
512
513
window_desktop = None
513
514
514
515
if DESKTOP_MODE is not None :
515
- window_desktop = cv2 .resize (window_desktop , (2400 , 1200 ))
516
+ # window_desktop = cv2.resize(window_desktop, (2400, 1200))
517
+ cv2 .namedWindow ('window_desktop' , cv2 .WINDOW_NORMAL )
518
+ cv2 .setWindowProperty ('window_desktop' , cv2 .WND_PROP_FULLSCREEN , cv2 .WINDOW_FULLSCREEN )
516
519
cv2 .imshow ('window_desktop' , window_desktop [:, :, ::- 1 ])
517
520
521
+ if args .use_server :
522
+ if DESKTOP_MODE == 'NORM' :
523
+ if SERVER_MODE is None :
524
+ background = np .ones ((256 , 256 , 3 ), dtype = np .float32 ) * 255
525
+ else :
526
+ background = render (extrinsic , intrinsic )
527
+ if mask is None :
528
+ window_server = background
529
+ else :
530
+ window_server = np .uint8 (mask * render_norm + (1 - mask ) * background )
531
+ elif DESKTOP_MODE == 'TEXTURE' :
532
+ if SERVER_MODE is None :
533
+ background = np .ones ((256 , 256 , 3 ), dtype = np .float32 ) * 255
534
+ else :
535
+ background = render (extrinsic , intrinsic )
536
+ if mask is None :
537
+ window_server = background
538
+ else :
539
+ window_server = np .uint8 (mask * render_tex + (1 - mask ) * background )
540
+ else :
541
+ if render_norm is not None :
542
+ window_server = np .uint8 (render_norm )
543
+
544
+ # yield window_desktop, window_server
545
+ (flag , encodedImage ) = cv2 .imencode (".jpg" , window_server [:, :, ::- 1 ])
546
+ if not flag :
547
+ continue
548
+ yield (b'--frame\r \n ' b'Content-Type: image/jpeg\r \n \r \n ' +
549
+ bytearray (encodedImage ) + b'\r \n ' )
550
+
518
551
key = cv2 .waitKey (1 )
519
552
if key == ord ('q' ):
520
553
DESKTOP_MODE = 'SEGM'
@@ -527,10 +560,10 @@ def render(extrinsic, intrinsic):
527
560
528
561
elif key == ord ('a' ):
529
562
SERVER_MODE = 'SEGM'
530
- elif key == ord ('s' ):
531
- SERVER_MODE = 'NORM'
532
- elif key == ord ('d' ):
533
- SERVER_MODE = 'TEXTURE'
563
+ # elif key == ord('s'):
564
+ # SERVER_MODE = 'NORM'
565
+ # elif key == ord('d'):
566
+ # SERVER_MODE = 'TEXTURE'
534
567
elif key == ord ('f' ):
535
568
SERVER_MODE = None
536
569
@@ -547,30 +580,7 @@ def render(extrinsic, intrinsic):
547
580
elif key == ord ('n' ):
548
581
VIEW_MODE = 'LOAD'
549
582
550
- if args .use_server :
551
- if SERVER_MODE == 'NORM' :
552
- background = render (extrinsic , intrinsic )
553
- if mask is None :
554
- window_server = background
555
- else :
556
- window_server = np .uint8 (mask * render_norm + (1 - mask ) * background )
557
- elif SERVER_MODE == 'TEXTURE' :
558
- background = render (extrinsic , intrinsic )
559
- if mask is None :
560
- window_server = background
561
- else :
562
- window_server = np .uint8 (mask * render_tex + (1 - mask ) * background )
563
- else :
564
- if render_norm is not None :
565
- window_server = np .uint8 (render_norm )
566
-
567
- # yield window_desktop, window_server
568
- (flag , encodedImage ) = cv2 .imencode (".jpg" , window_server [:, :, ::- 1 ])
569
- if not flag :
570
- continue
571
- yield (b'--frame\r \n ' b'Content-Type: image/jpeg\r \n \r \n ' +
572
- bytearray (encodedImage ) + b'\r \n ' )
573
-
583
+
574
584
575
585
if __name__ == '__main__' :
576
586
if args .use_server :
@@ -579,15 +589,6 @@ def render(extrinsic, intrinsic):
579
589
########################################
580
590
app = Flask (__name__ )
581
591
582
- def img_base64 (img_path ):
583
- with open (img_path ,"rb" ) as f :
584
- data = f .read ()
585
- print ("data:" , getsizeof (data ))
586
- assert data [- 2 :] == b'\xff \xd9 '
587
- base64_str = b64encode (data ).decode ('utf-8' )
588
- print ("base64:" , getsizeof (base64_str ))
589
- return base64_str
590
-
591
592
@app .route ("/" )
592
593
def index ():
593
594
return render_template ("test_flask.html" )
@@ -604,128 +605,3 @@ def video_feed():
604
605
print ('start main_loop.' )
605
606
for _ in main_loop ():
606
607
pass
607
-
608
- # @torch.no_grad()
609
- # def main_loop():
610
- # for data_dict in tqdm.tqdm(loader):
611
- # # for visualization on the ubuntu main screen
612
- # input4c = data_dict["segm"].cpu().numpy()[0].transpose(1, 2, 0) # [512, 512, 4]
613
- # input = (input4c[:, :, 0:3] * 0.5) + 0.5
614
- # segmentation = (input4c[:, :, 0:3] * input4c[:, :, 3:4] * 0.5) + 0.5
615
-
616
- # render_norm = data_dict["render_norm"] # [256, 256, 3] RGB
617
- # render_tex = data_dict["render_tex"] # [256, 256, 3] RGB
618
- # mask = data_dict["mask"]
619
- # extrinsic = data_dict["extrinsic"]
620
- # intrinsic = data_dict["intrinsic"]
621
-
622
- # if DESKTOP_MODE == 'SEGM':
623
- # window_desktop = np.uint8(np.hstack([
624
- # input * 255,
625
- # segmentation * 255
626
- # ])) # RGB
627
- # elif DESKTOP_MODE == 'NORM':
628
- # if render_norm is None:
629
- # render_norm = np.zeros((512, 512, 3), dtype=np.float32)
630
- # window_desktop = np.uint8(np.hstack([
631
- # input * 255,
632
- # cv2.resize(render_norm, (512, 512))
633
- # ])) # RGB
634
- # elif DESKTOP_MODE == 'TEXTURE':
635
- # if render_tex is None:
636
- # render_tex = np.zeros((512, 512, 3), dtype=np.float32)
637
- # window_desktop = np.uint8(np.hstack([
638
- # input * 255,
639
- # cv2.resize(render_tex, (512, 512))
640
- # ])) # RGB
641
- # else:
642
- # window_desktop = None
643
-
644
- # # if SERVER_MODE == 'NORM':
645
- # # background = scene.render(extrinsic, intrinsic)
646
- # # if mask is None:
647
- # # window_server = background
648
- # # else:
649
- # # window_server = np.uint8(mask * render_norm + (1 - mask) * background)
650
- # # elif SERVER_MODE == 'TEXTURE':
651
- # # background = scene.render(extrinsic, intrinsic)
652
- # # if mask is None:
653
- # # window_server = background
654
- # # else:
655
- # # window_server = np.uint8(mask * render_tex + (1 - mask) * background)
656
- # # else:
657
- # # window_server = None
658
-
659
- # yield window_desktop
660
-
661
-
662
- # # access server:
663
- # # http://localhost:9999/scripts/unit_tests/test_server.html
664
- # if __name__ == '__main__':
665
- # import asyncio
666
- # import websockets
667
- # import threading
668
- # import time
669
- # import random
670
- # import glob
671
- # from base64 import b64encode
672
- # from sys import getsizeof
673
- # from io import BytesIO
674
- # from PIL import Image
675
-
676
-
677
- # def img_base64(img_path):
678
- # with open(img_path,"rb") as f:
679
- # data = f.read()
680
- # print("data:", getsizeof(data))
681
- # assert data[-2:] == b'\xff\xd9'
682
- # base64_str = b64encode(data).decode('utf-8')
683
- # print("base64:", getsizeof(base64_str))
684
- # return base64_str
685
-
686
- # async def send(client, data):
687
- # await client.send(data)
688
-
689
- # async def handler(client, path):
690
- # # Register.
691
- # print("Websocket Client Connected.", client)
692
- # clients.append(client)
693
- # while True:
694
- # try:
695
- # # print("ping", client)
696
- # pong_waiter = await client.ping()
697
- # await pong_waiter
698
- # # print("pong", client)
699
- # time.sleep(3)
700
- # except Exception as e:
701
- # clients.remove(client)
702
- # print("Websocket Client Disconnected", client)
703
- # break
704
-
705
- # clients = []
706
- # start_server = websockets.serve(handler, "192.168.1.232", 5555)
707
-
708
- # asyncio.get_event_loop().run_until_complete(start_server)
709
- # threading.Thread(target = asyncio.get_event_loop().run_forever).start()
710
-
711
- # print(f"Socket Server Running on 192.168.1.232:5555. Starting main loop.")
712
-
713
- # for window_desktop in main_loop():
714
- # # message_clients = clients.copy()
715
- # # for client in message_clients:
716
- # # pil_img = Image.fromarray(window_server)
717
- # # buff = BytesIO()
718
- # # pil_img.save(buff, format="JPEG")
719
- # # data = b64encode(buff.getvalue()).decode("utf-8")
720
-
721
- # # print("Sending data to client")
722
- # # try:
723
- # # asyncio.run(send(client, data))
724
- # # except:
725
- # # # Clients might have disconnected during the messaging process,
726
- # # # just ignore that, they will have been removed already.
727
- # # pass
728
- # window_desktop = cv2.resize(window_desktop, (0, 0), fx=2, fy=2)
729
- # cv2.imshow('window_desktop', window_desktop[:, :, ::-1])
730
- # cv2.waitKey(1)
731
-
0 commit comments